initial commit

This commit is contained in:
root
2025-10-11 17:03:02 +02:00
commit 08dbb6210e
51 changed files with 3420 additions and 0 deletions

2
.gitattributes vendored Normal file
View File

@@ -0,0 +1,2 @@
# Convert line endings of all text files to \n (as apposed to \r\n as on Windows for example)
* text=auto

160
.gitignore vendored Normal file
View File

@@ -0,0 +1,160 @@
########################################################
### Python .gitignore from https://raw.githubusercontent.com/github/gitignore/master/Python.gitignore
########################################################
# Byte-compiled / optimized / DLL files
**/__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
junittest.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
*.ipynb
# IPython
profile_default/
ipython_config.py
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
########################################################
### IDEs & Jupyter
########################################################
.idea
target
.metadata
.settings
.classpath
.project
.idea
*.iml
.ipynb_checkpoints
*/.ipynb_checkpoints/*
########################################################
### Logs & System
########################################################
pylintout
sonar_report.json
mlruns/
.DS_Store
out.csv
*.tmp
dependency-check-report.*

64
.pre-commit-config.yaml Normal file
View File

@@ -0,0 +1,64 @@
repos:
- repo: local
hooks:
- id: poetry-check
name: poetry-check
description: run poetry check to validate config
entry: poetry check
language: system
pass_filenames: false
files: (^|/)pyproject.toml$
- id: trailing-whitespace-fixer
name: trailing-whitespace-fixer
entry: trailing-whitespace-fixer
language: python
types: [text]
- id: end-of-file-fixer
name: end-of-file-fixer
entry: end-of-file-fixer
language: python
types: [text]
- id: ruff_formatter
name: ruff_formatter
description: "Run 'ruff format' for extremely fast Python formatting"
entry: ruff format
args: ["--force-exclude"]
types_or: [python]
language: python
- id: ruff_linter
name: ruff_linter
description: "Run 'ruff' for extremely fast Python linting"
entry: ruff check
args:
[
"--force-exclude",
"--fix",
]
types_or: [python, pyi]
language: python
- id: ruff_sonar_report
name: ruff_sonar_report
description: "Generate a report of all ruff warnings for sonar"
entry: ruff check
args:
[
"--force-exclude",
"--fix",
"--output-file",
"sonar_report.json",
"--output-format",
"json",
]
types_or: [python, pyi]
language: python
- id: unittest-run
name: unittest-run
entry: coverage run
pass_filenames: false
language: system

1
.python-version Normal file
View File

@@ -0,0 +1 @@
3.12

18
.vscode/extensions.json vendored Normal file
View File

@@ -0,0 +1,18 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
// List of extensions which should be recommended for users of this workspace.
"recommendations": [
"ms-vscode-remote.remote-wsl", // WSL
"ms-python.python", // Python
"ms-azuretools.vscode-docker", // Docker
"tamasfe.even-better-toml", // Even Better TOML
"redhat.vscode-yaml", // Yaml plugin
"ms-vscode.makefile-tools", // Makefile plugin
"ms-python.mypy-type-checker", // mypy extension
"charliermarsh.ruff" // Python linter & formatter
],
// List of extensions recommended by VS Code that should not be recommended for users of this workspace.
"unwantedRecommendations": []
}

24
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,24 @@
{
"python.testing.pytestEnabled": false,
"python.testing.unittestEnabled": true,
"python.linting.mypyEnabled": true,
"python.testing.unittestArgs": [
"-v",
"-s",
".",
"-p",
"test*.py"
],
"python.defaultInterpreterPath": "${workspaceFolder}/.venv",
"python.envFile": "${workspaceFolder}/.env",
"python.analysis.ignore": ["*"],
"[python]": {
"editor.defaultFormatter": "charliermarsh.ruff",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll": "explicit",
}
},
"python.terminal.activateEnvironment": true,
"python.terminal.activateEnvInCurrentTerminal": true
}

38
Makefile Normal file
View File

@@ -0,0 +1,38 @@
PYTHON_VERSION := 3.12
GIT_BRANCH = $(shell git branch --show-current)
all: setup
clean:
rm -rf .venv
setup: check
pyenv install ${PYTHON_VERSION} --skip-existing \
&& pyenv local ${PYTHON_VERSION} \
&& poetry env use ${PYTHON_VERSION} \
&& poetry install \
&& poetry run pre-commit install
check: pyenv-exists poetry-exists
pyenv-exists: ; @which pyenv > /dev/null
poetry-exists: ; @which poetry > /dev/null
render-in-esta-python:
git clone ssh://git@codessh.sbb.ch:7999/kd_esta_blueprints/esta-python.git /tmp/esta-python
(cd /tmp/esta-python && git branch ${GIT_BRANCH})
(cd /tmp/esta-python && git checkout ${GIT_BRANCH})
poetry run copier copy --force \
--data project_name="ESTA Python" \
--data author_first_name="Your First Name" \
--data author_last_name="Your Last Name" \
--data bitbucket_organization=KD_ESTA_BLUEPRINTS \
--data pypi_repository="esta.pypi" \
--data docker_repository="esta.docker" \
--data helm_repository="esta.helm.local" \
. /tmp/esta-python
(cd /tmp/esta-python && git add -A)
remove-rendered-template:
rm -rf /tmp/esta-python

101
README.md Normal file
View File

@@ -0,0 +1,101 @@
# ESTA Python Template
This is a template for Python-based projects. It allows to generate an exemplary setup and project structure including neat stuff like pre-commit-hooks, automated testing, type checking, CI/CD using Tekton, etc. The generated code is intended to be tailored to project needs.
## Usage
### Creating new projects
The expected way to use this template is to install the necessary prerequisites using [Automated WSL and Docker Setup](https://confluence.sbb.ch/display/EAPKB/Automated+WSL+and+Docker+Setup). You can then just run the following in WSL to generate a new ESTA Python project:
```shell
copier copy ssh://git@codessh.sbb.ch:7999/kd_esta_blueprints/esta-python-template.git my_project
```
This will create a new directory called `my_project` and generate a new project inside it.
If you already have an empty directory - for example from a new git repository - you can cange into that directory and run:
```shell
copier copy ssh://git@codessh.sbb.ch:7999/kd_esta_blueprints/esta-python-template.git .
```
### Updating existing projects
In order to receive updates provided by this template, you can run the following command in your project directory:
```shell
copier update --skip-answered
```
### Install Python
If not already installed, install Python. The recommended way is to use [pyenv](https://github.com/pyenv/pyenv), which allows multiple parallel Python installations which can be automatically selected per project you're working on.
```shell
# Install Python if necessary
pyenv install 3.12
pyenv shell 3.12
```
> **_Note_**: If you start from scratch with Python development you might find [Automated WSL and Docker Setup](https://confluence.sbb.ch/spaces/WSL/pages/2260505495/Automated+WSL+and+Docker+Setup) useful.
### Install Copier
If not already installed get [Copier](https://copier.readthedocs.io/en/stable/).
The easiest way is to install it in an existing WSL installation is using `pipx`:
```shell
pipx install copier
```
### Order a Bitbucket Repository
Order a Bitbucket repository from the CLEW Portal <https://self.sbb-cloud.net/tools/bitbucket/repository> and clone it.
### (Optional) Order a Docker Repository
If you want to package your code in a Docker container order a "Docker" repository on Artifactory from the CLEW Portal: <https://self.sbb-cloud.net/tools/artifactory>.
### (Optional) Order a PyPI Repository
If you want to share your code as Python library order a "Python" repository on Artifactory from the CLEW Portal: <https://self.sbb-cloud.net/tools/artifactory>.
### (Optional) Order a Helm Repository
If you want to deploy your code using Helm, order a "Helm" repository on Artifactory from the CLEW Portal: <https://self.sbb-cloud.net/tools/artifactory>.
**Note**: When rendering the template, use the "local" Helm repository, e.g. "my-project.helm.local"!
### Render Template
Now you can go to the freshly cloned repository and render the template by running Copier. You will be prompted to provide the required information. Once you rendered the template, follow the setup instructions in the rendered template.
```shell
copier copy ssh://git@codessh.sbb.ch:7999/kd_esta_blueprints/esta-python-template.git .
```
## Developer's Guide
- Development of esta-python happens in this repository following a standard lean gitflow.
- Release: a release is a merge on main. This will trigger the corresponding Tekton-Pipeline which pushes a rendered version of the template to <https://code.sbb.ch/projects/KD_ESTA_BLUEPRINTS/repos/esta-python/browse>
### Setup
- Install Python (see instructions above).
- Install Poetry according to <https://python-poetry.org/docs/#installation>.
- Setup your environment:
```shell
# Create venv and install all dependencies
make
# Cleanup venv
make clean
# Render template into current HEAD of esta-python
# Helpful when you want to see the diff to esta-python.
make render-in-esta-python
# Cleanup
make remove-rendered-template
```

32
changelog.md Normal file
View File

@@ -0,0 +1,32 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased]
## [1.1.0] - 2025-09-30
### Changed
- Update development dependencies
- Replaced the pre-commit tools pylint, black, isort and pydocstyle with ruff
- The [OWASP dependency check](https://confluence.sbb.ch/spaces/CYBERSEC/pages/1585571010/OWASP+Dependency-Check) which is run as part of the build now includes suppressions for the standard dependencies of ESTA Python. Also, the check has been set to fail the build if there are critical vulnerabilities in your dependencies.
### Added
- Support Python version 3.12 and set it as the default
## [1.0.1] - 2025-08-18
### Added
- README.md: Add info on how to use copier update
## [1.0.0] - 2025-08-18
### Added
- Initial release: Migrating from the now-deprecated [esta-python-cookiecutter](https://code.sbb.ch/projects/KD_ESTA_BLUEPRINTS/repos/esta-python-cookiecutter/browse).

58
copier.yml Normal file
View File

@@ -0,0 +1,58 @@
_min_copier_version: "9.9.0"
_subdirectory: template
project_name:
type: str
name:
type: str
default: "{{ project_name | lower | replace(' ', '-') }}"
package_name:
type: str
default: "{{ name | replace('-', '_') }}"
description:
type: str
default: "A project created with esta-python-template"
author_first_name:
type: str
author_last_name:
type: str
author_email:
type: str
default: "{{ author_first_name | lower }}.{{ author_last_name | lower }}@sbb.ch"
authors:
type: str
default: "[\n {name = \"{{ author_first_name }} {{ author_last_name }}\", email = \"{{ author_email }}\"}\n]"
bitbucket_organization:
type: str
pypi_repository:
type: str
default: ""
docker_repository:
type: str
default: ""
helm_repository:
type: str
default: ""
python_version:
type: str
choices:
- "3.12"
- "3.11"
- "3.9"
default: "3.12"
use_ggshield:
type: bool
default: false

96
estaTektonPipeline.yaml Normal file
View File

@@ -0,0 +1,96 @@
# yaml-language-server: $schema=https://clew-resources.sbb-cloud.net/tekton-schema.json
productName: esta-python
pipelines:
# This is the continuous build pipeline which runs on every commit on a feature branch or if triggered in the tekton-ui.
- name: continuous
triggerType:
- GITEVENT
- USER
branchNamePrefixes:
- feature
- renovate
build: {}
tasks:
- name: pre-commit-run
# we are using a custom version of the esta-python build (see https://code.sbb.ch/projects/KD_ESTA/repos/esta-tekton-pipeline-templates/browse/tasks/esta-python-build.yaml)
# here, because we can't, and don't want to build a package (using `poetry build`) from this repository.
taskRef: esta-custom-exec
params:
RUN_IMAGE: esta.docker.bin.sbb.ch/esta/esta-tekton-pipeline-python-builderimage:latest
SCRIPT: |
apply-version python 3.12
eval "$(pyenv init -)"
# Suppress fatal: detected dubious ownership in repository
git config --global --add safe.directory $(workspaces.source.path)
poetry config virtualenvs.in-project false --local
poetry env use `pyenv version-name`
poetry install
# run code-quality check using pre-commit
poetry run pre-commit run --all-files
# This is the snapshot build pipeline which is triggered when pushing commits to main or if triggered in the tekton ui.
- name: snapshot
triggerType:
- USER
- GITEVENT
branchNamePrefixes:
- main
build: {}
tasks:
- name: pre-commit-run
# we are using a custom version of the esta-python build (see https://code.sbb.ch/projects/KD_ESTA/repos/esta-tekton-pipeline-templates/browse/tasks/esta-python-build.yaml)
# here, because we can't, and don't want to build a package (using `poetry build`) from this repository.
taskRef: esta-custom-exec
params:
RUN_IMAGE: esta.docker.bin.sbb.ch/esta/esta-tekton-pipeline-python-builderimage:latest
SCRIPT: |
apply-version python 3.12
eval "$(pyenv init -)"
# Suppress fatal: detected dubious ownership in repository
git config --global --add safe.directory $(workspaces.source.path)
poetry config virtualenvs.in-project false --local
poetry env use `pyenv version-name`
poetry install
# run code-quality check using pre-commit
poetry run pre-commit run --all-files
# The sync-to-esta-python task will render the copier template, create an automatic commit,
# and force-push changes to esta-python's main branch.
- name: sync-to-esta-python
taskRef: esta-custom-exec
params:
RUN_IMAGE: esta.docker.bin.sbb.ch/esta/esta-tekton-pipeline-python-builderimage:latest
SCRIPT: |
poetry config virtualenvs.in-project false --local
poetry env use `pyenv version-name`
poetry install
GIT_SSL_NO_VERIFY=true git clone https://code.sbb.ch/scm/kd_esta_blueprints/esta-python.git /tmp/esta-python
# Suppress fatal: detected dubious ownership in repository
git config --global --add safe.directory $(workspaces.source.path)
(cd /tmp/esta-python && git rm -r --ignore-unmatch '*')
poetry run copier copy --force \
--data project_name="ESTA Python" \
--data author_first_name="Your First Name" \
--data author_last_name="Your Last Name" \
--data bitbucket_organization=KD_ESTA_BLUEPRINTS \
--data pypi_repository="esta.pypi" \
--data docker_repository="esta.docker" \
--data helm_repository="esta.helm.local" \
. /tmp/esta-python/
cd /tmp/esta-python
git add -A
git config --global user.email "esta@sbb.ch"
git config --global user.name "esta-build-clew"
git commit --message "Automatic release commit." --allow-empty
git push --force origin

1169
poetry.lock generated Normal file

File diff suppressed because it is too large Load Diff

3
poetry.toml Normal file
View File

@@ -0,0 +1,3 @@
[virtualenvs]
create = true
in-project = true

94
pyproject.toml Normal file
View File

@@ -0,0 +1,94 @@
[project]
name = "esta-python-template"
description = "Template for ESTA Python projects. Can be used to generate a new ESTA Python project."
authors = [
{name = "Ferdinand Niedermann", email="<ferdinand.niedermann@sbb.ch>"}
]
readme = "README.md"
license = "Proprietary"
dynamic = ["version", "dependencies"]
requires-python = "~=3.12.0" # = fix minor version to 3.12.*
[project.urls]
repository = "https://code.sbb.ch/projects/KD_ESTA_BLUEPRINTS/repos/esta-python-template"
documentation = "https://code.sbb.ch/projects/KD_ESTA_BLUEPRINTS/repos/esta-python-template/browse/README.md"
[tool.poetry]
version = "0.0.0" # Version is ignored and set by Tekton. Use pipeline to increase.
requires-poetry = ">=2.0"
package-mode = false
[tool.poetry.dependencies]
python = "~3.12"
copier = "^9.7.1"
[tool.poetry.group.dev.dependencies]
coverage = {extras = ["toml"], version = "~7.10.0"} # Code coverage measurement for Python
unittest-xml-reporting = "~3.2" # unittest-based test runner with Ant/JUnit like XML reporting.
pre-commit = "~4.3.0" # A framework for managing and maintaining multi-language pre-commit hooks.
pre-commit-hooks = "~6.0.0" # Some out-of-the-box hooks for pre-commit.
tomli = "~2.2.0" # A lil' TOML parser
pyyaml = "~6.0" # YAML parser and emitter for Python
ruff = "~0.13.0" # Extremely fast python linter and formatter
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
[tool.coverage.run]
command_line = "-m xmlrunner discover --output-file junittest.xml"
[tool.ruff]
line-length = 140
indent-width = 4
exclude = [
".git",
".git-rewrite",
".ipynb_checkpoints",
".mypy_cache",
".pyenv",
".pytype",
".ruff_cache",
".venv",
".vscode",
"**/*.yaml",
"**/*.yml"
]
[tool.ruff.format]
# Like Black, use double quotes for strings.
quote-style = "double"
# Like Black, indent with spaces, rather than tabs.
indent-style = "space"
# Like Black, respect magic trailing commas.
skip-magic-trailing-comma = false
# Like Black, automatically detect the appropriate line ending.
line-ending = "auto"
[tool.ruff.lint]
select = [
"D", "E", "W", # Pydocstyle rules
"I", # Isort rules
"PL", # Pylint rules
"F", # Pyflake
"N", # Pep8-naming
"G", # Flake8-logging-format
"ARG", # Flake8-unused-arguments
"RUF100", # fail on unused noqa
]
ignore=[]
fixable = ["I"]
[tool.ruff.lint.per-file-ignores]
"tests/**/*.py" = ["D", "E", "W"] # Ignore Pydocstyle warnings & errors in "test/*" directory
"template/tests/**/*.py" = ["D", "E", "W"] # Ignore Pydocstyle warnings & errors for tests in the template
[tool.ruff.lint.pydocstyle]
convention = "google"
[tool.ruff.lint.pylint]
max-args = 8

32
renovate.json Normal file
View File

@@ -0,0 +1,32 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"local>KD_ESTA/esta-renovate-preset"
],
"branchPrefix": "feature/renovate/",
"dependencyDashboard": false,
"includeForks": true,
"ignoreDeps": ["python"],
"packageRules": [
{
"groupName": "all non-major dependencies",
"groupSlug": "all-minor-patch",
"matchPackagePatterns": [
"*"
],
"matchUpdateTypes": [
"minor",
"patch"
]
}
],
"customManagers": [
{
"customType": "regex",
"fileMatch": [".*Dockerfile.*"],
"matchStrings": ["POETRY_VERSION=\"(?<currentValue>.+)\""],
"depNameTemplate": "poetry",
"datasourceTemplate": "pypi"
}
]
}

8
sonar-project.properties Normal file
View File

@@ -0,0 +1,8 @@
# This file should be deleted and configuration for sonar done by pipeline helper.
sonar.projectKey=esta-python-template
sonar.sources=src
sonar.tests=tests
sonar.python.ruff.reportPaths=sonar_report.json
sonar.python.version={{ python_version }}
sonar.python.coverage.reportPaths=coverage.xml
sonar.python.xunit.reportPath=junittest.xml

View File

@@ -0,0 +1,2 @@
# Changes here will be overwritten by Copier; NEVER EDIT MANUALLY
{{_copier_answers | to_nice_yaml -}}

View File

@@ -0,0 +1,7 @@
# Example environment variable that controls the logging level of the application
LOG_LEVEL="INFO"
{%- if use_ggshield %}
# Template for GGShield
GITGUARDIAN_API_KEY=""
GITGUARDIAN_INSTANCE="https://gitguardian.sbb.ch"
{%- endif %}

2
template/.gitattributes vendored Normal file
View File

@@ -0,0 +1,2 @@
# Convert line endings of all text files to \n (as apposed to \r\n as on Windows for example)
* text=auto

163
template/.gitignore vendored Normal file
View File

@@ -0,0 +1,163 @@
########################################################
### Python .gitignore from https://raw.githubusercontent.com/github/gitignore/master/Python.gitignore
########################################################
# Byte-compiled / optimized / DLL files
**/__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
junittest.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
*.ipynb
# IPython
profile_default/
ipython_config.py
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# GGShield cache file
.cache_ggshield
########################################################
### IDEs & Jupyter
########################################################
.idea
target
.metadata
.settings
.classpath
.project
.idea
*.iml
.ipynb_checkpoints
*/.ipynb_checkpoints/*
########################################################
### Logs & System
########################################################
pylintout
sonar_report.json
mlruns/
.DS_Store
out.csv
*.tmp
dependency-check-report.*

View File

@@ -0,0 +1,93 @@
repos:
- repo: local
hooks:
- id: poetry-check
name: poetry-check
description: run poetry check to validate config
entry: poetry check
language: system
pass_filenames: false
files: (^|/)pyproject.toml$
- id: trailing-whitespace-fixer
name: trailing-whitespace-fixer
entry: trailing-whitespace-fixer
language: python
types: [text]
- id: end-of-file-fixer
name: end-of-file-fixer
entry: end-of-file-fixer
language: python
types: [text]
- id: ruff_formatter
name: ruff_formatter
description: "Run 'ruff format' for extremely fast Python formatting"
entry: ruff format
args: ["--force-exclude"]
types_or: [python]
language: python
- id: ruff_linter
name: ruff_linter
description: "Run 'ruff' for extremely fast Python linting"
entry: ruff check
args:
[
"--force-exclude",
"--fix",
]
types_or: [python, pyi]
language: python
- id: ruff_sonar_report
name: ruff_sonar_report
description: "Generate a report of all ruff warnings for sonar"
entry: ruff check
args:
[
"--force-exclude",
"--fix",
"--output-file",
"sonar_report.json",
"--output-format",
"json",
]
types_or: [python, pyi]
language: python
- id: mypy
name: mypy
entry: mypy
language: python
"types_or": [python, pyi]
args: ["--scripts-are-modules"]
require_serial: true
additional_dependencies: []
minimum_pre_commit_version: "2.9.2"
- id: unittest-run
name: unittest-run
entry: coverage run
pass_filenames: false
language: system
- id: unittest-coverage-report
name: unittest-coverage-report
entry: coverage xml
pass_filenames: false
language: system
{%- if use_ggshield %}
- id: ggshield
name: ggshield
entry: bash
description: Runs ggshield to detect hardcoded secrets, security vulnerabilities and policy breaks.
stages: [pre-commit]
# Skip this check if running on tekton, because there is a separate build step for it.
args: ["-c", "[ -n \"$CI\" ] || ggshield secret scan pre-commit"]
language: system
pass_filenames: true
{%- endif %}

View File

@@ -0,0 +1 @@
{{ python_version }}

18
template/.vscode/extensions.json vendored Normal file
View File

@@ -0,0 +1,18 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
// List of extensions which should be recommended for users of this workspace.
"recommendations": [
"ms-vscode-remote.remote-wsl", // WSL
"ms-python.python", // Python
"ms-azuretools.vscode-docker", // Docker
"tamasfe.even-better-toml", // Even Better TOML
"redhat.vscode-yaml", // Yaml plugin
"ms-vscode.makefile-tools", // Makefile plugin
"ms-python.mypy-type-checker", // mypy extension
"charliermarsh.ruff" // Python linter & formatter
],
// List of extensions recommended by VS Code that should not be recommended for users of this workspace.
"unwantedRecommendations": []
}

45
template/.vscode/launch.json.jinja vendored Normal file
View File

@@ -0,0 +1,45 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python: Current File",
"type": "debugpy",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"justMyCode": true
},
{
"name": "Python: Run Main with args",
"type": "debugpy",
"request": "launch",
"module": "{{ package_name }}.main",
"args": ["-w", "world"],
"console": "integratedTerminal",
"justMyCode": true
},
{
// This example launch-config promtps for arguments.
// You can tinker with it to specifically ask for arguments you have defined.
// To do so, create apropriate inputs and reference them here.
"name": "Python: Main, prompt for args",
"type": "debugpy",
"request": "launch",
"module": "{{ package_name }}.main",
"args": ["${input:args}"],
"console": "integratedTerminal",
"justMyCode": true
},
],
"inputs": [
{
"id": "args",
"type":"promptString",
"description": "Arguments for main",
"default": "-w word"
},
]
}

24
template/.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,24 @@
{
"python.testing.pytestEnabled": false,
"python.testing.unittestEnabled": true,
"python.linting.mypyEnabled": true,
"python.testing.unittestArgs": [
"-v",
"-s",
".",
"-p",
"test*.py"
],
"python.defaultInterpreterPath": "${workspaceFolder}/.venv",
"python.envFile": "${workspaceFolder}/.env",
"python.analysis.ignore": ["*"],
"[python]": {
"editor.defaultFormatter": "charliermarsh.ruff",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll": "explicit",
}
},
"python.terminal.activateEnvironment": true,
"python.terminal.activateEnvInCurrentTerminal": true
}

28
template/Makefile.jinja Normal file
View File

@@ -0,0 +1,28 @@
all: setup_environment
clean:
poetry run pre-commit uninstall
rm -rf .venv
setup_environment: check
pyenv install {{ python_version }} --skip-existing \
&& pyenv local {{ python_version }} \
&& poetry env use {{ python_version }} \
&& poetry install \
&& poetry run pre-commit install
check: pyenv_exists poetry_exists is_git {{- " ggshield_has_key" if use_ggshield else ""}}
pyenv_exists: ; @which pyenv > /dev/null
poetry_exists: ; @which poetry > /dev/null
is_git: ; @git rev-parse --git-dir > /dev/null
{%- if use_ggshield %}
ggshield_has_key:
ifeq ($(GITGUARDIAN_API_KEY),)
$(warning No API-Key for GitGuardian was set!)
endif
{%- endif %}

132
template/README.md.jinja Normal file
View File

@@ -0,0 +1,132 @@
> ***Note:*** **This repository was generated by the [🖨️ ESTA Python Template](https://code.sbb.ch/projects/KD_ESTA_BLUEPRINTS/repos/esta-python-template/).**
# {{ project_name }}
This is a blueprint for Python based projects. It exemplifies a setup and project structure including neat things such as pre-commit-hooks, automated testing, type checking, CI/CD using Tekton, etc. It is intended to be tailored to project needs.
## Getting Started
- Create a new project on [SonarQube](https://codequality.sbb.ch/projects/create) with the "Project key" `{{ name }}` and "Display name" `{{ project_name }}`.
- Configure pylint profile for sonarproject by making [a pull-request at the sonarqube-config repo](https://code.sbb.ch/projects/KD_WZU/repos/sonarqube-config/browse/QualityProfiles/pylint) and adding the `sonar.projectKey` into the files.
## Setup
### Install Pyenv and Poetry
- If not already installed, install [pyenv](https://github.com/pyenv/pyenv).
- If not already installed, get Poetry according to <https://python-poetry.org/docs/#installation>. If your are new to Poetry, you may find <https://python-poetry.org/docs/basic-usage/> interesting.
### Create Environment
Execute the following in a terminal:
```shell
# Create virtualenv, install all dependencies and pre-commit hooks in one command:
make
```
### Set Environment Variables for Local Developement
Environment Variables defined in the `.env`-File are automatically loaded. To get started:
- make a copy of `env.template` and rename it to `.env`
- fill out any missig values (e.g. for passwords)
- close and re-open the terminal
See the Environment Variables section below for a list of all variables used by this application.
{%- if use_ggshield %}
### Setup GGShield (GitGuardian)
The ggshield secret scanner requires an API-token to access the GitGuardian server. This token needs to be set both on your local machine and in tour tekton build environment. The `make` command will warn you if the token is not set:
> Makefile:24: No API-Key for GitGuardian was set!
1. Go to the [GitGuardian](https://gitguardian.sbb.ch/workspace/1/settings/api/personal-access-tokens) and create a new *Personal Access Token* with the *Scan* permission.
2. Set API-token locally
Add the environment variable GITGUARDIAN_API_KEY to the `.env`-file and reopen your terminal.
```.env
GITGUARDIAN_API_KEY="YourTokenGoesHere"
```
3. Set API-token in tekton
1. On your tekton-instance navigate to "pipeline secrets"
2. Edit the secret "esta-tekton-pipeline-env"
3. Add a new entry with the key `GITGUARDIAN_API_KEY` and as value the previously generated access token
You can run `poetry run ggshield api-status` locally to check your setup. Consult the [guide on confluence](https://confluence.sbb.ch/x/0QSFlw) if you have any problems.
{% endif %}
{%- if pypi_repository or docker_repository %}
## Usage
{% endif %}
{%- if pypi_repository -%}
### As Shared Library
Available versions can be seen in artifactory: <https://bin.sbb.ch/ui/repos/tree/General/{{ pypi_repository }}/{{ name }}>
Execute the following command to install the latest package:
```shell
pip install {{ name }} --no-cache-dir --index-url https://bin.sbb.ch/artifactory/api/pypi/{{ pypi_repository }}/simple/ --trusted-host bin.sbb.ch
```
{% endif %}
{%- if docker_repository -%}
### As Docker Container
If you take the docker image as is it will enable you to use defined entrypoints directly. The command's name defined in the `[tool.poetry.scripts]` section of pyproject.toml can be passed to the docker run command like this:
```shell
# docker run <path_to_image> <poetry command>
docker run docker.bin.sbb.ch/{{ docker_repository|replace('.docker', '') }}/{{ name }}:<tag> entrypoint
```
Here, 'entrypoint' is the command name.
{% endif %}
## Contents and Concepts
At first glance one may be overwhelmed by the amount of files and folders present in this directory. This is mainly due to the fact, that each tool uses its own configuration file. The situation has improved with more and more tools adding support for pyproject.toml. The following two tables describe the main structure of the project:
| Folder | Purpose |
|---------------|-----|
| `.venv` | This is where the Poetry-managed venv lives. |
| `.vscode` | This is where settings for vscode live. Some useful defaults are added in case you use vscode in your project. If not, this can savely be deleted.|
{%- if helm_repository %}
| `charts` | This directory contains the Helm charts of the project. Helm is used to deploy this application to OpenShift. |
{%- endif %}
| `src` | Main directory for the python code. Most of the times this will contain one subfolder with the main module of the project. `{{ package_name }}` in our case. Replace this with your own module-name. |
| `tests` | Directory containing all tests. This directory will be scanned by the test-infrastructure to find testcases. |
| File | Purpose |
|---------------------------------------|---------|
{%- if docker_repository %}
| `.dockerignore` | Files and directories to ignore when building a Docker image. |
{%- endif %}
| `.gitattributes` | Attributes for git are defined in this file, such as automatic line-ending conversion. |
| `.gitignore` | This file contains a list of path patterns that you want to ignore for git (they will never appear in commits). |
| `.pre-commit-config.yaml` | This file contains configuration for the pre-commit hook, which is run whenever you `git commit`, you can configure running code quality tools and tests here. |
{%- if docker_repository %}
| `Dockerfile` | Instructions for Docker on how to package this app into a Docker image. |
{%- endif %}
| `dependency-check-suppressions.xml` | Configuration file for the [OWASP Dependency Check](https://jeremylong.github.io/DependencyCheck/general/suppression.html). Can be used to specify vulnerabilities that should be suppressed by the check (because they aren't applicable to your project). |
| `estaTektonPipeline.yaml` | Configuration file for Continuous Integration on the ESTA Tekton Pipeline infrastructure. Refer to [ESTA Tekton Parameters - estaTektonPipeline.json](https://confluence.sbb.ch/display/CLEW/ESTA+Tekton+Parameters+-+estaTektonPipeline.json) for more information. |
| `poetry.toml` | Configuration for Poetry. |
| `pyproject.toml` | This file contains meta information for your project, as well as a high-level specification of the dependencies of your project, from which Poetry will do its dependency resolution and generate the `poetry.lock`. Also, it contains some customization for code-quality tools. Check [PEP 621](https://peps.python.org/pep-0621/) for details.|
| `README.md` | This file. Document how to develop and use your application in here. |
| `renovate.json` | Configuration file for [Renovate](https://confluence.sbb.ch/spaces/CLEW/pages/2212995011/Renovate). Allows you to set how and when renovate is run. See the [official documentation](https://docs.renovatebot.com/configuration-options/). |
| `sonar-project.properties` | Configuration for the Sonar check. |
## Environment Variable
The following environment variables may be used to configure `{{ package_name }}`:
| Environment Variable | Purpose | Default Value | Allowed Values |
|----------------------|-|-|-|
| LOG_LEVEL | Sets the default log level [here](src/{{ package_name }}/common/logging_configuration.py). | "INFO" | See [Python Standard Library API-Reference](https://docs.python.org/3/library/logging.html#logging-levels) |

View File

@@ -0,0 +1,43 @@
<?xml version="1.0" encoding="UTF-8"?>
<suppressions xmlns="https://jeremylong.github.io/DependencyCheck/dependency-suppression.1.3.xsd">
<suppress>
<notes><![CDATA[
This can be suppressed because it is a transient dependency of the `pre-commit-hooks` dependency, which only uses ruamel-yaml if the YAML-specific hooks are actually run.
Additionally, the vulnerability CVE-2022-3064 (Parsing malicious or large YAML documents can consume excessive amounts of CPU or memory.) only applies when parsing untrusted (user-supplied) YAML, which isn't the case here.
The gist: Never trust a user-submitted YAML file! This could DoS your app!
]]></notes>
<packageUrl regex="true">^pkg:pypi/ruamel-yaml-clib@.*$</packageUrl>
<cve>CVE-2022-3064</cve>
</suppress>
<suppress>
<notes><![CDATA[
This can be suppressed because it is a transient dependency of the `pre-commit-hooks` dependency, which only uses ruamel-yaml if the YAML-specific hooks are actually run.
Additionally, the vulnerability CVE-2021-4235 (Due to unbounded alias chasing, a maliciously crafted YAML file can cause the system to consume significant system resources. If parsing user input, this may be used as a denial of service vector.) only applies when parsing untrusted (user-supplied) YAML, which isn't the case here.
The gist: Never trust a user-submitted YAML file! This could DoS your app!
]]></notes>
<packageUrl regex="true">^pkg:pypi/ruamel-yaml-clib@.*$</packageUrl>
<cve>CVE-2021-4235</cve>
</suppress>
<suppress>
<notes><![CDATA[
This can be suppressed because it is a transient dependency of the `pre-commit-hooks` dependency, which only uses ruamel-yaml if the YAML-specific hooks are actually run.
Additionally, the vulnerability CVE-2022-3064 (Parsing malicious or large YAML documents can consume excessive amounts of CPU or memory.) only applies when parsing untrusted (user-supplied) YAML, which isn't the case here.
The gist: Never trust a user-submitted YAML file! This could DoS your app!
]]></notes>
<packageUrl regex="true">^pkg:pypi/ruamel-yaml@.*$</packageUrl>
<cve>CVE-2022-3064</cve>
</suppress>
<suppress>
<notes><![CDATA[
This can be suppressed because it is a transient dependency of the `pre-commit-hooks` dependency, which only uses ruamel-yaml if the YAML-specific hooks are actually run.
Additionally, the vulnerability CVE-2021-4235 (Due to unbounded alias chasing, a maliciously crafted YAML file can cause the system to consume significant system resources. If parsing user input, this may be used as a denial of service vector.) only applies when parsing untrusted (user-supplied) YAML, which isn't the case here.
The gist: Never trust a user-submitted YAML file! This could DoS your app!
]]></notes>
<packageUrl regex="true">^pkg:pypi/ruamel-yaml@.*$</packageUrl>
<cve>CVE-2021-4235</cve>
</suppress>
</suppressions>

View File

@@ -0,0 +1,103 @@
# yaml-language-server: $schema=https://clew-resources.sbb-cloud.net/tekton-schema.json
productName: {{ name }}
python:
{{-"\n targetRepo: "~ pypi_repository if pypi_repository else ' {}'}}
builder:
python: "{{ python_version }}"
{%- if docker_repository %}
docker:
artifactoryDockerRepo: {{ docker_repository }}
caching: true
{%- endif %}
{%- if helm_repository %}
helm:
chartRepository: {{ helm_repository }}
linting: true
{%- endif %}
pipelines:
# This is the continuous build pipeline which runs on every commit on a feature branch or if triggered in the tekton-ui.
# It builds and tests artifacts but does not push them to artifactory.
- name: continuous
triggerType:
- GITEVENT
- USER
branchNamePrefixes:
- feature
- renovate
build:
{%- if docker_repository %}
buildDockerImage: true
deployDockerImage: false
{%- endif %}
sonarScan:
enabled: true
owaspDependencyCheck:
enabled: true
additionalParams: "--suppression dependency-check-suppressions.xml --disablePyDist
--disablePyPkg --failOnCVSS 9"
failOnQualityGateFailure: true
{%- if use_ggshield %}
gitguardian:
enabled: true
reportmode: "FAILED"
{%- endif %}
# This is the snapshot build pipeline which is triggered when pushing commits to master, if triggered in the tekton ui or through a cron job at 05:00 ervery morning.
# It builds a snapshot-version of the product and pushes it to artifactory.
- name: snapshot
triggerType:
- USER
- GITEVENT
- CRON
branchNamePrefixes:
- master
cron: 30 3 * * * # Nightly Build triggers at 03:30 every day
build:
sonarScan:
enabled: true
owaspDependencyCheck:
enabled: true
additionalParams: "--suppression dependency-check-suppressions.xml --disablePyDist
--disablePyPkg --failOnCVSS 9"
failOnQualityGateFailure: true
{%- if docker_repository %}
buildDockerImage: true
deployDockerImage: true
deployArtifacts: false
{%- endif %}
{%- if use_ggshield %}
gitguardian:
enabled: true
reportmode: "FAILED"
{%- endif %}
# This is the release build pipeline which is triggered by adding a valid version tag to a commit.
# This can either be done using git or the esta-tekton ui.
# This builds and deploys the artifacts using the git tag as version.
- name: release
triggerType:
- GITEVENT
versionTagEventPatterns:
- "^(\\d+\\.)(\\d+\\.)(\\d+)$"
build:
{%- if docker_repository %}
buildDockerImage: true
deployArtifacts: true
additionalDockerImageTags:
- latest
{%- endif %}
{%- if helm_repository %}
packageAndDeployHelmChart: true
{%- endif %}
sonarScan:
enabled: true
owaspDependencyCheck:
enabled: true
additionalParams: "--suppression dependency-check-suppressions.xml --disablePyDist
--disablePyPkg --failOnCVSS 9"
failOnQualityGateFailure: true
{%- if use_ggshield %}
gitguardian:
enabled: true
reportmode: "FAILED"
{%- endif %}

View File

@@ -0,0 +1,11 @@
# The following is important for the Tekton build: Only the working directory
# is transferred between Tekton steps, e.g. between the "prepare" and "build"
# steps in the Python build. Thus we instruct poetry to install the dependencies
# into a .venv directory in the working directory instead of into a global directory.
[virtualenvs]
create = true
in-project = true
[repositories.artifactory]
url = "https://bin.sbb.ch/artifactory/api/pypi/{{ pypi_repository }}"

View File

@@ -0,0 +1,101 @@
[project]
# we prefix the package name with `sbb-` to defend against dependency confusion, see https://confluence.sbb.ch/x/Fxa9lg
name = "sbb-{{ name }}"
description = "{{ description }}"
authors = {{ authors }}
readme = "README.md"
license = "Proprietary"
dynamic = ["version", "dependencies"]
requires-python = "~= {{ python_version }}.0" # = fix minor version to {{ python_version }}.*
[project.urls]
repository = "https://code.sbb.ch/projects/{{ bitbucket_organization }}/repos/{{ name }}"
documentation = "https://code.sbb.ch/projects/{{ bitbucket_organization }}/repos/{{ name }}/browse/README.md"
[project.scripts]
entrypoint = '{{ package_name }}.main:cli'
[tool.poetry]
version = "0.0.0" # Version is ignored and set by Tekton. Use pipeline to increase.
requires-poetry = ">=2.0"
packages = [{include = "{{ package_name }}", from = "src"}]
[tool.poetry.dependencies]
numpy = "~2.3.0" # NumPy is the fundamental package for array computing with Python.
[tool.poetry.group.dev.dependencies]
coverage = {extras = ["toml"], version = "~7.10.4"} # Code coverage measurement for Python
unittest-xml-reporting = "~3.2" # unittest-based test runner with Ant/JUnit like XML reporting.
pre-commit = "~4.3.0" # A framework for managing and maintaining multi-language pre-commit hooks.
pre-commit-hooks = "~5.0.0" # Some out-of-the-box hooks for pre-commit.
mypy = "~1.18.0" # Tool for static type-checking
ruff = "~0.13.0" # Extremely fast python linter and formatter
{%- if use_ggshield %}
ggshield = "^1.23.0" # Tool to check for leaked secrets
{% endif %}
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
[tool.mypy]
python_version = "{{ python_version }}"
plugins = "numpy.typing.mypy_plugin"
strict = true
[tool.coverage.run]
command_line = "-m xmlrunner discover --output-file junittest.xml"
[tool.ruff]
line-length = 140
indent-width = 4
exclude = [
".git",
".git-rewrite",
".ipynb_checkpoints",
".mypy_cache",
".pyenv",
".pytype",
".ruff_cache",
".venv",
".vscode",
"**/*.yaml",
"**/*.yml"
]
[tool.ruff.format]
# Like Black, use double quotes for strings.
quote-style = "double"
# Like Black, indent with spaces, rather than tabs.
indent-style = "space"
# Like Black, respect magic trailing commas.
skip-magic-trailing-comma = false
# Like Black, automatically detect the appropriate line ending.
line-ending = "auto"
[tool.ruff.lint]
select = [
"D", "E", "W", # Pydocstyle rules
"I", # Isort rules
"PL", # Pylint rules
"F", # Pyflake
"N", # Pep8-naming
"G", # Flake8-logging-format
"ARG", # Flake8-unused-arguments
"RUF100", # fail on unused noqa
]
ignore=[]
fixable = ["I"]
[tool.ruff.lint.per-file-ignores]
"tests/**/*.py" = ["D", "E", "W"] # Ignore Pydocstyle warnings & errors in "test/*" directory
[tool.ruff.lint.pydocstyle]
convention = "google"
[tool.ruff.lint.pylint]
max-args = 8

View File

@@ -0,0 +1,33 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"local>KD_ESTA/esta-renovate-preset"
],
"branchPrefix": "renovate/",
"dependencyDashboard": false,
"ignoreDeps": ["python"],
"packageRules": [
{
"groupName": "all non-major dependencies",
"groupSlug": "all-minor-patch",
"matchPackagePatterns": [
"*"
],
"matchUpdateTypes": [
"minor",
"patch"
]
}
],
{%- if docker_repository %}
"customManagers": [
{
"customType": "regex",
"fileMatch": ["^Dockerfile$"],
"matchStrings": ["POETRY_VERSION=\"(?<currentValue>.+)\""],
"depNameTemplate": "poetry",
"datasourceTemplate": "pypi"
}
]
{%- endif %}
}

View File

@@ -0,0 +1,4 @@
sonar.sources=src
sonar.python.ruff.reportPaths=sonar_report.json
sonar.python.coverage.reportPaths=coverage.xml
sonar.python.xunit.reportPath=junittest.xml

View File

@@ -0,0 +1,8 @@
"""{{ name }} main package.
Add your package documentation here.
"""
from {{ package_name }}.common import logging_configuration
logging_configuration.configure_logger()

View File

@@ -0,0 +1 @@
"""The common subpackage bundles shared utilities."""

View File

@@ -0,0 +1,53 @@
"""Logging customization."""
import logging.config
import os
import time
LOGGER = logging.getLogger(__name__)
class UTCFormatter(logging.Formatter):
"""UTC formatter which converts timestamps to UTC."""
converter = time.gmtime # type: ignore
LOGGING_CONFIG = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"utc": {
"()": UTCFormatter,
"format": "%(asctime)s - %(process)d - %(name)s - %(levelname)s - %(message)s",
"datefmt": "%Y-%m-%dT%H:%M:%S%z",
}
},
"handlers": {
"console": {"class": "logging.StreamHandler", "formatter": "utc"},
},
"root": {"handlers": ["console"]},
}
def configure_logger() -> None:
"""Configure logging.
Configures a console logger which logs in UTC time with timestamps
formatted according to ISO 8601.
Note: loglevel defaults to logging.INFO and may be overridden by
configuring the environment variable 'LOG_LEVEL'.
"""
logging.config.dictConfig(LOGGING_CONFIG)
loglevel_name = os.environ.get("LOG_LEVEL", default="INFO")
loglevel = logging.getLevelName(loglevel_name)
if isinstance(loglevel, str):
LOGGER.warning( # pylint: disable=logging-too-many-args
"Loglevel-Name '%s' not found in loglevels. Falling back to INFO.",
loglevel_name,
)
loglevel = logging.INFO
# Set loglevel on root logger and propagate.
logging.getLogger().setLevel(loglevel)

View File

@@ -0,0 +1,13 @@
"""The great Greeter module."""
class Greeter:
"""Class for generating greetings."""
def __init__(self, name: str) -> None:
"""Initialize a Greeter object."""
self.name: str = name
def get_greeting(self) -> str:
"""Return a greeting."""
return f"Hello {self.name}!"

View File

@@ -0,0 +1,35 @@
"""Main Description."""
import argparse
import logging
import numpy as np
import numpy.typing as npt
from {{ package_name }} import greeter
LOGGER = logging.getLogger(__name__)
def get_np_array() -> npt.NDArray[np.int_]:
"""Return a numpy array."""
return np.array([0])
def main(word: str) -> None:
"""Main entry point for the application."""
LOGGER.info("Executing entrypoint.")
print(greeter.Greeter(word).get_greeting())
print(f"Here's a test that poetry dependencies are installed: {get_np_array()}")
def cli() -> None:
"""Cli-Entrypoint."""
parser = argparse.ArgumentParser(description="Demo argument")
parser.add_argument("-w", "--word", required=True)
args = parser.parse_args()
main(args.word)
if __name__ == "__main__":
cli()

View File

View File

@@ -0,0 +1,8 @@
import unittest
from {{ package_name }} import greeter
class TestGreeter(unittest.TestCase):
def test_get_greeting(self) -> None:
self.assertEqual(greeter.Greeter("World").get_greeting(), "Hello World!")

View File

@@ -0,0 +1,24 @@
import io
import unittest
import unittest.mock
import numpy as np
from {{ package_name }} import main
class TestMain(unittest.TestCase):
def test_get_np_array(self) -> None:
np.testing.assert_array_equal(main.get_np_array(), np.array([0]))
@unittest.mock.patch("sys.stdout", new_callable=io.StringIO)
def test_main(self, mock_stdout: unittest.mock.Mock) -> None:
"""Test main method by executing it and comparing terminal output to expected values."""
main.main("world")
# Go to beginning of output
mock_stdout.seek(0)
self.assertEqual(mock_stdout.readline(), "Hello world!\n")
self.assertEqual(
mock_stdout.readline(),
"Here's a test that poetry dependencies are installed: [0]\n",
)

View File

@@ -0,0 +1,5 @@
.pyre
.pytest_cache
.venv
__pycache__
poetry.toml

View File

@@ -0,0 +1,48 @@
FROM registry-redhat.docker.bin.sbb.ch/rhel9/python-{{ python_version.replace(".", "") }} AS base
ENV PYTHONFAULTHANDLER=1 \
PYTHONUNBUFFERED=1 \
PYTHONHASHSEED=random \
PIP_NO_CACHE_DIR=off \
PIP_DISABLE_PIP_VERSION_CHECK=on \
PIP_DEFAULT_TIMEOUT=100
# need to be root in order to install packages
USER 0
# install necessary system dependencies here using `RUN dnf install -y <mypackage> && dnf clean all`
# for installable packages, see: https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/9/html-single/package_manifest/index#doc-wrapper
FROM base AS builder
ENV POETRY_PATH="/opt/poetry" \
POETRY_VERSION="2.2.1" \
POETRY_VIRTUALENVS_IN_PROJECT=true
ENV PATH="$POETRY_PATH/bin:$PATH"
RUN python -m venv $POETRY_PATH && \
$POETRY_PATH/bin/pip install poetry==$POETRY_VERSION
WORKDIR /app
# Initialize environment with packages
COPY README.md pyproject.toml poetry.lock ./
RUN poetry env use {{ python_version }} && \
poetry install --without dev --no-interaction --no-ansi --no-root
# Add project source code
COPY src/ ./src/
RUN poetry build -f wheel
RUN poetry run pip install dist/*.whl
FROM base AS final
# switch back to a non-root user for executing
USER 1001
ENV PATH="/app/.venv/bin:$PATH"
COPY --from=builder /app/.venv /app/.venv
# Default command. Can be overridden using docker run <image> <command>
CMD ["entrypoint"]

View File

@@ -0,0 +1,23 @@
# Patterns to ignore when building packages.
# This supports shell glob matching, relative path matching, and
# negation (prefixed with !). Only one pattern per line.
.DS_Store
# Common VCS dirs
.git/
.gitignore
.bzr/
.bzrignore
.hg/
.hgignore
.svn/
# Common backup files
*.swp
*.bak
*.tmp
*.orig
*~
# Various IDEs
.project
.idea/
*.tmproj
.vscode/

View File

@@ -0,0 +1,6 @@
icon: http://acme.org/replaceme.jpg
apiVersion: v2
name: "{{ name }}"
description: "{{ description }}"
type: application
version: 0.0.0 # will be overwritten at package-time

0
tests/__init__.py Normal file
View File

View File

View File

@@ -0,0 +1,486 @@
from __future__ import annotations
import pathlib
import shutil
import subprocess
import tempfile
import unittest
from types import TracebackType
from typing import Any, Optional, Type
import tomli
import yaml
from copier import run_copy
def _run_shell_command_in_dir(command: list[str], dir: str) -> tuple[bytes, bytes, int]:
with subprocess.Popen(command, cwd=dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as proc:
outs, errs = proc.communicate()
returncode = proc.returncode
return outs, errs, returncode
class CopierRenderer:
DEFAULT_PARAMETERS = {
"project_name": "My Little Project",
"author_first_name": "Twilight",
"author_last_name": "Sparkle",
"bitbucket_organization": "MY_LITTLE_BITBUCKET_ORG",
}
def __init__(
self,
temp_source_dir: tempfile.TemporaryDirectory,
parameters: dict[str, str] | None = None,
):
self._temp_source_dir = temp_source_dir
if parameters is None:
self.parameters = {}
else:
self.parameters = parameters
def __enter__(self) -> CopierRenderer:
"""Run when the context is entered. Starts the transaction."""
self._temp_render_dir = tempfile.TemporaryDirectory()
self.render_path = pathlib.Path(self._temp_render_dir.name)
run_copy(
src_path=self._temp_source_dir.name,
dst_path=self.render_path,
vcs_ref="HEAD",
defaults=True,
data=(CopierRenderer.DEFAULT_PARAMETERS | self.parameters),
quiet=True,
)
return self
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_value: Optional[BaseException],
traceback: Optional[TracebackType],
) -> None:
"""Run when the context is exited. Ends the transaction with one of the actions specified in the initializer."""
self._temp_render_dir.cleanup()
def load_yaml(self, file_path: str) -> Any:
return yaml.safe_load((self.render_path / file_path).read_text())
def load_toml(self, file_path: str) -> Any:
return tomli.loads((self.render_path / file_path).read_text())
class TestTemplate(unittest.TestCase):
def setUp(self):
source_dir = pathlib.Path(__file__).parent.parent.parent
self.temp_source_dir = tempfile.TemporaryDirectory()
shutil.copytree(str(source_dir), self.temp_source_dir.name, dirs_exist_ok=True)
return super().setUp()
def tearDown(self):
self.temp_source_dir.cleanup()
return super().tearDown()
def test_dot_python_version(self) -> None:
with CopierRenderer(self.temp_source_dir) as copier_renderer:
self.assertEqual(
"3.12\n",
(copier_renderer.render_path / ".python-version").read_text(),
)
def test_pyproject_toml(self) -> None:
with CopierRenderer(self.temp_source_dir) as copier_renderer:
pyproject = tomli.loads((copier_renderer.render_path / "pyproject.toml").read_text())
self.assertEqual(pyproject["project"]["name"], "sbb-my-little-project")
self.assertEqual(
pyproject["project"]["description"],
"A project created with esta-python-template",
)
self.assertEqual(
pyproject["project"]["authors"],
[
{
"name": "Twilight Sparkle",
"email": "twilight.sparkle@sbb.ch",
}
],
)
self.assertEqual(pyproject["project"]["requires-python"], "~= 3.12.0")
self.assertEqual(
pyproject["project"]["urls"]["repository"],
"https://code.sbb.ch/projects/MY_LITTLE_BITBUCKET_ORG/repos/my-little-project",
)
self.assertEqual(
pyproject["project"]["urls"]["documentation"],
"https://code.sbb.ch/projects/MY_LITTLE_BITBUCKET_ORG/repos/my-little-project/browse/README.md",
)
self.assertEqual(
pyproject["tool"]["poetry"]["packages"],
[{"include": "my_little_project", "from": "src"}],
)
self.assertEqual(
pyproject["project"]["scripts"],
{"entrypoint": "my_little_project.main:cli"},
)
self.assertEqual(pyproject["tool"]["mypy"]["python_version"], "3.12")
def test_with_docker(self) -> None:
with CopierRenderer(
self.temp_source_dir,
{"docker_repository": "esta.docker", "python_version": "3.9"},
) as copier_renderer:
dockerfile = copier_renderer.render_path / "Dockerfile"
# Dockerfile exists
self.assertTrue(dockerfile.is_file())
# Dockerignore exists
self.assertTrue((copier_renderer.render_path / ".dockerignore").is_file())
# FROM-instruction
with dockerfile.open() as fh:
self.assertEqual(
fh.readline(),
"FROM registry-redhat.docker.bin.sbb.ch/rhel9/python-39 AS base\n",
)
# Tekton-Pipeline
tekton_pipeline = copier_renderer.load_yaml("estaTektonPipeline.yaml")
# Tekton-Docker section
self.assertEqual(
tekton_pipeline["docker"],
{"artifactoryDockerRepo": "esta.docker", "caching": True},
)
continuous_build = tekton_pipeline["pipelines"][0]["build"]
snapshot_build = tekton_pipeline["pipelines"][1]["build"]
release_build = tekton_pipeline["pipelines"][2]["build"]
# Check build sections
self.assertEqual(
continuous_build,
{
"sonarScan": {"enabled": True},
"owaspDependencyCheck": {
"enabled": True,
"additionalParams": "--suppression dependency-check-suppressions.xml --disablePyDist --disablePyPkg --failOnCVSS 9",
},
"failOnQualityGateFailure": True,
"buildDockerImage": True,
"deployDockerImage": False,
},
)
self.assertEqual(
snapshot_build,
{
"sonarScan": {"enabled": True},
"owaspDependencyCheck": {
"enabled": True,
"additionalParams": "--suppression dependency-check-suppressions.xml --disablePyDist --disablePyPkg --failOnCVSS 9",
},
"failOnQualityGateFailure": True,
"buildDockerImage": True,
"deployDockerImage": True,
"deployArtifacts": False,
},
)
self.assertEqual(
release_build,
{
"sonarScan": {"enabled": True},
"owaspDependencyCheck": {
"enabled": True,
"additionalParams": "--suppression dependency-check-suppressions.xml --disablePyDist --disablePyPkg --failOnCVSS 9",
},
"failOnQualityGateFailure": True,
"buildDockerImage": True,
"deployArtifacts": True,
"additionalDockerImageTags": ["latest"],
},
)
def test_with_docker_default_python_version(self) -> None:
with CopierRenderer(
self.temp_source_dir,
{"docker_repository": "esta.docker"}, # By not specifying a Python version, the default is taken
) as copier_renderer:
dockerfile = copier_renderer.render_path / "Dockerfile"
# Dockerfile exists
self.assertTrue(dockerfile.is_file())
# Dockerignore exists
self.assertTrue((copier_renderer.render_path / ".dockerignore").is_file())
# FROM-instruction
with dockerfile.open() as fh:
self.assertEqual(
fh.readline(),
"FROM registry-redhat.docker.bin.sbb.ch/rhel9/python-312 AS base\n",
)
def test_without_docker(self) -> None:
with CopierRenderer(self.temp_source_dir, {"docker_repository": "", "python_version": "3.12"}) as copier_renderer:
# Dockerfile does not exist
self.assertFalse((copier_renderer.render_path / "Dockerfile").exists())
# Dockerignore does not exist
self.assertFalse((copier_renderer.render_path / ".dockerignore").exists())
# Tekton-Pipeline
tekton_pipeline = copier_renderer.load_yaml("estaTektonPipeline.yaml")
# Tekton-Docker section
self.assertNotIn("docker", tekton_pipeline)
# Check build sections
for i, pipeline in enumerate(["continuous", "snapshot", "release"]):
with self.subTest(msg=f"Checking pipeline: '{pipeline}'."):
self.assertEqual(
tekton_pipeline["pipelines"][i]["build"],
{
"sonarScan": {"enabled": True},
"owaspDependencyCheck": {
"enabled": True,
"additionalParams": "--suppression dependency-check-suppressions.xml --disablePyDist --disablePyPkg --failOnCVSS 9",
},
"failOnQualityGateFailure": True,
},
)
def test_with_pypi(self) -> None:
with CopierRenderer(self.temp_source_dir, {"pypi_repository": "esta.pypi"}) as copier_renderer:
# Check Tekton-Pipeline
tekton_pipeline = copier_renderer.load_yaml("estaTektonPipeline.yaml")
self.assertEqual(tekton_pipeline["python"], {"targetRepo": "esta.pypi"})
def test_without_pypi(self) -> None:
with CopierRenderer(self.temp_source_dir, {"pypi_repository": ""}) as copier_renderer:
# Check Tekton-Pipeline
tekton_pipeline = copier_renderer.load_yaml("estaTektonPipeline.yaml")
self.assertEqual(tekton_pipeline["python"], {})
def test_pre_commit_hooks_in_template(self) -> None:
with CopierRenderer(self.temp_source_dir) as copier_renderer:
outs, errs, _ = _run_shell_command_in_dir(["git", "init"], dir=str(copier_renderer.render_path))
outs, errs, _ = _run_shell_command_in_dir(["git", "add", "--all"], dir=str(copier_renderer.render_path))
commands = [
["make"],
["poetry", "run", "pre-commit", "run", "--all-files"],
]
for command in commands:
with self.subTest(msg=f"Running {command=}."):
stdout, stderr, returncode = _run_shell_command_in_dir(command=command, dir=str(copier_renderer.render_path))
self.assertEqual(
returncode,
0,
msg=f"\nstdout:\n{stdout.decode()}\nstderr:\n{stderr.decode()}.",
)
def test_directory_names(self) -> None:
with CopierRenderer(self.temp_source_dir, {"project_name": "Funky Grogu"}) as copier_renderer:
self.assertTrue((copier_renderer.render_path / "src" / "funky_grogu").is_dir())
self.assertTrue((copier_renderer.render_path / "tests" / "funky_grogu").is_dir())
def test_poetry_toml(self) -> None:
with CopierRenderer(self.temp_source_dir, {"pypi_repository": "funky-grogu.pypi"}) as copier_renderer:
poetry = tomli.loads((copier_renderer.render_path / "poetry.toml").read_text())
self.assertEqual(
poetry["repositories"]["artifactory"]["url"],
"https://bin.sbb.ch/artifactory/api/pypi/funky-grogu.pypi",
)
def test_with_helm(self) -> None:
with CopierRenderer(
self.temp_source_dir,
{
"name": "my-project",
"description": "My project description",
"helm_repository": "esta.helm.local",
},
) as copier_renderer:
for dir_path in [
"charts",
"charts/my-project",
"charts/my-project/templates",
]:
self.assertTrue((copier_renderer.render_path / dir_path).is_dir())
for file_path in [
"charts/my-project/.helmignore",
"charts/my-project/Chart.yaml",
"charts/my-project/values.yaml",
]:
self.assertTrue((copier_renderer.render_path / file_path).is_file())
chart_yaml = copier_renderer.load_yaml("charts/my-project/Chart.yaml")
self.assertEqual(
chart_yaml,
{
"apiVersion": "v2",
"description": "My project description",
"icon": "http://acme.org/replaceme.jpg",
"name": "my-project",
"type": "application",
"version": "0.0.0",
},
)
# Tekton-Pipeline
tekton_pipeline = copier_renderer.load_yaml("estaTektonPipeline.yaml")
self.assertEqual(
tekton_pipeline["helm"],
{"chartRepository": "esta.helm.local", "linting": True},
)
release_build = tekton_pipeline["pipelines"][2]["build"]
self.assertEqual(
release_build,
{
"sonarScan": {"enabled": True},
"owaspDependencyCheck": {
"enabled": True,
"additionalParams": "--suppression dependency-check-suppressions.xml --disablePyDist --disablePyPkg --failOnCVSS 9",
},
"failOnQualityGateFailure": True,
"packageAndDeployHelmChart": True,
},
)
def test_without_helm(self) -> None:
with CopierRenderer(
self.temp_source_dir,
{
"name": "my-project",
"description": "My project description",
"helm_repository": "",
},
) as copier_renderer:
self.assertFalse((copier_renderer.render_path / "charts").exists())
# Tekton-Pipeline
tekton_pipeline = copier_renderer.load_yaml("estaTektonPipeline.yaml")
self.assertNotIn("helm", tekton_pipeline)
release_build = tekton_pipeline["pipelines"][2]["build"]
self.assertEqual(
release_build,
{
"sonarScan": {"enabled": True},
"owaspDependencyCheck": {
"enabled": True,
"additionalParams": "--suppression dependency-check-suppressions.xml --disablePyDist --disablePyPkg --failOnCVSS 9",
},
"failOnQualityGateFailure": True,
},
)
def test_without_ggshield(self) -> None:
with CopierRenderer(
self.temp_source_dir,
{
"name": "my-project",
"description": "My project description",
"use_ggshield": "False",
},
) as copier_renderer:
# .pre-commit-config.yaml
pre_commit_config = copier_renderer.load_yaml(".pre-commit-config.yaml")
hook_ids = [h["id"] for h in pre_commit_config["repos"][0]["hooks"]]
self.assertNotIn("ggshield", hook_ids)
# estaTektonPipeline.yaml
esta_tekton_pipeline = copier_renderer.load_yaml("estaTektonPipeline.yaml")
for pipeline in esta_tekton_pipeline["pipelines"]:
self.assertNotIn("gitguardian", pipeline["build"])
# .env.example
env_example = (copier_renderer.render_path / ".env.example").read_text()
self.assertNotIn("GITGUARDIAN_API_KEY", env_example)
# Makefile
makefile = (copier_renderer.render_path / "Makefile").read_text()
self.assertNotIn("ggshield_has_key", makefile)
# pyproject.toml
pyproject_toml = (copier_renderer.render_path / "pyproject.toml").read_text()
self.assertNotIn("ggshield", pyproject_toml)
# README.md
readme_md = (copier_renderer.render_path / "README.md").read_text()
self.assertNotIn("GitGuardian", readme_md)
def test_with_ggshield(self) -> None:
with CopierRenderer(
self.temp_source_dir,
{
"name": "my-project",
"description": "My project description",
"use_ggshield": "True",
},
) as copier_renderer:
# .pre-commit-config.yaml
pre_commit_config = copier_renderer.load_yaml(".pre-commit-config.yaml")
self.assertIn(
{
"id": "ggshield",
"name": "ggshield",
"entry": "bash",
"description": "Runs ggshield to detect hardcoded secrets, security vulnerabilities and policy breaks.",
"stages": ["pre-commit"],
"args": ["-c", '[ -n "$CI" ] || ggshield secret scan pre-commit'],
"language": "system",
"pass_filenames": True,
},
pre_commit_config["repos"][0]["hooks"],
)
# estaTektonPipeline.yaml
esta_tekton_pipeline = copier_renderer.load_yaml("estaTektonPipeline.yaml")
for pipeline in esta_tekton_pipeline["pipelines"]:
self.assertEqual(
{"enabled": True, "reportmode": "FAILED"},
pipeline["build"]["gitguardian"],
)
# .env.example
env_example_lines = (copier_renderer.render_path / ".env.example").read_text().splitlines()
for line in [
"# Template for GGShield",
'GITGUARDIAN_API_KEY=""',
'GITGUARDIAN_INSTANCE="https://gitguardian.sbb.ch"',
]:
self.assertIn(line, env_example_lines)
# Makefile
makefile = (copier_renderer.render_path / "Makefile").read_text()
self.assertIn(
"""
ggshield_has_key:
ifeq ($(GITGUARDIAN_API_KEY),)
$(warning No API-Key for GitGuardian was set!)
endif
""",
makefile,
)
# pyproject.toml
pyproject_toml = copier_renderer.load_toml("pyproject.toml")
self.assertIn(
"ggshield",
pyproject_toml["tool"]["poetry"]["group"]["dev"]["dependencies"],
)
# README.md
readme_md = (copier_renderer.render_path / "README.md").read_text()
self.assertIn("### Setup GGShield (GitGuardian)", readme_md)