initial commit

This commit is contained in:
root
2025-10-11 17:03:02 +02:00
commit 08dbb6210e
51 changed files with 3420 additions and 0 deletions

View File

@@ -0,0 +1,2 @@
# Changes here will be overwritten by Copier; NEVER EDIT MANUALLY
{{_copier_answers | to_nice_yaml -}}

View File

@@ -0,0 +1,7 @@
# Example environment variable that controls the logging level of the application
LOG_LEVEL="INFO"
{%- if use_ggshield %}
# Template for GGShield
GITGUARDIAN_API_KEY=""
GITGUARDIAN_INSTANCE="https://gitguardian.sbb.ch"
{%- endif %}

2
template/.gitattributes vendored Normal file
View File

@@ -0,0 +1,2 @@
# Convert line endings of all text files to \n (as apposed to \r\n as on Windows for example)
* text=auto

163
template/.gitignore vendored Normal file
View File

@@ -0,0 +1,163 @@
########################################################
### Python .gitignore from https://raw.githubusercontent.com/github/gitignore/master/Python.gitignore
########################################################
# Byte-compiled / optimized / DLL files
**/__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
junittest.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
*.ipynb
# IPython
profile_default/
ipython_config.py
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# GGShield cache file
.cache_ggshield
########################################################
### IDEs & Jupyter
########################################################
.idea
target
.metadata
.settings
.classpath
.project
.idea
*.iml
.ipynb_checkpoints
*/.ipynb_checkpoints/*
########################################################
### Logs & System
########################################################
pylintout
sonar_report.json
mlruns/
.DS_Store
out.csv
*.tmp
dependency-check-report.*

View File

@@ -0,0 +1,93 @@
repos:
- repo: local
hooks:
- id: poetry-check
name: poetry-check
description: run poetry check to validate config
entry: poetry check
language: system
pass_filenames: false
files: (^|/)pyproject.toml$
- id: trailing-whitespace-fixer
name: trailing-whitespace-fixer
entry: trailing-whitespace-fixer
language: python
types: [text]
- id: end-of-file-fixer
name: end-of-file-fixer
entry: end-of-file-fixer
language: python
types: [text]
- id: ruff_formatter
name: ruff_formatter
description: "Run 'ruff format' for extremely fast Python formatting"
entry: ruff format
args: ["--force-exclude"]
types_or: [python]
language: python
- id: ruff_linter
name: ruff_linter
description: "Run 'ruff' for extremely fast Python linting"
entry: ruff check
args:
[
"--force-exclude",
"--fix",
]
types_or: [python, pyi]
language: python
- id: ruff_sonar_report
name: ruff_sonar_report
description: "Generate a report of all ruff warnings for sonar"
entry: ruff check
args:
[
"--force-exclude",
"--fix",
"--output-file",
"sonar_report.json",
"--output-format",
"json",
]
types_or: [python, pyi]
language: python
- id: mypy
name: mypy
entry: mypy
language: python
"types_or": [python, pyi]
args: ["--scripts-are-modules"]
require_serial: true
additional_dependencies: []
minimum_pre_commit_version: "2.9.2"
- id: unittest-run
name: unittest-run
entry: coverage run
pass_filenames: false
language: system
- id: unittest-coverage-report
name: unittest-coverage-report
entry: coverage xml
pass_filenames: false
language: system
{%- if use_ggshield %}
- id: ggshield
name: ggshield
entry: bash
description: Runs ggshield to detect hardcoded secrets, security vulnerabilities and policy breaks.
stages: [pre-commit]
# Skip this check if running on tekton, because there is a separate build step for it.
args: ["-c", "[ -n \"$CI\" ] || ggshield secret scan pre-commit"]
language: system
pass_filenames: true
{%- endif %}

View File

@@ -0,0 +1 @@
{{ python_version }}

18
template/.vscode/extensions.json vendored Normal file
View File

@@ -0,0 +1,18 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations.
// Extension identifier format: ${publisher}.${name}. Example: vscode.csharp
// List of extensions which should be recommended for users of this workspace.
"recommendations": [
"ms-vscode-remote.remote-wsl", // WSL
"ms-python.python", // Python
"ms-azuretools.vscode-docker", // Docker
"tamasfe.even-better-toml", // Even Better TOML
"redhat.vscode-yaml", // Yaml plugin
"ms-vscode.makefile-tools", // Makefile plugin
"ms-python.mypy-type-checker", // mypy extension
"charliermarsh.ruff" // Python linter & formatter
],
// List of extensions recommended by VS Code that should not be recommended for users of this workspace.
"unwantedRecommendations": []
}

45
template/.vscode/launch.json.jinja vendored Normal file
View File

@@ -0,0 +1,45 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python: Current File",
"type": "debugpy",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"justMyCode": true
},
{
"name": "Python: Run Main with args",
"type": "debugpy",
"request": "launch",
"module": "{{ package_name }}.main",
"args": ["-w", "world"],
"console": "integratedTerminal",
"justMyCode": true
},
{
// This example launch-config promtps for arguments.
// You can tinker with it to specifically ask for arguments you have defined.
// To do so, create apropriate inputs and reference them here.
"name": "Python: Main, prompt for args",
"type": "debugpy",
"request": "launch",
"module": "{{ package_name }}.main",
"args": ["${input:args}"],
"console": "integratedTerminal",
"justMyCode": true
},
],
"inputs": [
{
"id": "args",
"type":"promptString",
"description": "Arguments for main",
"default": "-w word"
},
]
}

24
template/.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,24 @@
{
"python.testing.pytestEnabled": false,
"python.testing.unittestEnabled": true,
"python.linting.mypyEnabled": true,
"python.testing.unittestArgs": [
"-v",
"-s",
".",
"-p",
"test*.py"
],
"python.defaultInterpreterPath": "${workspaceFolder}/.venv",
"python.envFile": "${workspaceFolder}/.env",
"python.analysis.ignore": ["*"],
"[python]": {
"editor.defaultFormatter": "charliermarsh.ruff",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
"source.fixAll": "explicit",
}
},
"python.terminal.activateEnvironment": true,
"python.terminal.activateEnvInCurrentTerminal": true
}

28
template/Makefile.jinja Normal file
View File

@@ -0,0 +1,28 @@
all: setup_environment
clean:
poetry run pre-commit uninstall
rm -rf .venv
setup_environment: check
pyenv install {{ python_version }} --skip-existing \
&& pyenv local {{ python_version }} \
&& poetry env use {{ python_version }} \
&& poetry install \
&& poetry run pre-commit install
check: pyenv_exists poetry_exists is_git {{- " ggshield_has_key" if use_ggshield else ""}}
pyenv_exists: ; @which pyenv > /dev/null
poetry_exists: ; @which poetry > /dev/null
is_git: ; @git rev-parse --git-dir > /dev/null
{%- if use_ggshield %}
ggshield_has_key:
ifeq ($(GITGUARDIAN_API_KEY),)
$(warning No API-Key for GitGuardian was set!)
endif
{%- endif %}

132
template/README.md.jinja Normal file
View File

@@ -0,0 +1,132 @@
> ***Note:*** **This repository was generated by the [🖨️ ESTA Python Template](https://code.sbb.ch/projects/KD_ESTA_BLUEPRINTS/repos/esta-python-template/).**
# {{ project_name }}
This is a blueprint for Python based projects. It exemplifies a setup and project structure including neat things such as pre-commit-hooks, automated testing, type checking, CI/CD using Tekton, etc. It is intended to be tailored to project needs.
## Getting Started
- Create a new project on [SonarQube](https://codequality.sbb.ch/projects/create) with the "Project key" `{{ name }}` and "Display name" `{{ project_name }}`.
- Configure pylint profile for sonarproject by making [a pull-request at the sonarqube-config repo](https://code.sbb.ch/projects/KD_WZU/repos/sonarqube-config/browse/QualityProfiles/pylint) and adding the `sonar.projectKey` into the files.
## Setup
### Install Pyenv and Poetry
- If not already installed, install [pyenv](https://github.com/pyenv/pyenv).
- If not already installed, get Poetry according to <https://python-poetry.org/docs/#installation>. If your are new to Poetry, you may find <https://python-poetry.org/docs/basic-usage/> interesting.
### Create Environment
Execute the following in a terminal:
```shell
# Create virtualenv, install all dependencies and pre-commit hooks in one command:
make
```
### Set Environment Variables for Local Developement
Environment Variables defined in the `.env`-File are automatically loaded. To get started:
- make a copy of `env.template` and rename it to `.env`
- fill out any missig values (e.g. for passwords)
- close and re-open the terminal
See the Environment Variables section below for a list of all variables used by this application.
{%- if use_ggshield %}
### Setup GGShield (GitGuardian)
The ggshield secret scanner requires an API-token to access the GitGuardian server. This token needs to be set both on your local machine and in tour tekton build environment. The `make` command will warn you if the token is not set:
> Makefile:24: No API-Key for GitGuardian was set!
1. Go to the [GitGuardian](https://gitguardian.sbb.ch/workspace/1/settings/api/personal-access-tokens) and create a new *Personal Access Token* with the *Scan* permission.
2. Set API-token locally
Add the environment variable GITGUARDIAN_API_KEY to the `.env`-file and reopen your terminal.
```.env
GITGUARDIAN_API_KEY="YourTokenGoesHere"
```
3. Set API-token in tekton
1. On your tekton-instance navigate to "pipeline secrets"
2. Edit the secret "esta-tekton-pipeline-env"
3. Add a new entry with the key `GITGUARDIAN_API_KEY` and as value the previously generated access token
You can run `poetry run ggshield api-status` locally to check your setup. Consult the [guide on confluence](https://confluence.sbb.ch/x/0QSFlw) if you have any problems.
{% endif %}
{%- if pypi_repository or docker_repository %}
## Usage
{% endif %}
{%- if pypi_repository -%}
### As Shared Library
Available versions can be seen in artifactory: <https://bin.sbb.ch/ui/repos/tree/General/{{ pypi_repository }}/{{ name }}>
Execute the following command to install the latest package:
```shell
pip install {{ name }} --no-cache-dir --index-url https://bin.sbb.ch/artifactory/api/pypi/{{ pypi_repository }}/simple/ --trusted-host bin.sbb.ch
```
{% endif %}
{%- if docker_repository -%}
### As Docker Container
If you take the docker image as is it will enable you to use defined entrypoints directly. The command's name defined in the `[tool.poetry.scripts]` section of pyproject.toml can be passed to the docker run command like this:
```shell
# docker run <path_to_image> <poetry command>
docker run docker.bin.sbb.ch/{{ docker_repository|replace('.docker', '') }}/{{ name }}:<tag> entrypoint
```
Here, 'entrypoint' is the command name.
{% endif %}
## Contents and Concepts
At first glance one may be overwhelmed by the amount of files and folders present in this directory. This is mainly due to the fact, that each tool uses its own configuration file. The situation has improved with more and more tools adding support for pyproject.toml. The following two tables describe the main structure of the project:
| Folder | Purpose |
|---------------|-----|
| `.venv` | This is where the Poetry-managed venv lives. |
| `.vscode` | This is where settings for vscode live. Some useful defaults are added in case you use vscode in your project. If not, this can savely be deleted.|
{%- if helm_repository %}
| `charts` | This directory contains the Helm charts of the project. Helm is used to deploy this application to OpenShift. |
{%- endif %}
| `src` | Main directory for the python code. Most of the times this will contain one subfolder with the main module of the project. `{{ package_name }}` in our case. Replace this with your own module-name. |
| `tests` | Directory containing all tests. This directory will be scanned by the test-infrastructure to find testcases. |
| File | Purpose |
|---------------------------------------|---------|
{%- if docker_repository %}
| `.dockerignore` | Files and directories to ignore when building a Docker image. |
{%- endif %}
| `.gitattributes` | Attributes for git are defined in this file, such as automatic line-ending conversion. |
| `.gitignore` | This file contains a list of path patterns that you want to ignore for git (they will never appear in commits). |
| `.pre-commit-config.yaml` | This file contains configuration for the pre-commit hook, which is run whenever you `git commit`, you can configure running code quality tools and tests here. |
{%- if docker_repository %}
| `Dockerfile` | Instructions for Docker on how to package this app into a Docker image. |
{%- endif %}
| `dependency-check-suppressions.xml` | Configuration file for the [OWASP Dependency Check](https://jeremylong.github.io/DependencyCheck/general/suppression.html). Can be used to specify vulnerabilities that should be suppressed by the check (because they aren't applicable to your project). |
| `estaTektonPipeline.yaml` | Configuration file for Continuous Integration on the ESTA Tekton Pipeline infrastructure. Refer to [ESTA Tekton Parameters - estaTektonPipeline.json](https://confluence.sbb.ch/display/CLEW/ESTA+Tekton+Parameters+-+estaTektonPipeline.json) for more information. |
| `poetry.toml` | Configuration for Poetry. |
| `pyproject.toml` | This file contains meta information for your project, as well as a high-level specification of the dependencies of your project, from which Poetry will do its dependency resolution and generate the `poetry.lock`. Also, it contains some customization for code-quality tools. Check [PEP 621](https://peps.python.org/pep-0621/) for details.|
| `README.md` | This file. Document how to develop and use your application in here. |
| `renovate.json` | Configuration file for [Renovate](https://confluence.sbb.ch/spaces/CLEW/pages/2212995011/Renovate). Allows you to set how and when renovate is run. See the [official documentation](https://docs.renovatebot.com/configuration-options/). |
| `sonar-project.properties` | Configuration for the Sonar check. |
## Environment Variable
The following environment variables may be used to configure `{{ package_name }}`:
| Environment Variable | Purpose | Default Value | Allowed Values |
|----------------------|-|-|-|
| LOG_LEVEL | Sets the default log level [here](src/{{ package_name }}/common/logging_configuration.py). | "INFO" | See [Python Standard Library API-Reference](https://docs.python.org/3/library/logging.html#logging-levels) |

View File

@@ -0,0 +1,43 @@
<?xml version="1.0" encoding="UTF-8"?>
<suppressions xmlns="https://jeremylong.github.io/DependencyCheck/dependency-suppression.1.3.xsd">
<suppress>
<notes><![CDATA[
This can be suppressed because it is a transient dependency of the `pre-commit-hooks` dependency, which only uses ruamel-yaml if the YAML-specific hooks are actually run.
Additionally, the vulnerability CVE-2022-3064 (Parsing malicious or large YAML documents can consume excessive amounts of CPU or memory.) only applies when parsing untrusted (user-supplied) YAML, which isn't the case here.
The gist: Never trust a user-submitted YAML file! This could DoS your app!
]]></notes>
<packageUrl regex="true">^pkg:pypi/ruamel-yaml-clib@.*$</packageUrl>
<cve>CVE-2022-3064</cve>
</suppress>
<suppress>
<notes><![CDATA[
This can be suppressed because it is a transient dependency of the `pre-commit-hooks` dependency, which only uses ruamel-yaml if the YAML-specific hooks are actually run.
Additionally, the vulnerability CVE-2021-4235 (Due to unbounded alias chasing, a maliciously crafted YAML file can cause the system to consume significant system resources. If parsing user input, this may be used as a denial of service vector.) only applies when parsing untrusted (user-supplied) YAML, which isn't the case here.
The gist: Never trust a user-submitted YAML file! This could DoS your app!
]]></notes>
<packageUrl regex="true">^pkg:pypi/ruamel-yaml-clib@.*$</packageUrl>
<cve>CVE-2021-4235</cve>
</suppress>
<suppress>
<notes><![CDATA[
This can be suppressed because it is a transient dependency of the `pre-commit-hooks` dependency, which only uses ruamel-yaml if the YAML-specific hooks are actually run.
Additionally, the vulnerability CVE-2022-3064 (Parsing malicious or large YAML documents can consume excessive amounts of CPU or memory.) only applies when parsing untrusted (user-supplied) YAML, which isn't the case here.
The gist: Never trust a user-submitted YAML file! This could DoS your app!
]]></notes>
<packageUrl regex="true">^pkg:pypi/ruamel-yaml@.*$</packageUrl>
<cve>CVE-2022-3064</cve>
</suppress>
<suppress>
<notes><![CDATA[
This can be suppressed because it is a transient dependency of the `pre-commit-hooks` dependency, which only uses ruamel-yaml if the YAML-specific hooks are actually run.
Additionally, the vulnerability CVE-2021-4235 (Due to unbounded alias chasing, a maliciously crafted YAML file can cause the system to consume significant system resources. If parsing user input, this may be used as a denial of service vector.) only applies when parsing untrusted (user-supplied) YAML, which isn't the case here.
The gist: Never trust a user-submitted YAML file! This could DoS your app!
]]></notes>
<packageUrl regex="true">^pkg:pypi/ruamel-yaml@.*$</packageUrl>
<cve>CVE-2021-4235</cve>
</suppress>
</suppressions>

View File

@@ -0,0 +1,103 @@
# yaml-language-server: $schema=https://clew-resources.sbb-cloud.net/tekton-schema.json
productName: {{ name }}
python:
{{-"\n targetRepo: "~ pypi_repository if pypi_repository else ' {}'}}
builder:
python: "{{ python_version }}"
{%- if docker_repository %}
docker:
artifactoryDockerRepo: {{ docker_repository }}
caching: true
{%- endif %}
{%- if helm_repository %}
helm:
chartRepository: {{ helm_repository }}
linting: true
{%- endif %}
pipelines:
# This is the continuous build pipeline which runs on every commit on a feature branch or if triggered in the tekton-ui.
# It builds and tests artifacts but does not push them to artifactory.
- name: continuous
triggerType:
- GITEVENT
- USER
branchNamePrefixes:
- feature
- renovate
build:
{%- if docker_repository %}
buildDockerImage: true
deployDockerImage: false
{%- endif %}
sonarScan:
enabled: true
owaspDependencyCheck:
enabled: true
additionalParams: "--suppression dependency-check-suppressions.xml --disablePyDist
--disablePyPkg --failOnCVSS 9"
failOnQualityGateFailure: true
{%- if use_ggshield %}
gitguardian:
enabled: true
reportmode: "FAILED"
{%- endif %}
# This is the snapshot build pipeline which is triggered when pushing commits to master, if triggered in the tekton ui or through a cron job at 05:00 ervery morning.
# It builds a snapshot-version of the product and pushes it to artifactory.
- name: snapshot
triggerType:
- USER
- GITEVENT
- CRON
branchNamePrefixes:
- master
cron: 30 3 * * * # Nightly Build triggers at 03:30 every day
build:
sonarScan:
enabled: true
owaspDependencyCheck:
enabled: true
additionalParams: "--suppression dependency-check-suppressions.xml --disablePyDist
--disablePyPkg --failOnCVSS 9"
failOnQualityGateFailure: true
{%- if docker_repository %}
buildDockerImage: true
deployDockerImage: true
deployArtifacts: false
{%- endif %}
{%- if use_ggshield %}
gitguardian:
enabled: true
reportmode: "FAILED"
{%- endif %}
# This is the release build pipeline which is triggered by adding a valid version tag to a commit.
# This can either be done using git or the esta-tekton ui.
# This builds and deploys the artifacts using the git tag as version.
- name: release
triggerType:
- GITEVENT
versionTagEventPatterns:
- "^(\\d+\\.)(\\d+\\.)(\\d+)$"
build:
{%- if docker_repository %}
buildDockerImage: true
deployArtifacts: true
additionalDockerImageTags:
- latest
{%- endif %}
{%- if helm_repository %}
packageAndDeployHelmChart: true
{%- endif %}
sonarScan:
enabled: true
owaspDependencyCheck:
enabled: true
additionalParams: "--suppression dependency-check-suppressions.xml --disablePyDist
--disablePyPkg --failOnCVSS 9"
failOnQualityGateFailure: true
{%- if use_ggshield %}
gitguardian:
enabled: true
reportmode: "FAILED"
{%- endif %}

View File

@@ -0,0 +1,11 @@
# The following is important for the Tekton build: Only the working directory
# is transferred between Tekton steps, e.g. between the "prepare" and "build"
# steps in the Python build. Thus we instruct poetry to install the dependencies
# into a .venv directory in the working directory instead of into a global directory.
[virtualenvs]
create = true
in-project = true
[repositories.artifactory]
url = "https://bin.sbb.ch/artifactory/api/pypi/{{ pypi_repository }}"

View File

@@ -0,0 +1,101 @@
[project]
# we prefix the package name with `sbb-` to defend against dependency confusion, see https://confluence.sbb.ch/x/Fxa9lg
name = "sbb-{{ name }}"
description = "{{ description }}"
authors = {{ authors }}
readme = "README.md"
license = "Proprietary"
dynamic = ["version", "dependencies"]
requires-python = "~= {{ python_version }}.0" # = fix minor version to {{ python_version }}.*
[project.urls]
repository = "https://code.sbb.ch/projects/{{ bitbucket_organization }}/repos/{{ name }}"
documentation = "https://code.sbb.ch/projects/{{ bitbucket_organization }}/repos/{{ name }}/browse/README.md"
[project.scripts]
entrypoint = '{{ package_name }}.main:cli'
[tool.poetry]
version = "0.0.0" # Version is ignored and set by Tekton. Use pipeline to increase.
requires-poetry = ">=2.0"
packages = [{include = "{{ package_name }}", from = "src"}]
[tool.poetry.dependencies]
numpy = "~2.3.0" # NumPy is the fundamental package for array computing with Python.
[tool.poetry.group.dev.dependencies]
coverage = {extras = ["toml"], version = "~7.10.4"} # Code coverage measurement for Python
unittest-xml-reporting = "~3.2" # unittest-based test runner with Ant/JUnit like XML reporting.
pre-commit = "~4.3.0" # A framework for managing and maintaining multi-language pre-commit hooks.
pre-commit-hooks = "~5.0.0" # Some out-of-the-box hooks for pre-commit.
mypy = "~1.18.0" # Tool for static type-checking
ruff = "~0.13.0" # Extremely fast python linter and formatter
{%- if use_ggshield %}
ggshield = "^1.23.0" # Tool to check for leaked secrets
{% endif %}
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
[tool.mypy]
python_version = "{{ python_version }}"
plugins = "numpy.typing.mypy_plugin"
strict = true
[tool.coverage.run]
command_line = "-m xmlrunner discover --output-file junittest.xml"
[tool.ruff]
line-length = 140
indent-width = 4
exclude = [
".git",
".git-rewrite",
".ipynb_checkpoints",
".mypy_cache",
".pyenv",
".pytype",
".ruff_cache",
".venv",
".vscode",
"**/*.yaml",
"**/*.yml"
]
[tool.ruff.format]
# Like Black, use double quotes for strings.
quote-style = "double"
# Like Black, indent with spaces, rather than tabs.
indent-style = "space"
# Like Black, respect magic trailing commas.
skip-magic-trailing-comma = false
# Like Black, automatically detect the appropriate line ending.
line-ending = "auto"
[tool.ruff.lint]
select = [
"D", "E", "W", # Pydocstyle rules
"I", # Isort rules
"PL", # Pylint rules
"F", # Pyflake
"N", # Pep8-naming
"G", # Flake8-logging-format
"ARG", # Flake8-unused-arguments
"RUF100", # fail on unused noqa
]
ignore=[]
fixable = ["I"]
[tool.ruff.lint.per-file-ignores]
"tests/**/*.py" = ["D", "E", "W"] # Ignore Pydocstyle warnings & errors in "test/*" directory
[tool.ruff.lint.pydocstyle]
convention = "google"
[tool.ruff.lint.pylint]
max-args = 8

View File

@@ -0,0 +1,33 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"local>KD_ESTA/esta-renovate-preset"
],
"branchPrefix": "renovate/",
"dependencyDashboard": false,
"ignoreDeps": ["python"],
"packageRules": [
{
"groupName": "all non-major dependencies",
"groupSlug": "all-minor-patch",
"matchPackagePatterns": [
"*"
],
"matchUpdateTypes": [
"minor",
"patch"
]
}
],
{%- if docker_repository %}
"customManagers": [
{
"customType": "regex",
"fileMatch": ["^Dockerfile$"],
"matchStrings": ["POETRY_VERSION=\"(?<currentValue>.+)\""],
"depNameTemplate": "poetry",
"datasourceTemplate": "pypi"
}
]
{%- endif %}
}

View File

@@ -0,0 +1,4 @@
sonar.sources=src
sonar.python.ruff.reportPaths=sonar_report.json
sonar.python.coverage.reportPaths=coverage.xml
sonar.python.xunit.reportPath=junittest.xml

View File

@@ -0,0 +1,8 @@
"""{{ name }} main package.
Add your package documentation here.
"""
from {{ package_name }}.common import logging_configuration
logging_configuration.configure_logger()

View File

@@ -0,0 +1 @@
"""The common subpackage bundles shared utilities."""

View File

@@ -0,0 +1,53 @@
"""Logging customization."""
import logging.config
import os
import time
LOGGER = logging.getLogger(__name__)
class UTCFormatter(logging.Formatter):
"""UTC formatter which converts timestamps to UTC."""
converter = time.gmtime # type: ignore
LOGGING_CONFIG = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"utc": {
"()": UTCFormatter,
"format": "%(asctime)s - %(process)d - %(name)s - %(levelname)s - %(message)s",
"datefmt": "%Y-%m-%dT%H:%M:%S%z",
}
},
"handlers": {
"console": {"class": "logging.StreamHandler", "formatter": "utc"},
},
"root": {"handlers": ["console"]},
}
def configure_logger() -> None:
"""Configure logging.
Configures a console logger which logs in UTC time with timestamps
formatted according to ISO 8601.
Note: loglevel defaults to logging.INFO and may be overridden by
configuring the environment variable 'LOG_LEVEL'.
"""
logging.config.dictConfig(LOGGING_CONFIG)
loglevel_name = os.environ.get("LOG_LEVEL", default="INFO")
loglevel = logging.getLevelName(loglevel_name)
if isinstance(loglevel, str):
LOGGER.warning( # pylint: disable=logging-too-many-args
"Loglevel-Name '%s' not found in loglevels. Falling back to INFO.",
loglevel_name,
)
loglevel = logging.INFO
# Set loglevel on root logger and propagate.
logging.getLogger().setLevel(loglevel)

View File

@@ -0,0 +1,13 @@
"""The great Greeter module."""
class Greeter:
"""Class for generating greetings."""
def __init__(self, name: str) -> None:
"""Initialize a Greeter object."""
self.name: str = name
def get_greeting(self) -> str:
"""Return a greeting."""
return f"Hello {self.name}!"

View File

@@ -0,0 +1,35 @@
"""Main Description."""
import argparse
import logging
import numpy as np
import numpy.typing as npt
from {{ package_name }} import greeter
LOGGER = logging.getLogger(__name__)
def get_np_array() -> npt.NDArray[np.int_]:
"""Return a numpy array."""
return np.array([0])
def main(word: str) -> None:
"""Main entry point for the application."""
LOGGER.info("Executing entrypoint.")
print(greeter.Greeter(word).get_greeting())
print(f"Here's a test that poetry dependencies are installed: {get_np_array()}")
def cli() -> None:
"""Cli-Entrypoint."""
parser = argparse.ArgumentParser(description="Demo argument")
parser.add_argument("-w", "--word", required=True)
args = parser.parse_args()
main(args.word)
if __name__ == "__main__":
cli()

View File

View File

@@ -0,0 +1,8 @@
import unittest
from {{ package_name }} import greeter
class TestGreeter(unittest.TestCase):
def test_get_greeting(self) -> None:
self.assertEqual(greeter.Greeter("World").get_greeting(), "Hello World!")

View File

@@ -0,0 +1,24 @@
import io
import unittest
import unittest.mock
import numpy as np
from {{ package_name }} import main
class TestMain(unittest.TestCase):
def test_get_np_array(self) -> None:
np.testing.assert_array_equal(main.get_np_array(), np.array([0]))
@unittest.mock.patch("sys.stdout", new_callable=io.StringIO)
def test_main(self, mock_stdout: unittest.mock.Mock) -> None:
"""Test main method by executing it and comparing terminal output to expected values."""
main.main("world")
# Go to beginning of output
mock_stdout.seek(0)
self.assertEqual(mock_stdout.readline(), "Hello world!\n")
self.assertEqual(
mock_stdout.readline(),
"Here's a test that poetry dependencies are installed: [0]\n",
)

View File

@@ -0,0 +1,5 @@
.pyre
.pytest_cache
.venv
__pycache__
poetry.toml

View File

@@ -0,0 +1,48 @@
FROM registry-redhat.docker.bin.sbb.ch/rhel9/python-{{ python_version.replace(".", "") }} AS base
ENV PYTHONFAULTHANDLER=1 \
PYTHONUNBUFFERED=1 \
PYTHONHASHSEED=random \
PIP_NO_CACHE_DIR=off \
PIP_DISABLE_PIP_VERSION_CHECK=on \
PIP_DEFAULT_TIMEOUT=100
# need to be root in order to install packages
USER 0
# install necessary system dependencies here using `RUN dnf install -y <mypackage> && dnf clean all`
# for installable packages, see: https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/9/html-single/package_manifest/index#doc-wrapper
FROM base AS builder
ENV POETRY_PATH="/opt/poetry" \
POETRY_VERSION="2.2.1" \
POETRY_VIRTUALENVS_IN_PROJECT=true
ENV PATH="$POETRY_PATH/bin:$PATH"
RUN python -m venv $POETRY_PATH && \
$POETRY_PATH/bin/pip install poetry==$POETRY_VERSION
WORKDIR /app
# Initialize environment with packages
COPY README.md pyproject.toml poetry.lock ./
RUN poetry env use {{ python_version }} && \
poetry install --without dev --no-interaction --no-ansi --no-root
# Add project source code
COPY src/ ./src/
RUN poetry build -f wheel
RUN poetry run pip install dist/*.whl
FROM base AS final
# switch back to a non-root user for executing
USER 1001
ENV PATH="/app/.venv/bin:$PATH"
COPY --from=builder /app/.venv /app/.venv
# Default command. Can be overridden using docker run <image> <command>
CMD ["entrypoint"]

View File

@@ -0,0 +1,23 @@
# Patterns to ignore when building packages.
# This supports shell glob matching, relative path matching, and
# negation (prefixed with !). Only one pattern per line.
.DS_Store
# Common VCS dirs
.git/
.gitignore
.bzr/
.bzrignore
.hg/
.hgignore
.svn/
# Common backup files
*.swp
*.bak
*.tmp
*.orig
*~
# Various IDEs
.project
.idea/
*.tmproj
.vscode/

View File

@@ -0,0 +1,6 @@
icon: http://acme.org/replaceme.jpg
apiVersion: v2
name: "{{ name }}"
description: "{{ description }}"
type: application
version: 0.0.0 # will be overwritten at package-time