Skip to content

Commit

Permalink
"Copy" super linter from internal to barman public repo
Browse files Browse the repository at this point in the history
This commit is

* Adding the `linter.yml` workflow to the barman repository
* Adding the files that are referenced in the workflow
	* .github/workflows/linter.yml
        * .gitleaks.toml
        * .hadolint.yaml
        * .isort.cfg
        * .markdownlint.yml
        * .python-black
        * .yamllint.yml

References: BAR-361

Signed-off-by: Andre <[email protected]>
  • Loading branch information
andremagui committed Aug 31, 2024
1 parent 58b2653 commit 03d581d
Show file tree
Hide file tree
Showing 7 changed files with 237 additions and 0 deletions.
207 changes: 207 additions & 0 deletions .github/workflows/linter.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,207 @@
---
# Copyright (C) 2024 EnterpriseDB

name: Linters

on:
pull_request:
branches:
- master

push:
branches:
- master

schedule:
# Lint code base every Monday 12:00 am. The idea here is to catch possible
# issues that were not detected during the normal development workflow.
- cron: '0 0 * * 1'

workflow_dispatch:
inputs:
source-ref:
description: Source code branch/ref name
default: master
required: true
type: string

env:
SOURCE_REF: ${{ inputs.source-ref || github.ref }}
GITHUB_TOKEN: ${{ secrets.GH_SLONIK }}

jobs:
run-super-linter:
name: Run super linter
runs-on: ubuntu-latest

permissions:
contents: read
packages: read
# To report GitHub Actions status checks
statuses: write

steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ref: ${{ env.SOURCE_REF }}
# Full git history is needed to get a proper list of changed files within `super-linter`
fetch-depth: 0

- name: Set up Python
uses: actions/setup-python@v5

- name: Super-linter
uses: super-linter/super-linter/slim@v7
env:
# To report GitHub Actions status checks
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# Linters configuration.
LINTER_RULES_PATH: '.'
# We are not interested in linting the these files:
# * Security workflows provided by the Foundation team, as we are not supposed
# to be changing them.
# * Markdown files under `doc` or `sphinx` directories, which belong to the
# old docs, and are going to be replaced soon.
FILTER_REGEX_EXCLUDE: '\.github/workflows/[^\s]*-scan.ya?ml|(doc|sphinx)/.*\.md'
DOCKERFILE_HADOLINT_FILE_NAME: .hadolint.yaml
GITLEAKS_CONFIG_FILE: .gitleaks.toml
MARKDOWN_CONFIG_FILE: .markdownlint.yml
PYTHON_BLACK_CONFIG_FILE: .python-black
PYTHON_FLAKE8_CONFIG_FILE: tox.ini
PYTHON_ISORT_CONFIG_FILE: .isort.cfg
YAML_CONFIG_FILE: .yamllint.yml
YAML_ERROR_ON_WARNING: false
# On runs triggered by PRs we only lint the added/modified files.
VALIDATE_ALL_CODEBASE: ${{ github.event_name != 'pull_request' }}
# Validate file types used in the Barman repo.
# Bash because of bash scripts.
VALIDATE_BASH: true
VALIDATE_BASH_EXEC: true
# Dockerfile because we might add some of them soon.
VALIDATE_DOCKERFILE_HADOLINT: true
# Validate the own GitHub workflows and actions.
VALIDATE_GITHUB_ACTIONS: true
# Search for leaks in the repository.
VALIDATE_GITLEAKS: true
# Validate all documentation files from the repo.
VALIDATE_MARKDOWN: true
# Validate Python code.
VALIDATE_PYTHON_BLACK: true
VALIDATE_PYTHON_FLAKE8: true
VALIDATE_PYTHON_ISORT: true
# Validate YAML files from workflows and release notes.
VALIDATE_YAML: true

check-conventions:
if: github.event_name == 'pull_request'
name: Check the compliance to contribution guidelines in PRs
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ github.event.pull_request.head.ref }}

- name: Set up Python
uses: actions/setup-python@v5

- name: Checkout repo-actions
uses: actions/checkout@v4
with:
repository: EnterpriseDB/repo-actions
token: ${{ env.GITHUB_TOKEN }}
ref: v1
path: ./.github/repo-actions

- name: Check conventions for PRs
id: validate-conventions
shell: python
run: |
import os
import re
import json
import subprocess
# Get the event payload from the GitHub event
with open(os.getenv('GITHUB_EVENT_PATH'), 'r') as f:
event_payload = json.load(f)
# Get the branch name, PR number and PR description from the event payload
pr_number = event_payload['pull_request']['number']
branch_name = event_payload['pull_request']['head']['ref']
pr_description = event_payload['pull_request']['body']
# Initialize a list to collect when conventions are not followed
errors = []
# Check for the conventions for PRs, starting with branch name
# By storing the ticket number in the branch name, we also indirectly check that
# the commit messages and the PR description all reference the same ticket number
match = re.match(r'^dev/bar-(\d+)(?=[^\s]*$)', branch_name, re.IGNORECASE)
if match:
# Get the ticket number from the branch name
ticket_number = match.group(1)
else:
errors.append(f'Branch name {branch_name} does not follow the convention')
ticket_number = None
if ticket_number:
# Check the PR description for ticket reference
if not re.search(rf"^References: BAR-{ticket_number}(?=\.|\s+|$)", pr_description, re.IGNORECASE | re.MULTILINE):
errors.append(f'PR description does not reference BAR-{ticket_number}')
# Check PR commit messages for the ticket reference
# Get the target branch from the PR payload
target_branch = event_payload['pull_request']['base']['ref']
# Get a list of commits that exist only in the source branch but not in the target branch
commit_hashes = subprocess.check_output(
['git', 'rev-list', '--right-only', f'origin/{target_branch}...{branch_name}']
).decode().split()
# Check each commit message for the ticket reference
for commit_hash in commit_hashes:
commit_message = subprocess.check_output(
['git', 'show', '-s', '--format=%b', commit_hash]
).decode()
if not re.search(rf"^References: BAR-{ticket_number}(?=\.|\s+|$)", commit_message, re.IGNORECASE | re.MULTILINE):
errors.append(f'Commit {commit_hash} does not reference BAR-{ticket_number}')
exit_code = 0
comments = []
if len(errors) == 0:
comments.append(
"✅ This PR follows all conventions!"
)
else:
# If the PR fails to follow any of the conventions, report it and mark as failed
comments.append(
"❌ This PR does not follow all conventions!"
)
comments.append(
"\n".join(errors)
)
exit_code = 1
with open(os.environ["GITHUB_OUTPUT"], "a") as fd:
# We need to "encode" line-breaks as literal "\n" values when writing to
# GITHUB_OUTPUT so we avoid "Invalid format" errors in Actions
comments = '\n\n'.join(comments).replace("\n", r"\n")
fd.write(f"comments={comments}\n")
fd.write(f"exit_code={exit_code}\n")
- name: Create or update comment about conventions validation
uses: ./.github/repo-actions/create-or-update-comment
with:
token: ${{ env.GITHUB_TOKEN }}
repo: ${{ github.repository }}
issue-number: ${{ github.event.pull_request.number }}
comment: '# Contributing Conventions Status\n\n${{ steps.validate-conventions.outputs.comments }}'
matches: '^# Contributing Conventions Status\n\n'

- name: Exit with exit code reported by conventions validation
run: |
# shellcheck disable=SC2242
exit ${{ steps.validate-conventions.outputs.exit_code }}
4 changes: 4 additions & 0 deletions .gitleaks.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
[extend]
# useDefault will extend the base configuration with the default gitleaks config:
# https://github.com/zricethezav/gitleaks/blob/master/config/gitleaks.toml
useDefault = true
1 change: 1 addition & 0 deletions .hadolint.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
failure-threshold: error
3 changes: 3 additions & 0 deletions .isort.cfg
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[settings]
profile = black
multi_line_output = 3
8 changes: 8 additions & 0 deletions .markdownlint.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# MD013/line-length : Line length : https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md013.md
# We don't want the linter to fail just because line-length was exceeded.
MD013: false
# MD024/no-duplicate-heading: https://github.com/DavidAnson/markdownlint/blob/v0.34.0/doc/md024.md
# We don't want the linter to fail when duplicated header names are found. That is not
# relevant for us, and actually we rely on duplicated names when generating the RELNOTES.md
# contents.
MD024: false
2 changes: 2 additions & 0 deletions .python-black
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[tool.black]
line-length = 88
12 changes: 12 additions & 0 deletions .yamllint.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
extends: default

rules:
# comments should visibly make sense
comments:
level: error
comments-indentation:
level: error
# 88 chars should be enough, but don't fail if a line is longer
line-length:
max: 88
level: warning

0 comments on commit 03d581d

Please sign in to comment.