Compare commits
42 Commits
master
...
developmen
Author | SHA1 | Date | |
---|---|---|---|
5feba1c27c | |||
e03a201c30 | |||
ec9304fdc5 | |||
f10663a2d5 | |||
eb11625b66 | |||
5e69fb2807 | |||
4de933151a | |||
a2b935c5cb | |||
4b9133a626 | |||
dc57a65a1c | |||
d60383e6f6 | |||
9727594d89 | |||
6d501e25d9 | |||
3629b2ff21 | |||
c50be1bb4c | |||
c6777a9423 | |||
d29ae2b0f0 | |||
3a1bf95099 | |||
97c196e6d8 | |||
6a2a8f04a3 | |||
e61f22c958 | |||
b928a044f7 | |||
9f3b07c9f8 | |||
27fcf1fde4 | |||
9516803f78 | |||
55b7dc396f | |||
598cd7db5c | |||
b18043c2f0 | |||
f53cd10241 | |||
ea2f7e76cb | |||
b8fb75e6cd | |||
73123d7b4c | |||
aa368e4bfb | |||
a7b2145886 | |||
ab3560bad3 | |||
7d3a8d833f | |||
8d6245779c | |||
6dd49ee14b | |||
5cd30f0bc0 | |||
8a2826ca34 | |||
2e0062062a | |||
358bc0394e |
23
.cz.yaml
Normal file
23
.cz.yaml
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
---
|
||||||
|
commitizen:
|
||||||
|
customize:
|
||||||
|
change_type_map:
|
||||||
|
feature: Features
|
||||||
|
fix: Fixes
|
||||||
|
refactor: Refactoring
|
||||||
|
test: Tests
|
||||||
|
change_type_order:
|
||||||
|
- BREAKING CHANGE
|
||||||
|
- feat
|
||||||
|
- fix
|
||||||
|
- test
|
||||||
|
- refactor
|
||||||
|
# yamllint disable rule:line-length
|
||||||
|
commit_parser: ^(?P<change_type>feat|fix|test|refactor|perf|BREAKING CHANGE)(?:\((?P<scope>[^()\r\n]*)\)|\()?(?P<breaking>!)?:\s(?P<message>.*)?
|
||||||
|
# yamllint enable rule:line-length
|
||||||
|
name: cz_customize
|
||||||
|
prerelease_offset: 1
|
||||||
|
tag_format: $version
|
||||||
|
update_changelog_on_bump: false
|
||||||
|
version: 0.0.1
|
||||||
|
version_scheme: semver
|
7
.dockerignore
Normal file
7
.dockerignore
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
.ansible
|
||||||
|
.git
|
||||||
|
CONTRIBUTING.md
|
||||||
|
docs/
|
||||||
|
galaxy.yml
|
||||||
|
*.tmp.*
|
||||||
|
|
0
.gitea/.gitkeep
Normal file
0
.gitea/.gitkeep
Normal file
0
.gitea/workflows/.gitkeep
Normal file
0
.gitea/workflows/.gitkeep
Normal file
42
.gitea/workflows/pull_request.yaml
Normal file
42
.gitea/workflows/pull_request.yaml
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
---
|
||||||
|
|
||||||
|
name: Lint (Pull Request)
|
||||||
|
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request: {}
|
||||||
|
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
|
||||||
|
lint:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
|
||||||
|
|
||||||
|
- name: Enable Matcher Service
|
||||||
|
run: |
|
||||||
|
echo "NFC_PROBLEM_MATCHER=${GITHUB_REF_NAME}";
|
||||||
|
|
||||||
|
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
|
||||||
|
- name: Install YAMLLint
|
||||||
|
run: pip install yamllint
|
||||||
|
|
||||||
|
|
||||||
|
- name: Run YAMLLint
|
||||||
|
run: |
|
||||||
|
echo "NFC_PROBLEM_MATCHER_TYPE=YAML-Lint"
|
||||||
|
yamllint -f github . || true
|
||||||
|
|
||||||
|
|
||||||
|
- name: Install Ansible-Lint
|
||||||
|
run: pip install ansible-lint
|
||||||
|
|
||||||
|
|
||||||
|
- name: Run Ansible-Lint
|
||||||
|
run: |
|
||||||
|
echo "NFC_PROBLEM_MATCHER_TYPE=pylint-json";
|
||||||
|
ansible-lint -f json . || true
|
25
.github/workflows/ci.yaml
vendored
Normal file
25
.github/workflows/ci.yaml
vendored
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
---
|
||||||
|
|
||||||
|
name: 'CI'
|
||||||
|
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- '**'
|
||||||
|
tags:
|
||||||
|
- '*'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
|
||||||
|
|
||||||
|
docker:
|
||||||
|
name: 'Docker'
|
||||||
|
uses: nofusscomputing/action_docker/.github/workflows/docker.yaml@development
|
||||||
|
with:
|
||||||
|
DOCKER_BUILD_IMAGE_NAME: "nofusscomputing/git-event-problem-matcher"
|
||||||
|
DOCKER_PUBLISH_REGISTRY: "docker.io"
|
||||||
|
DOCKER_PUBLISH_IMAGE_NAME: "nofusscomputing/git-event-problem-matcher"
|
||||||
|
secrets:
|
||||||
|
DOCKER_PUBLISH_USERNAME: ${{ secrets.NFC_DOCKERHUB_USERNAME }}
|
||||||
|
DOCKER_PUBLISH_PASSWORD: ${{ secrets.NFC_DOCKERHUB_TOKEN }}
|
17
.github/workflows/pull_request.yaml
vendored
Normal file
17
.github/workflows/pull_request.yaml
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
---
|
||||||
|
|
||||||
|
name: Pull Requests
|
||||||
|
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request: {}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
|
||||||
|
ci-test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- name: Test
|
||||||
|
run: |
|
||||||
|
echo "github";
|
5
.gitignore
vendored
Normal file
5
.gitignore
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
# Temp directories / files
|
||||||
|
|
||||||
|
.ansible/
|
||||||
|
artifacts/
|
||||||
|
*.tmp.*
|
77
.yamllint
Normal file
77
.yamllint
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
---
|
||||||
|
|
||||||
|
# extends: default
|
||||||
|
|
||||||
|
|
||||||
|
ignore:
|
||||||
|
- '.github/'
|
||||||
|
- '**/crd/**'
|
||||||
|
- mkdocs.yml
|
||||||
|
- '*PrometheusRule*'
|
||||||
|
- '**/source/**'
|
||||||
|
|
||||||
|
rules:
|
||||||
|
braces:
|
||||||
|
level: error
|
||||||
|
max-spaces-inside: 1
|
||||||
|
min-spaces-inside: 1
|
||||||
|
min-spaces-inside-empty: 0
|
||||||
|
max-spaces-inside-empty: 0
|
||||||
|
|
||||||
|
brackets:
|
||||||
|
level: error
|
||||||
|
max-spaces-inside: 1
|
||||||
|
min-spaces-inside: 1
|
||||||
|
min-spaces-inside-empty: 0
|
||||||
|
max-spaces-inside-empty: 0
|
||||||
|
|
||||||
|
colons:
|
||||||
|
level: warning
|
||||||
|
max-spaces-after: 1
|
||||||
|
|
||||||
|
commas:
|
||||||
|
level: warning
|
||||||
|
|
||||||
|
comments:
|
||||||
|
level: error
|
||||||
|
require-starting-space: true
|
||||||
|
ignore-shebangs: true
|
||||||
|
min-spaces-from-content: 4
|
||||||
|
|
||||||
|
comments-indentation:
|
||||||
|
level: error
|
||||||
|
|
||||||
|
document-end:
|
||||||
|
level: error
|
||||||
|
present: false
|
||||||
|
|
||||||
|
document-start:
|
||||||
|
level: error
|
||||||
|
present: true
|
||||||
|
|
||||||
|
empty-lines:
|
||||||
|
level: error
|
||||||
|
max: 3
|
||||||
|
max-start: 0
|
||||||
|
max-end: 0
|
||||||
|
|
||||||
|
hyphens:
|
||||||
|
level: error
|
||||||
|
max-spaces-after: 1
|
||||||
|
|
||||||
|
indentation:
|
||||||
|
level: error
|
||||||
|
spaces: 2
|
||||||
|
indent-sequences: true
|
||||||
|
check-multi-line-strings: true
|
||||||
|
|
||||||
|
line-length:
|
||||||
|
level: warning
|
||||||
|
max: 100
|
||||||
|
allow-non-breakable-inline-mappings: true
|
||||||
|
|
||||||
|
new-lines:
|
||||||
|
level: error
|
||||||
|
type: unix
|
||||||
|
|
||||||
|
truthy: disable
|
5
CONTRIBUTING.md
Normal file
5
CONTRIBUTING.md
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
# Contribution Guide
|
||||||
|
|
||||||
|
Contributions welcome.
|
||||||
|
|
||||||
|
This doc is still a WIP.
|
66
README.md
Normal file
66
README.md
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
# No Fuss Computings Git[ea/hub] Event Processing
|
||||||
|
|
||||||
|
Documentation for the collection.
|
||||||
|
|
||||||
|
|
||||||
|
## TL;DR
|
||||||
|
|
||||||
|
| Name | required | Description |
|
||||||
|
|:---:|:---:|:---|
|
||||||
|
| GIT_API_TOKEN | :white_check_mark: | API token to access Git[ea/hub] to post PR Review. |
|
||||||
|
| GIT_API_URL | :white_check_mark: | API URL to access Git[ea/hub]. To create random one `echo $(head -c 50 /dev/urandom | xxd -p | head -c 50)` |
|
||||||
|
| GIT_INTERNAL_API_URL | :x: | An internal URL to use in place of the public API URL. i.e. DMZ url. |
|
||||||
|
| GIT_EVENT_RULEBOOK_TOKEN | :white_check_mark: | The token to set for the inbound connection to the container. |
|
||||||
|
| GIT_EVENT_RULEBOOK_PORT | :x: | The port to listen for inbound webhooks. Defaults to 5000 |
|
||||||
|
| ENABLE_DEBUG_LOGGING | :x: | Turn on playbook debug logging. Defaults to `false` :warning: Doing this will output you auth tokens to the log. |
|
||||||
|
|
||||||
|
|
||||||
|
### Steps
|
||||||
|
|
||||||
|
1. deploy somewhere that git[ea/hub] has access to the container
|
||||||
|
1. ensure vars above are set within the container
|
||||||
|
1. **Gor Gitea** Go to `Site ADministration -> Integrations-> Webhooks`
|
||||||
|
1. Add a system webhook
|
||||||
|
1. set the http url to the container ip/dns name. ensure port is specifed. suffic `:<port number>`
|
||||||
|
1. select `Trigger On -> Workflow Jobs`
|
||||||
|
1. set `Authorization Header` to `Bearer <actual value of GIT_EVENT_RULEBOOK_TOKEN>`
|
||||||
|
1. click `Update Webhook` to save
|
||||||
|
1. you are now GTG and all jobs will get posted to the container for processing.
|
||||||
|
|
||||||
|
|
||||||
|
### Setup Parsing of matchers
|
||||||
|
|
||||||
|
1. Before any parsing can be done the following must be output with the id of the pull request to enable the problem matcher parsing.
|
||||||
|
|
||||||
|
``` bash
|
||||||
|
|
||||||
|
echo "NFC_PROBLEM_MATCHER=${GITHUB_REF_NAME}";
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
1. Ansible Lint
|
||||||
|
|
||||||
|
1. before pylint runs, ensure the following commands are executed in your workflow.
|
||||||
|
|
||||||
|
``` bash
|
||||||
|
|
||||||
|
echo "NFC_PROBLEM_MATCHER_TYPE=pylint-json";
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
1. the output format for pylint is json. i.e. `ansible-lint -f json .`
|
||||||
|
|
||||||
|
1. Parsing normal GitHub Problem matchers
|
||||||
|
|
||||||
|
1. Before the job runs, give the matcher a name, no spaces, only letters and can have `-` and `_`
|
||||||
|
|
||||||
|
``` bash
|
||||||
|
|
||||||
|
echo "NFC_PROBLEM_MATCHER_TYPE=My-Job-Name";
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
1. now run the job that outputs in standrd Github style problem matchers.
|
||||||
|
|
||||||
|
1. Now the user will have a PR reviews done with the contents of the problem matcher(s) as review comments.
|
100
dockerfile
Normal file
100
dockerfile
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
ARG GIT_EVENT_RULEBOOK_NAME='problem_matcher'
|
||||||
|
|
||||||
|
FROM python:3.11-alpine3.22 AS Build
|
||||||
|
|
||||||
|
|
||||||
|
RUN apk update; \
|
||||||
|
apk add \
|
||||||
|
build-base \
|
||||||
|
gcc;
|
||||||
|
|
||||||
|
|
||||||
|
RUN pip install --upgrade \
|
||||||
|
setuptools \
|
||||||
|
wheel
|
||||||
|
|
||||||
|
|
||||||
|
RUN apk add openjdk21-jdk;
|
||||||
|
|
||||||
|
|
||||||
|
RUN apk add \
|
||||||
|
alpine-sdk \
|
||||||
|
libffi-dev \
|
||||||
|
maven \
|
||||||
|
build-base libc-dev;
|
||||||
|
|
||||||
|
|
||||||
|
ENV JAVA_HOME /usr/lib/jvm/java-21-openjdk
|
||||||
|
|
||||||
|
# ChatGPT suggestion to fix alpine version >3.19
|
||||||
|
ENV CFLAGS "-Wno-incompatible-pointer-types"
|
||||||
|
|
||||||
|
|
||||||
|
COPY requirements.txt /tmp/requirements.txt
|
||||||
|
|
||||||
|
|
||||||
|
RUN mkdir -p /tmp/python_modules; \
|
||||||
|
cd /tmp/python_modules; \
|
||||||
|
pip download --dest . \
|
||||||
|
pip \
|
||||||
|
--check-build-dependencies \
|
||||||
|
-r /tmp/requirements.txt
|
||||||
|
|
||||||
|
|
||||||
|
RUN cd /tmp/python_modules; \
|
||||||
|
mkdir -p /tmp/python_builds; \
|
||||||
|
echo "[DEBUG] PATH=$PATH"; \
|
||||||
|
ls -l; \
|
||||||
|
pip wheel --wheel-dir /tmp/python_builds --find-links . *.whl; \
|
||||||
|
pip wheel --wheel-dir /tmp/python_builds --find-links . *.tar.gz;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
FROM python:3.11-alpine3.22
|
||||||
|
|
||||||
|
|
||||||
|
ARG GIT_EVENT_RULEBOOK_NAME
|
||||||
|
|
||||||
|
|
||||||
|
RUN apk --no-cache update; \
|
||||||
|
apk --no-cache add \
|
||||||
|
openjdk21-jdk
|
||||||
|
|
||||||
|
|
||||||
|
ENV ANSIBLE_FORCE_COLOR true
|
||||||
|
|
||||||
|
ENV ANSIBLE_INVENTORY hosts.yaml
|
||||||
|
|
||||||
|
ENV JAVA_HOME /usr/lib/jvm/java-21-openjdk
|
||||||
|
|
||||||
|
ENV GIT_EVENT_RULEBOOK_NAME ${GIT_EVENT_RULEBOOK_NAME}
|
||||||
|
|
||||||
|
|
||||||
|
COPY includes/ /
|
||||||
|
|
||||||
|
COPY . /home/eda/.ansible/collections/ansible_collections/nofusscomputing/git_events/
|
||||||
|
|
||||||
|
COPY --from=build /tmp/python_builds /tmp/python_builds
|
||||||
|
|
||||||
|
|
||||||
|
RUN pip install --no-cache-dir /tmp/python_builds/*; \
|
||||||
|
rm -R /tmp/python_builds; \
|
||||||
|
ansible-galaxy collection install ansible.eda; \
|
||||||
|
addgroup eda; \
|
||||||
|
adduser -D --ingroup eda eda; \
|
||||||
|
cp -r /root/.ansible /home/eda/; \
|
||||||
|
rm -rf /home/eda/.ansible/collections/ansible_collections/nofusscomputing/git_events/includes; \
|
||||||
|
mv /usr/bin/annotations.py /usr/bin/annotations; \
|
||||||
|
chmod +x /usr/bin/annotations; \
|
||||||
|
chmod +x /entrypoint.sh; \
|
||||||
|
chown eda:eda -R /home/eda;
|
||||||
|
|
||||||
|
|
||||||
|
WORKDIR /home/eda
|
||||||
|
|
||||||
|
|
||||||
|
USER eda
|
||||||
|
|
||||||
|
|
||||||
|
ENTRYPOINT [ "/entrypoint.sh" ]
|
43
extensions/eda/rulebooks/problem_matcher.yml
Normal file
43
extensions/eda/rulebooks/problem_matcher.yml
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
---
|
||||||
|
|
||||||
|
- name: Inbound Webhook
|
||||||
|
hosts: localhost
|
||||||
|
match_multiple_rules: true
|
||||||
|
|
||||||
|
sources:
|
||||||
|
- name: Webhook
|
||||||
|
ansible.eda.webhook:
|
||||||
|
host: 0.0.0.0
|
||||||
|
port: "{{ GIT_EVENT_RULEBOOK_PORT | default(5000) | int }}"
|
||||||
|
token: "{{ GIT_EVENT_RULEBOOK_TOKEN | default('-not-set-') }}"
|
||||||
|
|
||||||
|
rules:
|
||||||
|
|
||||||
|
|
||||||
|
- name: Show workflow_job Action
|
||||||
|
condition: >
|
||||||
|
event.meta.headers['X-GitHub-Event'] == 'workflow_job'
|
||||||
|
actions:
|
||||||
|
|
||||||
|
- debug:
|
||||||
|
msg: |-
|
||||||
|
Received workflow_job event from {{ event.payload.sender.username + ' ' -}}
|
||||||
|
for repository {{ event.payload.repository.full_name + ' ' -}}
|
||||||
|
with action of {{ event.payload.action }}
|
||||||
|
|
||||||
|
|
||||||
|
- name: Process Completed workflow_job
|
||||||
|
# yamllint disable rule:indentation
|
||||||
|
condition: >
|
||||||
|
event.meta.headers['X-GitHub-Event'] == 'workflow_job'
|
||||||
|
and
|
||||||
|
event.payload.action == 'completed'
|
||||||
|
# yamllint enable rule:indentation
|
||||||
|
actions:
|
||||||
|
|
||||||
|
- run_playbook:
|
||||||
|
name: nofusscomputing.git_events.problem_matcher
|
||||||
|
verbosity: 2
|
||||||
|
extra_vars:
|
||||||
|
payload: "{{ event.payload }}"
|
||||||
|
ansible_connection: local
|
46
galaxy.yml
Normal file
46
galaxy.yml
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
---
|
||||||
|
|
||||||
|
namespace: nofusscomputing
|
||||||
|
|
||||||
|
name: git_events
|
||||||
|
|
||||||
|
version: 0.0.1
|
||||||
|
|
||||||
|
readme: README.md
|
||||||
|
|
||||||
|
authors:
|
||||||
|
- No Fuss Computing
|
||||||
|
|
||||||
|
description: Git[ea/hub] Problem matcher parser with PR Code Review
|
||||||
|
license:
|
||||||
|
- MIT
|
||||||
|
|
||||||
|
license_file: LICENCE
|
||||||
|
|
||||||
|
tags:
|
||||||
|
- ci
|
||||||
|
- event
|
||||||
|
- rulebook
|
||||||
|
- tools
|
||||||
|
|
||||||
|
dependencies: {}
|
||||||
|
|
||||||
|
repository: https://nofusscomputing.com/git/ansible-collections/git-events
|
||||||
|
|
||||||
|
documentation: https://nofusscomputing.com/git/ansible-collections/git-events
|
||||||
|
|
||||||
|
homepage: https://nofusscomputing.com/git/ansible-collections/git-events
|
||||||
|
|
||||||
|
issues: https://nofusscomputing.com/git/ansible-collections/git-events/issues
|
||||||
|
|
||||||
|
build_ignore: [
|
||||||
|
'.ansible',
|
||||||
|
artifacts/,
|
||||||
|
'.cz.yaml',
|
||||||
|
'.dockerignore',
|
||||||
|
'dockerfile',
|
||||||
|
'.git',
|
||||||
|
'galaxy.yml',
|
||||||
|
'*.tmp.*'
|
||||||
|
|
||||||
|
]
|
18
includes/entrypoint.sh
Normal file
18
includes/entrypoint.sh
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [ $# -eq 0 ]; then
|
||||||
|
|
||||||
|
cd ${HOME};
|
||||||
|
|
||||||
|
ansible-rulebook \
|
||||||
|
-r nofusscomputing.git_events.${GIT_EVENT_RULEBOOK_NAME} \
|
||||||
|
--env-vars GIT_EVENT_RULEBOOK_PORT,GIT_EVENT_RULEBOOK_TOKEN \
|
||||||
|
-v;
|
||||||
|
|
||||||
|
else
|
||||||
|
|
||||||
|
exec "$@"
|
||||||
|
|
||||||
|
fi
|
6
includes/home/eda/hosts.yaml
Normal file
6
includes/home/eda/hosts.yaml
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
---
|
||||||
|
all:
|
||||||
|
hosts:
|
||||||
|
localhost:
|
||||||
|
vars:
|
||||||
|
ansible_connection: local
|
309
includes/usr/bin/annotations.py
Executable file
309
includes/usr/bin/annotations.py
Executable file
@ -0,0 +1,309 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
import json
|
||||||
|
# import requests
|
||||||
|
import os
|
||||||
|
|
||||||
|
# API Docs: https://docs.github.com/en/rest/pulls/reviews?apiVersion=2022-11-28#create-a-review-for-a-pull-request
|
||||||
|
|
||||||
|
def default_matcher( entry, tool_name = '' ) -> dict:
|
||||||
|
|
||||||
|
if tool_name == 'default':
|
||||||
|
tool_name = ''
|
||||||
|
else:
|
||||||
|
tool_name = tool_name + ' '
|
||||||
|
|
||||||
|
filename = str(entry['file'])
|
||||||
|
|
||||||
|
if filename.startswith('./'):
|
||||||
|
|
||||||
|
filename = str(entry['file'])[2:]
|
||||||
|
|
||||||
|
admonition_level = 'NOTE'
|
||||||
|
if str(entry['type']).upper() in [ 'ERROR' ]:
|
||||||
|
|
||||||
|
admonition_level = 'IMPORTANT'
|
||||||
|
|
||||||
|
|
||||||
|
elif str(entry['type']).upper() in [ 'WARNING' ]:
|
||||||
|
|
||||||
|
admonition_level = 'WARNING'
|
||||||
|
|
||||||
|
|
||||||
|
body =str (
|
||||||
|
f"> [!{admonition_level}]"
|
||||||
|
"\n>"
|
||||||
|
f"\n> **{tool_name}Severity:** _{str(entry['type']).lower()}_ "
|
||||||
|
f"\n> **file**: _{filename}_ "
|
||||||
|
f"**Line**: _{str(entry['line'])}_ **Column**: _{str(entry['column'])}_"
|
||||||
|
"\n>"
|
||||||
|
f"\n> {str(entry['text'])}"
|
||||||
|
"\n>"
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"body": body,
|
||||||
|
"new_position": int(entry['line']),
|
||||||
|
"old_position": 0,
|
||||||
|
"path": filename
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def pylint_matcher( entry ) -> dict:
|
||||||
|
|
||||||
|
if not entry.get('line', int(1)):
|
||||||
|
|
||||||
|
comment_line = 1
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
comment_line = int(entry.get('line', int(1)))
|
||||||
|
|
||||||
|
severity = str(entry['severity']).lower()
|
||||||
|
admonition_level = 'NOTE'
|
||||||
|
|
||||||
|
if severity in [ 'major' ]:
|
||||||
|
|
||||||
|
admonition_level = 'IMPORTANT'
|
||||||
|
|
||||||
|
if severity in [ 'minor' ]:
|
||||||
|
|
||||||
|
admonition_level = 'WARNING'
|
||||||
|
|
||||||
|
body = str(
|
||||||
|
f"> [!{admonition_level}] "
|
||||||
|
f"\n> "
|
||||||
|
f"\n>**PyLint Severity**: {severity} "
|
||||||
|
f"\n>**file**: _{entry['path']}_ "
|
||||||
|
f"**Line**: _{entry.get('line', 0)}_ "
|
||||||
|
f"\n>"
|
||||||
|
f"\n> [{entry['check_name']}]({entry['url']}): {entry['description']} "
|
||||||
|
f"\n>"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if(
|
||||||
|
entry.get('body', '') != 'None'
|
||||||
|
and entry.get('body', '') != ''
|
||||||
|
and entry.get('body', '') is not None
|
||||||
|
):
|
||||||
|
|
||||||
|
body = body + str(
|
||||||
|
f"\n>_{entry.get('body', '')}_ "
|
||||||
|
f"\n>"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
return {
|
||||||
|
"body": body,
|
||||||
|
"new_position": comment_line,
|
||||||
|
"old_position": 0,
|
||||||
|
"path": str(entry['path'])
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
regex = {
|
||||||
|
|
||||||
|
"default": os.getenv("PROBLEM_MATCHER_REGEX",
|
||||||
|
r"::(?P<type>\S+)\s+"
|
||||||
|
r"(?:file=)(?P<file>.+?),"
|
||||||
|
r"(?:line=)(?P<line>\d+),"
|
||||||
|
r"(?:col=)(?P<column>\d+).+?"
|
||||||
|
# r"\s\[(?P<rule>\S+)]\s(?P<text>.+)"
|
||||||
|
r"\s(?P<text>.+)"
|
||||||
|
),
|
||||||
|
|
||||||
|
# \{\s*"type":\s*"(?P<type>[^"]+)",\s*"check_name":\s*"(?P<check_name>[^"]+)",\s*"categories":\s*\[(?P<categories>[^\]]*)\],\s*"url":\s*"(?P<url>[^"]+)",\s*"severity":\s*"(?P<severity>[^"]+)",\s*"description":\s*"(?P<description>[^"]+)",\s*"fingerprint":\s*"(?P<fingerprint>[^"]+)",\s*"location":\s*\{\s*"path":\s*"(?P<path>[^"]+)"(?:,\s*"lines":\s*\{\s*"begin":\s*(?P<line>\d+)\})?.*?\}},(?:\s"content":\s\{"body":\s"(?P<body>.+?)")?
|
||||||
|
"pylint-json": str(
|
||||||
|
# r'\{\s*"type":\s*"(?P<type>[^"]+)",\s*'
|
||||||
|
# r'"check_name":\s*"(?P<check_name>[^"]+)",\s*'
|
||||||
|
# r'"categories":\s*\[(?P<categories>[^\]]*)\],\s*'
|
||||||
|
# r'"url":\s*"(?P<url>[^"]+)",\s*'
|
||||||
|
# r'"severity":\s*"(?P<severity>[^"]+)",\s*'
|
||||||
|
# r'"description":\s*"(?P<description>[^"]+)",\s*'
|
||||||
|
# r'"fingerprint":\s*"(?P<fingerprint>[^"]+)",\s*'
|
||||||
|
# r'"location":\s*\{\s*"path":\s*"(?P<path>[^"]+)'
|
||||||
|
# # r'"(?:,\s*"lines":\s*\{\s*"begin":\s*(?P<line>\d+)\})?.*?\}},'
|
||||||
|
# r'(?:(?:,\s*"lines":\s*\{\s*"begin":\s*)|(?:{"line":\s))(?P<line>\d+)?.*?\}},'
|
||||||
|
# r'(?:\s"content":\s\{"body":\s"(?P<body>.+?)")?'
|
||||||
|
|
||||||
|
# \{\s*"type":\s*"(?P<type>[^"]+)",\s*"check_name":\s*"(?P<check_name>[^"]+)",\s*"categories":\s*\[(?P<categories>[^\]]*)\],\s*"url":\s*"(?P<url>[^"]+)",\s*"severity":\s*"(?P<severity>[^"]+)",\s*"description":\s*"(?P<description>[^"]+)",\s*"fingerprint":\s*"(?P<fingerprint>[^"]+)",\s*"location":\s*\{\s*"path":\s*"(?P<path>[^"]+)".+?"line[s]?":.+?(?P<line>\d+)?.*?\}},(?:\s"content":\s\{"body":\s"(?P<body>.+?)")?
|
||||||
|
|
||||||
|
r'\{\s*"type":\s*"(?P<type>[^"]+)",\s*'
|
||||||
|
r'"check_name":\s*"(?P<check_name>[^"]+)",\s*'
|
||||||
|
r'"categories":\s*\[(?P<categories>[^\]]*)\],\s*'
|
||||||
|
r'"url":\s*"(?P<url>[^"]+)",\s*'
|
||||||
|
r'"severity":\s*"(?P<severity>[^"]+)",\s*'
|
||||||
|
r'"description":\s*"(?P<description>[^"]+)",\s*'
|
||||||
|
r'"fingerprint":\s*"(?P<fingerprint>[^"]+)",\s*'
|
||||||
|
r'"location":\s*\{\s*"path":\s*"(?P<path>[^"]+)".+?'
|
||||||
|
r'"line[s]?":.+?(?P<line>\d+).*?\}}'
|
||||||
|
r'(?:,\s"content":\s\{"body":\s"(?P<body>.+?)")?'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
results = {}
|
||||||
|
|
||||||
|
NFC_PROBLEM_MATCHER = False
|
||||||
|
|
||||||
|
pull_request: int = None
|
||||||
|
|
||||||
|
matcher = re.compile(r'NFC_PROBLEM_MATCHER=(?P<pull_number>\d+)')
|
||||||
|
matcher_type = re.compile(r'NFC_PROBLEM_MATCHER_TYPE=(?P<type>[a-zA-Z_-]+)')
|
||||||
|
|
||||||
|
regex_type = 'default'
|
||||||
|
pattern = re.compile( regex[regex_type] )
|
||||||
|
matcher_name = 'Default Matcher'
|
||||||
|
|
||||||
|
for line in sys.stdin:
|
||||||
|
|
||||||
|
match_matcher_type = matcher_type.search(line)
|
||||||
|
|
||||||
|
if match_matcher_type:
|
||||||
|
regex_type = match_matcher_type['type']
|
||||||
|
matcher_name = match_matcher_type['type']
|
||||||
|
|
||||||
|
if regex_type in regex:
|
||||||
|
|
||||||
|
pattern = re.compile( regex[regex_type] )
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
pattern = re.compile( regex['default'] )
|
||||||
|
|
||||||
|
match = pattern.finditer(line)
|
||||||
|
|
||||||
|
problem_matcher = matcher.search(line)
|
||||||
|
|
||||||
|
if problem_matcher:
|
||||||
|
|
||||||
|
NFC_PROBLEM_MATCHER = True
|
||||||
|
|
||||||
|
pull_request = int(problem_matcher['pull_number'])
|
||||||
|
|
||||||
|
|
||||||
|
if match:
|
||||||
|
|
||||||
|
if matcher_name not in results:
|
||||||
|
results[matcher_name] = []
|
||||||
|
|
||||||
|
|
||||||
|
for obj in match:
|
||||||
|
|
||||||
|
results[matcher_name].append(obj.groupdict())
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if not NFC_PROBLEM_MATCHER:
|
||||||
|
|
||||||
|
print(json.dumps({
|
||||||
|
'pull_request': ''
|
||||||
|
}, indent=4))
|
||||||
|
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
if not results:
|
||||||
|
print("No matching lines found.")
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
api_body: dict = {
|
||||||
|
"body": "boo",
|
||||||
|
"comments": [],
|
||||||
|
"commit_id": os.getenv("GITHUB_SHA"),
|
||||||
|
"event": "REQUEST_CHANGES"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
type_count = {}
|
||||||
|
|
||||||
|
for tool, tool_results in results.items():
|
||||||
|
|
||||||
|
for entry in tool_results:
|
||||||
|
|
||||||
|
if tool == 'pylint-json':
|
||||||
|
|
||||||
|
api_body['comments'] += [ pylint_matcher( entry ) ]
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
api_body['comments'] += [ default_matcher( entry, tool_name = tool ) ]
|
||||||
|
|
||||||
|
if tool not in type_count:
|
||||||
|
|
||||||
|
type_count[tool] = 1
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
type_count[tool] += 1
|
||||||
|
|
||||||
|
|
||||||
|
review_body = {
|
||||||
|
'header': str(
|
||||||
|
'## :no_entry_sign: Annotations found \n' \
|
||||||
|
f'@{os.getenv("GITHUB_ACTOR")}, \n\n'
|
||||||
|
'I found some issues that need addressing. \n\n'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
for msg_type, cnt in type_count.items():
|
||||||
|
|
||||||
|
if msg_type not in review_body:
|
||||||
|
|
||||||
|
review_body[msg_type] = str('| Type | Count | \n|:---|:---:| \n')
|
||||||
|
|
||||||
|
review_body[msg_type] += f'| {msg_type} | {cnt} | \n'
|
||||||
|
|
||||||
|
|
||||||
|
api_body['body'] = review_body['header']
|
||||||
|
|
||||||
|
|
||||||
|
for msg_type, value in review_body.items():
|
||||||
|
|
||||||
|
if msg_type != 'header':
|
||||||
|
|
||||||
|
api_body['body'] += str(
|
||||||
|
f'### {msg_type} issues found '
|
||||||
|
'\n'
|
||||||
|
f'{value}\n'
|
||||||
|
'\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if len(api_body['comments']) == 0:
|
||||||
|
|
||||||
|
api_body.update({
|
||||||
|
'body': "G'day, I didn't find any problems to report on",
|
||||||
|
'event': 'APPROVE'
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"pull_request": pull_request,
|
||||||
|
"api_body": api_body
|
||||||
|
}
|
||||||
|
|
||||||
|
print(json.dumps(data, indent=4))
|
||||||
|
|
||||||
|
|
||||||
|
# URL = os.getenv("GITHUB_API_URL") + '/repos/' + os.getenv("GITHUB_REPOSITORY") + '/pulls/' + os.getenv("GITHUB_REF_NAME") + '/reviews?token=' + str(os.getenv("AGITHUB_TOKEN"))
|
||||||
|
# try:
|
||||||
|
# response = requests.post(URL, json=api_body)
|
||||||
|
# response.raise_for_status()
|
||||||
|
# print(f"\n✅ Successfully posted to {URL}")
|
||||||
|
# print(f"🔁 Server responded with: {response.status_code} {response.reason}")
|
||||||
|
# except requests.exceptions.RequestException as e:
|
||||||
|
# print(f"\n❌ Failed to post to {URL}")
|
||||||
|
# print(f"Error: {e}")
|
||||||
|
# sys.exit(1)
|
52
meta/runtime.yml
Normal file
52
meta/runtime.yml
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
---
|
||||||
|
# Collections must specify a minimum required ansible version to upload
|
||||||
|
# to galaxy
|
||||||
|
requires_ansible: '>=2.18.0'
|
||||||
|
|
||||||
|
# Content that Ansible needs to load from another location or that has
|
||||||
|
# been deprecated/removed
|
||||||
|
# plugin_routing:
|
||||||
|
# action:
|
||||||
|
# redirected_plugin_name:
|
||||||
|
# redirect: ns.col.new_location
|
||||||
|
# deprecated_plugin_name:
|
||||||
|
# deprecation:
|
||||||
|
# removal_version: "4.0.0"
|
||||||
|
# warning_text: |
|
||||||
|
# See the porting guide on how to update your playbook to
|
||||||
|
# use ns.col.another_plugin instead.
|
||||||
|
# removed_plugin_name:
|
||||||
|
# tombstone:
|
||||||
|
# removal_version: "2.0.0"
|
||||||
|
# warning_text: |
|
||||||
|
# See the porting guide on how to update your playbook to
|
||||||
|
# use ns.col.another_plugin instead.
|
||||||
|
# become:
|
||||||
|
# cache:
|
||||||
|
# callback:
|
||||||
|
# cliconf:
|
||||||
|
# connection:
|
||||||
|
# doc_fragments:
|
||||||
|
# filter:
|
||||||
|
# httpapi:
|
||||||
|
# inventory:
|
||||||
|
# lookup:
|
||||||
|
# module_utils:
|
||||||
|
# modules:
|
||||||
|
# netconf:
|
||||||
|
# shell:
|
||||||
|
# strategy:
|
||||||
|
# terminal:
|
||||||
|
# test:
|
||||||
|
# vars:
|
||||||
|
|
||||||
|
# Python import statements that Ansible needs to load from another location
|
||||||
|
# import_redirection:
|
||||||
|
# ansible_collections.ns.col.plugins.module_utils.old_location:
|
||||||
|
# redirect: ansible_collections.ns.col.plugins.module_utils.new_location
|
||||||
|
|
||||||
|
# Groups of actions/modules that take a common set of options
|
||||||
|
# action_groups:
|
||||||
|
# group_name:
|
||||||
|
# - module1
|
||||||
|
# - module2
|
133
playbooks/problem_matcher.yaml
Normal file
133
playbooks/problem_matcher.yaml
Normal file
@ -0,0 +1,133 @@
|
|||||||
|
---
|
||||||
|
- name: Git Problem Matcher
|
||||||
|
gather_facts: false
|
||||||
|
hosts: localhost
|
||||||
|
|
||||||
|
|
||||||
|
tasks:
|
||||||
|
|
||||||
|
|
||||||
|
- name: Get facts from Environment
|
||||||
|
ansible.builtin.set_fact:
|
||||||
|
git_api_url: "{{ lookup('env', 'GIT_INTERNAL_API_URL') | default(payload.repository.url) }}"
|
||||||
|
gitea_replace_url: "{{ lookup('env', 'GIT_API_URL') | default(payload.repository.url) }}"
|
||||||
|
disable_logging: "{{ not lookup('env', 'ENABLE_DEBUG_LOGGING') | bool | default(false) }}"
|
||||||
|
|
||||||
|
|
||||||
|
- name: Set var gitea_replace_url if empty
|
||||||
|
ansible.builtin.set_fact:
|
||||||
|
git_api_url: "{{ payload.repository.url }}"
|
||||||
|
when: >
|
||||||
|
git_api_url == ""
|
||||||
|
|
||||||
|
|
||||||
|
- name: Set var gitea_replace_url if empty
|
||||||
|
ansible.builtin.set_fact:
|
||||||
|
gitea_replace_url: "{{ payload.repository.url }}"
|
||||||
|
when: >
|
||||||
|
gitea_replace_url == ""
|
||||||
|
|
||||||
|
|
||||||
|
- name: Set required Facts
|
||||||
|
ansible.builtin.set_fact:
|
||||||
|
git_url_api: >-
|
||||||
|
{{ payload.repository.url |
|
||||||
|
replace((gitea_replace_url | split('/api/'))[0], git_api_url) }}
|
||||||
|
git_url_path_jobs: 'actions/jobs'
|
||||||
|
head_sha: "{{ payload.workflow_job.head_sha }}"
|
||||||
|
|
||||||
|
|
||||||
|
- name: Ensure API Token is defined
|
||||||
|
ansible.builtin.assert:
|
||||||
|
that:
|
||||||
|
- lookup('env', 'GIT_API_TOKEN') is defined
|
||||||
|
msg: Environmental variable `GIT_API_TOKEN` must be defined
|
||||||
|
|
||||||
|
|
||||||
|
- name: Ensure required variables exist
|
||||||
|
ansible.builtin.assert:
|
||||||
|
that:
|
||||||
|
- lookup('env', 'GIT_API_TOKEN') | length > 0
|
||||||
|
msg: Environmental variable `GIT_API_TOKEN` must not be empty
|
||||||
|
|
||||||
|
|
||||||
|
- name: Fetch job log
|
||||||
|
ansible.builtin.uri:
|
||||||
|
url: >-
|
||||||
|
{{ git_url_api + '/' + git_url_path_jobs
|
||||||
|
+ '/' + payload.workflow_job.id | string + '/logs' }}
|
||||||
|
dest: /tmp/job.log
|
||||||
|
headers:
|
||||||
|
Authorization: token {{ lookup('env', 'GIT_API_TOKEN') }}
|
||||||
|
method: GET
|
||||||
|
return_content: true
|
||||||
|
timeout: 10
|
||||||
|
validate_certs: false
|
||||||
|
no_log: "{{ disable_logging }}"
|
||||||
|
|
||||||
|
|
||||||
|
- name: Trace
|
||||||
|
ansible.builtin.shell:
|
||||||
|
cmd: |
|
||||||
|
set -o pipefail;
|
||||||
|
|
||||||
|
export GITHUB_ACTOR={{ payload.sender.username }};
|
||||||
|
|
||||||
|
export GITHUB_SHA={{ payload.workflow_job.head_sha }};
|
||||||
|
|
||||||
|
cat /tmp/job.log | annotations > /tmp/annotations.json;
|
||||||
|
|
||||||
|
executable: sh
|
||||||
|
changed_when: false
|
||||||
|
|
||||||
|
|
||||||
|
- name: Load annotations
|
||||||
|
ansible.builtin.set_fact:
|
||||||
|
annotations: "{{ lookup('file', '/tmp/annotations.json') | from_yaml }}"
|
||||||
|
|
||||||
|
|
||||||
|
- name: Fetch Pull Request
|
||||||
|
ansible.builtin.uri:
|
||||||
|
url: "{{ git_url_api + '/pulls/' + annotations.pull_request | string }}"
|
||||||
|
headers:
|
||||||
|
Authorization: token {{ lookup('env', 'GIT_API_TOKEN') }}
|
||||||
|
method: GET
|
||||||
|
return_content: true
|
||||||
|
status_code:
|
||||||
|
- 200
|
||||||
|
- 404
|
||||||
|
timeout: 10
|
||||||
|
validate_certs: false
|
||||||
|
no_log: "{{ disable_logging }}"
|
||||||
|
register: http_get_pull_request
|
||||||
|
when: >
|
||||||
|
annotations.pull_request | string
|
||||||
|
|
||||||
|
|
||||||
|
- name: Trace - Display Pull Request State
|
||||||
|
ansible.builtin.debug:
|
||||||
|
msg: "{{ http_get_pull_request.json.state | default('No PR found') }}"
|
||||||
|
when: >
|
||||||
|
not http_get_pull_request.skipped | default(false) | bool
|
||||||
|
|
||||||
|
|
||||||
|
- name: Post review
|
||||||
|
ansible.builtin.uri:
|
||||||
|
url: "{{ git_url_api + '/pulls/' + annotations.pull_request | string + '/reviews' }}"
|
||||||
|
body: "{{ annotations.api_body }}"
|
||||||
|
body_format: json
|
||||||
|
headers:
|
||||||
|
Authorization: token {{ lookup('env', 'GIT_API_TOKEN') }}
|
||||||
|
method: POST
|
||||||
|
return_content: true
|
||||||
|
timeout: 10
|
||||||
|
validate_certs: false
|
||||||
|
no_log: "{{ disable_logging }}"
|
||||||
|
# yamllint disable rule:indentation
|
||||||
|
when: >
|
||||||
|
http_get_pull_request.json.state | default('-') != 'closed'
|
||||||
|
and
|
||||||
|
http_get_pull_request.status | default(0) == 200
|
||||||
|
and
|
||||||
|
not http_get_pull_request.skipped | default(false) | bool
|
||||||
|
# yamllint enable rule:indentation
|
5
requirements.txt
Normal file
5
requirements.txt
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
|
||||||
|
ansible-core==2.18.6
|
||||||
|
ansible-rulebook==1.1.7
|
||||||
|
|
||||||
|
requests
|
Reference in New Issue
Block a user