Compare commits

42 Commits

Author SHA1 Message Date
5feba1c27c Merge pull request 'fix: No problems found' (#6) from fix-5-no-promlems-approve-pr into development
Reviewed-on: #6
2025-07-12 14:11:40 +00:00
Jon
e03a201c30 chore: linting fix
All checks were successful
Lint (Pull Request) / lint (pull_request) Successful in 29s
ref: #6
2025-07-12 23:38:11 +09:30
Jon
ec9304fdc5 fix(problem_matcher): correct regex for when pylint content is not available
All checks were successful
Lint (Pull Request) / lint (pull_request) Successful in 29s
ref: #6
2025-07-12 23:35:04 +09:30
Jon
f10663a2d5 ci: add ansible-lint
All checks were successful
Lint (Pull Request) / lint (pull_request) Successful in 43s
ref: #6
2025-07-12 22:43:03 +09:30
Jon
eb11625b66 fix(problem_matcher): On approval body is required contrary to docs
All checks were successful
Lint (Pull Request) / lint (pull_request) Successful in 9s
ref: #6 #5
2025-07-12 22:35:02 +09:30
Jon
5e69fb2807 chore: linting fixes
All checks were successful
Lint (Pull Request) / lint (pull_request) Successful in 19s
ref: #6
2025-07-12 21:44:17 +09:30
Jon
4de933151a ci(lint): add yaml lint file
All checks were successful
Lint (Pull Request) / lint (pull_request) Successful in -1m8s
ref: #6
2025-07-12 21:31:59 +09:30
Jon
a2b935c5cb fix(problem_matcher): Ensure commit var set for annotation parsing
ref: #6
2025-07-12 21:31:41 +09:30
Jon
4b9133a626 ci: enable problem matcher parsing
All checks were successful
Lint (Pull Request) / lint (pull_request) Successful in -1m9s
ref: #6 #5
2025-07-12 21:25:30 +09:30
Jon
dc57a65a1c feat(problem_matcher): when not enabled, dont process further
All checks were successful
Lint (Pull Request) / lint (pull_request) Successful in 9s
ref: #6
2025-07-12 21:14:28 +09:30
Jon
d60383e6f6 ci: Add PR Linting
All checks were successful
Lint (Pull Request) / lint (pull_request) Successful in 8s
ref: #6
2025-07-12 19:15:50 +09:30
Jon
9727594d89 feat(annotations): when no problem found, remove comment and set to approve PR
ref: #6 #5
2025-07-12 19:14:47 +09:30
Jon
6d501e25d9 docs: update readme with repo ci requirement
ref: #1
2025-07-08 00:28:09 +09:30
Jon
3629b2ff21 fix(problem-matcher): Ensure that if env vars are empy in playbook, they are set to defaults
ref: ansible-collections/git-events#1 clusters/.profile#12
2025-07-07 23:53:53 +09:30
c50be1bb4c Merge pull request 'ci: add ci for both Gitea and Github repos' (#4) from gitea-github-ci into development
Reviewed-on: #4
2025-07-01 17:28:14 +00:00
Jon
c6777a9423 ci(github): Add github build to GHCR
ref: #4 #1
2025-07-02 01:01:26 +09:30
Jon
d29ae2b0f0 chore: confirm gitea folder empty, does not run github jobs
ref: #4 #1
2025-07-02 00:50:23 +09:30
Jon
3a1bf95099 chore: add test to confirm only gitea workflows run on gitea if github dir also exists
All checks were successful
Pull Requests / ci-test (pull_request) Successful in 2s
ref: #4 #1
2025-07-02 00:49:17 +09:30
Jon
97c196e6d8 docs(readme): add short how to use
ref: #1
2025-07-02 00:42:08 +09:30
Jon
6a2a8f04a3 refactor: give env vars names that match the where
ref: #1
2025-07-02 00:20:07 +09:30
e61f22c958 Merge pull request 'feat: furtherer' work and refinements' (#3) from annotation-cleanup into development
Reviewed-on: #3
2025-07-01 14:11:05 +00:00
Jon
b928a044f7 refactor(docker): use arg to set rulebook name
ref: #3 #1
2025-07-01 23:28:32 +09:30
Jon
9f3b07c9f8 refactor(rulebook): rulebook should share playbook name
ref: #3 #1
2025-07-01 23:25:47 +09:30
Jon
27fcf1fde4 chore(rulebook): add user and repo to log out
ref: #3 #1
2025-07-01 20:54:30 +09:30
Jon
9516803f78 chore(parser): code cleanup
ref: #3 #1
2025-07-01 20:53:43 +09:30
Jon
55b7dc396f fix(parser): allow upper case leters in matchers name
ref: #3 #1
2025-07-01 20:53:04 +09:30
Jon
598cd7db5c refactor(parser): store matcher name seperatly
ref: #3 #1
2025-07-01 20:52:35 +09:30
Jon
b18043c2f0 feat(parser): enable setting mather name for defaul regex
ref: #3 #1
2025-07-01 20:00:55 +09:30
Jon
f53cd10241 refactor(parser): clean up pylint admonition vars
ref: #3 #1
2025-07-01 19:59:58 +09:30
Jon
ea2f7e76cb feat(parser): admonition layout set to be same as pylint for default matcherr
ref: #3 #1
2025-07-01 19:59:25 +09:30
Jon
b8fb75e6cd feat(parser): add table for each tool used for annotations
ref: #3 #1
2025-07-01 18:22:49 +09:30
73123d7b4c Merge pull request 'feat: create' (#2) from genesis into development
Reviewed-on: #2
2025-06-30 20:21:12 +00:00
Jon
aa368e4bfb feat(docker): add alpine build
ref: #2 #1
2025-07-01 03:56:59 +09:30
Jon
a7b2145886 docs: add skeleton contributing guide
ref: #2 #1
2025-07-01 02:11:34 +09:30
Jon
ab3560bad3 chore: update galaxy meta files
ref: #2 #1
2025-07-01 02:10:21 +09:30
Jon
7d3a8d833f feat(parser): make pylint comment prettier
ref: #2 #1
2025-07-01 01:57:34 +09:30
Jon
8d6245779c docs: add readme
ref: #2 #1
2025-06-30 08:26:31 +09:30
Jon
6dd49ee14b feat(docker): initial
ref: #2 #1
2025-06-30 08:25:23 +09:30
Jon
5cd30f0bc0 feat(rulebook): initial webhook
ref: #2 #1
2025-06-30 08:24:58 +09:30
Jon
8a2826ca34 feat(playbook): initial problem_matcher
ref: #2 #1
2025-06-30 08:24:39 +09:30
Jon
2e0062062a feat(script): initial parser script
ref: #2 #1
2025-06-30 08:24:17 +09:30
Jon
358bc0394e feat(inventory): Add inventory for container image
ref: #2 #1
2025-06-30 08:23:46 +09:30
20 changed files with 979 additions and 0 deletions

23
.cz.yaml Normal file
View File

@ -0,0 +1,23 @@
---
commitizen:
customize:
change_type_map:
feature: Features
fix: Fixes
refactor: Refactoring
test: Tests
change_type_order:
- BREAKING CHANGE
- feat
- fix
- test
- refactor
# yamllint disable rule:line-length
commit_parser: ^(?P<change_type>feat|fix|test|refactor|perf|BREAKING CHANGE)(?:\((?P<scope>[^()\r\n]*)\)|\()?(?P<breaking>!)?:\s(?P<message>.*)?
# yamllint enable rule:line-length
name: cz_customize
prerelease_offset: 1
tag_format: $version
update_changelog_on_bump: false
version: 0.0.1
version_scheme: semver

7
.dockerignore Normal file
View File

@ -0,0 +1,7 @@
.ansible
.git
CONTRIBUTING.md
docs/
galaxy.yml
*.tmp.*

0
.gitea/.gitkeep Normal file
View File

View File

View File

@ -0,0 +1,42 @@
---
name: Lint (Pull Request)
on:
pull_request: {}
jobs:
lint:
runs-on: ubuntu-latest
steps:
- name: Enable Matcher Service
run: |
echo "NFC_PROBLEM_MATCHER=${GITHUB_REF_NAME}";
- uses: actions/checkout@v3
- name: Install YAMLLint
run: pip install yamllint
- name: Run YAMLLint
run: |
echo "NFC_PROBLEM_MATCHER_TYPE=YAML-Lint"
yamllint -f github . || true
- name: Install Ansible-Lint
run: pip install ansible-lint
- name: Run Ansible-Lint
run: |
echo "NFC_PROBLEM_MATCHER_TYPE=pylint-json";
ansible-lint -f json . || true

25
.github/workflows/ci.yaml vendored Normal file
View File

@ -0,0 +1,25 @@
---
name: 'CI'
on:
push:
branches:
- '**'
tags:
- '*'
jobs:
docker:
name: 'Docker'
uses: nofusscomputing/action_docker/.github/workflows/docker.yaml@development
with:
DOCKER_BUILD_IMAGE_NAME: "nofusscomputing/git-event-problem-matcher"
DOCKER_PUBLISH_REGISTRY: "docker.io"
DOCKER_PUBLISH_IMAGE_NAME: "nofusscomputing/git-event-problem-matcher"
secrets:
DOCKER_PUBLISH_USERNAME: ${{ secrets.NFC_DOCKERHUB_USERNAME }}
DOCKER_PUBLISH_PASSWORD: ${{ secrets.NFC_DOCKERHUB_TOKEN }}

17
.github/workflows/pull_request.yaml vendored Normal file
View File

@ -0,0 +1,17 @@
---
name: Pull Requests
on:
pull_request: {}
jobs:
ci-test:
runs-on: ubuntu-latest
steps:
- name: Test
run: |
echo "github";

5
.gitignore vendored Normal file
View File

@ -0,0 +1,5 @@
# Temp directories / files
.ansible/
artifacts/
*.tmp.*

77
.yamllint Normal file
View File

@ -0,0 +1,77 @@
---
# extends: default
ignore:
- '.github/'
- '**/crd/**'
- mkdocs.yml
- '*PrometheusRule*'
- '**/source/**'
rules:
braces:
level: error
max-spaces-inside: 1
min-spaces-inside: 1
min-spaces-inside-empty: 0
max-spaces-inside-empty: 0
brackets:
level: error
max-spaces-inside: 1
min-spaces-inside: 1
min-spaces-inside-empty: 0
max-spaces-inside-empty: 0
colons:
level: warning
max-spaces-after: 1
commas:
level: warning
comments:
level: error
require-starting-space: true
ignore-shebangs: true
min-spaces-from-content: 4
comments-indentation:
level: error
document-end:
level: error
present: false
document-start:
level: error
present: true
empty-lines:
level: error
max: 3
max-start: 0
max-end: 0
hyphens:
level: error
max-spaces-after: 1
indentation:
level: error
spaces: 2
indent-sequences: true
check-multi-line-strings: true
line-length:
level: warning
max: 100
allow-non-breakable-inline-mappings: true
new-lines:
level: error
type: unix
truthy: disable

5
CONTRIBUTING.md Normal file
View File

@ -0,0 +1,5 @@
# Contribution Guide
Contributions welcome.
This doc is still a WIP.

66
README.md Normal file
View File

@ -0,0 +1,66 @@
# No Fuss Computings Git[ea/hub] Event Processing
Documentation for the collection.
## TL;DR
| Name | required | Description |
|:---:|:---:|:---|
| GIT_API_TOKEN | :white_check_mark: | API token to access Git[ea/hub] to post PR Review. |
| GIT_API_URL | :white_check_mark: | API URL to access Git[ea/hub]. To create random one `echo $(head -c 50 /dev/urandom | xxd -p | head -c 50)` |
| GIT_INTERNAL_API_URL | :x: | An internal URL to use in place of the public API URL. i.e. DMZ url. |
| GIT_EVENT_RULEBOOK_TOKEN | :white_check_mark: | The token to set for the inbound connection to the container. |
| GIT_EVENT_RULEBOOK_PORT | :x: | The port to listen for inbound webhooks. Defaults to 5000 |
| ENABLE_DEBUG_LOGGING | :x: | Turn on playbook debug logging. Defaults to `false` :warning: Doing this will output you auth tokens to the log. |
### Steps
1. deploy somewhere that git[ea/hub] has access to the container
1. ensure vars above are set within the container
1. **Gor Gitea** Go to `Site ADministration -> Integrations-> Webhooks`
1. Add a system webhook
1. set the http url to the container ip/dns name. ensure port is specifed. suffic `:<port number>`
1. select `Trigger On -> Workflow Jobs`
1. set `Authorization Header` to `Bearer <actual value of GIT_EVENT_RULEBOOK_TOKEN>`
1. click `Update Webhook` to save
1. you are now GTG and all jobs will get posted to the container for processing.
### Setup Parsing of matchers
1. Before any parsing can be done the following must be output with the id of the pull request to enable the problem matcher parsing.
``` bash
echo "NFC_PROBLEM_MATCHER=${GITHUB_REF_NAME}";
```
1. Ansible Lint
1. before pylint runs, ensure the following commands are executed in your workflow.
``` bash
echo "NFC_PROBLEM_MATCHER_TYPE=pylint-json";
```
1. the output format for pylint is json. i.e. `ansible-lint -f json .`
1. Parsing normal GitHub Problem matchers
1. Before the job runs, give the matcher a name, no spaces, only letters and can have `-` and `_`
``` bash
echo "NFC_PROBLEM_MATCHER_TYPE=My-Job-Name";
```
1. now run the job that outputs in standrd Github style problem matchers.
1. Now the user will have a PR reviews done with the contents of the problem matcher(s) as review comments.

100
dockerfile Normal file
View File

@ -0,0 +1,100 @@
ARG GIT_EVENT_RULEBOOK_NAME='problem_matcher'
FROM python:3.11-alpine3.22 AS Build
RUN apk update; \
apk add \
build-base \
gcc;
RUN pip install --upgrade \
setuptools \
wheel
RUN apk add openjdk21-jdk;
RUN apk add \
alpine-sdk \
libffi-dev \
maven \
build-base libc-dev;
ENV JAVA_HOME /usr/lib/jvm/java-21-openjdk
# ChatGPT suggestion to fix alpine version >3.19
ENV CFLAGS "-Wno-incompatible-pointer-types"
COPY requirements.txt /tmp/requirements.txt
RUN mkdir -p /tmp/python_modules; \
cd /tmp/python_modules; \
pip download --dest . \
pip \
--check-build-dependencies \
-r /tmp/requirements.txt
RUN cd /tmp/python_modules; \
mkdir -p /tmp/python_builds; \
echo "[DEBUG] PATH=$PATH"; \
ls -l; \
pip wheel --wheel-dir /tmp/python_builds --find-links . *.whl; \
pip wheel --wheel-dir /tmp/python_builds --find-links . *.tar.gz;
FROM python:3.11-alpine3.22
ARG GIT_EVENT_RULEBOOK_NAME
RUN apk --no-cache update; \
apk --no-cache add \
openjdk21-jdk
ENV ANSIBLE_FORCE_COLOR true
ENV ANSIBLE_INVENTORY hosts.yaml
ENV JAVA_HOME /usr/lib/jvm/java-21-openjdk
ENV GIT_EVENT_RULEBOOK_NAME ${GIT_EVENT_RULEBOOK_NAME}
COPY includes/ /
COPY . /home/eda/.ansible/collections/ansible_collections/nofusscomputing/git_events/
COPY --from=build /tmp/python_builds /tmp/python_builds
RUN pip install --no-cache-dir /tmp/python_builds/*; \
rm -R /tmp/python_builds; \
ansible-galaxy collection install ansible.eda; \
addgroup eda; \
adduser -D --ingroup eda eda; \
cp -r /root/.ansible /home/eda/; \
rm -rf /home/eda/.ansible/collections/ansible_collections/nofusscomputing/git_events/includes; \
mv /usr/bin/annotations.py /usr/bin/annotations; \
chmod +x /usr/bin/annotations; \
chmod +x /entrypoint.sh; \
chown eda:eda -R /home/eda;
WORKDIR /home/eda
USER eda
ENTRYPOINT [ "/entrypoint.sh" ]

View File

@ -0,0 +1,43 @@
---
- name: Inbound Webhook
hosts: localhost
match_multiple_rules: true
sources:
- name: Webhook
ansible.eda.webhook:
host: 0.0.0.0
port: "{{ GIT_EVENT_RULEBOOK_PORT | default(5000) | int }}"
token: "{{ GIT_EVENT_RULEBOOK_TOKEN | default('-not-set-') }}"
rules:
- name: Show workflow_job Action
condition: >
event.meta.headers['X-GitHub-Event'] == 'workflow_job'
actions:
- debug:
msg: |-
Received workflow_job event from {{ event.payload.sender.username + ' ' -}}
for repository {{ event.payload.repository.full_name + ' ' -}}
with action of {{ event.payload.action }}
- name: Process Completed workflow_job
# yamllint disable rule:indentation
condition: >
event.meta.headers['X-GitHub-Event'] == 'workflow_job'
and
event.payload.action == 'completed'
# yamllint enable rule:indentation
actions:
- run_playbook:
name: nofusscomputing.git_events.problem_matcher
verbosity: 2
extra_vars:
payload: "{{ event.payload }}"
ansible_connection: local

46
galaxy.yml Normal file
View File

@ -0,0 +1,46 @@
---
namespace: nofusscomputing
name: git_events
version: 0.0.1
readme: README.md
authors:
- No Fuss Computing
description: Git[ea/hub] Problem matcher parser with PR Code Review
license:
- MIT
license_file: LICENCE
tags:
- ci
- event
- rulebook
- tools
dependencies: {}
repository: https://nofusscomputing.com/git/ansible-collections/git-events
documentation: https://nofusscomputing.com/git/ansible-collections/git-events
homepage: https://nofusscomputing.com/git/ansible-collections/git-events
issues: https://nofusscomputing.com/git/ansible-collections/git-events/issues
build_ignore: [
'.ansible',
artifacts/,
'.cz.yaml',
'.dockerignore',
'dockerfile',
'.git',
'galaxy.yml',
'*.tmp.*'
]

18
includes/entrypoint.sh Normal file
View File

@ -0,0 +1,18 @@
#!/bin/sh
set -e
if [ $# -eq 0 ]; then
cd ${HOME};
ansible-rulebook \
-r nofusscomputing.git_events.${GIT_EVENT_RULEBOOK_NAME} \
--env-vars GIT_EVENT_RULEBOOK_PORT,GIT_EVENT_RULEBOOK_TOKEN \
-v;
else
exec "$@"
fi

View File

@ -0,0 +1,6 @@
---
all:
hosts:
localhost:
vars:
ansible_connection: local

309
includes/usr/bin/annotations.py Executable file
View File

@ -0,0 +1,309 @@
#!/usr/bin/env python
import sys
import re
import json
# import requests
import os
# API Docs: https://docs.github.com/en/rest/pulls/reviews?apiVersion=2022-11-28#create-a-review-for-a-pull-request
def default_matcher( entry, tool_name = '' ) -> dict:
if tool_name == 'default':
tool_name = ''
else:
tool_name = tool_name + ' '
filename = str(entry['file'])
if filename.startswith('./'):
filename = str(entry['file'])[2:]
admonition_level = 'NOTE'
if str(entry['type']).upper() in [ 'ERROR' ]:
admonition_level = 'IMPORTANT'
elif str(entry['type']).upper() in [ 'WARNING' ]:
admonition_level = 'WARNING'
body =str (
f"> [!{admonition_level}]"
"\n>"
f"\n> **{tool_name}Severity:** _{str(entry['type']).lower()}_ "
f"\n> **file**: _{filename}_ "
f"**Line**: _{str(entry['line'])}_ **Column**: _{str(entry['column'])}_"
"\n>"
f"\n> {str(entry['text'])}"
"\n>"
)
return {
"body": body,
"new_position": int(entry['line']),
"old_position": 0,
"path": filename
}
def pylint_matcher( entry ) -> dict:
if not entry.get('line', int(1)):
comment_line = 1
else:
comment_line = int(entry.get('line', int(1)))
severity = str(entry['severity']).lower()
admonition_level = 'NOTE'
if severity in [ 'major' ]:
admonition_level = 'IMPORTANT'
if severity in [ 'minor' ]:
admonition_level = 'WARNING'
body = str(
f"> [!{admonition_level}] "
f"\n> "
f"\n>**PyLint Severity**: {severity} "
f"\n>**file**: _{entry['path']}_ "
f"**Line**: _{entry.get('line', 0)}_ "
f"\n>"
f"\n> [{entry['check_name']}]({entry['url']}): {entry['description']} "
f"\n>"
)
if(
entry.get('body', '') != 'None'
and entry.get('body', '') != ''
and entry.get('body', '') is not None
):
body = body + str(
f"\n>_{entry.get('body', '')}_ "
f"\n>"
)
return {
"body": body,
"new_position": comment_line,
"old_position": 0,
"path": str(entry['path'])
}
regex = {
"default": os.getenv("PROBLEM_MATCHER_REGEX",
r"::(?P<type>\S+)\s+"
r"(?:file=)(?P<file>.+?),"
r"(?:line=)(?P<line>\d+),"
r"(?:col=)(?P<column>\d+).+?"
# r"\s\[(?P<rule>\S+)]\s(?P<text>.+)"
r"\s(?P<text>.+)"
),
# \{\s*"type":\s*"(?P<type>[^"]+)",\s*"check_name":\s*"(?P<check_name>[^"]+)",\s*"categories":\s*\[(?P<categories>[^\]]*)\],\s*"url":\s*"(?P<url>[^"]+)",\s*"severity":\s*"(?P<severity>[^"]+)",\s*"description":\s*"(?P<description>[^"]+)",\s*"fingerprint":\s*"(?P<fingerprint>[^"]+)",\s*"location":\s*\{\s*"path":\s*"(?P<path>[^"]+)"(?:,\s*"lines":\s*\{\s*"begin":\s*(?P<line>\d+)\})?.*?\}},(?:\s"content":\s\{"body":\s"(?P<body>.+?)")?
"pylint-json": str(
# r'\{\s*"type":\s*"(?P<type>[^"]+)",\s*'
# r'"check_name":\s*"(?P<check_name>[^"]+)",\s*'
# r'"categories":\s*\[(?P<categories>[^\]]*)\],\s*'
# r'"url":\s*"(?P<url>[^"]+)",\s*'
# r'"severity":\s*"(?P<severity>[^"]+)",\s*'
# r'"description":\s*"(?P<description>[^"]+)",\s*'
# r'"fingerprint":\s*"(?P<fingerprint>[^"]+)",\s*'
# r'"location":\s*\{\s*"path":\s*"(?P<path>[^"]+)'
# # r'"(?:,\s*"lines":\s*\{\s*"begin":\s*(?P<line>\d+)\})?.*?\}},'
# r'(?:(?:,\s*"lines":\s*\{\s*"begin":\s*)|(?:{"line":\s))(?P<line>\d+)?.*?\}},'
# r'(?:\s"content":\s\{"body":\s"(?P<body>.+?)")?'
# \{\s*"type":\s*"(?P<type>[^"]+)",\s*"check_name":\s*"(?P<check_name>[^"]+)",\s*"categories":\s*\[(?P<categories>[^\]]*)\],\s*"url":\s*"(?P<url>[^"]+)",\s*"severity":\s*"(?P<severity>[^"]+)",\s*"description":\s*"(?P<description>[^"]+)",\s*"fingerprint":\s*"(?P<fingerprint>[^"]+)",\s*"location":\s*\{\s*"path":\s*"(?P<path>[^"]+)".+?"line[s]?":.+?(?P<line>\d+)?.*?\}},(?:\s"content":\s\{"body":\s"(?P<body>.+?)")?
r'\{\s*"type":\s*"(?P<type>[^"]+)",\s*'
r'"check_name":\s*"(?P<check_name>[^"]+)",\s*'
r'"categories":\s*\[(?P<categories>[^\]]*)\],\s*'
r'"url":\s*"(?P<url>[^"]+)",\s*'
r'"severity":\s*"(?P<severity>[^"]+)",\s*'
r'"description":\s*"(?P<description>[^"]+)",\s*'
r'"fingerprint":\s*"(?P<fingerprint>[^"]+)",\s*'
r'"location":\s*\{\s*"path":\s*"(?P<path>[^"]+)".+?'
r'"line[s]?":.+?(?P<line>\d+).*?\}}'
r'(?:,\s"content":\s\{"body":\s"(?P<body>.+?)")?'
)
}
results = {}
NFC_PROBLEM_MATCHER = False
pull_request: int = None
matcher = re.compile(r'NFC_PROBLEM_MATCHER=(?P<pull_number>\d+)')
matcher_type = re.compile(r'NFC_PROBLEM_MATCHER_TYPE=(?P<type>[a-zA-Z_-]+)')
regex_type = 'default'
pattern = re.compile( regex[regex_type] )
matcher_name = 'Default Matcher'
for line in sys.stdin:
match_matcher_type = matcher_type.search(line)
if match_matcher_type:
regex_type = match_matcher_type['type']
matcher_name = match_matcher_type['type']
if regex_type in regex:
pattern = re.compile( regex[regex_type] )
else:
pattern = re.compile( regex['default'] )
match = pattern.finditer(line)
problem_matcher = matcher.search(line)
if problem_matcher:
NFC_PROBLEM_MATCHER = True
pull_request = int(problem_matcher['pull_number'])
if match:
if matcher_name not in results:
results[matcher_name] = []
for obj in match:
results[matcher_name].append(obj.groupdict())
if not NFC_PROBLEM_MATCHER:
print(json.dumps({
'pull_request': ''
}, indent=4))
sys.exit(0)
if not results:
print("No matching lines found.")
sys.exit(0)
api_body: dict = {
"body": "boo",
"comments": [],
"commit_id": os.getenv("GITHUB_SHA"),
"event": "REQUEST_CHANGES"
}
type_count = {}
for tool, tool_results in results.items():
for entry in tool_results:
if tool == 'pylint-json':
api_body['comments'] += [ pylint_matcher( entry ) ]
else:
api_body['comments'] += [ default_matcher( entry, tool_name = tool ) ]
if tool not in type_count:
type_count[tool] = 1
else:
type_count[tool] += 1
review_body = {
'header': str(
'## :no_entry_sign: Annotations found \n' \
f'@{os.getenv("GITHUB_ACTOR")}, \n\n'
'I found some issues that need addressing. \n\n'
)
}
for msg_type, cnt in type_count.items():
if msg_type not in review_body:
review_body[msg_type] = str('| Type | Count | \n|:---|:---:| \n')
review_body[msg_type] += f'| {msg_type} | {cnt} | \n'
api_body['body'] = review_body['header']
for msg_type, value in review_body.items():
if msg_type != 'header':
api_body['body'] += str(
f'### {msg_type} issues found '
'\n'
f'{value}\n'
'\n'
)
if len(api_body['comments']) == 0:
api_body.update({
'body': "G'day, I didn't find any problems to report on",
'event': 'APPROVE'
})
data = {
"pull_request": pull_request,
"api_body": api_body
}
print(json.dumps(data, indent=4))
# URL = os.getenv("GITHUB_API_URL") + '/repos/' + os.getenv("GITHUB_REPOSITORY") + '/pulls/' + os.getenv("GITHUB_REF_NAME") + '/reviews?token=' + str(os.getenv("AGITHUB_TOKEN"))
# try:
# response = requests.post(URL, json=api_body)
# response.raise_for_status()
# print(f"\n✅ Successfully posted to {URL}")
# print(f"🔁 Server responded with: {response.status_code} {response.reason}")
# except requests.exceptions.RequestException as e:
# print(f"\n❌ Failed to post to {URL}")
# print(f"Error: {e}")
# sys.exit(1)

52
meta/runtime.yml Normal file
View File

@ -0,0 +1,52 @@
---
# Collections must specify a minimum required ansible version to upload
# to galaxy
requires_ansible: '>=2.18.0'
# Content that Ansible needs to load from another location or that has
# been deprecated/removed
# plugin_routing:
# action:
# redirected_plugin_name:
# redirect: ns.col.new_location
# deprecated_plugin_name:
# deprecation:
# removal_version: "4.0.0"
# warning_text: |
# See the porting guide on how to update your playbook to
# use ns.col.another_plugin instead.
# removed_plugin_name:
# tombstone:
# removal_version: "2.0.0"
# warning_text: |
# See the porting guide on how to update your playbook to
# use ns.col.another_plugin instead.
# become:
# cache:
# callback:
# cliconf:
# connection:
# doc_fragments:
# filter:
# httpapi:
# inventory:
# lookup:
# module_utils:
# modules:
# netconf:
# shell:
# strategy:
# terminal:
# test:
# vars:
# Python import statements that Ansible needs to load from another location
# import_redirection:
# ansible_collections.ns.col.plugins.module_utils.old_location:
# redirect: ansible_collections.ns.col.plugins.module_utils.new_location
# Groups of actions/modules that take a common set of options
# action_groups:
# group_name:
# - module1
# - module2

View File

@ -0,0 +1,133 @@
---
- name: Git Problem Matcher
gather_facts: false
hosts: localhost
tasks:
- name: Get facts from Environment
ansible.builtin.set_fact:
git_api_url: "{{ lookup('env', 'GIT_INTERNAL_API_URL') | default(payload.repository.url) }}"
gitea_replace_url: "{{ lookup('env', 'GIT_API_URL') | default(payload.repository.url) }}"
disable_logging: "{{ not lookup('env', 'ENABLE_DEBUG_LOGGING') | bool | default(false) }}"
- name: Set var gitea_replace_url if empty
ansible.builtin.set_fact:
git_api_url: "{{ payload.repository.url }}"
when: >
git_api_url == ""
- name: Set var gitea_replace_url if empty
ansible.builtin.set_fact:
gitea_replace_url: "{{ payload.repository.url }}"
when: >
gitea_replace_url == ""
- name: Set required Facts
ansible.builtin.set_fact:
git_url_api: >-
{{ payload.repository.url |
replace((gitea_replace_url | split('/api/'))[0], git_api_url) }}
git_url_path_jobs: 'actions/jobs'
head_sha: "{{ payload.workflow_job.head_sha }}"
- name: Ensure API Token is defined
ansible.builtin.assert:
that:
- lookup('env', 'GIT_API_TOKEN') is defined
msg: Environmental variable `GIT_API_TOKEN` must be defined
- name: Ensure required variables exist
ansible.builtin.assert:
that:
- lookup('env', 'GIT_API_TOKEN') | length > 0
msg: Environmental variable `GIT_API_TOKEN` must not be empty
- name: Fetch job log
ansible.builtin.uri:
url: >-
{{ git_url_api + '/' + git_url_path_jobs
+ '/' + payload.workflow_job.id | string + '/logs' }}
dest: /tmp/job.log
headers:
Authorization: token {{ lookup('env', 'GIT_API_TOKEN') }}
method: GET
return_content: true
timeout: 10
validate_certs: false
no_log: "{{ disable_logging }}"
- name: Trace
ansible.builtin.shell:
cmd: |
set -o pipefail;
export GITHUB_ACTOR={{ payload.sender.username }};
export GITHUB_SHA={{ payload.workflow_job.head_sha }};
cat /tmp/job.log | annotations > /tmp/annotations.json;
executable: sh
changed_when: false
- name: Load annotations
ansible.builtin.set_fact:
annotations: "{{ lookup('file', '/tmp/annotations.json') | from_yaml }}"
- name: Fetch Pull Request
ansible.builtin.uri:
url: "{{ git_url_api + '/pulls/' + annotations.pull_request | string }}"
headers:
Authorization: token {{ lookup('env', 'GIT_API_TOKEN') }}
method: GET
return_content: true
status_code:
- 200
- 404
timeout: 10
validate_certs: false
no_log: "{{ disable_logging }}"
register: http_get_pull_request
when: >
annotations.pull_request | string
- name: Trace - Display Pull Request State
ansible.builtin.debug:
msg: "{{ http_get_pull_request.json.state | default('No PR found') }}"
when: >
not http_get_pull_request.skipped | default(false) | bool
- name: Post review
ansible.builtin.uri:
url: "{{ git_url_api + '/pulls/' + annotations.pull_request | string + '/reviews' }}"
body: "{{ annotations.api_body }}"
body_format: json
headers:
Authorization: token {{ lookup('env', 'GIT_API_TOKEN') }}
method: POST
return_content: true
timeout: 10
validate_certs: false
no_log: "{{ disable_logging }}"
# yamllint disable rule:indentation
when: >
http_get_pull_request.json.state | default('-') != 'closed'
and
http_get_pull_request.status | default(0) == 200
and
not http_get_pull_request.skipped | default(false) | bool
# yamllint enable rule:indentation

5
requirements.txt Normal file
View File

@ -0,0 +1,5 @@
ansible-core==2.18.6
ansible-rulebook==1.1.7
requests