diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..184c903 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,7 @@ +.ansible +.git +CONTRIBUTING.md +docs/ +galaxy.yml +*.tmp.* + diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..433afeb --- /dev/null +++ b/.gitignore @@ -0,0 +1,5 @@ +# Temp directories / files + +.ansible/ +artifacts/ +*.tmp.* diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..ec478e3 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,5 @@ +# Contribution Guide + +Contributions welcome. + +This doc is still a WIP. diff --git a/README.md b/README.md new file mode 100644 index 0000000..9e24d6a --- /dev/null +++ b/README.md @@ -0,0 +1 @@ +# No Fuss Computings Git[ea/hub] Event Processing diff --git a/dockerfile b/dockerfile new file mode 100644 index 0000000..7f96b03 --- /dev/null +++ b/dockerfile @@ -0,0 +1,97 @@ + +FROM python:3.11-alpine3.22 AS Build + + +RUN apk update; \ + apk add \ + build-base \ + gcc; + + +RUN pip install --upgrade \ + setuptools \ + wheel + + +RUN apk add openjdk21-jdk; + + +RUN apk add \ + alpine-sdk \ + libffi-dev \ + maven \ + build-base libc-dev; + + +ENV JAVA_HOME /usr/lib/jvm/java-21-openjdk + +# ChatGPT suggestion to fix alpine version >3.19 +ENV CFLAGS "-Wno-incompatible-pointer-types" + + +COPY requirements.txt /tmp/requirements.txt + + +RUN mkdir -p /tmp/python_modules; \ + cd /tmp/python_modules; \ + pip download --dest . \ + pip \ + --check-build-dependencies \ + -r /tmp/requirements.txt + + +RUN cd /tmp/python_modules; \ + mkdir -p /tmp/python_builds; \ + echo "[DEBUG] PATH=$PATH"; \ + ls -l; \ + pip wheel --wheel-dir /tmp/python_builds --find-links . *.whl; \ + pip wheel --wheel-dir /tmp/python_builds --find-links . *.tar.gz; + + + +FROM python:3.11-alpine3.22 + + +RUN apk --no-cache update; \ + apk --no-cache add \ + openjdk21-jdk + + +ENV ANSIBLE_FORCE_COLOR true + +ENV ANSIBLE_INVENTORY hosts.yaml + +ENV JAVA_HOME /usr/lib/jvm/java-21-openjdk + + +COPY includes/ / + +COPY . /home/eda/.ansible/collections/ansible_collections/nofusscomputing/git_events/ + +COPY --from=build /tmp/python_builds /tmp/python_builds + + +RUN pip install --no-cache-dir /tmp/python_builds/*; \ + rm -R /tmp/python_builds; \ + ansible-galaxy collection install ansible.eda; \ + addgroup eda; \ + adduser -D --ingroup eda eda; \ + cp -r /root/.ansible /home/eda/; \ + rm -rf /home/eda/.ansible/collections/ansible_collections/nofusscomputing/git_events/includes; \ + mv /usr/bin/annotations.py /usr/bin/annotations; \ + chmod +x /usr/bin/annotations; \ + chown eda:eda -R /home/eda; + + +WORKDIR /home/eda + + +USER eda + + +CMD [ \ + "ansible-rulebook", \ + "-r", "nofusscomputing.git_events.webhook", \ + "--env-vars", "PROBLEM_MATCHER_PORT,PROBLEM_MATCHER_TOKEN", \ + "-v" \ +] diff --git a/extensions/eda/rulebooks/webhook.yml b/extensions/eda/rulebooks/webhook.yml new file mode 100644 index 0000000..1a6a4ce --- /dev/null +++ b/extensions/eda/rulebooks/webhook.yml @@ -0,0 +1,38 @@ +--- + +- name: Inbound Webhook + hosts: localhost + match_multiple_rules: true + + sources: + - name: Webhook + ansible.eda.webhook: + host: 0.0.0.0 + port: "{{ PROBLEM_MATCHER_PORT | default(5000) | int }}" + token: "{{ PROBLEM_MATCHER_TOKEN | default('-not-set-') }}" + + rules: + + + - name: Show workflow_job Action + condition: > + event.meta.headers['X-GitHub-Event'] == 'workflow_job' + actions: + + - debug: + msg: "Received workflow_job event with action of {{ event.payload.action }}" + + + - name: Process Completed workflow_job + condition: > + event.meta.headers['X-GitHub-Event'] == 'workflow_job' + and + event.payload.action == 'completed' + actions: + + - run_playbook: + name: nofusscomputing.git_events.problem_matcher + verbosity: 2 + extra_vars: + payload: "{{ event.payload }}" + ansible_connection: local diff --git a/galaxy.yml b/galaxy.yml new file mode 100644 index 0000000..4755466 --- /dev/null +++ b/galaxy.yml @@ -0,0 +1,79 @@ +--- + +### REQUIRED +# The namespace of the collection. This can be a company/brand/organization or product namespace under which all +# content lives. May only contain alphanumeric lowercase characters and underscores. Namespaces cannot start with +# underscores or numbers and cannot contain consecutive underscores +namespace: nofusscomputing + +# The name of the collection. Has the same character restrictions as 'namespace' +name: git_events + +# The version of the collection. Must be compatible with semantic versioning +version: 0.0.1 + +# The path to the Markdown (.md) readme file. This path is relative to the root of the collection +readme: README.md + +# A list of the collection's content authors. Can be just the name or in the format 'Full Name (url) +# @nicks:irc/im.site#channel' +authors: + - No Fuss Computing + +### OPTIONAL but strongly recommended +# A short summary description of the collection +description: Git[ea/hub] Problem matcher parser with PR Code Review + +# Either a single license or a list of licenses for content inside of a collection. Ansible Galaxy currently only +# accepts L(SPDX,https://spdx.org/licenses/) licenses. This key is mutually exclusive with 'license_file' +license: + - MIT + +# The path to the license file for the collection. This path is relative to the root of the collection. This key is +# mutually exclusive with 'license' +license_file: LICENCE + +# A list of tags you want to associate with the collection for indexing/searching. A tag name has the same character +# requirements as 'namespace' and 'name' +tags: + - ci + - event + - rulebook + - tools + +# Collections that this collection requires to be installed for it to be usable. The key of the dict is the +# collection label 'namespace.name'. The value is a version range +# L(specifiers,https://python-semanticversion.readthedocs.io/en/latest/#requirement-specification). Multiple version +# range specifiers can be set and are separated by ',' +dependencies: {} + +# The URL of the originating SCM repository +repository: https://nofusscomputing.com/git/ansible-collections/git-events + +# The URL to any online docs +documentation: https://nofusscomputing.com/git/ansible-collections/git-events + +# The URL to the homepage of the collection/project +homepage: https://nofusscomputing.com/git/ansible-collections/git-events + +# The URL to the collection issue tracker +issues: https://nofusscomputing.com/git/ansible-collections/git-events/issues + +# A list of file glob-like patterns used to filter any files or directories that should not be included in the build +# artifact. A pattern is matched from the relative path of the file or directory of the collection directory. This +# uses 'fnmatch' to match the files or directories. Some directories and files like 'galaxy.yml', '*.pyc', '*.retry', +# and '.git' are always filtered. Mutually exclusive with 'manifest' +build_ignore: [ + '.ansible', + artifacts/, + '.git', + 'galaxy.yml', + '*.tmp.*' + +] +# A dict controlling use of manifest directives used in building the collection artifact. The key 'directives' is a +# list of MANIFEST.in style +# L(directives,https://packaging.python.org/en/latest/guides/using-manifest-in/#manifest-in-commands). The key +# 'omit_default_directives' is a boolean that controls whether the default directives are used. Mutually exclusive +# with 'build_ignore' +# manifest: null diff --git a/includes/home/eda/hosts.yaml b/includes/home/eda/hosts.yaml new file mode 100644 index 0000000..074cf18 --- /dev/null +++ b/includes/home/eda/hosts.yaml @@ -0,0 +1,6 @@ +--- +all: + hosts: + localhost: + vars: + ansible_connection: local diff --git a/includes/usr/bin/annotations.py b/includes/usr/bin/annotations.py new file mode 100755 index 0000000..bafc31a --- /dev/null +++ b/includes/usr/bin/annotations.py @@ -0,0 +1,262 @@ +#!/usr/bin/env python + +import sys +import re +import json +# import requests +import os + + + +def default_matcher( entry ) -> dict: + + + filename = str(entry['file']) + + msg_type = str(entry['type']).upper() + + if msg_type in type_count: + + type_count[msg_type] += 1 + + else: + + type_count[msg_type] = 1 + + if filename.startswith('./'): + + filename = str(entry['file'])[2:] + + body = f"> [!NOTE]\n>\n> **{msg_type} in file: {filename}** " \ + f"_Line: {str(entry['line'])} Column: {str(entry['column'])}_" \ + f"\n>\n> _{str(entry['text'])}_\n>" + + if msg_type in [ 'ERROR' ]: + + + + body = f"> [!IMPORTANT]\n>\n> **{msg_type} in file: {filename}** " \ + f"_Line: {str(entry['line'])} Column: {str(entry['column'])}_" \ + f"\n>\n> _{str(entry['text'])}_\n>" + + elif msg_type in [ 'WARNING' ]: + + body = f"> [!WARNING]\n>\n> **{msg_type} in file: {filename}** " \ + f"_Line: {str(entry['line'])} Column: {str(entry['column'])}_" \ + f"\n>\n> _{str(entry['text'])}_\n>" + + return { + "body": body, + "new_position": int(entry['line']), + "old_position": 0, + "path": filename + } + + + +def pylint_matcher( entry ) -> dict: + + if not entry.get('line', int(1)): + + comment_line = 1 + + else: + + comment_line = int(entry.get('line', int(1))) + + severity = str(entry['severity']).lower() + default_admonition_level = 'NOTE' + + if severity in [ 'major' ]: + + default_admonition_level = 'IMPORTANT' + + if severity in [ 'minor' ]: + + default_admonition_level = 'WARNING' + + body = str( + f"> [!{default_admonition_level}] " + f"\n> " + f"\n>**Severity**: {severity} " + f"\n>**file**: _{entry['path']}_ " + f"**Line**: _{entry.get('line', 0)}_ " + f"\n>" + f"\n> [{entry['check_name']}]({entry['url']}): {entry['description']} " + f"\n>" + ) + + + if( + entry.get('body', '') != 'None' + and entry.get('body', '') != '' + and entry.get('body', '') is not None + ): + + body = body + str( + f"\n>_{entry.get('body', '')}_ " + f"\n>" + ) + + + return { + "body": body, + "new_position": comment_line, + "old_position": 0, + "path": str(entry['path']) + } + + + + + + +regex = { + + "default": os.getenv("PROBLEM_MATCHER_REGEX", + r"::(?P\S+)\s+" + r"(?:file=)(?P.+?)," + r"(?:line=)(?P\d+)," + r"(?:col=)(?P\d+).+?" + # r"\s\[(?P\S+)]\s(?P.+)" + r"\s(?P.+)" + ), + +# \{\s*"type":\s*"(?P[^"]+)",\s*"check_name":\s*"(?P[^"]+)",\s*"categories":\s*\[(?P[^\]]*)\],\s*"url":\s*"(?P[^"]+)",\s*"severity":\s*"(?P[^"]+)",\s*"description":\s*"(?P[^"]+)",\s*"fingerprint":\s*"(?P[^"]+)",\s*"location":\s*\{\s*"path":\s*"(?P[^"]+)"(?:,\s*"lines":\s*\{\s*"begin":\s*(?P\d+)\})?.*?\}},(?:\s"content":\s\{"body":\s"(?P.+?)")? + "pylint-json": str( + # r'\{\s*"type":\s*"(?P[^"]+)",\s*' + # r'"check_name":\s*"(?P[^"]+)",\s*' + # r'"categories":\s*\[(?P[^\]]*)\],\s*' + # r'"url":\s*"(?P[^"]+)",\s*' + # r'"severity":\s*"(?P[^"]+)",\s*' + # r'"description":\s*"(?P[^"]+)",\s*' + # r'"fingerprint":\s*"(?P[^"]+)",\s*' + # r'"location":\s*\{\s*"path":\s*"(?P[^"]+)' + # # r'"(?:,\s*"lines":\s*\{\s*"begin":\s*(?P\d+)\})?.*?\}},' + # r'(?:(?:,\s*"lines":\s*\{\s*"begin":\s*)|(?:{"line":\s))(?P\d+)?.*?\}},' + # r'(?:\s"content":\s\{"body":\s"(?P.+?)")?' + + # \{\s*"type":\s*"(?P[^"]+)",\s*"check_name":\s*"(?P[^"]+)",\s*"categories":\s*\[(?P[^\]]*)\],\s*"url":\s*"(?P[^"]+)",\s*"severity":\s*"(?P[^"]+)",\s*"description":\s*"(?P[^"]+)",\s*"fingerprint":\s*"(?P[^"]+)",\s*"location":\s*\{\s*"path":\s*"(?P[^"]+)".+?"line[s]?":.+?(?P\d+)?.*?\}},(?:\s"content":\s\{"body":\s"(?P.+?)")? + + r'\{\s*"type":\s*"(?P[^"]+)",\s*' + r'"check_name":\s*"(?P[^"]+)",\s*' + r'"categories":\s*\[(?P[^\]]*)\],\s*' + r'"url":\s*"(?P[^"]+)",\s*' + r'"severity":\s*"(?P[^"]+)",\s*' + r'"description":\s*"(?P[^"]+)",\s*' + r'"fingerprint":\s*"(?P[^"]+)",\s*' + r'"location":\s*\{\s*"path":\s*"(?P[^"]+)".+?' + r'"line[s]?":.+?(?P\d+).*?\}},' + r'(?:\s"content":\s\{"body":\s"(?P.+?)")?' + ) +} + + + +results = {} + +NFC_PROBLEM_MATCHER = False + +pull_request: int = None + +matcher = re.compile(r'NFC_PROBLEM_MATCHER=(?P\d+)') +matcher_type = re.compile(r'NFC_PROBLEM_MATCHER_TYPE=(?P[a-z_-]+)') + +regex_type = 'default' +pattern = re.compile( regex[regex_type] ) + + +for line in sys.stdin: + + match_matcher_type = matcher_type.search(line) + + if match_matcher_type: + regex_type = match_matcher_type['type'] + pattern = re.compile( regex[regex_type] ) + + + match = pattern.finditer(line) + + problem_matcher = matcher.search(line,) + + if problem_matcher: + + NFC_PROBLEM_MATCHER = True + + pull_request = int(problem_matcher['pull_number']) + + + if match: + + if regex_type not in results: + results[regex_type] = [] + + + for obj in match: + + results[regex_type].append(obj.groupdict()) + + + +if not NFC_PROBLEM_MATCHER: + + sys.exit(2) + + +if not results: + print("No matching lines found.") + sys.exit(0) + + +api_body: dict = { + "body": "boo", + "comments": [], + "commit_id": os.getenv("GITHUB_SHA"), + "event": "REQUEST_CHANGES" +} + + +type_count = {} + +for tool, tool_results in results.items(): + + for entry in tool_results: + + if tool == 'default': + + api_body['comments'] += [ default_matcher( entry ) ] + + elif tool == 'pylint-json': + + api_body['comments'] += [ pylint_matcher( entry ) ] + + +review_body = '## :no_entry_sign: Annotations found\n\n' \ + f'@{os.getenv("GITHUB_ACTOR")}, found some issues.\n\n' \ + '| Type | Count | \n|:---|:---:| \n' + +for msg_type, cnt in type_count.items(): + + review_body += f'| {msg_type} | {cnt} | \n' + + +api_body['body'] = review_body + '\n' + +data = { + "pull_request": pull_request, + "api_body": api_body +} + +print(json.dumps(data)) + + +# URL = os.getenv("GITHUB_API_URL") + '/repos/' + os.getenv("GITHUB_REPOSITORY") + '/pulls/' + os.getenv("GITHUB_REF_NAME") + '/reviews?token=' + str(os.getenv("AGITHUB_TOKEN")) +# try: +# response = requests.post(URL, json=api_body) +# response.raise_for_status() +# print(f"\nāœ… Successfully posted to {URL}") +# print(f"šŸ” Server responded with: {response.status_code} {response.reason}") +# except requests.exceptions.RequestException as e: +# print(f"\nāŒ Failed to post to {URL}") +# print(f"Error: {e}") +# sys.exit(1) diff --git a/meta/runtime.yml b/meta/runtime.yml new file mode 100644 index 0000000..ddab9ac --- /dev/null +++ b/meta/runtime.yml @@ -0,0 +1,52 @@ +--- +# Collections must specify a minimum required ansible version to upload +# to galaxy +requires_ansible: '>=2.18.0' + +# Content that Ansible needs to load from another location or that has +# been deprecated/removed +# plugin_routing: +# action: +# redirected_plugin_name: +# redirect: ns.col.new_location +# deprecated_plugin_name: +# deprecation: +# removal_version: "4.0.0" +# warning_text: | +# See the porting guide on how to update your playbook to +# use ns.col.another_plugin instead. +# removed_plugin_name: +# tombstone: +# removal_version: "2.0.0" +# warning_text: | +# See the porting guide on how to update your playbook to +# use ns.col.another_plugin instead. +# become: +# cache: +# callback: +# cliconf: +# connection: +# doc_fragments: +# filter: +# httpapi: +# inventory: +# lookup: +# module_utils: +# modules: +# netconf: +# shell: +# strategy: +# terminal: +# test: +# vars: + +# Python import statements that Ansible needs to load from another location +# import_redirection: +# ansible_collections.ns.col.plugins.module_utils.old_location: +# redirect: ansible_collections.ns.col.plugins.module_utils.new_location + +# Groups of actions/modules that take a common set of options +# action_groups: +# group_name: +# - module1 +# - module2 diff --git a/playbooks/problem_matcher.yaml b/playbooks/problem_matcher.yaml new file mode 100644 index 0000000..9c86de9 --- /dev/null +++ b/playbooks/problem_matcher.yaml @@ -0,0 +1,105 @@ +--- +- name: Git Problem Matcher + gather_facts: false + hosts: localhost + + + tasks: + + + - name: Get facts from Environment + ansible.builtin.set_fact: + gitea_url: "{{ lookup('env', 'GITEA_INTERNAL_URL') | default(payload.repository.url) }}" + gitea_replace_url: "{{ lookup('env', 'GITEA_URL') | default(payload.repository.url) }}" + disable_logging: "{{ not lookup('env', 'ENABLE_DEBUG_LOGGING') | bool | default(false) }}" + + + - name: Set required Facts + ansible.builtin.set_fact: + git_url_api: "{{ payload.repository.url | replace(gitea_replace_url, gitea_url) }}" + git_url_path_jobs: 'actions/jobs' + head_sha: "{{ payload.workflow_job.head_sha }}" + + + - name: Ensure API Token is defined + ansible.builtin.assert: + that: + - lookup('env', 'GIT_API_TOKEN') is defined + msg: Environmental variable `GIT_API_TOKEN` must be defined + + + - name: Ensure required variables exist + ansible.builtin.assert: + that: + - lookup('env', 'GIT_API_TOKEN') | length > 0 + msg: Environmental variable `GIT_API_TOKEN` must not be empty + + + - name: Fetch job log + ansible.builtin.uri: + url: "{{ git_url_api + '/' + git_url_path_jobs + '/' + payload.workflow_job.id | string + '/logs' }}" + dest: /tmp/job.log + headers: + Authorization: token {{ lookup('env', 'GIT_API_TOKEN') }} + method: GET + return_content: true + timeout: 10 + validate_certs: false + no_log: "{{ disable_logging }}" + + + - name: Trace + ansible.builtin.shell: + cmd: | + set -o pipefail; + + export GITHUB_ACTOR={{ payload.sender.username }} + + cat /tmp/job.log | annotations > /tmp/annotations.json; + + executable: sh + changed_when: false + + + - name: Load annotations + ansible.builtin.set_fact: + annotations: "{{ lookup('file', '/tmp/annotations.json') | from_yaml }}" + + + - name: Fetch Pull Request + ansible.builtin.uri: + url: "{{ git_url_api + '/pulls/' + annotations.pull_request | string }}" + headers: + Authorization: token {{ lookup('env', 'GIT_API_TOKEN') }} + method: GET + return_content: true + status_code: + - 200 + - 404 + timeout: 10 + validate_certs: false + no_log: "{{ disable_logging }}" + register: http_get_pull_request + + + - name: Trace - Display Pull Request State + ansible.builtin.debug: + msg: "{{ http_get_pull_request.json.state | default('No PR found') }}" + + + - name: Post review + ansible.builtin.uri: + url: "{{ git_url_api + '/pulls/' + annotations.pull_request | string + '/reviews' }}" + body: "{{ annotations.api_body }}" + body_format: json + headers: + Authorization: token {{ lookup('env', 'GIT_API_TOKEN') }} + method: POST + return_content: true + timeout: 10 + validate_certs: false + no_log: "{{ disable_logging }}" + when: > + http_get_pull_request.json.state | default('-') != 'closed' + and + http_get_pull_request.status == 200 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..2263dca --- /dev/null +++ b/requirements.txt @@ -0,0 +1,5 @@ + +ansible-core==2.18.6 +ansible-rulebook==1.1.7 + +requests