16 Commits

Author SHA1 Message Date
rattatwinko
283e82f783 asdf
All checks were successful
Build Pommer Executable / build (push) Successful in 25s
2025-05-28 15:22:35 +02:00
rattatwinko
71b9594b8c fixed
Some checks failed
Build Pommer Executable / build (push) Failing after 53s
2025-05-28 15:19:47 +02:00
rattatwinko
bb28221733 cock 2025-05-28 15:19:07 +02:00
f236b81d29 assfucked new test suite ; seperated version which is very fucking experimental 2025-05-23 07:34:03 +02:00
85ba1c6d33 tests fixed 2025-05-23 07:19:42 +02:00
4c02d6c1bd shit 2025-05-23 07:08:34 +02:00
4f440cd37d moved tests and added some more testing 2025-05-23 06:49:13 +02:00
828fda59c2 tests 2025-05-22 16:31:42 +02:00
858a5a6069 comment 2025-05-22 15:00:31 +02:00
7e684d845b changed info uptop of File 2025-05-22 12:22:08 +02:00
rattatwinko
3cd448d39b bullshit smallassfuckass change 2025-05-21 19:26:02 +02:00
rattatwinko
21ae48eb23 some improvement on the readme.md ; and LOGO!!! 2025-05-21 19:15:15 +02:00
e00c62a7f8 oaschiges logo in der abgefickten readme 2025-05-21 07:34:20 +02:00
ddbeaa0a79 fixed typo naxyn 2025-05-21 07:27:13 +02:00
cc2fa8d1d6 readme.md updated to fit 2025-05-20 14:40:19 +02:00
aa5349f704 gradle support 2025-05-20 10:53:22 +02:00
10 changed files with 2745 additions and 89 deletions

View File

@@ -0,0 +1,31 @@
name: Build Pommer Executable
on:
push:
branches:
- master
pull_request:
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
pip install pyinstaller
- name: Build executable with PyInstaller
run: |
pyinstaller --onefile pommer/pommer.py --name pommer
- name: Upload executable artifact
uses: actions/upload-artifact@v3
with:
name: pommer-linux
path: dist/pommer

BIN
logo.xcf Normal file

Binary file not shown.

View File

@@ -0,0 +1,183 @@
import os
import re
import xml.etree.ElementTree as ET
from pathlib import Path
def parse_pom_xml(pom_path):
"""Parse a Maven POM file with better error handling and namespace support"""
try:
if not os.path.exists(pom_path):
return None
# Register namespace
ET.register_namespace('', "http://maven.apache.org/POM/4.0.0")
ns = {'mvn': "http://maven.apache.org/POM/4.0.0"}
tree = ET.parse(pom_path)
root = tree.getroot()
# Helper function to safely get text
def get_text(element, path, default="unknown"):
elem = element.find(path, ns)
return elem.text if elem is not None else default
artifact_id = get_text(root, './mvn:artifactId')
group_id = get_text(root, './mvn:groupId')
version = get_text(root, './mvn:version')
name = get_text(root, './mvn:name', artifact_id)
# Get Java version with better fallback logic
java_version = "17" # Default
java_elem = root.find('./mvn:properties/mvn:java.version', ns)
if java_elem is not None and java_elem.text:
java_version = java_elem.text.strip()
# Packaging type
packaging = get_text(root, './mvn:packaging', 'jar')
# Kotlin detection
kotlin_version = None
kotlin_elem = root.find('./mvn:properties/mvn:kotlin.version', ns)
if kotlin_elem is not None:
kotlin_version = kotlin_elem.text
# Check for Kotlin plugin or dependency
is_kotlin = False
if kotlin_version:
is_kotlin = True
else:
# Check plugins
for plugin in root.findall('.//mvn:plugin', ns):
group = plugin.find('./mvn:groupId', ns)
if group is not None and group.text == 'org.jetbrains.kotlin':
is_kotlin = True
break
# Check dependencies
if not is_kotlin:
for dep in root.findall('.//mvn:dependency', ns):
group = dep.find('./mvn:groupId', ns)
artifact = dep.find('./mvn:artifactId', ns)
if (group is not None and group.text == 'org.jetbrains.kotlin' and
artifact is not None and 'kotlin' in artifact.text.lower()):
is_kotlin = True
break
return {
"artifact_id": artifact_id,
"group_id": group_id,
"version": version,
"name": name,
"java_version": java_version,
"packaging": packaging,
"is_kotlin": is_kotlin,
"kotlin_version": kotlin_version,
"pom_path": str(Path(pom_path).resolve()),
"build_system": "maven"
}
except Exception as e:
print(f"Error parsing POM file {pom_path}: {str(e)}")
return None
def parse_gradle_file(gradle_path):
"""Parse Gradle build files with improved regex and error handling"""
try:
if not os.path.exists(gradle_path):
return None
with open(gradle_path, 'r', encoding='utf-8') as f:
content = f.read()
is_kotlin_dsl = gradle_path.endswith('.kts')
# Extract basic info with more robust regex
group_match = re.search(r'group\s*[=:]\s*[\'"]([^\'"]+)[\'"]', content)
group_id = group_match.group(1) if group_match else "unknown"
version_match = re.search(r'version\s*[=:]\s*[\'"]([^\'"]+)[\'"]', content)
version = version_match.group(1) if version_match else "unknown"
# Project name detection
artifact_id = "unknown"
project_dir = os.path.dirname(gradle_path)
# Check settings.gradle[.kts]
for settings_file in ['settings.gradle', 'settings.gradle.kts']:
settings_path = os.path.join(project_dir, settings_file)
if os.path.exists(settings_path):
try:
with open(settings_path, 'r', encoding='utf-8') as f:
settings_content = f.read()
name_match = re.search(r'rootProject\.name\s*[=:]\s*[\'"]([^\'"]+)[\'"]', settings_content)
if name_match:
artifact_id = name_match.group(1)
break
except Exception:
continue
if artifact_id == "unknown":
artifact_id = os.path.basename(project_dir) if project_dir else "unknown"
# Java version detection with multiple patterns
java_version = "17" # Default
# Pattern 1: JavaVersion.VERSION_XX
version_match = re.search(r'(?:source|target)Compatibility\s*[=:]\s*JavaVersion\.VERSION_(\d+)', content)
if version_match:
java_version = version_match.group(1)
else:
# Pattern 2: sourceCompatibility = 'XX'
version_match = re.search(r'(?:source|target)Compatibility\s*[=:]\s*[\'"](\d+)[\'"]', content)
if version_match:
java_version = version_match.group(1)
else:
# Pattern 3: Toolchain
toolchain_match = re.search(
r'toolchain\s*{[^}]*languageVersion\s*[=:]\s*JavaLanguageVersion\.of\(\s*(\d+)\s*\)',
content, re.DOTALL
)
if toolchain_match:
java_version = toolchain_match.group(1)
# Kotlin detection
is_kotlin = is_kotlin_dsl
kotlin_version = None
if not is_kotlin:
# Check for Kotlin plugin
kotlin_plugin_match = re.search(
r'(?:id|kotlin)\s*[(]?\s*[\'"](?:org\.jetbrains\.kotlin\.(?:jvm|android)|kotlin)[\'"]\s*[)]?',
content
)
if kotlin_plugin_match:
is_kotlin = True
# Try to get version
version_match = re.search(
r'(?:id|kotlin)\s*[(]?\s*[\'"]org\.jetbrains\.kotlin\.\w+[\'"]\s*[)]?\s*version\s*[\'"]([^\'"]+)[\'"]',
content
)
if version_match:
kotlin_version = version_match.group(1)
# Application plugin detection
is_application = bool(re.search(r'id\s*[(]?\s*[\'"]application[\'"]\s*[)]?', content))
return {
"artifact_id": artifact_id,
"group_id": group_id,
"version": version,
"name": artifact_id, # Use artifact_id as name if not specified
"java_version": java_version,
"packaging": "application" if is_application else "jar",
"is_kotlin": is_kotlin,
"kotlin_version": kotlin_version,
"gradle_path": str(Path(gradle_path).resolve()),
"is_kotlin_dsl": is_kotlin_dsl,
"build_system": "gradle"
}
except Exception as e:
print(f"Error parsing Gradle file {gradle_path}: {str(e)}")
return None

View File

@@ -0,0 +1,24 @@
import os
import glob
from pathlib import Path
def find_pom_files(base_dir="."):
"""Find all pom.xml files recursively"""
base_path = Path(base_dir).resolve()
return [
str(p) for p in
base_path.glob("**/pom.xml")
if not any(part.startswith('.') for part in p.parts)
]
def find_gradle_files(base_dir="."):
"""Find all build.gradle[.kts] files recursively"""
base_path = Path(base_dir).resolve()
gradle_files = list(base_path.glob("**/build.gradle"))
kts_files = list(base_path.glob("**/build.gradle.kts"))
# Filter out hidden directories
return [
str(p) for p in gradle_files + kts_files
if not any(part.startswith('.') for part in p.parts)
]

View File

@@ -0,0 +1,137 @@
import os
from pathlib import Path
def generate_maven_workflow(pom_infos):
"""Generate GitHub workflow for Maven projects"""
if not pom_infos:
return None
# Get highest Java version
java_versions = [int(info["java_version"]) for info in pom_infos if info["java_version"].isdigit()]
java_version = str(max(java_versions)) if java_versions else "17"
workflow = f"""name: Maven Build
on:
push:
branches: [ main, master ]
pull_request:
branches: [ main, master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up JDK {java_version}
uses: actions/setup-java@v3
with:
java-version: '{java_version}'
distribution: 'temurin'
cache: 'maven'
"""
for info in pom_infos:
workflow += f"""
- name: Build {info['name']}
run: mvn -B package -f "{info['pom_path']}"
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: {info['artifact_id']}
path: {os.path.dirname(info['pom_path'])}/target/*.jar
"""
return workflow
def generate_gradle_workflow(gradle_infos):
"""Generate GitHub workflow for Gradle projects"""
if not gradle_infos:
return None
# Get highest Java version
java_versions = [int(info["java_version"]) for info in gradle_infos if info["java_version"].isdigit()]
java_version = str(max(java_versions)) if java_versions else "17"
workflow = f"""name: Gradle Build
on:
push:
branches: [ main, master ]
pull_request:
branches: [ main, master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up JDK {java_version}
uses: actions/setup-java@v3
with:
java-version: '{java_version}'
distribution: 'temurin'
cache: 'gradle'
- name: Make gradlew executable
run: chmod +x gradlew
"""
for info in gradle_infos:
workflow += f"""
- name: Build {info['name']}
run: ./gradlew build
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: {info['artifact_id']}
path: build/libs/*.jar
"""
return workflow
def generate_build_script(project_infos):
"""Generate a universal build script"""
script = """#!/bin/bash
set -e
echo "Universal Build Script"
echo "======================"
"""
maven_projects = [p for p in project_infos if p["build_system"] == "maven"]
gradle_projects = [p for p in project_infos if p["build_system"] == "gradle"]
if maven_projects:
script += """
# Maven projects
echo "Building Maven projects..."
"""
for project in maven_projects:
script += f"""
echo "Building {project['name']}"
mvn -f "{project['pom_path']}" clean package
"""
if gradle_projects:
script += """
# Gradle projects
echo "Building Gradle projects..."
"""
for project in gradle_projects:
script += f"""
echo "Building {project['name']}"
cd "{os.path.dirname(project['gradle_path'])}"
chmod +x gradlew || true
./gradlew build || gradle build
cd -
"""
script += """
echo "Build completed successfully!"
"""
return script

View File

@@ -0,0 +1,66 @@
import argparse
import json
import os
from pathlib import Path
from .parser import parse_pom_xml, parse_gradle_file
from .generator import (
generate_maven_workflow,
generate_gradle_workflow,
generate_build_script
)
from .discovery import find_pom_files, find_gradle_files
def main():
parser = argparse.ArgumentParser(description='Generate CI workflows for Java projects')
parser.add_argument('--dir', default='.', help='Base directory to scan')
parser.add_argument('--output-json', action='store_true', help='Output project info as JSON')
args = parser.parse_args()
# Find and parse projects
projects = []
for pom_path in find_pom_files(args.dir):
if project := parse_pom_xml(pom_path):
projects.append(project)
for gradle_path in find_gradle_files(args.dir):
if project := parse_gradle_file(gradle_path):
projects.append(project)
if not projects:
print("No projects found")
return
# Generate workflows
maven_projects = [p for p in projects if p["build_system"] == "maven"]
gradle_projects = [p for p in projects if p["build_system"] == "gradle"]
# Create output directory
workflows_dir = Path(".github/workflows")
workflows_dir.mkdir(parents=True, exist_ok=True)
if maven_projects:
workflow = generate_maven_workflow(maven_projects)
if workflow:
(workflows_dir / "maven.yml").write_text(workflow)
print("Generated Maven workflow")
if gradle_projects:
workflow = generate_gradle_workflow(gradle_projects)
if workflow:
(workflows_dir / "gradle.yml").write_text(workflow)
print("Generated Gradle workflow")
# Generate build script
build_script = generate_build_script(projects)
Path("build.sh").write_text(build_script)
os.chmod("build.sh", 0o755)
print("Generated build.sh")
if args.output_json:
Path("projects.json").write_text(json.dumps(projects, indent=2))
print("Generated projects.json")
print(f"Found {len(projects)} projects:")
for p in projects:
print(f"- {p['name']} ({p['build_system']})")

View File

@@ -1,27 +1,29 @@
#!/usr/bin/env python3
"""
POMMER.PY
``` POMMER.PY ```
THIS IS PROPRIETARY SOFTWARE DO NOT DISTRIBUTE TO OUTSIDERS!
!:: THIS IS PROPRIETARY SOFTWARE DO NOT DISTRIBUTE TO OUTSIDERS ::!
This Python File is distributed with every Kotlin Plugin Repository!
This Python File is distributed with every Kotlin / Java Plugin Repository!
If you find this to be confusing to use look at the Documentation in "rattatwinko/pommer"
Run this Script with Python 3.11 ; 3.9
This YET only works with Maven!
This works with Gradle and Maven ; other Branch is for reliablitiy ; this one is for "Support" with larger *commercial* Builds
!:: Maven Building with the Master Branch ( this Version ) is unstable! The Building Test is placed "rattatwinko/mavenprobe"!! ::!
"""
import os
import xml.etree.ElementTree as ET
import re
from pathlib import Path
import argparse
import glob
import json
def parse_pom_xml(pom_path):
@@ -107,19 +109,116 @@ def parse_pom_xml(pom_path):
"kotlin_version": kotlin_version,
"source_dir": source_dir,
"default_goal": default_goal,
"pom_path": pom_path
"pom_path": pom_path,
"build_system": "maven"
}
except Exception as e:
print(f"Error parsing {pom_path}: {e}")
return None
def generate_gitea_workflow(pom_infos):
def parse_gradle_file(gradle_path):
"""
Generate a Gitea workflow YAML file based on multiple POM information
Parse a build.gradle or build.gradle.kts file and extract relevant information
"""
try:
print(f"Parsing Gradle file: {gradle_path}")
# Read the file content
with open(gradle_path, 'r') as f:
content = f.read()
# Determine if it's a Kotlin DSL file
is_kotlin_dsl = gradle_path.endswith('.kts')
# Extract group, version and project name
group_match = re.search(r'group\s*=\s*[\'"]([^\'"]+)[\'"]', content)
group_id = group_match.group(1) if group_match else "unknown"
version_match = re.search(r'version\s*=\s*[\'"]([^\'"]+)[\'"]', content)
version = version_match.group(1) if version_match else "unknown"
# Check settings.gradle for project name
settings_path = os.path.join(os.path.dirname(gradle_path), "settings.gradle")
settings_path_kts = os.path.join(os.path.dirname(gradle_path), "settings.gradle.kts")
artifact_id = "unknown"
name = "unknown"
# Here I was on fucking crack? This is fucking crazy!
# Anyways Here it checks the artifacting settings for Gradle (kts)
if os.path.exists(settings_path):
with open(settings_path, 'r') as f:
settings_content = f.read()
root_project_match = re.search(r'rootProject\.name\s*=\s*[\'"]([^\'"]+)[\'"]', settings_content)
if root_project_match:
artifact_id = root_project_match.group(1)
name = artifact_id
elif os.path.exists(settings_path_kts):
with open(settings_path_kts, 'r') as f:
settings_content = f.read()
root_project_match = re.search(r'rootProject\.name\s*=\s*[\'"]([^\'"]+)[\'"]', settings_content)
if root_project_match:
artifact_id = root_project_match.group(1)
name = artifact_id
# If no name found in settings.gradle, use directory name
if artifact_id == "unknown":
artifact_id = os.path.basename(os.path.dirname(gradle_path))
name = artifact_id
# Check for Java version
java_version = "17" # Default to Java 17
java_version_match = re.search(r'sourceCompatibility\s*=\s*[\'"]*JavaVersion\.VERSION_(\d+)[\'"]*', content)
if java_version_match:
java_version = java_version_match.group(1)
else:
# Alternative pattern: sourceCompatibility = '11'
alt_java_match = re.search(r'sourceCompatibility\s*=\s*[\'"](\d+)[\'"]', content)
if alt_java_match:
java_version = alt_java_match.group(1)
else:
# Look for toolchain configuration
toolchain_match = re.search(r'toolchain\s*{[^}]*languageVersion\s*=\s*JavaLanguageVersion\s*\.\s*of\s*\(\s*(\d+)\s*\)', content, re.DOTALL)
if toolchain_match:
java_version = toolchain_match.group(1)
# Check if Kotlin is used
kotlin_plugin_match = re.search(r'(id\s*\(\s*[\'"]kotlin[\'"])|(id\s*[\'"]org\.jetbrains\.kotlin)', content)
kotlin_version_match = re.search(r'kotlin\s*version\s*[\'"]([^\'"]+)[\'"]', content)
is_kotlin = bool(kotlin_plugin_match) or bool(kotlin_version_match) or is_kotlin_dsl
kotlin_version = kotlin_version_match.group(1) if kotlin_version_match else None
# Check for application plugin
is_application = bool(re.search(r'id\s*\(\s*[\'"]application[\'"]', content))
return {
"artifact_id": artifact_id,
"group_id": group_id,
"version": version,
"name": name,
"java_version": java_version,
"packaging": "jar" if not is_application else "application",
"is_kotlin": is_kotlin,
"kotlin_version": kotlin_version,
"source_dir": None, # Gradle uses conventional source dirs
"default_goal": None, # Gradle doesn't have default goals like Maven
"gradle_path": gradle_path,
"is_kotlin_dsl": is_kotlin_dsl,
"build_system": "gradle"
}
except Exception as e:
print(f"Error parsing {gradle_path}: {e}")
return None
def generate_maven_workflow(pom_infos):
"""
Generate a Gitea workflow YAML file for Maven projects
"""
if not pom_infos:
print("No valid POM files found")
print("No valid Maven POM files found")
return None
# Get the highest Java version required
@@ -186,13 +285,15 @@ jobs:
if info["default_goal"]:
maven_command = info["default_goal"]
relative_pom_path = info["pom_path"]
workflow_content += f"""
- name: Build {info["name"]} ({info["artifact_id"]})
run: |
echo "Building {info["artifact_id"]}"
echo "Current directory: $(pwd)"
# Run Maven build directly using the POM file path
mvn -B {maven_command} -f "$GITHUB_WORKSPACE/pom.xml" -Dmaven.compiler.failOnError=true
mvn -B {maven_command} -f "{relative_pom_path}" -Dmaven.compiler.failOnError=true
"""
# Add artifact upload step
@@ -201,8 +302,108 @@ jobs:
uses: actions/upload-artifact@v3
with:
name: {info["artifact_id"]}
path: target/{info['artifact_id']}-*.jar
if-no-files-found: error
path: |
{os.path.dirname(relative_pom_path)}/target/{info['artifact_id']}-*.jar
{os.path.dirname(relative_pom_path)}/target/*.jar
if-no-files-found: warn
"""
return workflow_content
def generate_gradle_workflow(gradle_infos):
"""
Generate a Gitea workflow YAML file for Gradle projects
"""
if not gradle_infos:
print("No valid Gradle files found")
return None
# Get the highest Java version required
java_version = max([info["java_version"] for info in gradle_infos])
# Check if any project uses Kotlin
uses_kotlin = any(info["is_kotlin"] for info in gradle_infos)
# Kotlin version (if any)
kotlin_version = None
for info in gradle_infos:
if info["kotlin_version"]:
kotlin_version = info["kotlin_version"]
break
# Construct the workflow content
workflow_content = f"""name: Gradle Build
on:
push:
branches: [ main, master, dev ]
pull_request:
branches: [ main, master ]
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up JDK {java_version}
uses: actions/setup-java@v3
with:
distribution: 'temurin'
java-version: '{java_version}'
cache: 'gradle'
- name: Grant execute permission for gradlew
run: |
find . -name gradlew -type f -exec chmod +x {{}} \;
- name: Debug Info
run: |
echo "Current workspace directory: $GITHUB_WORKSPACE"
echo "Current directory: $(pwd)"
echo "Project structure:"
find . -type f -name "*.kt" | sort
find . -type f -name "build.gradle*"
if [ -f ./gradlew ]; then
./gradlew --version
else
echo "No wrapper found, will use system gradle"
gradle --version
fi
"""
# Add individual build steps for each Gradle project
for i, info in enumerate(gradle_infos):
project_dir = os.path.dirname(info["gradle_path"])
has_wrapper = os.path.exists(os.path.join(project_dir, "gradlew"))
gradle_command = f"{os.path.join(project_dir, 'gradlew')}" if has_wrapper else "gradle"
workflow_content += f"""
- name: Build {info["name"]} ({info["artifact_id"]})
working-directory: {project_dir}
run: |
echo "Building {info["artifact_id"]}"
echo "Current directory: $(pwd)"
# Run Gradle build
{"./gradlew" if has_wrapper else "gradle"} build
"""
# Add artifact upload step
workflow_content += f"""
- name: Upload {info["artifact_id"]} artifact
uses: actions/upload-artifact@v3
with:
name: {info["artifact_id"]}
path: |
{project_dir}/build/libs/*.jar
{project_dir}/app/build/libs/*.jar
{project_dir}/build/distributions/*.zip
{project_dir}/app/build/distributions/*.zip
if-no-files-found: warn
"""
return workflow_content
@@ -213,79 +414,178 @@ def find_pom_files(base_dir="."):
return glob.glob(f"{base_dir}/**/pom.xml", recursive=True)
def find_gradle_files(base_dir="."):
"""Find all build.gradle and build.gradle.kts files in the given directory and subdirectories"""
gradle_files = glob.glob(f"{base_dir}/**/build.gradle", recursive=True)
kotlin_gradle_files = glob.glob(f"{base_dir}/**/build.gradle.kts", recursive=True)
return gradle_files + kotlin_gradle_files
def generate_build_script(project_infos):
"""Generate a universal build script for either Maven or Gradle projects"""
maven_projects = [p for p in project_infos if p["build_system"] == "maven"]
gradle_projects = [p for p in project_infos if p["build_system"] == "gradle"]
script = """#!/bin/bash
# Universal build script for Maven and Gradle projects
echo "Current directory: $(pwd)"
"""
if maven_projects:
script += """
# Check if Maven is installed
if ! command -v mvn &> /dev/null; then
echo "Maven not found, installing..."
sudo apt-get update
sudo apt-get install -y maven
fi
echo "Maven version: $(mvn --version)"
"""
for project in maven_projects:
script += f"""
echo "Building Maven project: {project['name']}"
mvn clean package -f "{project['pom_path']}"
"""
if gradle_projects:
script += """
# Check for Gradle or Gradle Wrapper
"""
for project in gradle_projects:
project_dir = os.path.dirname(project["gradle_path"])
has_wrapper = os.path.exists(os.path.join(project_dir, "gradlew"))
if has_wrapper:
script += f"""
echo "Building Gradle project using wrapper: {project['name']}"
cd "{project_dir}"
chmod +x ./gradlew
./gradlew build
cd - > /dev/null
"""
else:
script += f"""
echo "Building Gradle project using system Gradle: {project['name']}"
# Install Gradle if not available
if ! command -v gradle &> /dev/null; then
echo "Gradle not found, installing..."
sudo apt-get update
sudo apt-get install -y gradle
fi
cd "{project_dir}"
gradle build
cd - > /dev/null
"""
script += """
echo "Build complete. Build artifacts should be in their respective target/ or build/libs/ directories."
"""
return script
def main():
parser = argparse.ArgumentParser(description='Generate Gitea workflow for Maven/Kotlin projects')
parser.add_argument('--dir', '-d', default='.', help='Base directory to search for pom.xml files')
parser = argparse.ArgumentParser(description='Generate Gitea workflow for Maven/Gradle/Kotlin projects')
parser.add_argument('--dir', '-d', default='.', help='Base directory to search for project files')
parser.add_argument('--specific-pom', '-p', help='Path to a specific pom.xml file to process')
parser.add_argument('--specific-gradle', '-g', help='Path to a specific build.gradle file to process')
parser.add_argument('--output-json', '-j', action='store_true', help='Output project info as JSON as well')
args = parser.parse_args()
# Find project files
pom_files = []
gradle_files = []
if args.specific_pom:
pom_files = [args.specific_pom]
else:
pom_files = find_pom_files(args.dir)
if args.specific_gradle:
gradle_files = [args.specific_gradle]
else:
gradle_files = find_gradle_files(args.dir)
if not pom_files:
print(f"No pom.xml files found in {args.dir}")
if not pom_files and not gradle_files:
print(f"No pom.xml or build.gradle files found in {args.dir}")
return
print(f"Found {len(pom_files)} pom.xml files")
print(f"Found {len(pom_files)} pom.xml files and {len(gradle_files)} Gradle files")
# Parse all POM files
pom_infos = []
# Parse all project files
project_infos = []
# Parse Maven files
for pom_file in pom_files:
info = parse_pom_xml(pom_file)
if info:
pom_infos.append(info)
project_infos.append(info)
# Parse Gradle files
for gradle_file in gradle_files:
info = parse_gradle_file(gradle_file)
if info:
project_infos.append(info)
if not pom_infos:
print("No valid POM files could be parsed")
return
# Generate the workflow content
workflow_content = generate_gitea_workflow(pom_infos)
if not workflow_content:
if not project_infos:
print("No valid project files could be parsed")
return
# Count by build system
maven_count = sum(1 for p in project_infos if p["build_system"] == "maven")
gradle_count = sum(1 for p in project_infos if p["build_system"] == "gradle")
# Create the .gitea/workflows directory if it doesn't exist
workflow_dir = Path(".gitea/workflows")
workflow_dir.mkdir(parents=True, exist_ok=True)
# Write the workflow file
workflow_file = workflow_dir / "maven_build.yaml"
with open(workflow_file, "w") as f:
f.write(workflow_content)
# Generate and write workflow files for each build system
if maven_count > 0:
maven_infos = [p for p in project_infos if p["build_system"] == "maven"]
maven_workflow = generate_maven_workflow(maven_infos)
if maven_workflow:
maven_workflow_file = workflow_dir / "maven_build.yaml"
with open(maven_workflow_file, "w") as f:
f.write(maven_workflow)
print(f"Gitea Maven workflow generated at: {maven_workflow_file}")
if gradle_count > 0:
gradle_infos = [p for p in project_infos if p["build_system"] == "gradle"]
gradle_workflow = generate_gradle_workflow(gradle_infos)
if gradle_workflow:
gradle_workflow_file = workflow_dir / "gradle_build.yaml"
with open(gradle_workflow_file, "w") as f:
f.write(gradle_workflow)
print(f"Gitea Gradle workflow generated at: {gradle_workflow_file}")
print(f"Gitea workflow generated at: {workflow_file}")
print(f"This workflow will build {len(pom_infos)} Maven projects")
# Generate a universal build script
build_script = generate_build_script(project_infos)
with open("build.sh", "w") as f:
f.write(build_script)
os.chmod("build.sh", 0o755)
print(f"Universal build script generated at: build.sh")
# Print summary of detected projects
print("\nDetected projects:")
for info in pom_infos:
# What the actuall fuck is going on here? Whoever reads this may God be with you!
for info in project_infos:
kotlin_info = "with Kotlin" if info["is_kotlin"] else "Java only"
build_command = info["default_goal"] if info["default_goal"] else "clean package"
print(
f"- {info['name']} ({info['artifact_id']}): {kotlin_info}, Java {info['java_version']}, build: {build_command}")
# Create a simple direct build script as fallback
with open("build.sh", "w") as f:
f.write("""#!/bin/bash
# Direct build script for Maven project
echo "Current directory: $(pwd)"
echo "Building project with Maven..."
# Run Maven build using the exact pom.xml location
mvn clean package -f "$(pwd)/pom.xml"
echo "Build complete. JAR file should be in target/ directory."
""")
# Make it executable
os.chmod("build.sh", 0o755)
print(f"Simple build script generated at: build.sh")
if info["build_system"] == "maven":
build_command = info["default_goal"] if info["default_goal"] else "clean package"
print(f"- {info['name']} ({info['artifact_id']}): Maven, {kotlin_info}, Java {info['java_version']}, build: {build_command}")
else:
dsl_info = "Kotlin DSL" if info.get("is_kotlin_dsl") else "Groovy DSL"
print(f"- {info['name']} ({info['artifact_id']}): Gradle ({dsl_info}), {kotlin_info}, Java {info['java_version']}")
# Export as JSON if requested
if args.output_json:
with open("project_info.json", "w") as f:
json.dump(project_infos, f, indent=2)
print(f"Project information exported to project_info.json")
# Run script!
if __name__ == "__main__":
main()

View File

@@ -4,26 +4,26 @@
---
**Pommer** is an internal Python tool designed to streamline **CI workflow generation for Java/Kotlin projects** using **Maven**. It scans your project for `pom.xml` files, analyzes project metadata, and generates a Gitea CI workflow along with a local build script for convenience.
**Pommer** is an internal Python tool designed to streamline CI workflow generation for **Java/Kotlin projects** using **Gradle**. It scans your project for build files, analyzes project metadata, and generates a Gitea CI workflow along with a local build script for convenience.
> **This is the stable, Maven-only branch.**
> For Gradle support, switch to the `master` branch.
> 🛠 **Now supports Gradle!**
> The **Maven-compatible version** is maintained on a separate Git branch.
---
## ⚠️ Internal Use Only
> **This tool is for internal use within the local Gitea Instance only. Do not distribute or share externally.**
> **This tool is for internal use within Gitea only. Do not distribute or share externally.**
---
## Features
* 🔍 **Automatic discovery** of Maven projects (`pom.xml`)
* 📦 **Parses project metadata** (artifact ID, group ID, version, Java/Kotlin usage, etc.)
* 🏗️ **Generates a Gitea CI workflow**, compatible with multi-module Maven setups
* 🧪 **Creates a `build.sh` script** for local Maven testing
* 🎯 **Configurable**: Target a specific directory or file
* 🔍 **Automatic discovery** of Gradle (`build.gradle` / `build.gradle.kts`) and Maven (`pom.xml`) projects.
* 📦 **Parses project metadata** (artifact ID, group ID, version, Java/Kotlin usage, etc.).
* 🏗️ **Generates a Gitea CI workflow**, compatible with multi-module projects.
* 🧪 **Creates a `build.sh` script** for local testing and development.
* 🎯 **Configurable**: Target a specific directory or file to process.
---
@@ -37,10 +37,11 @@ python3 pommer.py
### Options
| Option | Description |
| ---------------------- | ------------------------------------- |
| `-d`, `--dir` | Base directory to scan (default: `.`) |
| `-p`, `--specific-pom` | Path to a specific `pom.xml` file |
| Option | Description |
| ----------------------------------------------------------------------------------- | -------------------------------------------------- |
| `-d`, `--dir` | Base directory to scan (default: `.`) |
| `-p`, `--specific-pom` | (For Maven only) Path to a specific `pom.xml` file |
| *Gradle projects are detected automatically by presence of `build.gradle`/*`kts`*.* | |
#### Examples
@@ -49,8 +50,7 @@ python3 pommer.py
```bash
python3 pommer.py --dir path/to/project
```
* Use a specific POM file:
* Use a specific Maven POM file (legacy mode):
```bash
python3 pommer.py --specific-pom path/to/pom.xml
@@ -62,11 +62,11 @@ python3 pommer.py
* 📄 **Gitea Workflow:**
`.gitea/workflows/build.yaml`
Automates build and artifact upload for all detected Maven projects.
Automates build and artifact upload for all detected Gradle (and Maven, if applicable) projects.
* 🔧 **Build Script:**
`build.sh`
Local script to build all Maven projects easily.
Simple, local script to build all projects using Gradle or Maven, as detected.
---
@@ -74,11 +74,11 @@ python3 pommer.py
> ⚠️ **Slow compile or stuck builds?**
>
> * **Installing JDK** and setting up the runner can take time.
> * If its stuck on **"Downloading JDK 21"**, be patient and **check network usage**.
> * **No activity?** → Restart the runner.
> * Final build steps are **slow but usually fine**.
> * **Local builds slow?** → Expect slower CI/CD times.
> * **Installing JDK** and setting up the runner can take a while.
> * If it seems stuck on **"Downloading JDK 21"**, be patient and **monitor server network usage**.
> * **No network activity for several minutes?** → Try **restarting the runner**.
> * The **final steps ("finishing up") are slow**, but usually not broken.
> * **Local builds slow?** → Itll be even slower on CI/CD.
---
@@ -91,21 +91,19 @@ python3 pommer.py
## Notes
* This branch **only supports Maven**.
* For Gradle support, use the `gradle-support` branch.
* Java/Kotlin usage is detected automatically.
* The workflow uses the **highest JDK version** found.
* Gradle is now the primary build system supported.
* Maven support is preserved in the legacy branch.
* Java/Kotlin support is detected automatically across modules.
* The workflow always uses the **highest JDK version** found in the project tree.
---
## Support
Contact **rattatwinko** for help or feature requests.
Contact rattatwinko help or feature requests.
---
## License
**Internal Use Only Not for Distribution**
More details are in pommer.py

View File

@@ -1,3 +1 @@
pathlib
argparse
pycopy-xml.etree.elementtree
pyinstaller

1919
tests.py Normal file

File diff suppressed because it is too large Load Diff