CI: Replace pytest with pre-commit hook

This commit is contained in:
Thaddeus Crews 2025-03-18 12:03:27 -05:00
parent ba3482926d
commit adc63c6149
No known key found for this signature in database
GPG Key ID: 8C6E5FEB5FC03CCC
16 changed files with 76 additions and 245 deletions

View File

@ -13,15 +13,10 @@ jobs:
with:
fetch-depth: 2
- name: Install APT dependencies
# This needs to happen before Python and npm execution; it must happen before any extra files are written.
- name: .gitignore checks (gitignore_check.sh)
run: |
sudo apt update
sudo apt install -y libxml2-utils
- name: Install Python dependencies and general setup
run: |
pip3 install pytest==7.1.2
git config diff.wsErrorHighlight all
bash ./misc/scripts/gitignore_check.sh
- name: Get changed files
env:
@ -32,27 +27,17 @@ jobs:
elif [ "${{ github.event_name }}" == "push" -a "${{ github.event.forced }}" == "false" -a "${{ github.event.created }}" == "false" ]; then
files=$(git diff-tree --no-commit-id --name-only -r ${{ github.event.before }}..${{ github.event.after }} 2> /dev/null || true)
fi
echo "$files" >> changed.txt
cat changed.txt
files=$(echo "$files" | grep -v 'thirdparty' | xargs -I {} sh -c 'echo "\"./{}\""' | tr '\n' ' ')
files=$(echo "$files" | xargs -I {} sh -c 'echo "\"./{}\""' | tr '\n' ' ')
echo "CHANGED_FILES=$files" >> $GITHUB_ENV
# This needs to happen before Python and npm execution; it must happen before any extra files are written.
- name: .gitignore checks (gitignore_check.sh)
run: |
bash ./misc/scripts/gitignore_check.sh
- name: Style checks via pre-commit
uses: pre-commit/action@v3.0.1
with:
extra_args: --files ${{ env.CHANGED_FILES }}
- name: Python builders checks via pytest
run: |
pytest ./tests/python_build
- name: Class reference schema checks
run: |
sudo apt install -y libxml2-utils
xmllint --quiet --noout --schema doc/class.xsd doc/classes/*.xml modules/*/doc_classes/*.xml platform/*/doc_classes/*.xml
- name: Run C compiler on `gdextension_interface.h`

View File

@ -16,12 +16,10 @@ repos:
- id: clang-format
files: \.(c|h|cpp|hpp|cc|hh|cxx|hxx|m|mm|inc|java)$
types_or: [text]
exclude: ^tests/python_build/.*
- id: clang-format
name: clang-format-glsl
files: \.glsl$
types_or: [text]
exclude: ^tests/python_build/.*
args: [-style=file:misc/utility/clang_format_glsl.yml]
- repo: https://github.com/pocc/pre-commit-hooks
@ -31,7 +29,6 @@ repos:
files: \.(c|h|cpp|hpp|cc|hh|cxx|hxx|m|mm|inc|java|glsl)$
args: [--fix, --quiet, --use-color]
types_or: [text]
exclude: ^tests/python_build/.*
additional_dependencies: [clang-tidy==19.1.0]
require_serial: true
stages: [manual] # Not automatically triggered, invoked via `pre-commit run --hook-stage manual clang-tidy`
@ -48,7 +45,7 @@ repos:
types_or: [text]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.14.1 # Latest version that supports Python 3.8
rev: v1.14.1 # Latest version that supports Python 3.8
hooks:
- id: mypy
files: \.py$
@ -88,6 +85,13 @@ repos:
pass_filenames: false
files: ^(doc/classes|.*/doc_classes)/.*\.xml$
- id: validate-builders
name: validate-builders
language: python
entry: python tests/python_build/validate_builders.py
pass_filenames: false
files: ^(gles3|glsl)_builders\.py$
- id: eslint
name: eslint
language: node

View File

@ -1,26 +0,0 @@
import os
import sys
from pathlib import Path
import pytest
CWD = Path(__file__).parent
ROOT = CWD.parent.parent
# append directory with build files to sys.path to import them
sys.path.append(str(ROOT))
@pytest.fixture
def shader_files(request):
shader_path = request.param
res = {
"path_input": str(CWD / "fixtures" / f"{shader_path}.glsl"),
"path_output": str(CWD / "fixtures" / f"{shader_path}.glsl.gen.h"),
"path_expected_full": str(CWD / "fixtures" / f"{shader_path}_expected_full.glsl"),
"path_expected_parts": str(CWD / "fixtures" / f"{shader_path}_expected_parts.json"),
}
yield res
if not os.getenv("PYTEST_KEEP_GENERATED_FILES"):
os.remove(res["path_output"])

View File

@ -1,53 +0,0 @@
{
"vertex_lines": [
"",
"precision highp float;",
"precision highp int;",
"",
"layout(location = 0) in highp vec3 vertex;",
"",
"out highp vec4 position_interp;",
"",
"void main() {",
"\tposition_interp = vec4(vertex.x,1,0,1);",
"}",
""
],
"fragment_lines": [
"",
"precision highp float;",
"precision highp int;",
"",
"in highp vec4 position_interp;",
"",
"void main() {",
"\thighp float depth = ((position_interp.z / position_interp.w) + 1.0);",
"\tfrag_color = vec4(depth);",
"}"
],
"uniforms": [],
"fbos": [],
"texunits": [],
"texunit_names": [],
"ubos": [],
"ubo_names": [],
"feedbacks": [],
"vertex_included_files": [],
"fragment_included_files": [],
"reading": "fragment",
"line_offset": 33,
"vertex_offset": 10,
"fragment_offset": 23,
"variant_defines": [
"#define USE_NINEPATCH"
],
"variant_names": [
"MODE_NINEPATCH"
],
"specialization_names": [
"DISABLE_LIGHTING"
],
"specialization_values": [
" false\n"
]
}

View File

@ -1,3 +0,0 @@
{
"code": "#[compute]\n\n#version 450\n\n#VERSION_DEFINES\n\n\n#define M_PI 3.14159265359\n\nvoid main() {\n\tvec3 static_light = vec3(0, 1, 0);\n}\n"
}

View File

@ -1,3 +0,0 @@
{
"code": "#[versions]\n\nlines = \"#define MODE_LINES\";\n\n#[vertex]\n\n#version 450\n\n#VERSION_DEFINES\n\nlayout(location = 0) out vec3 uv_interp;\n\nvoid main() {\n\n#ifdef MODE_LINES\n\tuv_interp = vec3(0,0,1);\n#endif\n}\n\n#[fragment]\n\n#version 450\n\n#VERSION_DEFINES\n\n#define M_PI 3.14159265359\n\nlayout(location = 0) out vec4 dst_color;\n\nvoid main() {\n\tdst_color = vec4(1,1,0,0);\n}\n"
}

View File

@ -1,28 +0,0 @@
{
"vertex_lines": [],
"fragment_lines": [],
"compute_lines": [
"",
"#version 450",
"",
"#VERSION_DEFINES",
"",
"#define BLOCK_SIZE 8",
"",
"#define M_PI 3.14159265359",
"",
"void main() {",
"\tuint t = BLOCK_SIZE + 1;",
"}"
],
"vertex_included_files": [],
"fragment_included_files": [],
"compute_included_files": [
"tests/python_build/fixtures/rd_glsl/_included.glsl"
],
"reading": "compute",
"line_offset": 13,
"vertex_offset": 0,
"fragment_offset": 0,
"compute_offset": 1
}

View File

@ -1,40 +0,0 @@
{
"vertex_lines": [
"",
"#version 450",
"",
"#VERSION_DEFINES",
"",
"#define M_PI 3.14159265359",
"",
"layout(location = 0) out vec2 uv_interp;",
"",
"void main() {",
"\tuv_interp = vec2(0, 1);",
"}",
""
],
"fragment_lines": [
"",
"#version 450",
"",
"#VERSION_DEFINES",
"",
"layout(location = 0) in vec2 uv_interp;",
"",
"void main() {",
"\tuv_interp = vec2(1, 0);",
"}"
],
"compute_lines": [],
"vertex_included_files": [
"tests/python_build/fixtures/rd_glsl/_included.glsl"
],
"fragment_included_files": [],
"compute_included_files": [],
"reading": "fragment",
"line_offset": 25,
"vertex_offset": 1,
"fragment_offset": 15,
"compute_offset": 0
}

View File

@ -1,31 +0,0 @@
import json
import pytest
from gles3_builders import GLES3HeaderStruct, build_gles3_header
@pytest.mark.parametrize(
["shader_files", "builder", "header_struct"],
[
("gles3/vertex_fragment", build_gles3_header, GLES3HeaderStruct),
],
indirect=["shader_files"],
)
def test_gles3_builder(shader_files, builder, header_struct):
header = header_struct()
builder(shader_files["path_input"], "drivers/gles3/shader_gles3.h", "GLES3", header_data=header)
with open(shader_files["path_expected_parts"], "r", encoding="utf-8") as f:
expected_parts = json.load(f)
assert expected_parts == header.__dict__
with open(shader_files["path_output"], "r", encoding="utf-8") as f:
actual_output = f.read()
assert actual_output
with open(shader_files["path_expected_full"], "r", encoding="utf-8") as f:
expected_output = f.read()
assert actual_output == expected_output

View File

@ -1,37 +0,0 @@
import json
import pytest
from glsl_builders import RAWHeaderStruct, RDHeaderStruct, build_raw_header, build_rd_header
@pytest.mark.parametrize(
[
"shader_files",
"builder",
"header_struct",
],
[
("glsl/vertex_fragment", build_raw_header, RAWHeaderStruct),
("glsl/compute", build_raw_header, RAWHeaderStruct),
("rd_glsl/vertex_fragment", build_rd_header, RDHeaderStruct),
("rd_glsl/compute", build_rd_header, RDHeaderStruct),
],
indirect=["shader_files"],
)
def test_glsl_builder(shader_files, builder, header_struct):
header = header_struct()
builder(shader_files["path_input"], header_data=header)
with open(shader_files["path_expected_parts"], "r", encoding="utf-8") as f:
expected_parts = json.load(f)
assert expected_parts == header.__dict__
with open(shader_files["path_output"], "r", encoding="utf-8") as f:
actual_output = f.read()
assert actual_output
with open(shader_files["path_expected_full"], "r", encoding="utf-8") as f:
expected_output = f.read()
assert actual_output == expected_output

View File

@ -0,0 +1,63 @@
#!/usr/bin/env python3
from __future__ import annotations
if __name__ != "__main__":
raise ImportError(f"{__name__} should not be used as a module.")
import os
import sys
from typing import Any, Callable
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../"))
from gles3_builders import build_gles3_header
from glsl_builders import build_raw_header, build_rd_header
FUNC_PATH_KWARGS: list[tuple[Callable[..., None], str, dict[str, Any]]] = [
(
build_gles3_header,
"tests/python_build/fixtures/gles3/vertex_fragment.out",
{"shader": "tests/python_build/fixtures/gles3/vertex_fragment.glsl"},
),
(
build_raw_header,
"tests/python_build/fixtures/glsl/compute.out",
{"shader": "tests/python_build/fixtures/glsl/compute.glsl"},
),
(
build_raw_header,
"tests/python_build/fixtures/glsl/vertex_fragment.out",
{"shader": "tests/python_build/fixtures/glsl/vertex_fragment.glsl"},
),
(
build_rd_header,
"tests/python_build/fixtures/rd_glsl/compute.out",
{"shader": "tests/python_build/fixtures/rd_glsl/compute.glsl"},
),
(
build_rd_header,
"tests/python_build/fixtures/rd_glsl/vertex_fragment.out",
{"shader": "tests/python_build/fixtures/rd_glsl/vertex_fragment.glsl"},
),
]
def main() -> int:
ret = 0
for func, path, kwargs in FUNC_PATH_KWARGS:
if os.path.exists(out_path := os.path.abspath(path)):
with open(out_path, "rb") as file:
raw = file.read()
func(path, **kwargs)
with open(out_path, "rb") as file:
if raw != file.read():
ret += 1
else:
func(path, **kwargs)
ret += 1
return ret
sys.exit(main())