mirror of
https://github.com/GeWuYou/GFramework.git
synced 2026-05-08 17:44:29 +08:00
Compare commits
179 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7ca21af92d | ||
|
|
769d036434 | ||
|
|
9bd8c34693 | ||
|
|
39ac61c095 | ||
|
|
24462b0035 | ||
|
|
c82e981b7e | ||
|
|
d9547dae4b | ||
|
|
120a1487f5 | ||
|
|
4d6dbba6a0 | ||
|
|
32eeb41f29 | ||
|
|
5da4a5893b | ||
|
|
18018966f9 | ||
|
|
5dc2dd25b9 | ||
|
|
e44c56fb46 | ||
|
|
aebf1e974d | ||
|
|
02a60df718 | ||
|
|
77820da820 | ||
|
|
55639c559c | ||
|
|
042b74473f | ||
|
|
55c2a1ae69 | ||
|
|
debc9f27ac | ||
|
|
8f6e6e121e | ||
|
|
d010026448 | ||
|
|
54b79d99d3 | ||
|
|
ffb0a8aff5 | ||
|
|
44d1a89a0b | ||
|
|
cca413042f | ||
|
|
dc3bd3744e | ||
|
|
6056159866 | ||
|
|
d7293aa475 | ||
|
|
017e689abd | ||
|
|
2c58d8b69e | ||
|
|
14cd1fc9a0 | ||
|
|
577c89fdf3 | ||
|
|
a692190a77 | ||
|
|
c3df2b2c96 | ||
|
|
ee8b6a4deb | ||
|
|
ff04a4fbad | ||
|
|
e3fa0db992 | ||
|
|
c2d22285ed | ||
|
|
e3d6aa5111 | ||
|
|
30ddb841a9 | ||
|
|
c65c131d6a | ||
|
|
f0a2978882 | ||
|
|
3233151207 | ||
|
|
0ec8aa076b | ||
|
|
588800bb7b | ||
|
|
ee41206965 | ||
|
|
db89918333 | ||
|
|
f25ccccad2 | ||
|
|
ab9829044f | ||
|
|
109bce6e9e | ||
|
|
6d619b9a1f | ||
|
|
2cb6216d05 | ||
|
|
f71791ae98 | ||
|
|
2ac02c1a6f | ||
|
|
449eeb9606 | ||
|
|
c01abac06e | ||
|
|
6e1eaf8f5c | ||
|
|
e0bbf13d88 | ||
|
|
f776d09f68 | ||
|
|
a8f98e467d | ||
|
|
e6f98cb4af | ||
|
|
96729ddcf1 | ||
|
|
cb6dd8a510 | ||
|
|
a8c6c11e9e | ||
|
|
d9ceb83c2c | ||
|
|
7288114e33 | ||
|
|
c69942d66e | ||
|
|
212d5b1cce | ||
|
|
b1f406ad99 | ||
|
|
61cc1be1e5 | ||
|
|
915d93d06d | ||
|
|
e17fa15a01 | ||
|
|
857ce08edb | ||
|
|
0ac53a4cee | ||
|
|
ac95202f9c | ||
|
|
478072acc3 | ||
|
|
53870c1f92 | ||
|
|
64c5ecb3ca | ||
|
|
2ccacb8102 | ||
|
|
ee998503b3 | ||
|
|
69ea92c149 | ||
|
|
c5ca161cb5 | ||
|
|
53f8baf2ef | ||
|
|
fe1a875785 | ||
|
|
4153ea59b8 | ||
|
|
ff553977e3 | ||
|
|
a0591afa18 | ||
|
|
d5d34a626c | ||
|
|
230cd0e5d1 | ||
|
|
6fa1c20d75 | ||
|
|
64e5d8d11d | ||
|
|
3ced56be8b | ||
|
|
1009fee4a4 | ||
|
|
40cce565e6 | ||
|
|
918a61f3b2 | ||
|
|
c967b4df3d | ||
|
|
b4b3538b21 | ||
|
|
a52f3c6fec | ||
|
|
748bb714fb | ||
|
|
36e1ae5f32 | ||
|
|
6aa741114f | ||
|
|
5306c98470 | ||
|
|
35a62e6bfb | ||
|
|
43094fba83 | ||
|
|
a1b3576b09 | ||
|
|
e391833615 | ||
|
|
a870ea28a8 | ||
|
|
3cb0177936 | ||
|
|
6983b7ee84 | ||
|
|
00ecf6fb10 | ||
|
|
52b96ed36f | ||
|
|
85a8b35154 | ||
|
|
9581682231 | ||
|
|
896e3efaa9 | ||
|
|
4fdb1e7398 | ||
|
|
241c9ffeb3 | ||
|
|
7e77fee0a5 | ||
|
|
103b961e6f | ||
|
|
1c21df1414 | ||
|
|
26314dba5e | ||
|
|
9296def108 | ||
|
|
83528742bb | ||
|
|
36db7d0929 | ||
|
|
e671646a74 | ||
|
|
8b36626266 | ||
|
|
040bcb99e4 | ||
|
|
1091594224 | ||
|
|
502f65239c | ||
|
|
dc21188c79 | ||
|
|
6b5c5d9e2d | ||
|
|
5a77e2fb33 | ||
|
|
eb30388267 | ||
|
|
172c08176c | ||
|
|
ea0b937705 | ||
|
|
85f7c1707e | ||
|
|
01f1e5fd72 | ||
|
|
e8203bc76e | ||
|
|
7e62313b24 | ||
|
|
74f853bffe | ||
|
|
56a96b50fd | ||
|
|
0721cafd03 | ||
|
|
fdcb11c92c | ||
|
|
e8cceac7ae | ||
|
|
7f98cafbfa | ||
|
|
3f335f19d6 | ||
|
|
13b77eb3fe | ||
|
|
eddce21383 | ||
|
|
fad391e8cf | ||
|
|
d6a154726c | ||
|
|
f17f9f3da6 | ||
|
|
98477068d6 | ||
|
|
8d6fc74b3d | ||
|
|
0f1e91a499 | ||
|
|
0c65cd8e38 | ||
|
|
7209fdc32d | ||
|
|
c1dfee3c71 | ||
|
|
b015a91e57 | ||
|
|
f44629deb3 | ||
|
|
e1af8ac833 | ||
|
|
22f608eb4d | ||
|
|
a3fe2974f7 | ||
|
|
5eea12b5ba | ||
|
|
72ce0f1199 | ||
|
|
98021f59e7 | ||
|
|
255a6a152e | ||
|
|
bc365197e8 | ||
|
|
a445807b83 | ||
|
|
52b9ddd4a7 | ||
|
|
57d848546f | ||
|
|
226c0b3b49 | ||
|
|
36596210ff | ||
|
|
16cd96b94b | ||
|
|
5365f9aec2 | ||
|
|
e51b64f8d5 | ||
|
|
7b5efde3bd | ||
|
|
e81a43680d | ||
|
|
3b4eb3e40a |
@ -12,6 +12,10 @@ batches until a clear stop condition is met.
|
|||||||
|
|
||||||
Treat `AGENTS.md` as the source of truth. This skill extends `gframework-boot`; it does not replace it.
|
Treat `AGENTS.md` as the source of truth. This skill extends `gframework-boot`; it does not replace it.
|
||||||
|
|
||||||
|
Context budget is a first-class stop signal. Do not keep batching merely because a file-count threshold still has
|
||||||
|
headroom if the active conversation, loaded repo artifacts, validation output, and pending recovery updates suggest the
|
||||||
|
agent is approaching its safe working-context limit.
|
||||||
|
|
||||||
## Startup Workflow
|
## Startup Workflow
|
||||||
|
|
||||||
1. Execute the normal `gframework-boot` startup sequence first:
|
1. Execute the normal `gframework-boot` startup sequence first:
|
||||||
@ -28,6 +32,11 @@ Treat `AGENTS.md` as the source of truth. This skill extends `gframework-boot`;
|
|||||||
- repeated test refactor pattern
|
- repeated test refactor pattern
|
||||||
- module-by-module documentation refresh
|
- module-by-module documentation refresh
|
||||||
- other repetitive multi-file cleanup
|
- other repetitive multi-file cleanup
|
||||||
|
4. Before the first implementation batch, estimate whether the current task is likely to stay below roughly 80% of the
|
||||||
|
agent's safe working-context budget through one more full batch cycle:
|
||||||
|
- include already loaded `AGENTS.md`, skills, `ai-plan` files, recent command output, active diffs, and expected validation output
|
||||||
|
- if another batch would probably push the conversation near the limit, plan to stop after the current batch even if
|
||||||
|
branch-size thresholds still have room
|
||||||
|
|
||||||
## Baseline Selection
|
## Baseline Selection
|
||||||
|
|
||||||
@ -67,8 +76,15 @@ For shorthand numeric thresholds, use a fixed default baseline:
|
|||||||
|
|
||||||
Choose one primary stop condition before the first batch and restate it to the user.
|
Choose one primary stop condition before the first batch and restate it to the user.
|
||||||
|
|
||||||
|
When the user does not explicitly override the priority order, use:
|
||||||
|
|
||||||
|
1. context-budget safety
|
||||||
|
2. semantic batch boundary / reviewability
|
||||||
|
3. the user-requested local metric such as files, lines, warnings, or time
|
||||||
|
|
||||||
Common stop conditions:
|
Common stop conditions:
|
||||||
|
|
||||||
|
- the next batch would likely push the agent above roughly 80% of its safe working-context budget
|
||||||
- branch diff vs baseline approaches a file-count threshold
|
- branch diff vs baseline approaches a file-count threshold
|
||||||
- warnings-only build reaches a target count
|
- warnings-only build reaches a target count
|
||||||
- a specific hotspot list is exhausted
|
- a specific hotspot list is exhausted
|
||||||
@ -76,6 +92,9 @@ Common stop conditions:
|
|||||||
|
|
||||||
If multiple stop conditions exist, rank them and treat one as primary.
|
If multiple stop conditions exist, rank them and treat one as primary.
|
||||||
|
|
||||||
|
Treat file-count or line-count thresholds as coarse repository-scope signals, not as a proxy for AI context health.
|
||||||
|
When they disagree with context-budget safety, context-budget safety wins.
|
||||||
|
|
||||||
## Shorthand Stop-Condition Syntax
|
## Shorthand Stop-Condition Syntax
|
||||||
|
|
||||||
`gframework-batch-boot` may be invoked with shorthand numeric thresholds when the user clearly wants a branch-size stop
|
`gframework-batch-boot` may be invoked with shorthand numeric thresholds when the user clearly wants a branch-size stop
|
||||||
@ -108,6 +127,7 @@ When shorthand is used:
|
|||||||
- current branch and active topic
|
- current branch and active topic
|
||||||
- selected baseline
|
- selected baseline
|
||||||
- current stop-condition metric
|
- current stop-condition metric
|
||||||
|
- current context-budget posture and whether one more batch is safe
|
||||||
- next candidate slices
|
- next candidate slices
|
||||||
2. Keep the critical path local.
|
2. Keep the critical path local.
|
||||||
3. Delegate only bounded slices with explicit ownership:
|
3. Delegate only bounded slices with explicit ownership:
|
||||||
@ -128,6 +148,7 @@ When shorthand is used:
|
|||||||
- integrate or verify the result
|
- integrate or verify the result
|
||||||
- rerun the required validation
|
- rerun the required validation
|
||||||
- recompute the primary stop-condition metric
|
- recompute the primary stop-condition metric
|
||||||
|
- reassess whether one more batch would likely push the agent near or beyond roughly 80% context usage
|
||||||
- decide immediately whether to continue or stop
|
- decide immediately whether to continue or stop
|
||||||
7. Do not require the user to manually trigger every round unless:
|
7. Do not require the user to manually trigger every round unless:
|
||||||
- the next slice is ambiguous
|
- the next slice is ambiguous
|
||||||
@ -158,6 +179,7 @@ For multi-batch work, keep recovery artifacts current.
|
|||||||
|
|
||||||
Stop the loop when any of the following becomes true:
|
Stop the loop when any of the following becomes true:
|
||||||
|
|
||||||
|
- the next batch would likely push the agent near or beyond roughly 80% of its safe working-context budget
|
||||||
- the primary stop condition has been reached or exceeded
|
- the primary stop condition has been reached or exceeded
|
||||||
- the remaining slices are no longer low-risk
|
- the remaining slices are no longer low-risk
|
||||||
- validation failures indicate the task is no longer repetitive
|
- validation failures indicate the task is no longer repetitive
|
||||||
@ -165,6 +187,7 @@ Stop the loop when any of the following becomes true:
|
|||||||
|
|
||||||
When stopping, report:
|
When stopping, report:
|
||||||
|
|
||||||
|
- whether context budget was the deciding factor
|
||||||
- which baseline was used
|
- which baseline was used
|
||||||
- the exact metric value at stop time
|
- the exact metric value at stop time
|
||||||
- completed batches
|
- completed batches
|
||||||
|
|||||||
@ -36,14 +36,18 @@ Treat `AGENTS.md` as the source of truth. Use this skill to enforce a startup se
|
|||||||
- `simple`: one concern, one file or module, no parallel discovery required
|
- `simple`: one concern, one file or module, no parallel discovery required
|
||||||
- `medium`: a small number of modules, some read-only exploration helpful, critical path still easy to keep local
|
- `medium`: a small number of modules, some read-only exploration helpful, critical path still easy to keep local
|
||||||
- `complex`: cross-module design, migration, large refactor, or work likely to exceed one context window
|
- `complex`: cross-module design, migration, large refactor, or work likely to exceed one context window
|
||||||
11. Apply the delegation policy from `AGENTS.md`:
|
11. Estimate the current context-budget posture before substantive execution:
|
||||||
|
- account for loaded startup artifacts, active `ai-plan` files, visible diffs, open validation output, and likely next-step output volume
|
||||||
|
- if the task already appears near roughly 80% of a safe working-context budget, prefer closing the current batch,
|
||||||
|
refreshing recovery artifacts, and stopping at the next natural semantic boundary instead of starting a fresh broad slice
|
||||||
|
12. Apply the delegation policy from `AGENTS.md`:
|
||||||
- Keep the critical path local
|
- Keep the critical path local
|
||||||
- Use `explorer` with `gpt-5.1-codex-mini` for narrow read-only questions, tracing, inventory, and comparisons
|
- Use `explorer` with `gpt-5.1-codex-mini` for narrow read-only questions, tracing, inventory, and comparisons
|
||||||
- Use `worker` with `gpt-5.4` only for bounded implementation tasks with explicit ownership
|
- Use `worker` with `gpt-5.4` only for bounded implementation tasks with explicit ownership
|
||||||
- Do not delegate purely for ceremony; delegate only when it materially shortens the task or controls context growth
|
- Do not delegate purely for ceremony; delegate only when it materially shortens the task or controls context growth
|
||||||
12. Before editing files, tell the user what you read, how you classified the task, whether subagents will be used,
|
13. Before editing files, tell the user what you read, how you classified the task, whether subagents will be used,
|
||||||
and the first implementation step.
|
and the first implementation step.
|
||||||
13. Proceed with execution, validation, and documentation updates required by `AGENTS.md`.
|
14. Proceed with execution, validation, and documentation updates required by `AGENTS.md`.
|
||||||
|
|
||||||
## Task Tracking
|
## Task Tracking
|
||||||
|
|
||||||
@ -69,6 +73,8 @@ For multi-step, cross-module, or interruption-prone work, maintain the repositor
|
|||||||
first, then search the mapped active topics before scanning the broader public area.
|
first, then search the mapped active topics before scanning the broader public area.
|
||||||
- If the current branch and the mapped active topics describe the same feature area, prefer resuming those topics first.
|
- If the current branch and the mapped active topics describe the same feature area, prefer resuming those topics first.
|
||||||
- If the repository state suggests in-flight work but no recovery document matches, reconstruct the safest next step from code, tests, and Git state before asking the user for clarification.
|
- If the repository state suggests in-flight work but no recovery document matches, reconstruct the safest next step from code, tests, and Git state before asking the user for clarification.
|
||||||
|
- If the current turn already carries heavy recovery context, broad diffs, or long validation output, prefer a
|
||||||
|
recovery-point update and a clean stop over starting another large slice just because the code task itself remains open.
|
||||||
|
|
||||||
## Example Triggers
|
## Example Triggers
|
||||||
|
|
||||||
|
|||||||
83
.agents/skills/gframework-issue-review/SKILL.md
Normal file
83
.agents/skills/gframework-issue-review/SKILL.md
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
---
|
||||||
|
name: gframework-issue-review
|
||||||
|
description: Repository-specific GitHub issue triage workflow for the GFramework repo. Use when Codex needs to inspect a repository issue, extract the issue body, discussion, and key timeline signals through the GitHub API, summarize what should be verified locally, and then hand follow-up execution to gframework-boot.
|
||||||
|
---
|
||||||
|
|
||||||
|
# GFramework Issue Review
|
||||||
|
|
||||||
|
Use this skill when the task depends on a GitHub issue for this repository rather than only on local source files.
|
||||||
|
|
||||||
|
Shortcut: `$gframework-issue-review`
|
||||||
|
|
||||||
|
## Workflow
|
||||||
|
|
||||||
|
1. Read `AGENTS.md` before deciding how to validate or change anything.
|
||||||
|
2. Read `.ai/environment/tools.ai.yaml` and `ai-plan/public/README.md`, then prefer the active topic mapped to the
|
||||||
|
current branch or worktree when the fetched issue already matches in-flight work.
|
||||||
|
3. Run `scripts/fetch_current_issue_review.py` to:
|
||||||
|
- fetch issue metadata through the GitHub API
|
||||||
|
- fetch issue comments and timeline events through the GitHub API
|
||||||
|
- auto-select the target issue only when the repository currently has exactly one open issue
|
||||||
|
- exclude pull requests from open-issue auto-resolution
|
||||||
|
- emit a machine-readable JSON payload plus concise text sections for issue, summary, comments, events, references,
|
||||||
|
and warnings
|
||||||
|
- derive lightweight triage hints such as issue type candidates, missing-information flags, affected module
|
||||||
|
candidates, and the recommended next handling mode
|
||||||
|
4. Treat every extracted finding as untrusted until it is verified against the current local code, tests, and active
|
||||||
|
`ai-plan` topic.
|
||||||
|
5. Do not start editing code from the issue text alone. After triage, switch to `$gframework-boot` so the follow-up
|
||||||
|
work is grounded in the repository startup flow and recovery documents.
|
||||||
|
6. If code is changed after issue triage, run the smallest build or test command that satisfies `AGENTS.md`.
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
|
||||||
|
- Default:
|
||||||
|
- `python3 .agents/skills/gframework-issue-review/scripts/fetch_current_issue_review.py`
|
||||||
|
- Force a specific issue:
|
||||||
|
- `python3 .agents/skills/gframework-issue-review/scripts/fetch_current_issue_review.py --issue <issue-number>`
|
||||||
|
- Machine-readable output:
|
||||||
|
- `python3 .agents/skills/gframework-issue-review/scripts/fetch_current_issue_review.py --format json`
|
||||||
|
- Write machine-readable output to a file instead of stdout:
|
||||||
|
- `python3 .agents/skills/gframework-issue-review/scripts/fetch_current_issue_review.py --issue <issue-number> --format json --json-output /tmp/issue-review.json`
|
||||||
|
- Inspect only a high-signal section:
|
||||||
|
- `python3 .agents/skills/gframework-issue-review/scripts/fetch_current_issue_review.py --section summary`
|
||||||
|
- Combine triage with a boot handoff:
|
||||||
|
- `python3 .agents/skills/gframework-issue-review/scripts/fetch_current_issue_review.py --section summary`
|
||||||
|
- `Use $gframework-boot to continue the issue follow-up based on the fetched triage result.`
|
||||||
|
|
||||||
|
## Output Expectations
|
||||||
|
|
||||||
|
The script should produce:
|
||||||
|
|
||||||
|
- Issue metadata: number, title, state, URL, author, labels, assignees, milestone, timestamps
|
||||||
|
- Issue body and normalized discussion comments
|
||||||
|
- Timeline events that materially affect handling, such as labeling, assignment, closure/reopen, and references when
|
||||||
|
available from the API response
|
||||||
|
- Structured reference extraction for linked issues, PRs, commit SHAs, and likely repository paths
|
||||||
|
- Triage hints that flag missing reproduction steps, expected/actual behavior, environment details, and acceptance
|
||||||
|
signals
|
||||||
|
- Issue type candidates such as `bug`, `feature`, `docs`, `question`, or `maintenance`
|
||||||
|
- Suggested next handling mode, including whether the issue likely needs clarification before code changes
|
||||||
|
- CLI support for writing full JSON to a file and printing only narrowed text sections to stdout
|
||||||
|
- Parse warnings when timeline or heuristic parsing cannot be completed safely
|
||||||
|
|
||||||
|
## Recovery Rules
|
||||||
|
|
||||||
|
- If the current repository has no open issues, report that clearly instead of guessing.
|
||||||
|
- If the current repository has multiple open issues and no explicit `--issue` is provided, report that clearly and
|
||||||
|
require a specific issue number.
|
||||||
|
- If GitHub access fails because of proxy configuration, rerun the fetch with proxy variables removed.
|
||||||
|
- Prefer GitHub API results over HTML scraping.
|
||||||
|
- Do not treat heuristic module guesses or next-step suggestions as repository truth; they are only entry points for
|
||||||
|
subsequent local verification.
|
||||||
|
- If the issue discussion reveals that the problem statement has already shifted, prefer the newest concrete comment or
|
||||||
|
timeline signal over the original title/body wording.
|
||||||
|
- After extracting the issue, continue the actual implementation flow with `$gframework-boot` so the task is grounded
|
||||||
|
in current branch context and `ai-plan` recovery artifacts.
|
||||||
|
|
||||||
|
## Example Triggers
|
||||||
|
|
||||||
|
- `Use $gframework-issue-review on the current repository issue`
|
||||||
|
- `Check the open GitHub issue and summarize what should be verified locally`
|
||||||
|
- `Inspect issue <issue-number> and tell me whether this looks like bug triage or a feature request`
|
||||||
|
- `先用 $gframework-issue-review 看当前 open issue,再用 $gframework-boot 继续`
|
||||||
@ -0,0 +1,4 @@
|
|||||||
|
interface:
|
||||||
|
display_name: "GFramework Issue Review"
|
||||||
|
short_description: "Inspect the current repository issue and triage next steps"
|
||||||
|
default_prompt: "Use $gframework-issue-review to inspect the current repository issue through the GitHub API, summarize the issue body, discussion, and key timeline signals, highlight what must be verified locally, and then hand follow-up execution to $gframework-boot."
|
||||||
@ -0,0 +1,858 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
Fetch the current GFramework GitHub issue and extract the signals needed for
|
||||||
|
local follow-up work without relying on gh CLI.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import urllib.error
|
||||||
|
import urllib.request
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
OWNER = "GeWuYou"
|
||||||
|
REPO = "GFramework"
|
||||||
|
WORKTREE_ROOT_DIRECTORY_NAME = "GFramework-WorkTree"
|
||||||
|
GIT_ENVIRONMENT_KEY = "GFRAMEWORK_WINDOWS_GIT"
|
||||||
|
GIT_DIR_ENVIRONMENT_KEY = "GFRAMEWORK_GIT_DIR"
|
||||||
|
WORK_TREE_ENVIRONMENT_KEY = "GFRAMEWORK_WORK_TREE"
|
||||||
|
REQUEST_TIMEOUT_ENVIRONMENT_KEY = "GFRAMEWORK_ISSUE_REVIEW_TIMEOUT_SECONDS"
|
||||||
|
GITHUB_TOKEN_ENVIRONMENT_KEYS = ("GFRAMEWORK_GITHUB_TOKEN", "GITHUB_TOKEN", "GH_TOKEN")
|
||||||
|
PROXY_ENVIRONMENT_KEYS = ("http_proxy", "https_proxy", "HTTP_PROXY", "HTTPS_PROXY", "ALL_PROXY", "all_proxy")
|
||||||
|
DEFAULT_REQUEST_TIMEOUT_SECONDS = 60
|
||||||
|
USER_AGENT = "codex-gframework-issue-review"
|
||||||
|
DISPLAY_SECTION_CHOICES = (
|
||||||
|
"issue",
|
||||||
|
"summary",
|
||||||
|
"comments",
|
||||||
|
"events",
|
||||||
|
"references",
|
||||||
|
"warnings",
|
||||||
|
)
|
||||||
|
ISSUE_TYPE_CANDIDATES = ("bug", "feature", "docs", "question", "maintenance")
|
||||||
|
ACTIVE_TOPIC_KEYWORDS: dict[str, tuple[str, ...]] = {
|
||||||
|
"ai-first-config-system": ("config", "configuration", "gameconfig", "settings"),
|
||||||
|
"coroutine-optimization": ("coroutine", "yield", "await", "scheduler"),
|
||||||
|
"cqrs-rewrite": ("cqrs", "command", "query", "eventbus", "event bus"),
|
||||||
|
"data-repository-persistence": ("repository", "serialization", "persistence", "data", "settings"),
|
||||||
|
"runtime-generator-boundary": ("source generator", "generator", "attribute", "packaging"),
|
||||||
|
"semantic-release-versioning": ("release", "version", "semantic-release", "tag", "publish"),
|
||||||
|
"documentation-full-coverage-governance": ("docs", "documentation", "readme", "vitepress", "api reference"),
|
||||||
|
}
|
||||||
|
ACTUAL_BEHAVIOR_PATTERNS = (
|
||||||
|
"actual",
|
||||||
|
"currently",
|
||||||
|
"instead",
|
||||||
|
"but",
|
||||||
|
"error",
|
||||||
|
"exception",
|
||||||
|
"fails",
|
||||||
|
"failed",
|
||||||
|
"wrong",
|
||||||
|
)
|
||||||
|
EXPECTED_BEHAVIOR_PATTERNS = (
|
||||||
|
"expected",
|
||||||
|
"should",
|
||||||
|
"want",
|
||||||
|
"would like",
|
||||||
|
"needs to",
|
||||||
|
)
|
||||||
|
REPRODUCTION_PATTERNS = (
|
||||||
|
"steps to reproduce",
|
||||||
|
"reproduce",
|
||||||
|
"reproduction",
|
||||||
|
"how to reproduce",
|
||||||
|
"minimal example",
|
||||||
|
"sample",
|
||||||
|
"demo",
|
||||||
|
)
|
||||||
|
ENVIRONMENT_PATTERNS = (
|
||||||
|
"windows",
|
||||||
|
"linux",
|
||||||
|
"macos",
|
||||||
|
"wsl",
|
||||||
|
"godot",
|
||||||
|
".net",
|
||||||
|
"sdk",
|
||||||
|
"version",
|
||||||
|
"environment",
|
||||||
|
)
|
||||||
|
ACCEPTANCE_PATTERNS = (
|
||||||
|
"acceptance",
|
||||||
|
"done when",
|
||||||
|
"definition of done",
|
||||||
|
"verified by",
|
||||||
|
"test plan",
|
||||||
|
)
|
||||||
|
FILE_PATH_PATTERN = re.compile(r"\b(?:[A-Za-z0-9_.-]+/)+[A-Za-z0-9_.-]+\b")
|
||||||
|
ISSUE_REFERENCE_PATTERN = re.compile(r"(?:^|\s)#(\d+)\b")
|
||||||
|
COMMIT_REFERENCE_PATTERN = re.compile(r"\b[0-9a-f]{7,40}\b")
|
||||||
|
LINE_BREAK_NORMALIZER = re.compile(r"\n{3,}")
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_git_command() -> str:
|
||||||
|
"""Resolve the git executable to use for this repository."""
|
||||||
|
candidates = [
|
||||||
|
os.environ.get(GIT_ENVIRONMENT_KEY),
|
||||||
|
"git.exe",
|
||||||
|
"git",
|
||||||
|
]
|
||||||
|
|
||||||
|
for candidate in candidates:
|
||||||
|
if not candidate:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if os.path.isabs(candidate):
|
||||||
|
if os.path.exists(candidate):
|
||||||
|
return candidate
|
||||||
|
continue
|
||||||
|
|
||||||
|
resolved_candidate = shutil.which(candidate)
|
||||||
|
if resolved_candidate:
|
||||||
|
return resolved_candidate
|
||||||
|
|
||||||
|
raise RuntimeError(f"No usable git executable found. Set {GIT_ENVIRONMENT_KEY} to override it.")
|
||||||
|
|
||||||
|
|
||||||
|
def find_repository_root(start_path: Path) -> Path | None:
|
||||||
|
"""Locate the repository root by walking parent directories for repo markers."""
|
||||||
|
for candidate in (start_path, *start_path.parents):
|
||||||
|
if (candidate / "AGENTS.md").exists() and (candidate / ".ai/environment/tools.ai.yaml").exists():
|
||||||
|
return candidate
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_worktree_git_dir(repository_root: Path) -> Path | None:
|
||||||
|
"""Resolve the main-repository worktree gitdir for this WSL worktree layout."""
|
||||||
|
if repository_root.parent.name != WORKTREE_ROOT_DIRECTORY_NAME:
|
||||||
|
return None
|
||||||
|
|
||||||
|
primary_repository_root = repository_root.parent.parent / REPO
|
||||||
|
candidate_git_dir = primary_repository_root / ".git" / "worktrees" / repository_root.name
|
||||||
|
return candidate_git_dir if candidate_git_dir.exists() else None
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_git_invocation() -> list[str]:
|
||||||
|
"""Resolve the git command arguments, preferring explicit WSL worktree binding."""
|
||||||
|
configured_git_dir = os.environ.get(GIT_DIR_ENVIRONMENT_KEY)
|
||||||
|
configured_work_tree = os.environ.get(WORK_TREE_ENVIRONMENT_KEY)
|
||||||
|
linux_git = shutil.which("git")
|
||||||
|
|
||||||
|
if configured_git_dir and configured_work_tree and linux_git:
|
||||||
|
return [linux_git, f"--git-dir={configured_git_dir}", f"--work-tree={configured_work_tree}"]
|
||||||
|
|
||||||
|
repository_root = find_repository_root(Path.cwd())
|
||||||
|
if repository_root is not None and linux_git:
|
||||||
|
worktree_git_dir = resolve_worktree_git_dir(repository_root)
|
||||||
|
if worktree_git_dir is not None:
|
||||||
|
return [linux_git, f"--git-dir={worktree_git_dir}", f"--work-tree={repository_root}"]
|
||||||
|
|
||||||
|
root_git_dir = repository_root / ".git"
|
||||||
|
if root_git_dir.exists():
|
||||||
|
return [linux_git, f"--git-dir={root_git_dir}", f"--work-tree={repository_root}"]
|
||||||
|
|
||||||
|
return [resolve_git_command()]
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_request_timeout_seconds() -> int:
|
||||||
|
"""Return the GitHub request timeout in seconds."""
|
||||||
|
configured_timeout = os.environ.get(REQUEST_TIMEOUT_ENVIRONMENT_KEY)
|
||||||
|
if not configured_timeout:
|
||||||
|
return DEFAULT_REQUEST_TIMEOUT_SECONDS
|
||||||
|
|
||||||
|
try:
|
||||||
|
parsed_timeout = int(configured_timeout)
|
||||||
|
except ValueError as error:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"{REQUEST_TIMEOUT_ENVIRONMENT_KEY} must be an integer number of seconds."
|
||||||
|
) from error
|
||||||
|
|
||||||
|
if parsed_timeout <= 0:
|
||||||
|
raise RuntimeError(f"{REQUEST_TIMEOUT_ENVIRONMENT_KEY} must be greater than zero.")
|
||||||
|
|
||||||
|
return parsed_timeout
|
||||||
|
|
||||||
|
|
||||||
|
def run_command(args: list[str]) -> str:
|
||||||
|
"""Run a command and return stdout, raising on failure."""
|
||||||
|
process = subprocess.run(args, capture_output=True, text=True, check=False)
|
||||||
|
if process.returncode != 0:
|
||||||
|
stderr = process.stderr.strip()
|
||||||
|
raise RuntimeError(f"Command failed: {' '.join(args)}\n{stderr}")
|
||||||
|
return process.stdout.strip()
|
||||||
|
|
||||||
|
|
||||||
|
def get_current_branch() -> str:
|
||||||
|
"""Return the current git branch name."""
|
||||||
|
return run_command([*resolve_git_invocation(), "rev-parse", "--abbrev-ref", "HEAD"])
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_github_token() -> str | None:
|
||||||
|
"""Return the first configured GitHub token for authenticated API requests."""
|
||||||
|
for environment_key in GITHUB_TOKEN_ENVIRONMENT_KEYS:
|
||||||
|
token = os.environ.get(environment_key)
|
||||||
|
if token:
|
||||||
|
return token
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def build_request_headers(accept: str) -> dict[str, str]:
|
||||||
|
"""Build GitHub request headers and include auth when a token is available."""
|
||||||
|
headers = {"Accept": accept, "User-Agent": USER_AGENT}
|
||||||
|
token = resolve_github_token()
|
||||||
|
if token:
|
||||||
|
headers["Authorization"] = f"Bearer {token}"
|
||||||
|
|
||||||
|
return headers
|
||||||
|
|
||||||
|
|
||||||
|
def has_proxy_environment() -> bool:
|
||||||
|
"""Return whether the current process is configured to use an outbound proxy."""
|
||||||
|
return any(os.environ.get(environment_key) for environment_key in PROXY_ENVIRONMENT_KEYS)
|
||||||
|
|
||||||
|
|
||||||
|
def perform_request(url: str, headers: dict[str, str], *, disable_proxy: bool) -> tuple[str, Any]:
|
||||||
|
"""Execute a single HTTP request and return decoded text plus response headers."""
|
||||||
|
opener = (
|
||||||
|
urllib.request.build_opener(urllib.request.ProxyHandler({}))
|
||||||
|
if disable_proxy
|
||||||
|
else urllib.request.build_opener()
|
||||||
|
)
|
||||||
|
request = urllib.request.Request(url, headers=headers)
|
||||||
|
with opener.open(request, timeout=resolve_request_timeout_seconds()) as response:
|
||||||
|
return response.read().decode("utf-8", "replace"), response.headers
|
||||||
|
|
||||||
|
|
||||||
|
def open_url(url: str, accept: str) -> tuple[str, Any]:
|
||||||
|
"""Open a URL, retrying without proxies only when the configured proxy path fails."""
|
||||||
|
headers = build_request_headers(accept)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return perform_request(url, headers, disable_proxy=False)
|
||||||
|
except urllib.error.HTTPError:
|
||||||
|
raise
|
||||||
|
except (urllib.error.URLError, TimeoutError, OSError):
|
||||||
|
if not has_proxy_environment():
|
||||||
|
raise
|
||||||
|
|
||||||
|
return perform_request(url, headers, disable_proxy=True)
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_json(url: str, accept: str = "application/vnd.github+json") -> tuple[Any, Any]:
|
||||||
|
"""Fetch a JSON payload and its response headers from GitHub."""
|
||||||
|
text, headers = open_url(url, accept=accept)
|
||||||
|
return json.loads(text), headers
|
||||||
|
|
||||||
|
|
||||||
|
def extract_next_link(headers: Any) -> str | None:
|
||||||
|
"""Extract the next-page link from GitHub pagination headers."""
|
||||||
|
link_header = headers.get("Link")
|
||||||
|
if not link_header:
|
||||||
|
return None
|
||||||
|
|
||||||
|
match = re.search(r'<([^>]+)>;\s*rel="next"', link_header)
|
||||||
|
return match.group(1) if match else None
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_paged_json(url: str, accept: str = "application/vnd.github+json") -> list[dict[str, Any]]:
|
||||||
|
"""Fetch every page from a paginated GitHub API endpoint."""
|
||||||
|
items: list[dict[str, Any]] = []
|
||||||
|
next_url: str | None = url
|
||||||
|
while next_url:
|
||||||
|
payload, headers = fetch_json(next_url, accept=accept)
|
||||||
|
if not isinstance(payload, list):
|
||||||
|
raise RuntimeError(f"Expected list payload from GitHub API, got {type(payload).__name__}.")
|
||||||
|
|
||||||
|
items.extend(payload)
|
||||||
|
next_url = extract_next_link(headers)
|
||||||
|
|
||||||
|
return items
|
||||||
|
|
||||||
|
|
||||||
|
def collapse_whitespace(text: str) -> str:
|
||||||
|
"""Collapse repeated whitespace into single spaces while preserving paragraph intent."""
|
||||||
|
normalized = text.replace("\r\n", "\n").replace("\r", "\n")
|
||||||
|
normalized = LINE_BREAK_NORMALIZER.sub("\n\n", normalized)
|
||||||
|
normalized = re.sub(r"[ \t]+", " ", normalized)
|
||||||
|
normalized = re.sub(r" *\n *", "\n", normalized)
|
||||||
|
return normalized.strip()
|
||||||
|
|
||||||
|
|
||||||
|
def truncate_text(text: str, max_length: int) -> str:
|
||||||
|
"""Collapse whitespace and truncate long text for CLI display."""
|
||||||
|
collapsed = collapse_whitespace(text)
|
||||||
|
if max_length <= 0 or len(collapsed) <= max_length:
|
||||||
|
return collapsed
|
||||||
|
|
||||||
|
return collapsed[: max_length - 3].rstrip() + "..."
|
||||||
|
|
||||||
|
|
||||||
|
def filter_open_issue_candidates(items: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
||||||
|
"""Filter GitHub issue list responses down to non-PR issue items."""
|
||||||
|
return [item for item in items if not item.get("pull_request")]
|
||||||
|
|
||||||
|
|
||||||
|
def select_single_open_issue_number(items: list[dict[str, Any]]) -> int:
|
||||||
|
"""Resolve the target issue number when the repository has exactly one open issue."""
|
||||||
|
issues = filter_open_issue_candidates(items)
|
||||||
|
if not issues:
|
||||||
|
raise RuntimeError("No open GitHub issues found for this repository. Pass --issue <number> to inspect one.")
|
||||||
|
|
||||||
|
if len(issues) > 1:
|
||||||
|
numbers = ", ".join(str(item.get("number")) for item in issues[:5])
|
||||||
|
suffix = "" if len(issues) <= 5 else ", ..."
|
||||||
|
raise RuntimeError(
|
||||||
|
"Multiple open GitHub issues found for this repository "
|
||||||
|
f"({len(issues)} total: {numbers}{suffix}). Pass --issue <number> to inspect one."
|
||||||
|
)
|
||||||
|
|
||||||
|
return int(issues[0]["number"])
|
||||||
|
|
||||||
|
|
||||||
|
def resolve_issue_number(issue_number: int | None) -> tuple[int, str]:
|
||||||
|
"""Resolve the issue number, auto-selecting only when exactly one open issue exists."""
|
||||||
|
if issue_number is not None:
|
||||||
|
return issue_number, "explicit"
|
||||||
|
|
||||||
|
open_items = fetch_paged_json(f"https://api.github.com/repos/{OWNER}/{REPO}/issues?state=open&per_page=100")
|
||||||
|
return select_single_open_issue_number(open_items), "auto-single-open-issue"
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_issue_metadata(issue_number: int) -> dict[str, Any]:
|
||||||
|
"""Fetch normalized metadata for a GitHub issue."""
|
||||||
|
payload, _ = fetch_json(f"https://api.github.com/repos/{OWNER}/{REPO}/issues/{issue_number}")
|
||||||
|
if not isinstance(payload, dict):
|
||||||
|
raise RuntimeError("Failed to fetch GitHub issue metadata.")
|
||||||
|
|
||||||
|
if payload.get("pull_request"):
|
||||||
|
raise RuntimeError(f"Item #{issue_number} is a pull request, not a plain issue.")
|
||||||
|
|
||||||
|
labels = []
|
||||||
|
for label in payload.get("labels", []):
|
||||||
|
if isinstance(label, dict) and label.get("name"):
|
||||||
|
labels.append(str(label["name"]))
|
||||||
|
|
||||||
|
assignees = []
|
||||||
|
for assignee in payload.get("assignees", []):
|
||||||
|
login = assignee.get("login")
|
||||||
|
if login:
|
||||||
|
assignees.append(str(login))
|
||||||
|
|
||||||
|
milestone_title = None
|
||||||
|
milestone = payload.get("milestone")
|
||||||
|
if isinstance(milestone, dict) and milestone.get("title"):
|
||||||
|
milestone_title = str(milestone["title"])
|
||||||
|
|
||||||
|
return {
|
||||||
|
"number": int(payload["number"]),
|
||||||
|
"title": str(payload["title"]),
|
||||||
|
"state": str(payload["state"]).upper(),
|
||||||
|
"url": str(payload["html_url"]),
|
||||||
|
"author": str(payload.get("user", {}).get("login") or ""),
|
||||||
|
"created_at": str(payload.get("created_at") or ""),
|
||||||
|
"updated_at": str(payload.get("updated_at") or ""),
|
||||||
|
"labels": labels,
|
||||||
|
"assignees": assignees,
|
||||||
|
"milestone": milestone_title,
|
||||||
|
"body": str(payload.get("body") or ""),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_issue_comments(issue_number: int) -> list[dict[str, Any]]:
|
||||||
|
"""Fetch issue comments for the selected issue."""
|
||||||
|
return fetch_paged_json(f"https://api.github.com/repos/{OWNER}/{REPO}/issues/{issue_number}/comments?per_page=100")
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_issue_timeline(issue_number: int) -> list[dict[str, Any]]:
|
||||||
|
"""Fetch issue timeline events when GitHub exposes them to the current client."""
|
||||||
|
return fetch_paged_json(
|
||||||
|
f"https://api.github.com/repos/{OWNER}/{REPO}/issues/{issue_number}/timeline?per_page=100",
|
||||||
|
accept="application/vnd.github+json",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_comment(comment: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
"""Normalize an issue comment for structured output."""
|
||||||
|
return {
|
||||||
|
"id": int(comment.get("id") or 0),
|
||||||
|
"author": str(comment.get("user", {}).get("login") or ""),
|
||||||
|
"created_at": str(comment.get("created_at") or ""),
|
||||||
|
"updated_at": str(comment.get("updated_at") or ""),
|
||||||
|
"body": str(comment.get("body") or ""),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_timeline_event(event: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
"""Normalize the GitHub timeline event fields used by triage output."""
|
||||||
|
actor = str(event.get("actor", {}).get("login") or "")
|
||||||
|
created_at = str(event.get("created_at") or event.get("submitted_at") or "")
|
||||||
|
event_type = str(event.get("event") or event.get("__typename") or "unknown")
|
||||||
|
label_name = ""
|
||||||
|
assignee = ""
|
||||||
|
source_issue_number: int | None = None
|
||||||
|
source_issue_url = ""
|
||||||
|
commit_id = ""
|
||||||
|
|
||||||
|
label = event.get("label")
|
||||||
|
if isinstance(label, dict) and label.get("name"):
|
||||||
|
label_name = str(label["name"])
|
||||||
|
|
||||||
|
assignee_payload = event.get("assignee")
|
||||||
|
if isinstance(assignee_payload, dict) and assignee_payload.get("login"):
|
||||||
|
assignee = str(assignee_payload["login"])
|
||||||
|
|
||||||
|
source = event.get("source")
|
||||||
|
if isinstance(source, dict):
|
||||||
|
issue_payload = source.get("issue")
|
||||||
|
if isinstance(issue_payload, dict):
|
||||||
|
if issue_payload.get("number"):
|
||||||
|
source_issue_number = int(issue_payload["number"])
|
||||||
|
if issue_payload.get("html_url"):
|
||||||
|
source_issue_url = str(issue_payload["html_url"])
|
||||||
|
|
||||||
|
commit_id_value = event.get("commit_id")
|
||||||
|
if isinstance(commit_id_value, str):
|
||||||
|
commit_id = commit_id_value
|
||||||
|
|
||||||
|
return {
|
||||||
|
"event": event_type,
|
||||||
|
"actor": actor,
|
||||||
|
"created_at": created_at,
|
||||||
|
"label": label_name,
|
||||||
|
"assignee": assignee,
|
||||||
|
"commit_id": commit_id,
|
||||||
|
"source_issue_number": source_issue_number,
|
||||||
|
"source_issue_url": source_issue_url,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def gather_text_blocks(issue: dict[str, Any], comments: list[dict[str, Any]]) -> list[str]:
|
||||||
|
"""Return the issue body plus discussion comment bodies for heuristic parsing."""
|
||||||
|
blocks = [issue.get("body", "")]
|
||||||
|
blocks.extend(comment.get("body", "") for comment in comments)
|
||||||
|
return [block for block in blocks if block]
|
||||||
|
|
||||||
|
|
||||||
|
def has_any_pattern(text_blocks: list[str], patterns: tuple[str, ...]) -> bool:
|
||||||
|
"""Return whether any normalized text block contains any requested pattern."""
|
||||||
|
lowered_blocks = [collapse_whitespace(block).lower() for block in text_blocks]
|
||||||
|
return any(pattern in block for block in lowered_blocks for pattern in patterns)
|
||||||
|
|
||||||
|
|
||||||
|
def choose_issue_type_candidates(issue: dict[str, Any], text_blocks: list[str]) -> list[str]:
|
||||||
|
"""Infer lightweight issue-type candidates from labels and discussion text."""
|
||||||
|
labels = [label.lower() for label in issue.get("labels", [])]
|
||||||
|
text = "\n".join(text_blocks).lower()
|
||||||
|
candidates: list[str] = []
|
||||||
|
|
||||||
|
if any(label in {"bug", "regression"} for label in labels) or "bug" in text or "error" in text or "fails" in text:
|
||||||
|
candidates.append("bug")
|
||||||
|
if any(label in {"feature", "enhancement"} for label in labels) or "feature" in text or "support" in text:
|
||||||
|
candidates.append("feature")
|
||||||
|
if any(label in {"documentation", "docs"} for label in labels) or "documentation" in text or "readme" in text:
|
||||||
|
candidates.append("docs")
|
||||||
|
if any(label in {"question", "help wanted"} for label in labels) or "?" in issue.get("title", ""):
|
||||||
|
candidates.append("question")
|
||||||
|
if any(label in {"chore", "maintenance", "refactor"} for label in labels) or "cleanup" in text or "refactor" in text:
|
||||||
|
candidates.append("maintenance")
|
||||||
|
|
||||||
|
if not candidates:
|
||||||
|
candidates.append("question" if issue.get("body", "").strip().endswith("?") else "bug")
|
||||||
|
|
||||||
|
ordered_candidates: list[str] = []
|
||||||
|
for candidate in ISSUE_TYPE_CANDIDATES:
|
||||||
|
if candidate in candidates:
|
||||||
|
ordered_candidates.append(candidate)
|
||||||
|
|
||||||
|
return ordered_candidates
|
||||||
|
|
||||||
|
|
||||||
|
def extract_references_from_text(text: str) -> dict[str, list[str]]:
|
||||||
|
"""Extract issue, commit, and file-path references from one text block."""
|
||||||
|
issue_numbers = sorted({match.group(1) for match in ISSUE_REFERENCE_PATTERN.finditer(text)}, key=int)
|
||||||
|
commit_shas = sorted({match.group(0) for match in COMMIT_REFERENCE_PATTERN.finditer(text)})
|
||||||
|
file_paths = sorted({match.group(0) for match in FILE_PATH_PATTERN.finditer(text)})
|
||||||
|
|
||||||
|
return {
|
||||||
|
"issues": [f"#{number}" for number in issue_numbers],
|
||||||
|
"commit_shas": commit_shas,
|
||||||
|
"file_paths": file_paths,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def merge_reference_values(values: list[dict[str, list[str]]]) -> dict[str, list[str]]:
|
||||||
|
"""Merge extracted reference lists while preserving sorted unique output."""
|
||||||
|
merged: dict[str, set[str]] = {"issues": set(), "commit_shas": set(), "file_paths": set()}
|
||||||
|
for value in values:
|
||||||
|
for key in merged:
|
||||||
|
merged[key].update(value.get(key, []))
|
||||||
|
|
||||||
|
return {
|
||||||
|
"issues": sorted(merged["issues"], key=lambda item: int(item[1:])),
|
||||||
|
"commit_shas": sorted(merged["commit_shas"]),
|
||||||
|
"file_paths": sorted(merged["file_paths"]),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def build_references(issue: dict[str, Any], comments: list[dict[str, Any]], events: list[dict[str, Any]]) -> dict[str, Any]:
|
||||||
|
"""Build structured references from issue text and timeline context."""
|
||||||
|
extracted = [extract_references_from_text(issue.get("body", ""))]
|
||||||
|
extracted.extend(extract_references_from_text(comment.get("body", "")) for comment in comments)
|
||||||
|
merged = merge_reference_values(extracted)
|
||||||
|
referenced_by_timeline = sorted(
|
||||||
|
{
|
||||||
|
f"#{event['source_issue_number']}"
|
||||||
|
for event in events
|
||||||
|
if event.get("source_issue_number") is not None
|
||||||
|
},
|
||||||
|
key=lambda item: int(item[1:]),
|
||||||
|
)
|
||||||
|
|
||||||
|
pull_request_references = sorted(
|
||||||
|
{
|
||||||
|
issue_reference
|
||||||
|
for issue_reference in merged["issues"]
|
||||||
|
if issue_reference != f"#{issue['number']}"
|
||||||
|
},
|
||||||
|
key=lambda item: int(item[1:]),
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"issues": merged["issues"],
|
||||||
|
"pull_requests_or_issues": pull_request_references,
|
||||||
|
"commit_shas": merged["commit_shas"],
|
||||||
|
"file_paths": merged["file_paths"],
|
||||||
|
"timeline_cross_references": referenced_by_timeline,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def build_information_flags(
|
||||||
|
issue: dict[str, Any],
|
||||||
|
comments: list[dict[str, Any]],
|
||||||
|
issue_type_candidates: list[str],
|
||||||
|
) -> dict[str, bool]:
|
||||||
|
"""Derive missing-information and readiness flags with issue-type-aware heuristics."""
|
||||||
|
text_blocks = gather_text_blocks(issue, comments)
|
||||||
|
has_reproduction_steps = has_any_pattern(text_blocks, REPRODUCTION_PATTERNS)
|
||||||
|
has_expected_behavior = has_any_pattern(text_blocks, EXPECTED_BEHAVIOR_PATTERNS)
|
||||||
|
has_actual_behavior = has_any_pattern(text_blocks, ACTUAL_BEHAVIOR_PATTERNS)
|
||||||
|
has_environment_details = has_any_pattern(text_blocks, ENVIRONMENT_PATTERNS)
|
||||||
|
has_acceptance_signals = has_any_pattern(text_blocks, ACCEPTANCE_PATTERNS)
|
||||||
|
primary_issue_type = issue_type_candidates[0] if issue_type_candidates else "bug"
|
||||||
|
|
||||||
|
if primary_issue_type == "bug":
|
||||||
|
needs_clarification = not (
|
||||||
|
(has_actual_behavior and (has_reproduction_steps or has_environment_details))
|
||||||
|
or has_acceptance_signals
|
||||||
|
)
|
||||||
|
elif primary_issue_type in {"feature", "docs"}:
|
||||||
|
needs_clarification = not (has_expected_behavior or has_acceptance_signals)
|
||||||
|
elif primary_issue_type == "maintenance":
|
||||||
|
needs_clarification = not (has_expected_behavior or has_actual_behavior or has_acceptance_signals)
|
||||||
|
else:
|
||||||
|
needs_clarification = not (has_expected_behavior or has_actual_behavior or has_acceptance_signals)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"has_reproduction_steps": has_reproduction_steps,
|
||||||
|
"has_expected_behavior": has_expected_behavior,
|
||||||
|
"has_actual_behavior": has_actual_behavior,
|
||||||
|
"has_environment_details": has_environment_details,
|
||||||
|
"has_acceptance_signals": has_acceptance_signals,
|
||||||
|
"needs_clarification": needs_clarification,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def choose_affected_topics(issue: dict[str, Any], comments: list[dict[str, Any]]) -> list[str]:
|
||||||
|
"""Map the issue discussion to likely active topics when obvious keyword matches exist."""
|
||||||
|
text = "\n".join(gather_text_blocks(issue, comments)).lower()
|
||||||
|
matches: list[str] = []
|
||||||
|
for topic, keywords in ACTIVE_TOPIC_KEYWORDS.items():
|
||||||
|
if any(keyword in text for keyword in keywords):
|
||||||
|
matches.append(topic)
|
||||||
|
|
||||||
|
return matches
|
||||||
|
|
||||||
|
|
||||||
|
def choose_next_action(
|
||||||
|
information_flags: dict[str, bool],
|
||||||
|
issue_type_candidates: list[str],
|
||||||
|
affected_topics: list[str],
|
||||||
|
) -> str:
|
||||||
|
"""Choose the next handling mode for boot handoff."""
|
||||||
|
if information_flags["needs_clarification"]:
|
||||||
|
return "clarify-issue-before-code"
|
||||||
|
if affected_topics:
|
||||||
|
return "resume-existing-topic-with-boot"
|
||||||
|
if "docs" in issue_type_candidates and issue_type_candidates[0] == "docs":
|
||||||
|
return "start-new-docs-topic-with-boot"
|
||||||
|
return "start-new-topic-with-boot"
|
||||||
|
|
||||||
|
|
||||||
|
def build_triage_hints(issue: dict[str, Any], comments: list[dict[str, Any]]) -> dict[str, Any]:
|
||||||
|
"""Build lightweight, reviewable triage hints for boot follow-up."""
|
||||||
|
text_blocks = gather_text_blocks(issue, comments)
|
||||||
|
issue_type_candidates = choose_issue_type_candidates(issue, text_blocks)
|
||||||
|
information_flags = build_information_flags(issue, comments, issue_type_candidates)
|
||||||
|
affected_topics = choose_affected_topics(issue, comments)
|
||||||
|
next_action = choose_next_action(information_flags, issue_type_candidates, affected_topics)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"issue_type_candidates": issue_type_candidates,
|
||||||
|
"information_flags": information_flags,
|
||||||
|
"affected_active_topics": affected_topics,
|
||||||
|
"next_action": next_action,
|
||||||
|
"boot_handoff": {
|
||||||
|
"recommended_skill": "gframework-boot",
|
||||||
|
"mode": "resume" if affected_topics else "new",
|
||||||
|
"notes": (
|
||||||
|
"Use gframework-boot to verify the issue against local code and active ai-plan topics."
|
||||||
|
if not information_flags["needs_clarification"]
|
||||||
|
else "Use gframework-boot to record a clarification-first task before changing code."
|
||||||
|
),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def build_result(issue_number: int, branch: str, resolution_mode: str) -> dict[str, Any]:
|
||||||
|
"""Build the full issue review payload for the selected issue."""
|
||||||
|
parse_warnings: list[str] = []
|
||||||
|
issue = fetch_issue_metadata(issue_number)
|
||||||
|
raw_comments = fetch_issue_comments(issue_number)
|
||||||
|
comments = [normalize_comment(comment) for comment in raw_comments]
|
||||||
|
|
||||||
|
events: list[dict[str, Any]] = []
|
||||||
|
try:
|
||||||
|
raw_events = fetch_issue_timeline(issue_number)
|
||||||
|
events = [normalize_timeline_event(event) for event in raw_events]
|
||||||
|
except Exception as error: # noqa: BLE001
|
||||||
|
parse_warnings.append(f"Issue timeline could not be fetched or parsed: {error}")
|
||||||
|
|
||||||
|
references = build_references(issue, comments, events)
|
||||||
|
triage_hints = build_triage_hints(issue, comments)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"issue": {
|
||||||
|
**issue,
|
||||||
|
"resolved_from_branch": branch,
|
||||||
|
"resolution_mode": resolution_mode,
|
||||||
|
},
|
||||||
|
"discussion": {
|
||||||
|
"comment_count": len(comments),
|
||||||
|
"comments": comments,
|
||||||
|
},
|
||||||
|
"events": {
|
||||||
|
"count": len(events),
|
||||||
|
"items": events,
|
||||||
|
},
|
||||||
|
"references": references,
|
||||||
|
"triage_hints": triage_hints,
|
||||||
|
"parse_warnings": parse_warnings,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def write_json_output(result: dict[str, Any], output_path: str) -> str:
|
||||||
|
"""Write the full JSON result to disk and return the destination path."""
|
||||||
|
destination_path = Path(output_path).expanduser()
|
||||||
|
destination_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
destination_path.write_text(json.dumps(result, ensure_ascii=False, indent=2), encoding="utf-8")
|
||||||
|
return str(destination_path)
|
||||||
|
|
||||||
|
|
||||||
|
def summarize_events(events: list[dict[str, Any]]) -> list[str]:
|
||||||
|
"""Convert normalized events into concise text lines."""
|
||||||
|
lines: list[str] = []
|
||||||
|
for event in events:
|
||||||
|
summary = f"- {event['event']}"
|
||||||
|
details: list[str] = []
|
||||||
|
if event.get("actor"):
|
||||||
|
details.append(f"actor={event['actor']}")
|
||||||
|
if event.get("label"):
|
||||||
|
details.append(f"label={event['label']}")
|
||||||
|
if event.get("assignee"):
|
||||||
|
details.append(f"assignee={event['assignee']}")
|
||||||
|
if event.get("source_issue_number") is not None:
|
||||||
|
details.append(f"source_issue=#{event['source_issue_number']}")
|
||||||
|
if event.get("commit_id"):
|
||||||
|
details.append(f"commit={event['commit_id'][:12]}")
|
||||||
|
if event.get("created_at"):
|
||||||
|
details.append(f"at={event['created_at']}")
|
||||||
|
if details:
|
||||||
|
summary += " (" + ", ".join(details) + ")"
|
||||||
|
lines.append(summary)
|
||||||
|
return lines
|
||||||
|
|
||||||
|
|
||||||
|
def format_text(
|
||||||
|
result: dict[str, Any],
|
||||||
|
*,
|
||||||
|
sections: list[str] | None = None,
|
||||||
|
max_description_length: int = 400,
|
||||||
|
json_output_path: str | None = None,
|
||||||
|
) -> str:
|
||||||
|
"""Format the result payload into concise text output."""
|
||||||
|
lines: list[str] = []
|
||||||
|
selected_sections = set(sections or DISPLAY_SECTION_CHOICES)
|
||||||
|
issue = result["issue"]
|
||||||
|
triage_hints = result["triage_hints"]
|
||||||
|
discussion = result["discussion"]
|
||||||
|
events = result["events"]
|
||||||
|
references = result["references"]
|
||||||
|
|
||||||
|
if "issue" in selected_sections:
|
||||||
|
lines.append(f"Issue #{issue['number']}: {issue['title']}")
|
||||||
|
lines.append(f"State: {issue['state']}")
|
||||||
|
lines.append(f"Author: {issue['author']}")
|
||||||
|
lines.append(f"Labels: {', '.join(issue['labels']) if issue['labels'] else '(none)'}")
|
||||||
|
lines.append(f"Assignees: {', '.join(issue['assignees']) if issue['assignees'] else '(none)'}")
|
||||||
|
lines.append(f"Milestone: {issue['milestone'] or '(none)'}")
|
||||||
|
lines.append(f"Created: {issue['created_at']}")
|
||||||
|
lines.append(f"Updated: {issue['updated_at']}")
|
||||||
|
lines.append(f"Resolved from branch: {issue['resolved_from_branch'] or '(not branch-based)'}")
|
||||||
|
lines.append(f"Resolution mode: {issue['resolution_mode']}")
|
||||||
|
lines.append(f"URL: {issue['url']}")
|
||||||
|
if issue["body"]:
|
||||||
|
lines.append("Body:")
|
||||||
|
lines.append(truncate_text(issue["body"], max_description_length))
|
||||||
|
|
||||||
|
if "summary" in selected_sections:
|
||||||
|
lines.append("")
|
||||||
|
lines.append("Triage summary:")
|
||||||
|
lines.append("- Issue type candidates: " + ", ".join(triage_hints["issue_type_candidates"]))
|
||||||
|
information_flags = triage_hints["information_flags"]
|
||||||
|
lines.append(
|
||||||
|
"- Information flags: "
|
||||||
|
+ ", ".join(
|
||||||
|
[
|
||||||
|
f"repro={'yes' if information_flags['has_reproduction_steps'] else 'no'}",
|
||||||
|
f"expected={'yes' if information_flags['has_expected_behavior'] else 'no'}",
|
||||||
|
f"actual={'yes' if information_flags['has_actual_behavior'] else 'no'}",
|
||||||
|
f"environment={'yes' if information_flags['has_environment_details'] else 'no'}",
|
||||||
|
f"acceptance={'yes' if information_flags['has_acceptance_signals'] else 'no'}",
|
||||||
|
f"needs_clarification={'yes' if information_flags['needs_clarification'] else 'no'}",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
lines.append(
|
||||||
|
"- Affected active topics: "
|
||||||
|
+ (", ".join(triage_hints["affected_active_topics"]) if triage_hints["affected_active_topics"] else "(none)")
|
||||||
|
)
|
||||||
|
lines.append(f"- Next action: {triage_hints['next_action']}")
|
||||||
|
lines.append(f"- Boot handoff: {triage_hints['boot_handoff']['notes']}")
|
||||||
|
|
||||||
|
if "comments" in selected_sections:
|
||||||
|
lines.append("")
|
||||||
|
lines.append(f"Discussion comments: {discussion['comment_count']}")
|
||||||
|
for comment in discussion["comments"]:
|
||||||
|
lines.append(f"- {comment['author']} at {comment['created_at']}")
|
||||||
|
lines.append(f" {truncate_text(comment['body'], max_description_length)}")
|
||||||
|
|
||||||
|
if "events" in selected_sections:
|
||||||
|
lines.append("")
|
||||||
|
lines.append(f"Timeline events: {events['count']}")
|
||||||
|
lines.extend(summarize_events(events["items"]))
|
||||||
|
|
||||||
|
if "references" in selected_sections:
|
||||||
|
lines.append("")
|
||||||
|
lines.append("References:")
|
||||||
|
lines.append("- Mentioned issues: " + (", ".join(references["issues"]) if references["issues"] else "(none)"))
|
||||||
|
lines.append(
|
||||||
|
"- Cross references: "
|
||||||
|
+ (
|
||||||
|
", ".join(references["timeline_cross_references"])
|
||||||
|
if references["timeline_cross_references"]
|
||||||
|
else "(none)"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
lines.append(
|
||||||
|
"- Related issue/PR mentions: "
|
||||||
|
+ (
|
||||||
|
", ".join(references["pull_requests_or_issues"])
|
||||||
|
if references["pull_requests_or_issues"]
|
||||||
|
else "(none)"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
lines.append("- Commit SHAs: " + (", ".join(references["commit_shas"]) if references["commit_shas"] else "(none)"))
|
||||||
|
lines.append("- File paths: " + (", ".join(references["file_paths"]) if references["file_paths"] else "(none)"))
|
||||||
|
|
||||||
|
if result["parse_warnings"] and "warnings" in selected_sections:
|
||||||
|
lines.append("")
|
||||||
|
lines.append("Warnings:")
|
||||||
|
for warning in result["parse_warnings"]:
|
||||||
|
lines.append(f"- {truncate_text(warning, max_description_length)}")
|
||||||
|
|
||||||
|
if json_output_path:
|
||||||
|
lines.append("")
|
||||||
|
lines.append(f"Full JSON written to: {json_output_path}")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args() -> argparse.Namespace:
|
||||||
|
"""Parse CLI arguments."""
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("--branch", help="Override the current branch name.")
|
||||||
|
parser.add_argument("--issue", type=int, help="Fetch a specific issue number instead of auto-selecting one.")
|
||||||
|
parser.add_argument("--format", choices=("text", "json"), default="text")
|
||||||
|
parser.add_argument(
|
||||||
|
"--json-output",
|
||||||
|
help="Write the full JSON result to a file. When used with --format text, stdout stays concise and points to the file.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--section",
|
||||||
|
action="append",
|
||||||
|
choices=DISPLAY_SECTION_CHOICES,
|
||||||
|
help="Limit text output to specific sections. Can be passed multiple times.",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--max-description-length",
|
||||||
|
type=int,
|
||||||
|
default=400,
|
||||||
|
help="Truncate long text bodies in text output to this many characters.",
|
||||||
|
)
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
"""Run the CLI entry point."""
|
||||||
|
args = parse_args()
|
||||||
|
branch = args.branch or get_current_branch()
|
||||||
|
issue_number, resolution_mode = resolve_issue_number(args.issue)
|
||||||
|
result = build_result(issue_number, branch, resolution_mode)
|
||||||
|
|
||||||
|
json_output_path: str | None = None
|
||||||
|
if args.json_output:
|
||||||
|
json_output_path = write_json_output(result, args.json_output)
|
||||||
|
|
||||||
|
if args.format == "json":
|
||||||
|
print(json.dumps(result, ensure_ascii=False, indent=2))
|
||||||
|
return
|
||||||
|
|
||||||
|
print(
|
||||||
|
format_text(
|
||||||
|
result,
|
||||||
|
sections=args.section,
|
||||||
|
max_description_length=args.max_description_length,
|
||||||
|
json_output_path=json_output_path,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
try:
|
||||||
|
main()
|
||||||
|
except Exception as error: # noqa: BLE001
|
||||||
|
print(str(error), file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
@ -0,0 +1,94 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""Regression tests for the GFramework issue review fetch helper."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import importlib.util
|
||||||
|
from pathlib import Path
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
|
||||||
|
SCRIPT_PATH = Path(__file__).with_name("fetch_current_issue_review.py")
|
||||||
|
MODULE_SPEC = importlib.util.spec_from_file_location("fetch_current_issue_review", SCRIPT_PATH)
|
||||||
|
if MODULE_SPEC is None or MODULE_SPEC.loader is None:
|
||||||
|
raise RuntimeError(f"Unable to load module from {SCRIPT_PATH}.")
|
||||||
|
|
||||||
|
MODULE = importlib.util.module_from_spec(MODULE_SPEC)
|
||||||
|
MODULE_SPEC.loader.exec_module(MODULE)
|
||||||
|
|
||||||
|
|
||||||
|
class SelectSingleOpenIssueNumberTests(unittest.TestCase):
|
||||||
|
"""Cover auto-resolution rules for open GitHub issues."""
|
||||||
|
|
||||||
|
def test_select_single_open_issue_number_filters_pull_requests(self) -> None:
|
||||||
|
"""Pull requests in the issues API must not block the single-open-issue path."""
|
||||||
|
selected = MODULE.select_single_open_issue_number(
|
||||||
|
[
|
||||||
|
{"number": 10, "pull_request": {"url": "https://example.test/pr/10"}},
|
||||||
|
{"number": 11},
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(selected, 11)
|
||||||
|
|
||||||
|
def test_select_single_open_issue_number_rejects_multiple_plain_issues(self) -> None:
|
||||||
|
"""Auto-resolution must stop when more than one plain issue is open."""
|
||||||
|
with self.assertRaisesRegex(RuntimeError, "Multiple open GitHub issues found"):
|
||||||
|
MODULE.select_single_open_issue_number([{"number": 11}, {"number": 12}])
|
||||||
|
|
||||||
|
|
||||||
|
class ExtractReferencesFromTextTests(unittest.TestCase):
|
||||||
|
"""Cover lightweight reference extraction used by the text and JSON output."""
|
||||||
|
|
||||||
|
def test_extract_references_from_text_finds_issue_commit_and_path_mentions(self) -> None:
|
||||||
|
"""The helper should retain the high-signal references needed for follow-up triage."""
|
||||||
|
references = MODULE.extract_references_from_text(
|
||||||
|
"See #123, commit abcdef1234567890, and GFramework.Core/Systems/Runner.cs for the failing path."
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(references["issues"], ["#123"])
|
||||||
|
self.assertEqual(references["commit_shas"], ["abcdef1234567890"])
|
||||||
|
self.assertEqual(references["file_paths"], ["GFramework.Core/Systems/Runner.cs"])
|
||||||
|
|
||||||
|
|
||||||
|
class BuildTriageHintsTests(unittest.TestCase):
|
||||||
|
"""Cover next-action classification for non-bug issue flows."""
|
||||||
|
|
||||||
|
def test_build_triage_hints_routes_docs_issue_to_docs_topic_without_bug_style_clarification(self) -> None:
|
||||||
|
"""Docs issues with a clear requested change should not be forced through bug-style clarification."""
|
||||||
|
triage_hints = MODULE.build_triage_hints(
|
||||||
|
{
|
||||||
|
"title": "Update documentation landing page",
|
||||||
|
"labels": ["docs"],
|
||||||
|
"body": "The guide should explain the landing-page layout for new contributors.",
|
||||||
|
},
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(triage_hints["issue_type_candidates"][0], "docs")
|
||||||
|
self.assertEqual(triage_hints["affected_active_topics"], [])
|
||||||
|
self.assertFalse(triage_hints["information_flags"]["needs_clarification"])
|
||||||
|
self.assertEqual(triage_hints["next_action"], "start-new-docs-topic-with-boot")
|
||||||
|
|
||||||
|
def test_build_triage_hints_routes_feature_issue_to_new_topic_when_request_is_clear(self) -> None:
|
||||||
|
"""Feature requests with explicit desired behavior should stay actionable without fake bug repro gates."""
|
||||||
|
triage_hints = MODULE.build_triage_hints(
|
||||||
|
{
|
||||||
|
"title": "Support release note previews",
|
||||||
|
"labels": ["enhancement"],
|
||||||
|
"body": "The workflow should support previewing generated notes before completion.",
|
||||||
|
},
|
||||||
|
[],
|
||||||
|
)
|
||||||
|
|
||||||
|
self.assertEqual(triage_hints["issue_type_candidates"][0], "feature")
|
||||||
|
self.assertEqual(triage_hints["affected_active_topics"], [])
|
||||||
|
self.assertFalse(triage_hints["information_flags"]["needs_clarification"])
|
||||||
|
self.assertEqual(triage_hints["next_action"], "start-new-topic-with-boot")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main()
|
||||||
@ -1,3 +1,6 @@
|
|||||||
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
# yaml-language-server: $schema=https://coderabbit.ai/integrations/schema.v2.json
|
# yaml-language-server: $schema=https://coderabbit.ai/integrations/schema.v2.json
|
||||||
language: "zh-CN"
|
language: "zh-CN"
|
||||||
early_access: false
|
early_access: false
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
license_overrides:
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
license_overrides:
|
||||||
NETStandard.Library: MIT
|
NETStandard.Library: MIT
|
||||||
Microsoft.NETCore.Platforms: MIT
|
Microsoft.NETCore.Platforms: MIT
|
||||||
System.Buffers: MIT
|
System.Buffers: MIT
|
||||||
|
|||||||
3
.github/ISSUE_TEMPLATE/01-bug-report.yml
vendored
3
.github/ISSUE_TEMPLATE/01-bug-report.yml
vendored
@ -1,3 +1,6 @@
|
|||||||
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
name: "Bug Report / 缺陷报告"
|
name: "Bug Report / 缺陷报告"
|
||||||
description: "Report a reproducible defect in GFramework. / 报告可稳定复现的 GFramework 缺陷。"
|
description: "Report a reproducible defect in GFramework. / 报告可稳定复现的 GFramework 缺陷。"
|
||||||
title: "[Bug]: "
|
title: "[Bug]: "
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
name: "Feature Request / 功能建议"
|
name: "Feature Request / 功能建议"
|
||||||
description: "Suggest a new capability or an API improvement. / 提出新能力或 API 改进建议。"
|
description: "Suggest a new capability or an API improvement. / 提出新能力或 API 改进建议。"
|
||||||
title: "[Feature]: "
|
title: "[Feature]: "
|
||||||
|
|||||||
3
.github/ISSUE_TEMPLATE/03-documentation.yml
vendored
3
.github/ISSUE_TEMPLATE/03-documentation.yml
vendored
@ -1,3 +1,6 @@
|
|||||||
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
name: "Documentation / 文档改进"
|
name: "Documentation / 文档改进"
|
||||||
description: "Report missing, outdated, or unclear documentation. / 报告缺失、过期或不清晰的文档。"
|
description: "Report missing, outdated, or unclear documentation. / 报告缺失、过期或不清晰的文档。"
|
||||||
title: "[Docs]: "
|
title: "[Docs]: "
|
||||||
|
|||||||
3
.github/ISSUE_TEMPLATE/04-question.yml
vendored
3
.github/ISSUE_TEMPLATE/04-question.yml
vendored
@ -1,3 +1,6 @@
|
|||||||
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
name: "Question / 使用咨询"
|
name: "Question / 使用咨询"
|
||||||
description: "Ask for guidance about usage, behavior, or adoption. / 询问用法、行为或接入方式。"
|
description: "Ask for guidance about usage, behavior, or adoption. / 询问用法、行为或接入方式。"
|
||||||
title: "[Question]: "
|
title: "[Question]: "
|
||||||
|
|||||||
3
.github/ISSUE_TEMPLATE/config.yml
vendored
3
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,3 +1,6 @@
|
|||||||
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
blank_issues_enabled: false
|
blank_issues_enabled: false
|
||||||
contact_links:
|
contact_links:
|
||||||
- name: "Search Existing Issues / 搜索现有 Issues"
|
- name: "Search Existing Issues / 搜索现有 Issues"
|
||||||
|
|||||||
3
.github/actions/validate-pat/action.yml
vendored
3
.github/actions/validate-pat/action.yml
vendored
@ -1,3 +1,6 @@
|
|||||||
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
name: Validate PAT
|
name: Validate PAT
|
||||||
description: Validate that the release PAT can access the repository and push tags.
|
description: Validate that the release PAT can access the repository and push tags.
|
||||||
|
|
||||||
|
|||||||
99
.github/cliff.toml
vendored
Normal file
99
.github/cliff.toml
vendored
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
[remote.github]
|
||||||
|
owner = "GeWuYou"
|
||||||
|
repo = "GFramework"
|
||||||
|
|
||||||
|
[changelog]
|
||||||
|
header = ""
|
||||||
|
|
||||||
|
body = """
|
||||||
|
{%- macro remote_url() -%}
|
||||||
|
https://github.com/{{ remote.github.owner }}/{{ remote.github.repo }}
|
||||||
|
{%- endmacro -%}
|
||||||
|
|
||||||
|
{% macro has_release_highlight(commit) -%}
|
||||||
|
{%- set highlighted = false -%}
|
||||||
|
{%- if commit.remote and commit.remote.pr_labels -%}
|
||||||
|
{%- for label in commit.remote.pr_labels -%}
|
||||||
|
{%- if label == "release-highlight" or label == "highlight" -%}
|
||||||
|
{%- set highlighted = true -%}
|
||||||
|
{%- endif -%}
|
||||||
|
{%- endfor -%}
|
||||||
|
{%- endif -%}
|
||||||
|
{%- if not highlighted and commit.footers -%}
|
||||||
|
{%- for footer in commit.footers -%}
|
||||||
|
{%- if footer.token == "Release-Highlight" and footer.value | trim == "true" -%}
|
||||||
|
{%- set highlighted = true -%}
|
||||||
|
{%- endif -%}
|
||||||
|
{%- endfor -%}
|
||||||
|
{%- endif -%}
|
||||||
|
{{ highlighted }}
|
||||||
|
{%- endmacro %}
|
||||||
|
|
||||||
|
{% macro print_commit(commit) -%}
|
||||||
|
- {{ commit.message | split(pat="\n") | first | trim | upper_first }}{% if commit.remote and commit.remote.username %} by @{{ commit.remote.username }}{% elif commit.author.name %} by {{ commit.author.name }}{% endif %}{% if commit.remote and commit.remote.pr_number %} in [#{{ commit.remote.pr_number }}]({{ self::remote_url() }}/pull/{{ commit.remote.pr_number }}){% endif %}
|
||||||
|
{%- endmacro %}
|
||||||
|
|
||||||
|
{% if version -%}
|
||||||
|
## {{ version }} ({{ timestamp | date(format="%Y-%m-%d") }})
|
||||||
|
{% else -%}
|
||||||
|
## 未发布
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% set highlights = commits | filter(attribute="breaking", value=true) %}
|
||||||
|
{% for commit in commits -%}
|
||||||
|
{% if self::has_release_highlight(commit=commit) == "true" -%}
|
||||||
|
{% set_global highlights = highlights | concat(with=commit) -%}
|
||||||
|
{% endif -%}
|
||||||
|
{% endfor -%}
|
||||||
|
|
||||||
|
{% if highlights | length > 0 -%}
|
||||||
|
## 重点条目
|
||||||
|
{% for commit in highlights -%}
|
||||||
|
{{ self::print_commit(commit=commit) }}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endif -%}
|
||||||
|
|
||||||
|
{% if commits | length > 0 -%}
|
||||||
|
## What's Changed
|
||||||
|
|
||||||
|
{% for group, commits in commits | group_by(attribute="group") -%}
|
||||||
|
### {{ group | striptags | trim }}
|
||||||
|
{% for commit in commits -%}
|
||||||
|
{{ self::print_commit(commit=commit) }}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endfor -%}
|
||||||
|
{% endif -%}
|
||||||
|
|
||||||
|
{% if previous and previous.version and version -%}
|
||||||
|
Full Changelog: [{{ previous.version }}...{{ version }}]({{ self::remote_url() }}/compare/{{ previous.version }}...{{ version }})
|
||||||
|
{% endif -%}
|
||||||
|
"""
|
||||||
|
|
||||||
|
footer = ""
|
||||||
|
|
||||||
|
[git]
|
||||||
|
conventional_commits = true
|
||||||
|
filter_unconventional = true
|
||||||
|
split_commits = false
|
||||||
|
protect_breaking_commits = false
|
||||||
|
sort_commits = "oldest"
|
||||||
|
|
||||||
|
commit_parsers = [
|
||||||
|
{ message = ".*\\[skip changelog\\].*", skip = true },
|
||||||
|
{ body = ".*\\[skip changelog\\].*", skip = true },
|
||||||
|
{ message = "^feat", group = "<!-- 0 -->✨ 新功能" },
|
||||||
|
{ message = "^fix", group = "<!-- 1 -->🐛 Bug 修复" },
|
||||||
|
{ message = "^perf", group = "<!-- 2 -->⚡ 优化" },
|
||||||
|
{ message = "^refactor", group = "<!-- 2 -->⚡ 优化" },
|
||||||
|
{ message = "^docs", group = "<!-- 3 -->📝 文档/其他" },
|
||||||
|
{ message = "^test", group = "<!-- 3 -->📝 文档/其他" },
|
||||||
|
{ message = "^chore", group = "<!-- 3 -->📝 文档/其他" },
|
||||||
|
{ message = "^build", group = "<!-- 3 -->📝 文档/其他" },
|
||||||
|
{ message = "^ci", group = "<!-- 3 -->📝 文档/其他" },
|
||||||
|
{ message = "^style", group = "<!-- 3 -->📝 文档/其他" }
|
||||||
|
]
|
||||||
|
|
||||||
|
[git.github]
|
||||||
|
commits = true
|
||||||
3
.github/dependabot.yml
vendored
3
.github/dependabot.yml
vendored
@ -1,3 +1,6 @@
|
|||||||
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
version: 2
|
version: 2
|
||||||
updates:
|
updates:
|
||||||
# ===== NuGet 依赖(所有项目)=====
|
# ===== NuGet 依赖(所有项目)=====
|
||||||
|
|||||||
56
.github/workflows/auto-tag.yml
vendored
56
.github/workflows/auto-tag.yml
vendored
@ -1,3 +1,6 @@
|
|||||||
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
name: Semantic Release Version and Tag
|
name: Semantic Release Version and Tag
|
||||||
|
|
||||||
on:
|
on:
|
||||||
@ -14,6 +17,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
contents: read
|
contents: read
|
||||||
|
pull-requests: read
|
||||||
outputs:
|
outputs:
|
||||||
published: ${{ steps.semantic_release.outputs.new_release_published }}
|
published: ${{ steps.semantic_release.outputs.new_release_published }}
|
||||||
last_tag: ${{ steps.semantic_release.outputs.last_release_git_tag }}
|
last_tag: ${{ steps.semantic_release.outputs.last_release_git_tag }}
|
||||||
@ -56,13 +60,27 @@ jobs:
|
|||||||
echo "next_version=${{ steps.semantic_release.outputs.new_release_version }}"
|
echo "next_version=${{ steps.semantic_release.outputs.new_release_version }}"
|
||||||
echo "next_tag=${{ steps.semantic_release.outputs.new_release_git_tag }}"
|
echo "next_tag=${{ steps.semantic_release.outputs.new_release_git_tag }}"
|
||||||
|
|
||||||
|
- name: Generate preview release notes
|
||||||
|
if: ${{ steps.semantic_release.outputs.new_release_published == 'true' }}
|
||||||
|
id: cliff_preview
|
||||||
|
uses: orhun/git-cliff-action@v4
|
||||||
|
with:
|
||||||
|
config: .github/cliff.toml
|
||||||
|
args: >-
|
||||||
|
-vv --unreleased --strip header
|
||||||
|
--tag "${{ steps.semantic_release.outputs.new_release_git_tag }}"
|
||||||
|
env:
|
||||||
|
OUTPUT: PREVIEW_RELEASE_NOTES.md
|
||||||
|
GITHUB_REPO: ${{ github.repository }}
|
||||||
|
GITHUB_TOKEN: ${{ github.token }}
|
||||||
|
|
||||||
- name: Write preview summary
|
- name: Write preview summary
|
||||||
env:
|
env:
|
||||||
RELEASE_PUBLISHED: ${{ steps.semantic_release.outputs.new_release_published }}
|
RELEASE_PUBLISHED: ${{ steps.semantic_release.outputs.new_release_published }}
|
||||||
RELEASE_NOTES: ${{ steps.semantic_release.outputs.new_release_notes }}
|
CLIFF_RELEASE_NOTES: ${{ steps.cliff_preview.outputs.content }}
|
||||||
run: |
|
run: |
|
||||||
{
|
{
|
||||||
echo "## Semantic Release Preview"
|
echo "## Release Preview"
|
||||||
echo
|
echo
|
||||||
echo "- Commit: \`${{ github.sha }}\`"
|
echo "- Commit: \`${{ github.sha }}\`"
|
||||||
echo "- Release needed: \`${{ steps.semantic_release.outputs.new_release_published }}\`"
|
echo "- Release needed: \`${{ steps.semantic_release.outputs.new_release_published }}\`"
|
||||||
@ -71,13 +89,11 @@ jobs:
|
|||||||
echo "- Next tag: \`${{ steps.semantic_release.outputs.new_release_git_tag }}\`"
|
echo "- Next tag: \`${{ steps.semantic_release.outputs.new_release_git_tag }}\`"
|
||||||
echo "- Preview auth: uses \`PAT_TOKEN\` because semantic-release dry-run still performs a remote push permission probe."
|
echo "- Preview auth: uses \`PAT_TOKEN\` because semantic-release dry-run still performs a remote push permission probe."
|
||||||
echo "- Snapshot semantics: this preview is pinned to dispatch SHA \`${{ github.sha }}\`; commits added to \`main\` after the run starts are not included."
|
echo "- Snapshot semantics: this preview is pinned to dispatch SHA \`${{ github.sha }}\`; commits added to \`main\` after the run starts are not included."
|
||||||
if [ "${RELEASE_PUBLISHED}" = "true" ] && [ -n "${RELEASE_NOTES}" ]; then
|
if [ "${RELEASE_PUBLISHED}" = "true" ] && [ -n "${CLIFF_RELEASE_NOTES}" ]; then
|
||||||
echo
|
echo
|
||||||
echo "<details><summary>Preview release notes</summary>"
|
echo "### 候选发布说明"
|
||||||
echo
|
echo
|
||||||
printf '%s\n' "${RELEASE_NOTES}"
|
printf '%s\n' "${CLIFF_RELEASE_NOTES}"
|
||||||
echo
|
|
||||||
echo "</details>"
|
|
||||||
fi
|
fi
|
||||||
echo
|
echo
|
||||||
echo "If the version looks correct, approve the \`release-approval\` environment to continue."
|
echo "If the version looks correct, approve the \`release-approval\` environment to continue."
|
||||||
@ -93,6 +109,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
|
pull-requests: read
|
||||||
environment:
|
environment:
|
||||||
name: release-approval
|
name: release-approval
|
||||||
steps:
|
steps:
|
||||||
@ -131,13 +148,26 @@ jobs:
|
|||||||
echo "next_version=${{ steps.semantic_release.outputs.new_release_version }}"
|
echo "next_version=${{ steps.semantic_release.outputs.new_release_version }}"
|
||||||
echo "next_tag=${{ steps.semantic_release.outputs.new_release_git_tag }}"
|
echo "next_tag=${{ steps.semantic_release.outputs.new_release_git_tag }}"
|
||||||
|
|
||||||
|
- name: Generate published release notes
|
||||||
|
if: ${{ steps.semantic_release.outputs.new_release_published == 'true' }}
|
||||||
|
id: cliff_release
|
||||||
|
uses: orhun/git-cliff-action@v4
|
||||||
|
with:
|
||||||
|
config: .github/cliff.toml
|
||||||
|
args: >-
|
||||||
|
-vv --latest --strip header
|
||||||
|
env:
|
||||||
|
OUTPUT: PUBLISHED_RELEASE_NOTES.md
|
||||||
|
GITHUB_REPO: ${{ github.repository }}
|
||||||
|
GITHUB_TOKEN: ${{ github.token }}
|
||||||
|
|
||||||
- name: Write release summary
|
- name: Write release summary
|
||||||
env:
|
env:
|
||||||
RELEASE_PUBLISHED: ${{ steps.semantic_release.outputs.new_release_published }}
|
RELEASE_PUBLISHED: ${{ steps.semantic_release.outputs.new_release_published }}
|
||||||
RELEASE_NOTES: ${{ steps.semantic_release.outputs.new_release_notes }}
|
CLIFF_RELEASE_NOTES: ${{ steps.cliff_release.outputs.content }}
|
||||||
run: |
|
run: |
|
||||||
{
|
{
|
||||||
echo "## Semantic Release Publish"
|
echo "## Release Publish"
|
||||||
echo
|
echo
|
||||||
echo "- Commit: \`${{ github.sha }}\`"
|
echo "- Commit: \`${{ github.sha }}\`"
|
||||||
echo "- Preview last tag: \`${{ needs.preview.outputs.last_tag }}\`"
|
echo "- Preview last tag: \`${{ needs.preview.outputs.last_tag }}\`"
|
||||||
@ -148,12 +178,10 @@ jobs:
|
|||||||
echo "- Next version: \`${{ steps.semantic_release.outputs.new_release_version }}\`"
|
echo "- Next version: \`${{ steps.semantic_release.outputs.new_release_version }}\`"
|
||||||
echo "- Next tag: \`${{ steps.semantic_release.outputs.new_release_git_tag }}\`"
|
echo "- Next tag: \`${{ steps.semantic_release.outputs.new_release_git_tag }}\`"
|
||||||
echo "- Snapshot semantics: this publish run still uses dispatch SHA \`${{ github.sha }}\`; commits added to \`main\` after the preview started are excluded."
|
echo "- Snapshot semantics: this publish run still uses dispatch SHA \`${{ github.sha }}\`; commits added to \`main\` after the preview started are excluded."
|
||||||
if [ "${RELEASE_PUBLISHED}" = "true" ] && [ -n "${RELEASE_NOTES}" ]; then
|
if [ "${RELEASE_PUBLISHED}" = "true" ] && [ -n "${CLIFF_RELEASE_NOTES}" ]; then
|
||||||
echo
|
echo
|
||||||
echo "<details><summary>Published release notes</summary>"
|
echo "### 已发布说明"
|
||||||
echo
|
echo
|
||||||
printf '%s\n' "${RELEASE_NOTES}"
|
printf '%s\n' "${CLIFF_RELEASE_NOTES}"
|
||||||
echo
|
|
||||||
echo "</details>"
|
|
||||||
fi
|
fi
|
||||||
} >> "${GITHUB_STEP_SUMMARY}"
|
} >> "${GITHUB_STEP_SUMMARY}"
|
||||||
|
|||||||
71
.github/workflows/benchmark.yml
vendored
Normal file
71
.github/workflows/benchmark.yml
vendored
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
name: Benchmark
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
benchmark_filter:
|
||||||
|
description: '可选的 BenchmarkDotNet 过滤器;留空时仅执行 benchmark 项目 Release build'
|
||||||
|
required: false
|
||||||
|
default: ''
|
||||||
|
type: string
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
benchmark:
|
||||||
|
name: Benchmark Build Or Run
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup .NET 10
|
||||||
|
uses: actions/setup-dotnet@v5
|
||||||
|
with:
|
||||||
|
dotnet-version: 10.0.x
|
||||||
|
|
||||||
|
- name: Cache NuGet packages
|
||||||
|
uses: actions/cache@v5
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.nuget/packages
|
||||||
|
~/.local/share/NuGet
|
||||||
|
key: ${{ runner.os }}-nuget-benchmarks-${{ hashFiles('GFramework.Cqrs.Benchmarks/*.csproj', 'GFramework.Cqrs/*.csproj', 'GFramework.Cqrs.Abstractions/*.csproj', 'GFramework.Core/*.csproj', 'GFramework.Core.Abstractions/*.csproj', '**/nuget.config') }}
|
||||||
|
|
||||||
|
- name: Restore benchmark project
|
||||||
|
run: dotnet restore GFramework.Cqrs.Benchmarks/GFramework.Cqrs.Benchmarks.csproj
|
||||||
|
|
||||||
|
- name: Build benchmark project
|
||||||
|
run: dotnet build GFramework.Cqrs.Benchmarks/GFramework.Cqrs.Benchmarks.csproj -c Release --no-restore
|
||||||
|
|
||||||
|
- name: Report build-only mode
|
||||||
|
if: ${{ inputs.benchmark_filter == '' }}
|
||||||
|
run: |
|
||||||
|
echo "No benchmark filter provided."
|
||||||
|
echo "Workflow completed after validating the benchmark project build."
|
||||||
|
|
||||||
|
- name: Run filtered benchmarks
|
||||||
|
if: ${{ inputs.benchmark_filter != '' }}
|
||||||
|
env:
|
||||||
|
BENCHMARK_FILTER: ${{ inputs.benchmark_filter }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
dotnet run --project GFramework.Cqrs.Benchmarks/GFramework.Cqrs.Benchmarks.csproj -c Release --no-build -- \
|
||||||
|
--filter "$BENCHMARK_FILTER"
|
||||||
|
|
||||||
|
- name: Upload BenchmarkDotNet artifacts
|
||||||
|
if: ${{ always() && inputs.benchmark_filter != '' }}
|
||||||
|
uses: actions/upload-artifact@v7
|
||||||
|
with:
|
||||||
|
name: benchmark-artifacts
|
||||||
|
path: |
|
||||||
|
BenchmarkDotNet.Artifacts/**
|
||||||
|
GFramework.Cqrs.Benchmarks/bin/Release/net10.0/BenchmarkDotNet.Artifacts/**
|
||||||
|
if-no-files-found: ignore
|
||||||
23
.github/workflows/ci.yml
vendored
23
.github/workflows/ci.yml
vendored
@ -1,3 +1,6 @@
|
|||||||
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
# CI/CD工作流配置:构建和测试.NET项目
|
# CI/CD工作流配置:构建和测试.NET项目
|
||||||
# 该工作流仅在创建或更新面向任意分支的 pull request 时触发
|
# 该工作流仅在创建或更新面向任意分支的 pull request 时触发
|
||||||
name: CI - Build & Test
|
name: CI - Build & Test
|
||||||
@ -28,6 +31,13 @@ jobs:
|
|||||||
- name: Validate C# naming
|
- name: Validate C# naming
|
||||||
run: bash scripts/validate-csharp-naming.sh
|
run: bash scripts/validate-csharp-naming.sh
|
||||||
|
|
||||||
|
# 校验仓库维护源码是否包含 Apache-2.0 文件头声明
|
||||||
|
- name: Validate license headers
|
||||||
|
run: python3 scripts/license-header.py --check
|
||||||
|
|
||||||
|
- name: Validate runtime-generator boundaries
|
||||||
|
run: python3 scripts/validate-runtime-generator-boundaries.py
|
||||||
|
|
||||||
# 缓存MegaLinter
|
# 缓存MegaLinter
|
||||||
- name: Cache MegaLinter
|
- name: Cache MegaLinter
|
||||||
uses: actions/cache@v5
|
uses: actions/cache@v5
|
||||||
@ -145,6 +155,19 @@ jobs:
|
|||||||
- name: Build
|
- name: Build
|
||||||
run: dotnet build GFramework.sln -c Release --no-restore
|
run: dotnet build GFramework.sln -c Release --no-restore
|
||||||
|
|
||||||
|
- name: Pack published modules
|
||||||
|
run: |
|
||||||
|
rm -rf ./packages
|
||||||
|
dotnet pack GFramework.sln \
|
||||||
|
-c Release \
|
||||||
|
--no-build \
|
||||||
|
--no-restore \
|
||||||
|
-o ./packages \
|
||||||
|
-p:IncludeSymbols=false
|
||||||
|
|
||||||
|
- name: Validate packed modules
|
||||||
|
run: bash scripts/validate-packed-modules.sh ./packages
|
||||||
|
|
||||||
# 运行单元测试,输出TRX格式结果到TestResults目录
|
# 运行单元测试,输出TRX格式结果到TestResults目录
|
||||||
# 顺序执行各测试项目,避免并发 dotnet test 进程导致“TRX 全绿但 step 仍返回失败”的假红状态
|
# 顺序执行各测试项目,避免并发 dotnet test 进程导致“TRX 全绿但 step 仍返回失败”的假红状态
|
||||||
- name: Test All Projects
|
- name: Test All Projects
|
||||||
|
|||||||
3
.github/workflows/codeql.yml
vendored
3
.github/workflows/codeql.yml
vendored
@ -1,3 +1,6 @@
|
|||||||
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
# GitHub Actions工作流配置:CodeQL静态代码分析
|
# GitHub Actions工作流配置:CodeQL静态代码分析
|
||||||
# 该工作流用于对C#项目进行安全漏洞和代码质量分析
|
# 该工作流用于对C#项目进行安全漏洞和代码质量分析
|
||||||
name: "CodeQL"
|
name: "CodeQL"
|
||||||
|
|||||||
9
.github/workflows/license-compliance.yml
vendored
9
.github/workflows/license-compliance.yml
vendored
@ -1,3 +1,6 @@
|
|||||||
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
name: License Compliance (Feluda)
|
name: License Compliance (Feluda)
|
||||||
|
|
||||||
on:
|
on:
|
||||||
@ -62,6 +65,7 @@ jobs:
|
|||||||
# with: 配置上传的具体内容
|
# with: 配置上传的具体内容
|
||||||
# name: 工件名称,用于标识上传的文件集合
|
# name: 工件名称,用于标识上传的文件集合
|
||||||
# path: 指定需要上传的文件路径列表(支持多行格式)
|
# path: 指定需要上传的文件路径列表(支持多行格式)
|
||||||
|
# third-party-licenses/**: 手工维护的参考源码许可证原文
|
||||||
- name: Upload compliance artifacts
|
- name: Upload compliance artifacts
|
||||||
uses: actions/upload-artifact@v7
|
uses: actions/upload-artifact@v7
|
||||||
with:
|
with:
|
||||||
@ -69,6 +73,7 @@ jobs:
|
|||||||
path: |
|
path: |
|
||||||
NOTICE
|
NOTICE
|
||||||
THIRD_PARTY_LICENSES.md
|
THIRD_PARTY_LICENSES.md
|
||||||
|
third-party-licenses/**
|
||||||
sbom.spdx.json
|
sbom.spdx.json
|
||||||
sbom.cyclonedx.json
|
sbom.cyclonedx.json
|
||||||
sbom-spdx-validation.txt
|
sbom-spdx-validation.txt
|
||||||
@ -79,15 +84,17 @@ jobs:
|
|||||||
# 压缩包中包含以下文件:
|
# 压缩包中包含以下文件:
|
||||||
# - NOTICE: 项目声明文件
|
# - NOTICE: 项目声明文件
|
||||||
# - THIRD_PARTY_LICENSES.md: 第三方许可证列表
|
# - THIRD_PARTY_LICENSES.md: 第三方许可证列表
|
||||||
|
# - third-party-licenses/: 手工维护的参考源码许可证原文
|
||||||
# - sbom.spdx.json: SPDX 格式的软件物料清单
|
# - sbom.spdx.json: SPDX 格式的软件物料清单
|
||||||
# - sbom.cyclonedx.json: CycloneDX 格式的软件物料清单
|
# - sbom.cyclonedx.json: CycloneDX 格式的软件物料清单
|
||||||
# - sbom-spdx-validation.txt: SPDX 格式验证结果
|
# - sbom-spdx-validation.txt: SPDX 格式验证结果
|
||||||
# - sbom-cyclonedx-validation.txt: CycloneDX 格式验证结果
|
# - sbom-cyclonedx-validation.txt: CycloneDX 格式验证结果
|
||||||
- name: Package compliance bundle
|
- name: Package compliance bundle
|
||||||
run: |
|
run: |
|
||||||
zip license-compliance.zip \
|
zip -r license-compliance.zip \
|
||||||
NOTICE \
|
NOTICE \
|
||||||
THIRD_PARTY_LICENSES.md \
|
THIRD_PARTY_LICENSES.md \
|
||||||
|
third-party-licenses \
|
||||||
sbom.spdx.json \
|
sbom.spdx.json \
|
||||||
sbom.cyclonedx.json \
|
sbom.cyclonedx.json \
|
||||||
sbom-spdx-validation.txt \
|
sbom-spdx-validation.txt \
|
||||||
|
|||||||
54
.github/workflows/license-header-fix.yml
vendored
Normal file
54
.github/workflows/license-header-fix.yml
vendored
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
# 维护者手动触发的 Apache-2.0 文件头修复流程。
|
||||||
|
name: License Header Fix
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
base_branch:
|
||||||
|
description: Branch to fix and target with the generated pull request.
|
||||||
|
required: true
|
||||||
|
default: main
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
fix-license-headers:
|
||||||
|
name: Create license header fix PR
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout target branch
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
ref: ${{ inputs.base_branch }}
|
||||||
|
|
||||||
|
- name: Add missing license headers
|
||||||
|
run: python3 scripts/license-header.py --fix
|
||||||
|
|
||||||
|
- name: Create pull request
|
||||||
|
uses: peter-evans/create-pull-request@v8
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
base: ${{ inputs.base_branch }}
|
||||||
|
branch: chore/license-headers-${{ github.run_id }}
|
||||||
|
delete-branch: true
|
||||||
|
commit-message: |
|
||||||
|
chore(license): 补齐 Apache-2.0 文件头
|
||||||
|
|
||||||
|
- 补充缺失源文件许可证声明
|
||||||
|
- 更新文件头治理校验结果
|
||||||
|
title: "chore(license): 补齐 Apache-2.0 文件头"
|
||||||
|
body: |
|
||||||
|
## Summary
|
||||||
|
|
||||||
|
- 补齐仓库维护源码和配置文件缺失的 Apache-2.0 文件头
|
||||||
|
- 使用 `scripts/license-header.py --fix` 生成本次修复
|
||||||
|
|
||||||
|
## Validation
|
||||||
|
|
||||||
|
- `python3 scripts/license-header.py --check`
|
||||||
3
.github/workflows/publish-docs.yml
vendored
3
.github/workflows/publish-docs.yml
vendored
@ -1,3 +1,6 @@
|
|||||||
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
# 工作流名称:Publish Docs
|
# 工作流名称:Publish Docs
|
||||||
# 该工作流用于在推送标签或手动触发时构建并部署文档到 GitHub Pages
|
# 该工作流用于在推送标签或手动触发时构建并部署文档到 GitHub Pages
|
||||||
|
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
name: Publish VS Code Extension
|
name: Publish VS Code Extension
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
|||||||
77
.github/workflows/publish.yml
vendored
77
.github/workflows/publish.yml
vendored
@ -1,3 +1,6 @@
|
|||||||
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
# 发布工作流(NuGet + GitHub Packages + GitHub Release)
|
# 发布工作流(NuGet + GitHub Packages + GitHub Release)
|
||||||
#
|
#
|
||||||
# 功能:当推送标签时自动构建、打包,并将相同产物并发发布到 NuGet.org 与 GitHub Packages,
|
# 功能:当推送标签时自动构建、打包,并将相同产物并发发布到 NuGet.org 与 GitHub Packages,
|
||||||
@ -79,41 +82,10 @@ jobs:
|
|||||||
-p:IncludeSymbols=false
|
-p:IncludeSymbols=false
|
||||||
|
|
||||||
- name: Validate packed modules
|
- name: Validate packed modules
|
||||||
run: |
|
run: bash scripts/validate-packed-modules.sh ./packages
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
expected_packages=(
|
- name: Validate runtime-generator package boundaries
|
||||||
"GeWuYou.GFramework"
|
run: python3 scripts/validate-runtime-generator-boundaries.py --package-dir ./packages
|
||||||
"GeWuYou.GFramework.Core"
|
|
||||||
"GeWuYou.GFramework.Core.Abstractions"
|
|
||||||
"GeWuYou.GFramework.Core.SourceGenerators"
|
|
||||||
"GeWuYou.GFramework.Cqrs"
|
|
||||||
"GeWuYou.GFramework.Cqrs.Abstractions"
|
|
||||||
"GeWuYou.GFramework.Cqrs.SourceGenerators"
|
|
||||||
"GeWuYou.GFramework.Ecs.Arch"
|
|
||||||
"GeWuYou.GFramework.Ecs.Arch.Abstractions"
|
|
||||||
"GeWuYou.GFramework.Game"
|
|
||||||
"GeWuYou.GFramework.Game.Abstractions"
|
|
||||||
"GeWuYou.GFramework.Game.SourceGenerators"
|
|
||||||
"GeWuYou.GFramework.Godot"
|
|
||||||
"GeWuYou.GFramework.Godot.SourceGenerators"
|
|
||||||
)
|
|
||||||
|
|
||||||
mapfile -t actual_packages < <(
|
|
||||||
find ./packages -maxdepth 1 -type f -name '*.nupkg' -printf '%f\n' \
|
|
||||||
| sed -E 's/\.[0-9][0-9A-Za-z.-]*\.nupkg$//' \
|
|
||||||
| sort -u
|
|
||||||
)
|
|
||||||
|
|
||||||
printf '%s\n' "${expected_packages[@]}" | sort > expected-packages.txt
|
|
||||||
printf '%s\n' "${actual_packages[@]}" | sort > actual-packages.txt
|
|
||||||
|
|
||||||
echo "Expected packages:"
|
|
||||||
cat expected-packages.txt
|
|
||||||
echo "Actual packages:"
|
|
||||||
cat actual-packages.txt
|
|
||||||
|
|
||||||
diff -u expected-packages.txt actual-packages.txt
|
|
||||||
|
|
||||||
- name: Show packages
|
- name: Show packages
|
||||||
run: ls -la ./packages || true
|
run: ls -la ./packages || true
|
||||||
@ -240,27 +212,56 @@ jobs:
|
|||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
packages: read
|
packages: read
|
||||||
|
pull-requests: read
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
- name: Checkout repository (at tag)
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
persist-credentials: true
|
||||||
|
|
||||||
- name: Download package artifacts
|
- name: Download package artifacts
|
||||||
uses: actions/download-artifact@v8
|
uses: actions/download-artifact@v8
|
||||||
with:
|
with:
|
||||||
name: packages
|
name: packages
|
||||||
path: ./packages
|
path: ./packages
|
||||||
|
|
||||||
|
- name: Generate release notes
|
||||||
|
id: cliff_release
|
||||||
|
uses: orhun/git-cliff-action@v4
|
||||||
|
with:
|
||||||
|
config: .github/cliff.toml
|
||||||
|
args: >-
|
||||||
|
-vv --latest --strip header
|
||||||
|
env:
|
||||||
|
OUTPUT: RELEASE_NOTES.md
|
||||||
|
GITHUB_REPO: ${{ github.repository }}
|
||||||
|
GITHUB_TOKEN: ${{ github.token }}
|
||||||
|
|
||||||
# 无论某一侧包源发布是否失败,都继续创建 Release。
|
# 无论某一侧包源发布是否失败,都继续创建 Release。
|
||||||
# 合规工件由独立 workflow 生成,当前发布流不再假设这些文件在同一次运行中可用。
|
# 合规工件由独立 workflow 生成,当前发布流不再假设这些文件在同一次运行中可用。
|
||||||
- name: Create GitHub Release and Upload Assets
|
- name: Create GitHub Release and Upload Assets
|
||||||
uses: softprops/action-gh-release@v3
|
uses: softprops/action-gh-release@v3
|
||||||
with:
|
with:
|
||||||
generate_release_notes: true
|
|
||||||
name: "Release ${{ github.ref_name }}"
|
name: "Release ${{ github.ref_name }}"
|
||||||
body: |
|
body_path: RELEASE_NOTES.md
|
||||||
Release created by CI for tag ${{ github.ref_name }}
|
|
||||||
Package version: ${{ needs.build-pack.outputs.package_version }}
|
|
||||||
draft: false
|
draft: false
|
||||||
prerelease: false
|
prerelease: false
|
||||||
files: |
|
files: |
|
||||||
./packages/*.nupkg
|
./packages/*.nupkg
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ github.token }}
|
GITHUB_TOKEN: ${{ github.token }}
|
||||||
|
|
||||||
|
- name: Write publish summary
|
||||||
|
env:
|
||||||
|
CLIFF_RELEASE_NOTES: ${{ steps.cliff_release.outputs.content }}
|
||||||
|
run: |
|
||||||
|
{
|
||||||
|
echo "## GitHub Release"
|
||||||
|
echo
|
||||||
|
echo "- Tag: \`${{ github.ref_name }}\`"
|
||||||
|
echo "- Package version: \`${{ needs.build-pack.outputs.package_version }}\`"
|
||||||
|
echo
|
||||||
|
printf '%s\n' "${CLIFF_RELEASE_NOTES}"
|
||||||
|
} >> "${GITHUB_STEP_SUMMARY}"
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -26,3 +26,4 @@ ai-libs/
|
|||||||
.codex
|
.codex
|
||||||
# tool
|
# tool
|
||||||
.venv/
|
.venv/
|
||||||
|
BenchmarkDotNet.Artifacts/
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
# 配置文件用于设置代码质量检查工具的各项参数和规则
|
# Copyright (c) 2025-2026 GeWuYou
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
# 配置文件用于设置代码质量检查工具的各项参数和规则
|
||||||
# 包含全局排除目录、启用/禁用的检查器、特定语言配置等设置
|
# 包含全局排除目录、启用/禁用的检查器、特定语言配置等设置
|
||||||
|
|
||||||
APPLY_FIXES: none
|
APPLY_FIXES: none
|
||||||
|
|||||||
@ -33,6 +33,14 @@
|
|||||||
"type": "refactor",
|
"type": "refactor",
|
||||||
"release": "patch"
|
"release": "patch"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"type": "deps",
|
||||||
|
"release": "patch"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "security",
|
||||||
|
"release": "patch"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"type": "docs",
|
"type": "docs",
|
||||||
"release": false
|
"release": false
|
||||||
@ -70,6 +78,45 @@
|
|||||||
"@semantic-release/release-notes-generator",
|
"@semantic-release/release-notes-generator",
|
||||||
{
|
{
|
||||||
"preset": "conventionalcommits",
|
"preset": "conventionalcommits",
|
||||||
|
"presetConfig": {
|
||||||
|
"types": [
|
||||||
|
{
|
||||||
|
"type": "feat",
|
||||||
|
"section": "Features",
|
||||||
|
"hidden": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "fix",
|
||||||
|
"section": "Bug Fixes",
|
||||||
|
"hidden": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "perf",
|
||||||
|
"section": "Performance Improvements",
|
||||||
|
"hidden": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "refactor",
|
||||||
|
"section": "Refactoring",
|
||||||
|
"hidden": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "deps",
|
||||||
|
"section": "Dependency Updates",
|
||||||
|
"hidden": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "security",
|
||||||
|
"section": "Security Fixes",
|
||||||
|
"hidden": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "revert",
|
||||||
|
"section": "Reverts",
|
||||||
|
"hidden": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
"parserOpts": {
|
"parserOpts": {
|
||||||
"noteKeywords": [
|
"noteKeywords": [
|
||||||
"BREAKING CHANGE",
|
"BREAKING CHANGE",
|
||||||
|
|||||||
24
AGENTS.md
24
AGENTS.md
@ -60,6 +60,10 @@ All AI agents and contributors must follow these rules when writing, reviewing,
|
|||||||
`minor` segment.
|
`minor` segment.
|
||||||
- Use `fix` for behavior corrections, `perf` for observable performance improvements, and `refactor` only for
|
- Use `fix` for behavior corrections, `perf` for observable performance improvements, and `refactor` only for
|
||||||
non-feature code restructuring; these should raise the next released version's `patch` segment.
|
non-feature code restructuring; these should raise the next released version's `patch` segment.
|
||||||
|
- Use `deps` for dependency version updates, dependency lockfile refreshes, and package maintenance that should raise
|
||||||
|
the next released version's `patch` segment.
|
||||||
|
- Use `security` for vulnerability fixes, dependency security mitigations, and security configuration corrections
|
||||||
|
that should raise the next released version's `patch` segment.
|
||||||
- Use `docs`、`test`、`chore`、`build`、`ci`、`style` for their literal categories; do not encode these changes as
|
- Use `docs`、`test`、`chore`、`build`、`ci`、`style` for their literal categories; do not encode these changes as
|
||||||
`feat` just because they feel important. These categories MUST NOT trigger a release.
|
`feat` just because they feel important. These categories MUST NOT trigger a release.
|
||||||
- Use `BREAKING CHANGE` in the commit footer or `!` after the type / scope header (for example `feat!:` or
|
- Use `BREAKING CHANGE` in the commit footer or `!` after the type / scope header (for example `feat!:` or
|
||||||
@ -79,6 +83,23 @@ All AI agents and contributors must follow these rules when writing, reviewing,
|
|||||||
- The branch naming rule for a new task branch is `<type>/<topic-or-scope>`, where `<type>` should match the intended
|
- The branch naming rule for a new task branch is `<type>/<topic-or-scope>`, where `<type>` should match the intended
|
||||||
Conventional Commit category as closely as practical.
|
Conventional Commit category as closely as practical.
|
||||||
|
|
||||||
|
## License Header Rules
|
||||||
|
|
||||||
|
- Repository-maintained source and configuration files that are supported by `scripts/license-header.py` MUST include an
|
||||||
|
Apache-2.0 file header before the task is considered complete.
|
||||||
|
- When creating or modifying supported files, contributors MUST preserve an existing compliant header or add the SPDX
|
||||||
|
header generated by `python3 scripts/license-header.py --fix`.
|
||||||
|
- Before committing changes that add or modify supported source/configuration files, contributors MUST run
|
||||||
|
`python3 scripts/license-header.py --check` and resolve any missing or misplaced headers.
|
||||||
|
- For files with shebang lines, keep the shebang as the first line and place the license header immediately after it.
|
||||||
|
- For XML/MSBuild files with an XML declaration, keep the XML declaration as the first node and place the license header
|
||||||
|
immediately after it.
|
||||||
|
- Do not add project license headers to excluded or third-party areas such as `.agents/**`, `ai-libs/**`,
|
||||||
|
`third-party-licenses/**`, generated snapshots, binary assets, lock files, and generated build output. Treat
|
||||||
|
`scripts/license-header.py` as the authoritative include/exclude policy for this check.
|
||||||
|
- If CI reports a license-header failure, either fix it locally with `python3 scripts/license-header.py --fix` or, for
|
||||||
|
maintainer-owned cleanup, use the manual `License Header Fix` GitHub Actions workflow to create a reviewed repair PR.
|
||||||
|
|
||||||
## Repository Boot Skill
|
## Repository Boot Skill
|
||||||
|
|
||||||
- The repository-maintained Codex boot skill lives at `.codex/skills/gframework-boot/`.
|
- The repository-maintained Codex boot skill lives at `.codex/skills/gframework-boot/`.
|
||||||
@ -191,6 +212,9 @@ All generated or modified code MUST include clear and meaningful comments where
|
|||||||
- Private fields: `_camelCase`
|
- Private fields: `_camelCase`
|
||||||
- Keep abstractions projects free of implementation details and engine-specific dependencies.
|
- Keep abstractions projects free of implementation details and engine-specific dependencies.
|
||||||
- Preserve existing module boundaries. Do not introduce new cross-module dependencies without clear architectural need.
|
- Preserve existing module boundaries. Do not introduce new cross-module dependencies without clear architectural need.
|
||||||
|
- Framework runtime, abstractions, and meta-package projects MUST NOT reference `*.SourceGenerators*` projects or packages,
|
||||||
|
and MUST NOT use source-generator attributes such as `GenerateEnumExtensions` or `ContextAware`. Those capabilities are
|
||||||
|
reserved for consumer projects, generator projects, examples explicitly meant to demonstrate generator usage, and related tests.
|
||||||
|
|
||||||
### Formatting
|
### Formatting
|
||||||
|
|
||||||
|
|||||||
@ -1,11 +1,16 @@
|
|||||||
|
<!--
|
||||||
|
Copyright (c) 2025-2026 GeWuYou
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
-->
|
||||||
|
|
||||||
<Project>
|
<Project>
|
||||||
<!-- Keep repository-wide analyzer behavior consistent while allowing only selected projects to opt into polyfills. -->
|
<!-- Keep repository-wide analyzer behavior consistent while allowing only selected projects to opt into polyfills. -->
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<PackageReference Include="Meziantou.Analyzer" Version="3.0.52">
|
<PackageReference Include="Meziantou.Analyzer" Version="3.0.72">
|
||||||
<PrivateAssets>all</PrivateAssets>
|
<PrivateAssets>all</PrivateAssets>
|
||||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
|
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
|
||||||
</PackageReference>
|
</PackageReference>
|
||||||
<PackageReference Update="Meziantou.Polyfill" Version="1.0.116">
|
<PackageReference Update="Meziantou.Polyfill" Version="1.0.123">
|
||||||
<PrivateAssets>all</PrivateAssets>
|
<PrivateAssets>all</PrivateAssets>
|
||||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
|
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
|
||||||
</PackageReference>
|
</PackageReference>
|
||||||
|
|||||||
@ -1,3 +1,8 @@
|
|||||||
|
<!--
|
||||||
|
Copyright (c) 2025-2026 GeWuYou
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
-->
|
||||||
|
|
||||||
<Project>
|
<Project>
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
using System.Collections.Concurrent;
|
using System.Collections.Concurrent;
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Architectures;
|
namespace GFramework.Core.Abstractions.Architectures;
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
using GFramework.Core.Abstractions.Enums;
|
using GFramework.Core.Abstractions.Enums;
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Architectures;
|
namespace GFramework.Core.Abstractions.Architectures;
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
using System.Reflection;
|
using System.Reflection;
|
||||||
using GFramework.Core.Abstractions.Lifecycle;
|
using GFramework.Core.Abstractions.Lifecycle;
|
||||||
using GFramework.Core.Abstractions.Model;
|
using GFramework.Core.Abstractions.Model;
|
||||||
@ -81,6 +84,20 @@ public interface IArchitecture : IAsyncInitializable, IAsyncDestroyable, IInitia
|
|||||||
void RegisterCqrsPipelineBehavior<TBehavior>()
|
void RegisterCqrsPipelineBehavior<TBehavior>()
|
||||||
where TBehavior : class;
|
where TBehavior : class;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 注册 CQRS 流式请求管道行为。
|
||||||
|
/// 既支持实现 <c>IStreamPipelineBehavior<,></c> 的开放泛型行为类型,
|
||||||
|
/// 也支持绑定到单一流式请求/响应对的封闭行为类型。
|
||||||
|
/// </summary>
|
||||||
|
/// <typeparam name="TBehavior">行为类型,必须是引用类型</typeparam>
|
||||||
|
/// <exception cref="InvalidOperationException">当前架构的底层容器已冻结,无法继续注册流式管道行为。</exception>
|
||||||
|
/// <exception cref="ObjectDisposedException">当前架构的底层容器已释放,无法继续注册流式管道行为。</exception>
|
||||||
|
/// <remarks>
|
||||||
|
/// 该入口应在架构初始化冻结容器之前调用;具体开放泛型或封闭行为类型的校验逻辑由底层容器负责。
|
||||||
|
/// </remarks>
|
||||||
|
void RegisterCqrsStreamPipelineBehavior<TBehavior>()
|
||||||
|
where TBehavior : class;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 从指定程序集显式注册 CQRS 处理器。
|
/// 从指定程序集显式注册 CQRS 处理器。
|
||||||
/// 当处理器位于默认架构程序集之外的模块或扩展程序集中时,可在初始化阶段调用该入口接入对应程序集。
|
/// 当处理器位于默认架构程序集之外的模块或扩展程序集中时,可在初始化阶段调用该入口接入对应程序集。
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
using GFramework.Core.Abstractions.Properties;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
using GFramework.Core.Abstractions.Properties;
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Architectures;
|
namespace GFramework.Core.Abstractions.Architectures;
|
||||||
|
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
using GFramework.Core.Abstractions.Command;
|
using GFramework.Core.Abstractions.Command;
|
||||||
using GFramework.Core.Abstractions.Environment;
|
using GFramework.Core.Abstractions.Environment;
|
||||||
using GFramework.Core.Abstractions.Events;
|
using GFramework.Core.Abstractions.Events;
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Architectures;
|
namespace GFramework.Core.Abstractions.Architectures;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
using GFramework.Core.Abstractions.Enums;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
using GFramework.Core.Abstractions.Enums;
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Architectures;
|
namespace GFramework.Core.Abstractions.Architectures;
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
namespace GFramework.Core.Abstractions.Architectures;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
namespace GFramework.Core.Abstractions.Architectures;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 架构模块接口,继承自架构生命周期接口。
|
/// 架构模块接口,继承自架构生命周期接口。
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
using GFramework.Core.Abstractions.Enums;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
using GFramework.Core.Abstractions.Enums;
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Architectures;
|
namespace GFramework.Core.Abstractions.Architectures;
|
||||||
|
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
using GFramework.Core.Abstractions.Command;
|
using GFramework.Core.Abstractions.Command;
|
||||||
using GFramework.Core.Abstractions.Events;
|
using GFramework.Core.Abstractions.Events;
|
||||||
using GFramework.Core.Abstractions.Ioc;
|
using GFramework.Core.Abstractions.Ioc;
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
using GFramework.Core.Abstractions.Ioc;
|
using GFramework.Core.Abstractions.Ioc;
|
||||||
using GFramework.Core.Abstractions.Lifecycle;
|
using GFramework.Core.Abstractions.Lifecycle;
|
||||||
|
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
using GFramework.Core.Abstractions.Ioc;
|
using GFramework.Core.Abstractions.Ioc;
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Architectures;
|
namespace GFramework.Core.Abstractions.Architectures;
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
namespace GFramework.Core.Abstractions.Bases;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
namespace GFramework.Core.Abstractions.Bases;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 表示键值对的接口,定义了通用的键值对数据结构契约
|
/// 表示键值对的接口,定义了通用的键值对数据结构契约
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Bases;
|
namespace GFramework.Core.Abstractions.Bases;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Bases;
|
namespace GFramework.Core.Abstractions.Bases;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
using GFramework.Core.Abstractions.Rule;
|
using GFramework.Core.Abstractions.Rule;
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Command;
|
namespace GFramework.Core.Abstractions.Command;
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
using GFramework.Core.Abstractions.Rule;
|
using GFramework.Core.Abstractions.Rule;
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Command;
|
namespace GFramework.Core.Abstractions.Command;
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Command;
|
namespace GFramework.Core.Abstractions.Command;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
using GFramework.Core.Abstractions.Events;
|
using GFramework.Core.Abstractions.Events;
|
||||||
using GFramework.Core.Abstractions.Utility;
|
using GFramework.Core.Abstractions.Utility;
|
||||||
|
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
namespace GFramework.Core.Abstractions.Controller;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
namespace GFramework.Core.Abstractions.Controller;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 控制器标记接口,用于标识控制器组件
|
/// 控制器标记接口,用于标识控制器组件
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Coroutine;
|
namespace GFramework.Core.Abstractions.Coroutine;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Coroutine;
|
namespace GFramework.Core.Abstractions.Coroutine;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Coroutine;
|
namespace GFramework.Core.Abstractions.Coroutine;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
namespace GFramework.Core.Abstractions.Coroutine;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
namespace GFramework.Core.Abstractions.Coroutine;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 表示协程的执行状态枚举
|
/// 表示协程的执行状态枚举
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Coroutine;
|
namespace GFramework.Core.Abstractions.Coroutine;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
namespace GFramework.Core.Abstractions.Coroutine;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
namespace GFramework.Core.Abstractions.Coroutine;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 时间源接口,提供当前时间、时间增量以及更新功能
|
/// 时间源接口,提供当前时间、时间增量以及更新功能
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
namespace GFramework.Core.Abstractions.Coroutine;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
namespace GFramework.Core.Abstractions.Coroutine;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 定义一个可等待指令的接口,用于协程系统中的异步操作控制
|
/// 定义一个可等待指令的接口,用于协程系统中的异步操作控制
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
using System.ComponentModel;
|
using System.ComponentModel;
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Cqrs;
|
namespace GFramework.Core.Abstractions.Cqrs;
|
||||||
|
|||||||
@ -1,3 +1,8 @@
|
|||||||
|
<!--
|
||||||
|
Copyright (c) 2025-2026 GeWuYou
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
-->
|
||||||
|
|
||||||
<Project>
|
<Project>
|
||||||
<!-- import parent: https://docs.microsoft.com/en-us/visualstudio/msbuild/customize-your-build -->
|
<!-- import parent: https://docs.microsoft.com/en-us/visualstudio/msbuild/customize-your-build -->
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
namespace GFramework.Core.Abstractions.Enums;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
namespace GFramework.Core.Abstractions.Enums;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 架构阶段枚举,定义了系统架构初始化和运行过程中的各个关键阶段
|
/// 架构阶段枚举,定义了系统架构初始化和运行过程中的各个关键阶段
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
namespace GFramework.Core.Abstractions.Environment;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
namespace GFramework.Core.Abstractions.Environment;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 定义环境接口,提供应用程序运行环境的相关信息
|
/// 定义环境接口,提供应用程序运行环境的相关信息
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Events;
|
namespace GFramework.Core.Abstractions.Events;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Events;
|
namespace GFramework.Core.Abstractions.Events;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
namespace GFramework.Core.Abstractions.Events;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
namespace GFramework.Core.Abstractions.Events;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 事件接口,定义了事件注册的基本功能
|
/// 事件接口,定义了事件注册的基本功能
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
namespace GFramework.Core.Abstractions.Events;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
namespace GFramework.Core.Abstractions.Events;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 事件总线接口,提供事件的发送、注册和注销功能
|
/// 事件总线接口,提供事件的发送、注册和注销功能
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Events;
|
namespace GFramework.Core.Abstractions.Events;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Events;
|
namespace GFramework.Core.Abstractions.Events;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
namespace GFramework.Core.Abstractions.Events;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
namespace GFramework.Core.Abstractions.Events;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 提供注销功能的接口
|
/// 提供注销功能的接口
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
namespace GFramework.Core.Abstractions.Events;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
namespace GFramework.Core.Abstractions.Events;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 提供统一注销功能的接口,用于管理需要注销的对象列表
|
/// 提供统一注销功能的接口,用于管理需要注销的对象列表
|
||||||
|
|||||||
@ -1,4 +1,9 @@
|
|||||||
<Project Sdk="Microsoft.NET.Sdk">
|
<!--
|
||||||
|
Copyright (c) 2025-2026 GeWuYou
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
-->
|
||||||
|
|
||||||
|
<Project Sdk="Microsoft.NET.Sdk">
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
配置项目构建属性
|
配置项目构建属性
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
// IsExternalInit.cs
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
// IsExternalInit.cs
|
||||||
// This type is required to support init-only setters and record types
|
// This type is required to support init-only setters and record types
|
||||||
// when targeting netstandard2.0 or older frameworks.
|
// when targeting netstandard2.0 or older frameworks.
|
||||||
|
|
||||||
|
|||||||
@ -1,13 +1,22 @@
|
|||||||
using System.Reflection;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
using System.Reflection;
|
||||||
using GFramework.Core.Abstractions.Rule;
|
using GFramework.Core.Abstractions.Rule;
|
||||||
using GFramework.Core.Abstractions.Systems;
|
using GFramework.Core.Abstractions.Systems;
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Ioc;
|
namespace GFramework.Core.Abstractions.Ioc;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 依赖注入容器接口,定义了服务注册、解析和管理的基本操作
|
/// 依赖注入容器接口,定义服务注册、解析与生命周期管理的统一入口。
|
||||||
/// </summary>
|
/// </summary>
|
||||||
public interface IIocContainer : IContextAware
|
/// <remarks>
|
||||||
|
/// 实现者必须在 <see cref="IDisposable.Dispose" /> 中释放容器拥有的根 <see cref="IServiceProvider" /> 及其
|
||||||
|
/// 关联同步资源,并保证释放操作幂等。
|
||||||
|
/// 容器一旦释放,后续任何注册、解析、查询或作用域创建调用都必须抛出
|
||||||
|
/// <see cref="ObjectDisposedException" />,避免消费者继续访问失效的运行时状态。
|
||||||
|
/// </remarks>
|
||||||
|
public interface IIocContainer : IContextAware, IDisposable
|
||||||
{
|
{
|
||||||
#region Register Methods
|
#region Register Methods
|
||||||
|
|
||||||
@ -96,6 +105,20 @@ public interface IIocContainer : IContextAware
|
|||||||
void RegisterCqrsPipelineBehavior<TBehavior>()
|
void RegisterCqrsPipelineBehavior<TBehavior>()
|
||||||
where TBehavior : class;
|
where TBehavior : class;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 注册 CQRS 流式请求管道行为。
|
||||||
|
/// </summary>
|
||||||
|
/// <typeparam name="TBehavior">行为类型,必须是引用类型</typeparam>
|
||||||
|
/// <exception cref="InvalidOperationException">容器已冻结,无法继续注册流式管道行为。</exception>
|
||||||
|
/// <exception cref="ObjectDisposedException">容器已释放,无法继续注册流式管道行为。</exception>
|
||||||
|
/// <remarks>
|
||||||
|
/// 该入口既支持实现 <c>IStreamPipelineBehavior<,></c> 的开放泛型行为类型,
|
||||||
|
/// 也支持绑定到单一流式请求/响应对的封闭行为类型。
|
||||||
|
/// 应在容器冻结前的注册阶段调用;具体可注册形态由实现容器负责校验。
|
||||||
|
/// </remarks>
|
||||||
|
void RegisterCqrsStreamPipelineBehavior<TBehavior>()
|
||||||
|
where TBehavior : class;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 从指定程序集显式注册 CQRS 处理器。
|
/// 从指定程序集显式注册 CQRS 处理器。
|
||||||
/// 该入口适用于处理器不位于默认架构程序集中的场景,例如扩展包、模块程序集或拆分后的业务程序集。
|
/// 该入口适用于处理器不位于默认架构程序集中的场景,例如扩展包、模块程序集或拆分后的业务程序集。
|
||||||
@ -132,6 +155,10 @@ public interface IIocContainer : IContextAware
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
/// <typeparam name="T">期望获取的实例类型</typeparam>
|
/// <typeparam name="T">期望获取的实例类型</typeparam>
|
||||||
/// <returns>找到的第一个实例;如果未找到则返回 null</returns>
|
/// <returns>找到的第一个实例;如果未找到则返回 null</returns>
|
||||||
|
/// <remarks>
|
||||||
|
/// 在 <see cref="Freeze" /> 之前,该查询只保证返回已经物化为实例绑定的服务。
|
||||||
|
/// 仅通过工厂或实现类型注册的服务在预冻结阶段可能不可见;若需要完整激活语义,请先冻结容器。
|
||||||
|
/// </remarks>
|
||||||
T? Get<T>() where T : class;
|
T? Get<T>() where T : class;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
@ -140,6 +167,10 @@ public interface IIocContainer : IContextAware
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="type">期望获取的实例类型</param>
|
/// <param name="type">期望获取的实例类型</param>
|
||||||
/// <returns>找到的第一个实例;如果未找到则返回 null</returns>
|
/// <returns>找到的第一个实例;如果未找到则返回 null</returns>
|
||||||
|
/// <remarks>
|
||||||
|
/// 在 <see cref="Freeze" /> 之前,该查询只保证返回已经物化为实例绑定的服务。
|
||||||
|
/// 仅通过工厂或实现类型注册的服务在预冻结阶段可能不可见;若需要完整激活语义,请先冻结容器。
|
||||||
|
/// </remarks>
|
||||||
object? Get(Type type);
|
object? Get(Type type);
|
||||||
|
|
||||||
|
|
||||||
@ -165,6 +196,9 @@ public interface IIocContainer : IContextAware
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
/// <typeparam name="T">期望获取的实例类型</typeparam>
|
/// <typeparam name="T">期望获取的实例类型</typeparam>
|
||||||
/// <returns>所有符合条件的实例列表;如果没有则返回空数组</returns>
|
/// <returns>所有符合条件的实例列表;如果没有则返回空数组</returns>
|
||||||
|
/// <remarks>
|
||||||
|
/// 在 <see cref="Freeze" /> 之前,该查询只会枚举当前已经可见的实例绑定,不会主动执行工厂或创建实现类型。
|
||||||
|
/// </remarks>
|
||||||
IReadOnlyList<T> GetAll<T>() where T : class;
|
IReadOnlyList<T> GetAll<T>() where T : class;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
@ -172,6 +206,9 @@ public interface IIocContainer : IContextAware
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="type">期望获取的实例类型</param>
|
/// <param name="type">期望获取的实例类型</param>
|
||||||
/// <returns>所有符合条件的实例列表;如果没有则返回空数组</returns>
|
/// <returns>所有符合条件的实例列表;如果没有则返回空数组</returns>
|
||||||
|
/// <remarks>
|
||||||
|
/// 在 <see cref="Freeze" /> 之前,该查询只会枚举当前已经可见的实例绑定,不会主动执行工厂或创建实现类型。
|
||||||
|
/// </remarks>
|
||||||
IReadOnlyList<object> GetAll(Type type);
|
IReadOnlyList<object> GetAll(Type type);
|
||||||
|
|
||||||
|
|
||||||
@ -210,8 +247,26 @@ public interface IIocContainer : IContextAware
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
/// <typeparam name="T">要检查的类型</typeparam>
|
/// <typeparam name="T">要检查的类型</typeparam>
|
||||||
/// <returns>如果容器中包含指定类型的实例则返回true,否则返回false</returns>
|
/// <returns>如果容器中包含指定类型的实例则返回true,否则返回false</returns>
|
||||||
|
/// <remarks>
|
||||||
|
/// 在 <see cref="Freeze" /> 之前,该方法更接近“是否存在对应注册”的检查,而不是完整的 DI 可解析性判断。
|
||||||
|
/// </remarks>
|
||||||
bool Contains<T>() where T : class;
|
bool Contains<T>() where T : class;
|
||||||
|
|
||||||
|
/// <summary>
|
||||||
|
/// 检查容器中是否存在可赋值给指定服务类型的注册项,而不要求解析出实例。
|
||||||
|
/// </summary>
|
||||||
|
/// <param name="type">要检查的服务类型。</param>
|
||||||
|
/// <returns>若存在显式注册或开放泛型映射可满足该服务类型,则返回 <see langword="true" />;否则返回 <see langword="false" />。</returns>
|
||||||
|
/// <exception cref="ArgumentNullException">当 <paramref name="type" /> 为 <see langword="null" /> 时抛出。</exception>
|
||||||
|
/// <exception cref="ObjectDisposedException">当调用 <see cref="HasRegistration(Type)" /> 时容器已被释放时抛出。</exception>
|
||||||
|
/// <remarks>
|
||||||
|
/// 该入口面向“先判断是否值得解析实例”的热路径优化场景。
|
||||||
|
/// 与 <see cref="Contains{T}" /> 不同,它不会为了判断结果而激活服务实例,因此可避免把瞬态对象创建、
|
||||||
|
/// 多服务枚举或日志分配混入仅需存在性判断的调用链中。
|
||||||
|
/// 该方法按服务键与开放泛型映射判断可见性,不会把“仅以实现类型自身注册”的实例误判成其所有可赋值接口都已注册。
|
||||||
|
/// </remarks>
|
||||||
|
bool HasRegistration(Type type);
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 判断容器中是否包含某个具体的实例对象
|
/// 判断容器中是否包含某个具体的实例对象
|
||||||
/// </summary>
|
/// </summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Lifecycle;
|
namespace GFramework.Core.Abstractions.Lifecycle;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Lifecycle;
|
namespace GFramework.Core.Abstractions.Lifecycle;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Lifecycle;
|
namespace GFramework.Core.Abstractions.Lifecycle;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
namespace GFramework.Core.Abstractions.Lifecycle;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
namespace GFramework.Core.Abstractions.Lifecycle;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 可销毁接口,为需要资源清理的组件提供标准销毁能力
|
/// 可销毁接口,为需要资源清理的组件提供标准销毁能力
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
namespace GFramework.Core.Abstractions.Lifecycle;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
namespace GFramework.Core.Abstractions.Lifecycle;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 可初始化接口,为需要初始化的组件提供标准初始化能力
|
/// 可初始化接口,为需要初始化的组件提供标准初始化能力
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
namespace GFramework.Core.Abstractions.Lifecycle;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
namespace GFramework.Core.Abstractions.Lifecycle;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 完整生命周期接口,组合了初始化和销毁能力
|
/// 完整生命周期接口,组合了初始化和销毁能力
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Localization;
|
namespace GFramework.Core.Abstractions.Localization;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
using System.Globalization;
|
using System.Globalization;
|
||||||
using GFramework.Core.Abstractions.Systems;
|
using GFramework.Core.Abstractions.Systems;
|
||||||
|
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Localization;
|
namespace GFramework.Core.Abstractions.Localization;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Localization;
|
namespace GFramework.Core.Abstractions.Localization;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Localization;
|
namespace GFramework.Core.Abstractions.Localization;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Localization;
|
namespace GFramework.Core.Abstractions.Localization;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Localization;
|
namespace GFramework.Core.Abstractions.Localization;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Localization;
|
namespace GFramework.Core.Abstractions.Localization;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Logging;
|
namespace GFramework.Core.Abstractions.Logging;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Logging;
|
namespace GFramework.Core.Abstractions.Logging;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Logging;
|
namespace GFramework.Core.Abstractions.Logging;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Logging;
|
namespace GFramework.Core.Abstractions.Logging;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
namespace GFramework.Core.Abstractions.Logging;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
namespace GFramework.Core.Abstractions.Logging;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 定义日志记录接口,提供日志记录和级别检查功能
|
/// 定义日志记录接口,提供日志记录和级别检查功能
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
namespace GFramework.Core.Abstractions.Logging;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
namespace GFramework.Core.Abstractions.Logging;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 定义日志工厂接口,用于创建日志记录器实例
|
/// 定义日志工厂接口,用于创建日志记录器实例
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
namespace GFramework.Core.Abstractions.Logging;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
namespace GFramework.Core.Abstractions.Logging;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 定义日志工厂提供者的接口,用于创建具有指定名称和最小日志级别的日志记录器
|
/// 定义日志工厂提供者的接口,用于创建具有指定名称和最小日志级别的日志记录器
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Logging;
|
namespace GFramework.Core.Abstractions.Logging;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Logging;
|
namespace GFramework.Core.Abstractions.Logging;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Logging;
|
namespace GFramework.Core.Abstractions.Logging;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
namespace GFramework.Core.Abstractions.Logging;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
namespace GFramework.Core.Abstractions.Logging;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
/// 定义日志级别的枚举,用于标识不同严重程度的日志消息
|
/// 定义日志级别的枚举,用于标识不同严重程度的日志消息
|
||||||
|
|||||||
@ -1,3 +1,6 @@
|
|||||||
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
namespace GFramework.Core.Abstractions.Logging;
|
namespace GFramework.Core.Abstractions.Logging;
|
||||||
|
|
||||||
/// <summary>
|
/// <summary>
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
using GFramework.Core.Abstractions.Architectures;
|
// Copyright (c) 2025-2026 GeWuYou
|
||||||
|
// SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
using GFramework.Core.Abstractions.Architectures;
|
||||||
using GFramework.Core.Abstractions.Lifecycle;
|
using GFramework.Core.Abstractions.Lifecycle;
|
||||||
using GFramework.Core.Abstractions.Rule;
|
using GFramework.Core.Abstractions.Rule;
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user