Merge pull request #295 from GeWuYou/fix/analyzer-warning-reduction-batch

Fix/analyzer warning reduction batch
This commit is contained in:
gewuyou 2026-04-27 10:53:34 +08:00 committed by GitHub
commit b6a9fefda9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
36 changed files with 895 additions and 235 deletions

View File

@ -62,6 +62,7 @@ The script should produce:
- Pre-merge failed checks, if present
- Latest MegaLinter status and any detailed issues posted by `github-actions[bot]`
- Test summary, including failed-test signals when present
- Detailed failed-test rows from GitHub Test Reporter / CTRF comments when the PR comment includes `Name` / `Failure Message` content
- CLI support for writing full JSON to a file and printing only narrowed text sections to stdout
- Parse warnings only when both the primary API source and the intended fallback signal are unavailable

View File

@ -257,6 +257,11 @@ def strip_markdown_links(text: str) -> str:
return re.sub(r"\[([^\]]+)\]\([^)]+\)", r"\1", text)
def strip_markdown_images(text: str) -> str:
"""Drop Markdown image syntax while keeping surrounding text readable."""
return re.sub(r"!\[[^\]]*\]\([^)]+\)", "", text)
def extract_section(text: str, start_marker: str, end_markers: list[str]) -> str | None:
"""Extract text between a start marker and the earliest matching end marker."""
start = text.find(start_marker)
@ -486,43 +491,198 @@ def parse_megalinter_comment(comment_body: str) -> dict[str, Any]:
return report
def clean_markdown_table_cell(text: str) -> str:
"""Normalize a Markdown table cell for structured parsing."""
cleaned = strip_markdown_images(strip_markdown_links(html.unescape(text)))
cleaned = cleaned.replace("\xa0", " ")
cleaned = cleaned.replace("**", "").replace("*", "").replace("`", "")
return collapse_whitespace(cleaned)
def parse_int_from_text(text: str) -> int | None:
"""Extract the first integer value from text."""
match = re.search(r"\d+", text)
return int(match.group(0)) if match else None
def parse_duration_from_text(text: str) -> str:
"""Extract a duration token from text when present."""
match = re.search(r"\d+(?:\.\d+)?(?:ms|s|m|h)", text)
if match is not None:
return match.group(0)
return collapse_whitespace(text)
def parse_markdown_table(table_text: str) -> tuple[list[str], list[list[str]]]:
"""Parse a Markdown table into header cells and row cells."""
lines = [line.strip() for line in table_text.splitlines() if line.strip().startswith("|")]
if len(lines) < 2:
return [], []
headers = [clean_markdown_table_cell(cell) for cell in lines[0].strip("|").split("|")]
rows: list[list[str]] = []
for line in lines[2:]:
cells = [clean_markdown_table_cell(cell) for cell in line.strip("|").split("|")]
if cells:
rows.append(cells)
return headers, rows
def extract_markdown_table_after_heading(block: str, heading: str) -> tuple[list[str], list[list[str]]]:
"""Extract the first Markdown table that appears after a heading."""
section = extract_section(block, heading, ["\n### ", "\n#### ", "\n<details>", "\n<table>", "\n<sub>"])
if section is None:
return [], []
table_match = re.search(r"(\|.*\|\n\|[-| :]+\|\n(?:\|.*\|\n?)*)", section, re.S)
if table_match is None:
return [], []
return parse_markdown_table(table_match.group(1))
def normalize_stat_header(header: str) -> str:
"""Normalize a human-readable stats header into a stable machine key."""
ascii_only = re.sub(r"[^A-Za-z]+", "", header).lower()
aliases = {
"tests": "tests",
"passed": "passed",
"failed": "failed",
"skipped": "skipped",
"pending": "pending",
"other": "other",
"flaky": "flaky",
"duration": "duration",
}
return aliases.get(ascii_only, ascii_only)
def parse_stats_table(headers: list[str], rows: list[list[str]]) -> dict[str, Any]:
"""Convert a parsed Markdown stats table into the report stats shape."""
if not headers or not rows:
return {}
first_row = rows[0]
stats: dict[str, Any] = {}
for header, value in zip(headers, first_row):
key = normalize_stat_header(header)
if not key:
continue
if key == "duration":
stats[key] = parse_duration_from_text(value)
continue
parsed_value = parse_int_from_text(value)
if parsed_value is not None:
stats[key] = parsed_value
return stats
def normalize_failure_message(text: str) -> str:
"""Normalize a failed-test message while preserving the meaningful lines."""
cleaned = html.unescape(text)
cleaned = re.sub(r"(?i)<br\s*/?>", "\n", cleaned)
cleaned = re.sub(r"</?(?:p|div|tbody|thead|tr|td|th|table)>", "\n", cleaned)
cleaned = re.sub(r"<[^>]+>", " ", cleaned)
lines = [collapse_whitespace(line) for line in cleaned.splitlines()]
meaningful_lines = [line for line in lines if line]
return "\n".join(meaningful_lines)
def parse_failed_test_summary_list(block: str) -> list[str]:
"""Parse the compact failed-tests summary list from CTRF details blocks."""
failed_tests_section = re.search(
r"<details><summary><strong>\s*Failed Tests.*?</summary>(?P<body>.*?)</details>",
block,
re.S,
)
if failed_tests_section is None:
return []
summary_body = strip_markdown_links(strip_markdown_images(html.unescape(failed_tests_section.group("body"))))
failed_tests: list[str] = []
for raw_line in summary_body.splitlines():
line = collapse_whitespace(raw_line)
if not line:
continue
if "arrow-right" in raw_line:
parts = [part.strip() for part in line.split("arrow-right") if part.strip()]
candidate = parts[-1] if parts else line
elif ">" in line:
candidate = line.split(">")[-1].strip()
else:
candidate = line
if candidate:
failed_tests.append(candidate)
return failed_tests
def parse_failed_test_details(block: str) -> list[dict[str, str]]:
"""Parse the detailed failed-test HTML table from GitHub Test Reporter comments."""
details: list[dict[str, str]] = []
table_section = re.search(
r"### ❌ \*\*Some tests failed!\*\*.*?<tbody>(?P<body>.*?)</tbody>",
block,
re.S,
)
if table_section is None:
return details
row_pattern = re.compile(
r"<tr>\s*<td>(?P<name>.*?)</td>\s*<td>(?P<message>.*?)</td>(?:\s*<td>.*?</td>)*\s*</tr>",
re.S,
)
# Test Reporter tables may grow extra columns over time; only the first two are required here.
for row_match in row_pattern.finditer(table_section.group("body")):
name_cell = row_match.group("name")
message_cell = row_match.group("message")
name = collapse_whitespace(strip_tags(html.unescape(name_cell))).lstrip("").strip()
failure_message = normalize_failure_message(message_cell)
if name:
details.append(
{
"name": name,
"failure_message": failure_message,
}
)
return details
def parse_test_report(block: str) -> dict[str, Any]:
"""Parse a CTRF or GitHub test-reporter comment block."""
report: dict[str, Any] = {
"raw": block.strip(),
"stats": {},
"failed_tests": [],
"failed_test_details": [],
"has_failed_tests": False,
}
summary_row_match = re.search(
r"\|\s*\*?\*?(\d+)\*?\*?\s*\|\s*\*?\*?(\d+)\*?\*?\s*\|\s*\*?\*?(\d+)\*?\*?\s*\|"
r"\s*\*?\*?(\d+)\*?\*?\s*\|\s*\*?\*?(\d+)\*?\*?\s*\|\s*\*?\*?(\d+)\*?\*?\s*\|\s*\*?\*?([^\|]+?)\*?\*?\s*\|",
block,
)
if summary_row_match is not None:
report["stats"] = {
"tests": int(summary_row_match.group(1)),
"passed": int(summary_row_match.group(2)),
"failed": int(summary_row_match.group(3)),
"skipped": int(summary_row_match.group(4)),
"other": int(summary_row_match.group(5)),
"flaky": int(summary_row_match.group(6)),
"duration": summary_row_match.group(7).strip(),
}
summary_headers, summary_rows = extract_markdown_table_after_heading(block, "### Summary")
report["stats"] = parse_stats_table(summary_headers, summary_rows)
failed_tests_section = extract_section(
block,
"### Failed Tests",
["### Slowest Tests", "### Insights", "<sub>", "[Github Test Reporter]"],
)
if failed_tests_section:
lines = [line.strip("- ").strip() for line in failed_tests_section.splitlines()[1:] if line.strip()]
report["failed_tests"] = lines
report["has_failed_tests"] = True
elif "No failed tests in this run." in block or "All tests passed!" in block:
report["failed_tests"] = []
report["has_failed_tests"] = False
if not report["stats"]:
build_headers, build_rows = extract_markdown_table_after_heading(block, "### build-and-test:")
report["stats"] = parse_stats_table(build_headers, build_rows)
failed_test_details = parse_failed_test_details(block)
failed_test_names = parse_failed_test_summary_list(block)
if not failed_test_names and failed_test_details:
failed_test_names = [detail["name"] for detail in failed_test_details]
report["failed_tests"] = failed_test_names
report["failed_test_details"] = failed_test_details
failed_count = int(report["stats"].get("failed", 0) or 0)
report["has_failed_tests"] = bool(failed_test_names or failed_test_details or failed_count > 0)
return report
@ -1103,8 +1263,17 @@ def format_text(
lines.append(f"- Report {index}: no structured test stats parsed")
if report["has_failed_tests"]:
for failed_test in report["failed_tests"]:
lines.append(f" Failed test: {truncate_text(failed_test, max_description_length)}")
failed_test_details = report.get("failed_test_details", [])
if failed_test_details:
for failed_test_detail in failed_test_details:
lines.append(f" Failed test: {truncate_text(failed_test_detail['name'], max_description_length)}")
lines.append(
" Failure: "
f"{truncate_text(failed_test_detail['failure_message'].replace(chr(10), ' | '), max_description_length)}"
)
else:
for failed_test in report["failed_tests"]:
lines.append(f" Failed test: {truncate_text(failed_test, max_description_length)}")
else:
lines.append(" Failed tests: none reported")

View File

@ -0,0 +1,53 @@
#!/usr/bin/env python3
"""Regression tests for the GFramework PR review fetch helper."""
from __future__ import annotations
import importlib.util
from pathlib import Path
import unittest
SCRIPT_PATH = Path(__file__).with_name("fetch_current_pr_review.py")
MODULE_SPEC = importlib.util.spec_from_file_location("fetch_current_pr_review", SCRIPT_PATH)
if MODULE_SPEC is None or MODULE_SPEC.loader is None:
raise RuntimeError(f"Unable to load module from {SCRIPT_PATH}.")
MODULE = importlib.util.module_from_spec(MODULE_SPEC)
MODULE_SPEC.loader.exec_module(MODULE)
class ParseFailedTestDetailsTests(unittest.TestCase):
"""Cover failed-test table parsing edge cases for CTRF comments."""
def test_parse_failed_test_details_ignores_trailing_columns(self) -> None:
"""Extra columns should not prevent extracting the name and failure message."""
block = """
### ❌ **Some tests failed!**
<table>
<tbody>
<tr>
<td> RegisterMigration_During_Cache_Rebuild_Should_Not_Leave_Stale_Type_Cache</td>
<td><pre>Expected: False\nBut was: True</pre></td>
<td>failed</td>
<td>35.3s</td>
</tr>
</tbody>
</table>
"""
details = MODULE.parse_failed_test_details(block)
self.assertEqual(
details,
[
{
"name": "RegisterMigration_During_Cache_Rebuild_Should_Not_Leave_Stale_Type_Cache",
"failure_message": "Expected: False\nBut was: True",
}
],
)
if __name__ == "__main__":
unittest.main()

View File

@ -176,6 +176,6 @@ public sealed class TrackingPipelineBehavior<TRequest, TResponse> : IPipelineBeh
CancellationToken cancellationToken)
{
InvocationCount++;
return await next(message, cancellationToken);
return await next(message, cancellationToken).ConfigureAwait(false);
}
}

View File

@ -47,9 +47,11 @@ public sealed class AsyncKeyLockManagerTests
var index = i;
tasks.Add(Task.Run(async () =>
{
await using var handle = await manager.AcquireLockAsync("same-key").ConfigureAwait(false);
executionOrder.Add(index);
await Task.Delay(10).ConfigureAwait(false);
await using ((await manager.AcquireLockAsync("same-key").ConfigureAwait(false)).ConfigureAwait(false))
{
executionOrder.Add(index);
await Task.Delay(10).ConfigureAwait(false);
}
}));
}
@ -75,11 +77,13 @@ public sealed class AsyncKeyLockManagerTests
var key = $"key-{i}";
tasks.Add(Task.Run(async () =>
{
await using var handle = await manager.AcquireLockAsync(key).ConfigureAwait(false);
var current = Interlocked.Increment(ref concurrentCount);
maxConcurrent = Math.Max(maxConcurrent, current);
await Task.Delay(50).ConfigureAwait(false);
Interlocked.Decrement(ref concurrentCount);
await using ((await manager.AcquireLockAsync(key).ConfigureAwait(false)).ConfigureAwait(false))
{
var current = Interlocked.Increment(ref concurrentCount);
maxConcurrent = Math.Max(maxConcurrent, current);
await Task.Delay(50).ConfigureAwait(false);
Interlocked.Decrement(ref concurrentCount);
}
}));
}
@ -117,8 +121,10 @@ public sealed class AsyncKeyLockManagerTests
var key = $"key-{i % 10}";
tasks.Add(Task.Run(async () =>
{
await using var handle = await manager.AcquireLockAsync(key).ConfigureAwait(false);
await Task.Delay(1).ConfigureAwait(false);
await using ((await manager.AcquireLockAsync(key).ConfigureAwait(false)).ConfigureAwait(false))
{
await Task.Delay(1).ConfigureAwait(false);
}
}));
}
@ -139,10 +145,12 @@ public sealed class AsyncKeyLockManagerTests
{
tasks.Add(Task.Run(async () =>
{
await using var handle = await manager.AcquireLockAsync("same-key").ConfigureAwait(false);
var temp = counter;
await Task.Delay(1).ConfigureAwait(false);
counter = temp + 1;
await using ((await manager.AcquireLockAsync("same-key").ConfigureAwait(false)).ConfigureAwait(false))
{
var temp = counter;
await Task.Delay(1).ConfigureAwait(false);
counter = temp + 1;
}
}));
}
@ -295,8 +303,10 @@ public sealed class AsyncKeyLockManagerTests
{
for (var j = 0; j < 10; j++)
{
await using var handle = await manager.AcquireLockAsync($"key-{j % 5}").ConfigureAwait(false);
await Task.Delay(10).ConfigureAwait(false);
await using ((await manager.AcquireLockAsync($"key-{j % 5}").ConfigureAwait(false)).ConfigureAwait(false))
{
await Task.Delay(10).ConfigureAwait(false);
}
}
}));
}

View File

@ -225,23 +225,31 @@ public class AsyncExtensionsTests
/// 测试WithRetry方法遵守ShouldRetry谓词
/// </summary>
[Test]
public async Task WithRetry_Should_Respect_ShouldRetry_Predicate()
public void WithRetry_Should_Respect_ShouldRetry_Predicate()
{
static Task<int> ThrowShouldNotRetry(string parameterName)
{
throw new ArgumentException("Should not retry", parameterName);
}
// Arrange
var attemptCount = 0;
Func<Task<int>> taskFactory = () =>
{
attemptCount++;
throw new ArgumentException("Should not retry");
return ThrowShouldNotRetry(nameof(taskFactory));
};
// Act & Assert
Assert.ThrowsAsync<AggregateException>(() =>
var exception = Assert.ThrowsAsync<AggregateException>(() =>
taskFactory.WithRetryAsync(3, TimeSpan.FromMilliseconds(10),
ex => ex is not ArgumentException));
await Task.Delay(50).ConfigureAwait(false); // 等待任务完成
Assert.That(attemptCount, Is.EqualTo(1)); // 不应该重试
Assert.That(exception, Is.Not.Null);
Assert.That(exception!.InnerExceptions, Has.Count.EqualTo(1));
Assert.That(exception.InnerExceptions[0], Is.TypeOf<ArgumentException>());
Assert.That(((ArgumentException)exception.InnerExceptions[0]).ParamName, Is.EqualTo(nameof(taskFactory)));
}
/// <summary>

View File

@ -431,7 +431,11 @@ public class PauseStackManagerTests
{
var tasks = new List<Task>();
var tokens = new List<PauseToken>();
#if NET9_0_OR_GREATER
var lockObj = new System.Threading.Lock();
#else
var lockObj = new object();
#endif
for (int i = 0; i < 100; i++)
{

View File

@ -40,7 +40,13 @@ public class ConfigurationManager : IConfigurationManager
/// <summary>
/// 用于保护监听器列表的锁
/// </summary>
#if NET9_0_OR_GREATER
// net9.0 及以上目标使用专用 Lock以满足分析器对专用同步原语的建议。
private readonly System.Threading.Lock _watcherLock = new();
#else
// net8.0 目标仍回退到 object 锁,以保持多目标编译兼容性。
private readonly object _watcherLock = new();
#endif
/// <summary>
/// 配置监听器字典(线程安全)

View File

@ -12,7 +12,13 @@ internal sealed class CoroutineStatistics : ICoroutineStatistics
{
private readonly Dictionary<CoroutinePriority, int> _countByPriority = new();
private readonly Dictionary<string, int> _countByTag = new(StringComparer.Ordinal);
#if NET9_0_OR_GREATER
// net9.0 及以上目标使用专用 Lock以满足分析器对专用同步原语的建议。
private readonly System.Threading.Lock _lock = new();
#else
// net8.0 目标仍回退到 object 锁,以保持多目标编译兼容性。
private readonly object _lock = new();
#endif
private int _activeCount;
private double _maxExecutionTimeMs;
private int _pausedCount;

View File

@ -10,7 +10,13 @@ namespace GFramework.Core.Events;
public sealed class EventStatistics : IEventStatistics
{
private readonly Dictionary<string, int> _listenerCountByType = new(StringComparer.Ordinal);
#if NET9_0_OR_GREATER
// net9.0 及以上目标使用专用 Lock以满足分析器对专用同步原语的建议。
private readonly System.Threading.Lock _lock = new();
#else
// net8.0 目标仍回退到 object 锁,以保持多目标编译兼容性。
private readonly object _lock = new();
#endif
private readonly Dictionary<string, long> _publishCountByType = new(StringComparer.Ordinal);
private long _totalFailed;
private long _totalHandled;

View File

@ -10,7 +10,13 @@ namespace GFramework.Core.Events;
public sealed class FilterableEvent<T>
{
private readonly List<IEventFilter<T>> _filters = new();
#if NET9_0_OR_GREATER
// net9.0 及以上目标使用专用 Lock以满足分析器对专用同步原语的建议。
private readonly System.Threading.Lock _lock = new();
#else
// net8.0 目标仍回退到 object 锁,以保持多目标编译兼容性。
private readonly object _lock = new();
#endif
private readonly EventStatistics? _statistics;
private Action<T>? _onEvent;
@ -152,4 +158,4 @@ public sealed class FilterableEvent<T>
var count = _onEvent?.GetInvocationList().Length ?? 0;
_statistics.UpdateListenerCount(typeof(T).Name, count);
}
}
}

View File

@ -21,7 +21,13 @@ public class PriorityEvent<T> : IEvent
/// <summary>
/// 保护处理器集合的并发访问
/// </summary>
#if NET9_0_OR_GREATER
// net9.0 及以上目标使用专用 Lock以满足分析器对专用同步原语的建议。
private readonly System.Threading.Lock _syncRoot = new();
#else
// net8.0 目标仍回退到 object 锁,以保持多目标编译兼容性。
private readonly object _syncRoot = new();
#endif
/// <summary>
/// 标记事件是否已被处理(用于 UntilHandled 传播模式)
@ -326,4 +332,4 @@ public class PriorityEvent<T> : IEvent
public Action<EventContext<T>> Handler { get; } = handler;
public int Priority { get; } = priority;
}
}
}

View File

@ -10,7 +10,13 @@ namespace GFramework.Core.Events;
/// <typeparam name="T">事件数据类型</typeparam>
public sealed class WeakEvent<T>
{
#if NET9_0_OR_GREATER
// net9.0 及以上目标使用专用 Lock以满足分析器对专用同步原语的建议。
private readonly System.Threading.Lock _lock = new();
#else
// net8.0 目标仍回退到 object 锁,以保持多目标编译兼容性。
private readonly object _lock = new();
#endif
private readonly EventStatistics? _statistics;
private readonly List<WeakReference<Action<T>>> _weakHandlers = new();
@ -151,4 +157,4 @@ public sealed class WeakEvent<T>
var count = _weakHandlers.Count(wr => wr.TryGetTarget(out _));
_statistics.UpdateListenerCount(typeof(T).Name, count);
}
}
}

View File

@ -13,7 +13,13 @@ public sealed class FileAppender : ILogAppender, IDisposable
private readonly string _filePath;
private readonly ILogFilter? _filter;
private readonly ILogFormatter _formatter;
#if NET9_0_OR_GREATER
// net9.0 及以上目标使用专用 Lock以满足分析器对专用同步原语的建议。
private readonly System.Threading.Lock _lock = new();
#else
// net8.0 目标仍回退到 object 锁,以保持多目标编译兼容性。
private readonly object _lock = new();
#endif
private bool _disposed;
private StreamWriter? _writer;
@ -114,4 +120,4 @@ public sealed class FileAppender : ILogAppender, IDisposable
AutoFlush = true
};
}
}
}

View File

@ -14,7 +14,13 @@ public sealed class RollingFileAppender : ILogAppender, IDisposable
private readonly string _baseFilePath;
private readonly ILogFilter? _filter;
private readonly ILogFormatter _formatter;
#if NET9_0_OR_GREATER
// net9.0 及以上目标使用专用 Lock以满足分析器对专用同步原语的建议。
private readonly System.Threading.Lock _lock = new();
#else
// net8.0 目标仍回退到 object 锁,以保持多目标编译兼容性。
private readonly object _lock = new();
#endif
private readonly int _maxFileCount;
private readonly long _maxFileSize;
private long _currentSize;
@ -205,4 +211,4 @@ public sealed class RollingFileAppender : ILogAppender, IDisposable
// 获取当前文件大小
_currentSize = File.Exists(_baseFilePath) ? new FileInfo(_baseFilePath).Length : 0;
}
}
}

View File

@ -91,7 +91,13 @@ public sealed class SamplingFilter : ILogFilter
/// </summary>
private sealed class SamplingState
{
#if NET9_0_OR_GREATER
// net9.0 及以上目标使用专用 Lock以满足分析器对专用同步原语的建议。
private readonly System.Threading.Lock _lock = new();
#else
// net8.0 目标仍回退到 object 锁,以保持多目标编译兼容性。
private readonly object _lock = new();
#endif
private readonly ITimeProvider _timeProvider;
private long _count;
private long _lastAccessTicks;

View File

@ -12,9 +12,15 @@ namespace GFramework.Core.Property;
public class BindableProperty<T>(T defaultValue = default!) : IBindableProperty<T>
{
/// <summary>
/// 用于保护委托链和值访问的锁对象
/// 用于保护委托链和值访问的同步原语
/// </summary>
#if NET9_0_OR_GREATER
// net9.0 及以上目标使用专用 Lock以满足分析器对专用同步原语的建议。
private readonly System.Threading.Lock _lock = new();
#else
// net8.0 目标仍回退到 object 锁,以保持多目标编译兼容性。
private readonly object _lock = new();
#endif
/// <summary>
/// 属性值变化事件回调委托,当属性值发生变化时被调用
@ -172,4 +178,4 @@ public class BindableProperty<T>(T defaultValue = default!) : IBindableProperty<
{
return Value?.ToString() ?? string.Empty;
}
}
}

View File

@ -14,7 +14,13 @@ internal sealed class ResourceCache
private const string PathCannotBeNullOrEmptyMessage = "Path cannot be null or whitespace.";
private readonly ConcurrentDictionary<string, ResourceCacheEntry> _cache = new(StringComparer.Ordinal);
#if NET9_0_OR_GREATER
// net9.0 及以上目标使用专用 Lock以满足分析器对专用同步原语的建议。
private readonly System.Threading.Lock _lock = new();
#else
// net8.0 目标仍回退到 object 锁,以保持多目标编译兼容性。
private readonly object _lock = new();
#endif
/// <summary>
/// 获取已缓存资源的数量

View File

@ -11,7 +11,13 @@ namespace GFramework.Core.Resource;
/// <typeparam name="T">资源类型</typeparam>
internal sealed class ResourceHandle<T> : IResourceHandle<T> where T : class
{
#if NET9_0_OR_GREATER
// net9.0 及以上目标使用专用 Lock以满足分析器对专用同步原语的建议。
private readonly System.Threading.Lock _lock = new();
#else
// net8.0 目标仍回退到 object 锁,以保持多目标编译兼容性。
private readonly object _lock = new();
#endif
private readonly ILogger _logger = LoggerFactoryResolver.Provider.CreateLogger(nameof(ResourceHandle<T>));
private readonly Action<string> _onDispose;
private bool _disposed;
@ -141,4 +147,4 @@ internal sealed class ResourceHandle<T> : IResourceHandle<T> where T : class
_logger.Error($"[ResourceHandle] Error disposing resource '{Path}': {ex.Message}");
}
}
}
}

View File

@ -18,7 +18,13 @@ public class ResourceManager : IResourceManager
private readonly ResourceCache _cache = new();
private readonly ConcurrentDictionary<Type, object> _loaders = new();
#if NET9_0_OR_GREATER
// net9.0 及以上目标使用专用 Lock以满足分析器对专用同步原语的建议。
private readonly System.Threading.Lock _loadLock = new();
#else
// net8.0 目标仍回退到 object 锁,以保持多目标编译兼容性。
private readonly object _loadLock = new();
#endif
private readonly ILogger _logger = LoggerFactoryResolver.Provider.CreateLogger(nameof(ResourceManager));
private IResourceReleaseStrategy _releaseStrategy;

View File

@ -8,7 +8,13 @@ namespace GFramework.Core.State;
/// </summary>
public class StateMachine(int maxHistorySize = 10) : IStateMachine
{
#if NET9_0_OR_GREATER
// net9.0 及以上目标使用专用 Lock以满足分析器对专用同步原语的建议。
private readonly System.Threading.Lock _lock = new();
#else
// net8.0 目标仍回退到 object 锁,以保持多目标编译兼容性。
private readonly object _lock = new();
#endif
private readonly HashSet<IState> _registeredStates = [];
private readonly Stack<IState> _stateHistory = new();

View File

@ -16,7 +16,13 @@ internal sealed class WeakKeyCache<TKey, TValue>
where TKey : class
where TValue : class
{
#if NET9_0_OR_GREATER
// net9.0 及以上目标使用专用 Lock以满足分析器对专用同步原语的建议。
private readonly System.Threading.Lock _gate = new();
#else
// net8.0 目标仍回退到 object 锁,以保持多目标编译兼容性。
private readonly object _gate = new();
#endif
private ConditionalWeakTable<TKey, TValue> _entries = new();
/// <summary>

View File

@ -137,12 +137,10 @@ public sealed class SettingsModelTests
var migrationMapLock = lockField!.GetValue(model);
Assert.That(migrationMapLock, Is.Not.Null);
Task initializeTask;
Task registerTask;
lock (migrationMapLock!)
var tasks = WithSynchronizationLockHeld(migrationMapLock!, () =>
{
initializeTask = Task.Run(() => model.InitializeAsync());
registerTask = Task.Run(() => model.RegisterMigration(new TestLatestSettingsMigrationV2ToV3()));
var initializeTask = Task.Run(() => model.InitializeAsync());
var registerTask = Task.Run(() => model.RegisterMigration(new TestLatestSettingsMigrationV2ToV3()));
Thread.Sleep(50);
@ -151,7 +149,11 @@ public sealed class SettingsModelTests
Assert.That(initializeTask.IsCompleted, Is.False);
Assert.That(registerTask.IsCompleted, Is.False);
});
}
return (initializeTask, registerTask);
});
var (initializeTask, registerTask) = tasks;
await Task.WhenAll(initializeTask, registerTask);
@ -171,6 +173,35 @@ public sealed class SettingsModelTests
});
}
/// <summary>
/// 以与被测代码相同的同步原语持有反射获取到的锁对象,避免在 .NET 9+ 上把 <see cref="System.Threading.Lock" />
/// 退化成 <see cref="Monitor" /> 语义,导致并发测试误判。
/// </summary>
/// <param name="syncRoot">通过反射读取到的私有锁字段。</param>
/// <typeparam name="TResult">持锁代码返回的结果类型。</typeparam>
/// <param name="action">持锁期间执行的断言与并发调度逻辑。</param>
/// <returns>持锁代码的返回值。</returns>
private static TResult WithSynchronizationLockHeld<TResult>(object syncRoot, Func<TResult> action)
{
ArgumentNullException.ThrowIfNull(syncRoot);
ArgumentNullException.ThrowIfNull(action);
#if NET9_0_OR_GREATER
if (syncRoot is System.Threading.Lock typedLock)
{
using (typedLock.EnterScope())
{
return action();
}
}
#endif
lock (syncRoot)
{
return action();
}
}
private sealed class TestSettingsData : ISettingsData
{
public string Value { get; set; } = "default";

View File

@ -20,7 +20,11 @@ public sealed class GameConfigBootstrap : IDisposable
// All lifecycle transitions share one gate so initialization, hot-reload startup,
// stop, and disposal never publish half-finished state to concurrent callers.
#if NET9_0_OR_GREATER
private readonly Lock _stateGate = new();
#else
private readonly object _stateGate = new();
#endif
private readonly GameConfigBootstrapOptions _options;
private IUnRegister? _hotReload;
private YamlConfigLoader? _loader;
@ -210,67 +214,16 @@ public sealed class GameConfigBootstrap : IDisposable
/// </exception>
public void StartHotReload(YamlConfigHotReloadOptions? options = null)
{
YamlConfigLoader loader;
lock (_stateGate)
{
ThrowIfDisposedCore();
loader = _loader ?? throw new InvalidOperationException(
"Hot reload can only be started after the initial config load succeeds.");
if (_isStartingHotReload || _hotReload != null)
{
throw new InvalidOperationException("Hot reload is already enabled.");
}
_isStartingHotReload = true;
_stopHotReloadAfterStart = false;
}
var loader = BeginHotReloadStart();
IUnRegister? hotReload = null;
try
{
hotReload = loader.EnableHotReload(Registry, options);
var shouldStop = false;
lock (_stateGate)
{
try
{
ThrowIfDisposedCore();
// Stop/Dispose may arrive while the watcher is being created. In that
// case, release the new handle immediately instead of publishing it.
if (_stopHotReloadAfterStart)
{
shouldStop = true;
_stopHotReloadAfterStart = false;
}
else
{
_hotReload = hotReload;
hotReload = null;
}
}
finally
{
_isStartingHotReload = false;
}
}
if (shouldStop)
{
hotReload?.UnRegister();
}
hotReload = CompleteHotReloadStart(hotReload);
}
catch
{
lock (_stateGate)
{
_isStartingHotReload = false;
_stopHotReloadAfterStart = false;
}
ResetHotReloadStartAfterFailure();
hotReload?.UnRegister();
throw;
}
@ -332,4 +285,70 @@ public sealed class GameConfigBootstrap : IDisposable
throw new ObjectDisposedException(nameof(GameConfigBootstrap));
}
}
private YamlConfigLoader BeginHotReloadStart()
{
lock (_stateGate)
{
ThrowIfDisposedCore();
var loader = _loader ?? throw new InvalidOperationException(
"Hot reload can only be started after the initial config load succeeds.");
if (_isStartingHotReload || _hotReload != null)
{
throw new InvalidOperationException("Hot reload is already enabled.");
}
_isStartingHotReload = true;
_stopHotReloadAfterStart = false;
return loader;
}
}
private IUnRegister? CompleteHotReloadStart(IUnRegister? hotReload)
{
var shouldStop = false;
lock (_stateGate)
{
try
{
ThrowIfDisposedCore();
// Stop/Dispose may arrive while the watcher is being created. In that
// case, release the new handle immediately instead of publishing it.
if (_stopHotReloadAfterStart)
{
shouldStop = true;
_stopHotReloadAfterStart = false;
}
else
{
_hotReload = hotReload;
hotReload = null;
}
}
finally
{
_isStartingHotReload = false;
}
}
if (shouldStop)
{
hotReload?.UnRegister();
return null;
}
return hotReload;
}
private void ResetHotReloadStartAfterFailure()
{
lock (_stateGate)
{
_isStartingHotReload = false;
_stopHotReloadAfterStart = false;
}
}
}

View File

@ -52,7 +52,9 @@ public class DataRepository(IStorage? storage, DataRepositoryOptions? options =
var key = location.ToStorageKey();
// 检查存储中是否存在指定键的数据
T result = await Storage.ExistsAsync(key) ? await Storage.ReadAsync<T>(key) : new T();
T result = await Storage.ExistsAsync(key).ConfigureAwait(false)
? await Storage.ReadAsync<T>(key).ConfigureAwait(false)
: new T();
// 如果启用事件功能,则发送数据加载完成事件
if (_options.EnableEvents)
@ -70,7 +72,7 @@ public class DataRepository(IStorage? storage, DataRepositoryOptions? options =
public async Task SaveAsync<T>(IDataLocation location, T data)
where T : class, IData
{
await SaveCoreAsync(location, data, emitSavedEvent: true);
await SaveCoreAsync(location, data, emitSavedEvent: true).ConfigureAwait(false);
}
/// <summary>
@ -91,12 +93,12 @@ public class DataRepository(IStorage? storage, DataRepositoryOptions? options =
{
var key = location.ToStorageKey();
if (!await Storage.ExistsAsync(key))
if (!await Storage.ExistsAsync(key).ConfigureAwait(false))
{
return;
}
await Storage.DeleteAsync(key);
await Storage.DeleteAsync(key).ConfigureAwait(false);
if (_options.EnableEvents)
this.SendEvent(new DataDeletedEvent(location));
}
@ -113,7 +115,7 @@ public class DataRepository(IStorage? storage, DataRepositoryOptions? options =
// 但抑制逐项 DataSavedEvent避免监听器对同一批次收到重复语义的事件。
foreach (var (location, data) in valueTuples)
{
await SaveCoreUntypedAsync(location, data, emitSavedEvent: false);
await SaveCoreUntypedAsync(location, data, emitSavedEvent: false).ConfigureAwait(false);
}
if (_options.EnableEvents)
@ -140,8 +142,8 @@ public class DataRepository(IStorage? storage, DataRepositoryOptions? options =
{
var key = location.ToStorageKey();
await BackupIfNeededAsync<T>(key);
await Storage.WriteAsync(key, data);
await BackupIfNeededAsync<T>(key).ConfigureAwait(false);
await Storage.WriteAsync(key, data).ConfigureAwait(false);
if (emitSavedEvent && _options.EnableEvents)
{
@ -156,14 +158,14 @@ public class DataRepository(IStorage? storage, DataRepositoryOptions? options =
private async Task BackupIfNeededAsync<T>(string key)
where T : class, IData
{
if (!_options.AutoBackup || !await Storage.ExistsAsync(key))
if (!_options.AutoBackup || !await Storage.ExistsAsync(key).ConfigureAwait(false))
{
return;
}
var backupKey = $"{key}.backup";
var existing = await Storage.ReadAsync<T>(key);
await Storage.WriteAsync(backupKey, existing);
var existing = await Storage.ReadAsync<T>(key).ConfigureAwait(false);
await Storage.WriteAsync(backupKey, existing).ConfigureAwait(false);
}
/// <summary>

View File

@ -33,7 +33,13 @@ public class SaveRepository<TSaveData> : AbstractContextUtility, ISaveRepository
{
private readonly SaveConfiguration _config;
private readonly Dictionary<int, ISaveMigration<TSaveData>> _migrations = new();
#if NET9_0_OR_GREATER
// net9.0 及以上目标使用专用 Lock以满足分析器对专用同步原语的建议。
private readonly System.Threading.Lock _migrationsLock = new();
#else
// net8.0 目标仍回退到 object 锁,以保持多目标编译兼容性。
private readonly object _migrationsLock = new();
#endif
private readonly IStorage _rootStorage;
/// <summary>
@ -99,7 +105,7 @@ public class SaveRepository<TSaveData> : AbstractContextUtility, ISaveRepository
public async Task<bool> ExistsAsync(int slot)
{
var storage = GetSlotStorage(slot);
return await storage.ExistsAsync(_config.SaveFileName);
return await storage.ExistsAsync(_config.SaveFileName).ConfigureAwait(false);
}
/// <summary>
@ -111,10 +117,10 @@ public class SaveRepository<TSaveData> : AbstractContextUtility, ISaveRepository
{
var storage = GetSlotStorage(slot);
if (await storage.ExistsAsync(_config.SaveFileName))
if (await storage.ExistsAsync(_config.SaveFileName).ConfigureAwait(false))
{
var loaded = await storage.ReadAsync<TSaveData>(_config.SaveFileName);
return await MigrateIfNeededAsync(slot, storage, loaded);
var loaded = await storage.ReadAsync<TSaveData>(_config.SaveFileName).ConfigureAwait(false);
return await MigrateIfNeededAsync(slot, storage, loaded).ConfigureAwait(false);
}
return new TSaveData();
@ -130,11 +136,11 @@ public class SaveRepository<TSaveData> : AbstractContextUtility, ISaveRepository
var slotPath = $"{_config.SaveSlotPrefix}{slot}";
// 确保槽位目录存在
if (!await _rootStorage.DirectoryExistsAsync(slotPath))
await _rootStorage.CreateDirectoryAsync(slotPath);
if (!await _rootStorage.DirectoryExistsAsync(slotPath).ConfigureAwait(false))
await _rootStorage.CreateDirectoryAsync(slotPath).ConfigureAwait(false);
var storage = GetSlotStorage(slot);
await storage.WriteAsync(_config.SaveFileName, data);
await storage.WriteAsync(_config.SaveFileName, data).ConfigureAwait(false);
}
/// <summary>
@ -144,7 +150,7 @@ public class SaveRepository<TSaveData> : AbstractContextUtility, ISaveRepository
public async Task DeleteAsync(int slot)
{
var storage = GetSlotStorage(slot);
await storage.DeleteAsync(_config.SaveFileName);
await storage.DeleteAsync(_config.SaveFileName).ConfigureAwait(false);
}
/// <summary>
@ -154,7 +160,7 @@ public class SaveRepository<TSaveData> : AbstractContextUtility, ISaveRepository
public async Task<IReadOnlyList<int>> ListSlotsAsync()
{
// 列举所有槽位目录
var directories = await _rootStorage.ListDirectoriesAsync();
var directories = await _rootStorage.ListDirectoriesAsync().ConfigureAwait(false);
var slots = new List<int>();
@ -171,7 +177,7 @@ public class SaveRepository<TSaveData> : AbstractContextUtility, ISaveRepository
// 直接检查存档文件是否存在,避免重复创建 ScopedStorage
var saveFilePath = $"{dirName}/{_config.SaveFileName}";
if (await _rootStorage.ExistsAsync(saveFilePath))
if (await _rootStorage.ExistsAsync(saveFilePath).ConfigureAwait(false))
slots.Add(slot);
}
@ -246,7 +252,7 @@ public class SaveRepository<TSaveData> : AbstractContextUtility, ISaveRepository
$"{typeof(TSaveData).Name} in slot {slot}",
"save migration");
await storage.WriteAsync(_config.SaveFileName, migrated);
await storage.WriteAsync(_config.SaveFileName, migrated).ConfigureAwait(false);
return migrated;
}

View File

@ -37,7 +37,7 @@ public class UnifiedSettingsDataRepository(
{
private readonly SemaphoreSlim _lock = new(1, 1);
private readonly DataRepositoryOptions _options = options ?? new DataRepositoryOptions();
private readonly Dictionary<string, Type> _typeRegistry = new();
private readonly Dictionary<string, Type> _typeRegistry = new(StringComparer.Ordinal);
private UnifiedSettingsFile? _file;
private bool _loaded;
private IRuntimeTypeSerializer? _serializer = serializer;
@ -67,7 +67,7 @@ public class UnifiedSettingsDataRepository(
public async Task<T> LoadAsync<T>(IDataLocation location)
where T : class, IData, new()
{
await EnsureLoadedAsync();
await EnsureLoadedAsync().ConfigureAwait(false);
var key = location.Key;
var result = _file!.Sections.TryGetValue(key, out var raw) ? Serializer.Deserialize<T>(raw) : new T();
if (_options.EnableEvents)
@ -85,8 +85,9 @@ public class UnifiedSettingsDataRepository(
public async Task SaveAsync<T>(IDataLocation location, T data)
where T : class, IData
{
await EnsureLoadedAsync();
await MutateAndPersistAsync(file => file.Sections[location.Key] = Serializer.Serialize(data));
await EnsureLoadedAsync().ConfigureAwait(false);
await MutateAndPersistAsync(file => file.Sections[location.Key] = Serializer.Serialize(data))
.ConfigureAwait(false);
if (_options.EnableEvents)
{
@ -101,7 +102,7 @@ public class UnifiedSettingsDataRepository(
/// <returns>如果数据存在则返回true否则返回false</returns>
public async Task<bool> ExistsAsync(IDataLocation location)
{
await EnsureLoadedAsync();
await EnsureLoadedAsync().ConfigureAwait(false);
return File.Sections.ContainsKey(location.Key);
}
@ -112,10 +113,10 @@ public class UnifiedSettingsDataRepository(
/// <returns>异步操作任务</returns>
public async Task DeleteAsync(IDataLocation location)
{
await EnsureLoadedAsync();
await EnsureLoadedAsync().ConfigureAwait(false);
var removed = false;
await _lock.WaitAsync();
await _lock.WaitAsync().ConfigureAwait(false);
try
{
var currentFile = File;
@ -126,7 +127,7 @@ public class UnifiedSettingsDataRepository(
return;
}
await WriteUnifiedFileCoreAsync(currentFile, nextFile);
await WriteUnifiedFileCoreAsync(currentFile, nextFile).ConfigureAwait(false);
_file = nextFile;
}
finally
@ -148,17 +149,18 @@ public class UnifiedSettingsDataRepository(
public async Task SaveAllAsync(
IEnumerable<(IDataLocation location, IData data)> dataList)
{
await EnsureLoadedAsync();
await EnsureLoadedAsync().ConfigureAwait(false);
var valueTuples = dataList.ToList();
await MutateAndPersistAsync(file =>
{
foreach (var (location, data) in valueTuples)
{
file.Sections[location.Key] = Serializer.Serialize(data);
}
});
foreach (var (location, data) in valueTuples)
{
file.Sections[location.Key] = Serializer.Serialize(data);
}
})
.ConfigureAwait(false);
if (_options.EnableEvents)
this.SendEvent(new DataBatchSavedEvent(valueTuples));
@ -170,9 +172,9 @@ public class UnifiedSettingsDataRepository(
/// <returns>包含所有数据项的字典,键为数据位置键,值为数据对象</returns>
public async Task<IDictionary<string, IData>> LoadAllAsync()
{
await EnsureLoadedAsync();
await EnsureLoadedAsync().ConfigureAwait(false);
var result = new Dictionary<string, IData>();
var result = new Dictionary<string, IData>(StringComparer.Ordinal);
foreach (var (key, raw) in File.Sections)
{
@ -216,15 +218,15 @@ public class UnifiedSettingsDataRepository(
{
if (_loaded) return;
await _lock.WaitAsync();
await _lock.WaitAsync().ConfigureAwait(false);
try
{
if (_loaded) return;
var key = UnifiedKey;
_file = await Storage.ExistsAsync(key)
? await Storage.ReadAsync<UnifiedSettingsFile>(key)
_file = await Storage.ExistsAsync(key).ConfigureAwait(false)
? await Storage.ReadAsync<UnifiedSettingsFile>(key).ConfigureAwait(false)
: new UnifiedSettingsFile { Version = 1 };
_loaded = true;
@ -241,7 +243,7 @@ public class UnifiedSettingsDataRepository(
/// </summary>
private async Task MutateAndPersistAsync(Action<UnifiedSettingsFile> mutation)
{
await _lock.WaitAsync();
await _lock.WaitAsync().ConfigureAwait(false);
try
{
var currentFile = File;
@ -250,7 +252,7 @@ public class UnifiedSettingsDataRepository(
// 先在副本上计算“下一份已提交状态”,只有底层持久化成功后才交换缓存,
// 这样即使备份或写入失败,也不会把未提交修改留在内存快照里。
mutation(nextFile);
await WriteUnifiedFileCoreAsync(currentFile, nextFile);
await WriteUnifiedFileCoreAsync(currentFile, nextFile).ConfigureAwait(false);
_file = nextFile;
}
finally
@ -270,13 +272,13 @@ public class UnifiedSettingsDataRepository(
/// <param name="nextFile">即将提交的新统一文件快照。</param>
private async Task WriteUnifiedFileCoreAsync(UnifiedSettingsFile currentFile, UnifiedSettingsFile nextFile)
{
if (_options.AutoBackup && await Storage.ExistsAsync(UnifiedKey))
if (_options.AutoBackup && await Storage.ExistsAsync(UnifiedKey).ConfigureAwait(false))
{
var backupKey = $"{UnifiedKey}.backup";
await Storage.WriteAsync(backupKey, currentFile);
await Storage.WriteAsync(backupKey, currentFile).ConfigureAwait(false);
}
await Storage.WriteAsync(UnifiedKey, nextFile);
await Storage.WriteAsync(UnifiedKey, nextFile).ConfigureAwait(false);
}
/// <summary>

View File

@ -218,7 +218,7 @@ public abstract class RouterBase<TRoute, TContext> : AbstractSystem
/// <returns>如果栈中包含指定路由返回 true,否则返回 false</returns>
public bool Contains(string routeKey)
{
return Stack.Any(r => r.Key == routeKey);
return Stack.Any(r => string.Equals(r.Key, routeKey, StringComparison.Ordinal));
}
/// <summary>
@ -237,7 +237,7 @@ public abstract class RouterBase<TRoute, TContext> : AbstractSystem
/// <returns>如果栈顶是指定路由返回 true,否则返回 false</returns>
public bool IsTop(string routeKey)
{
return Stack.Count != 0 && Stack.Peek().Key.Equals(routeKey);
return Stack.Count != 0 && string.Equals(Stack.Peek().Key, routeKey, StringComparison.Ordinal);
}
#endregion

View File

@ -11,6 +11,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using GFramework.Core.Abstractions.Logging;
using GFramework.Core.Extensions;
using GFramework.Core.Logging;
@ -82,7 +83,7 @@ public abstract class SceneRouterBase
string sceneKey,
ISceneEnterParam? param = null)
{
await _transitionLock.WaitAsync();
await _transitionLock.WaitAsync().ConfigureAwait(true);
try
{
IsTransitioning = true;
@ -111,7 +112,7 @@ public abstract class SceneRouterBase
/// <returns>如果场景在栈中返回true否则返回false。</returns>
public new bool Contains(string sceneKey)
{
return Stack.Any(s => s.Key == sceneKey);
return Stack.Any(s => string.Equals(s.Key, sceneKey, StringComparison.Ordinal));
}
#endregion
@ -184,7 +185,7 @@ public abstract class SceneRouterBase
string sceneKey,
ISceneEnterParam? param = null)
{
await _transitionLock.WaitAsync();
await _transitionLock.WaitAsync().ConfigureAwait(true);
try
{
IsTransitioning = true;
@ -220,7 +221,7 @@ public abstract class SceneRouterBase
}
// 守卫检查
if (!await ExecuteEnterGuardsAsync(sceneKey, param))
if (!await ExecuteEnterGuardsAsync(sceneKey, param).ConfigureAwait(true))
{
Log.Warn("Push blocked by guard: {0}", sceneKey);
return;
@ -233,20 +234,20 @@ public abstract class SceneRouterBase
Root!.AddScene(scene);
// 加载资源
await scene.OnLoadAsync(param);
await scene.OnLoadAsync(param).ConfigureAwait(true);
// 暂停当前场景
if (Stack.Count > 0)
{
var current = Stack.Peek();
await current.OnPauseAsync();
await current.OnPauseAsync().ConfigureAwait(true);
}
// 压入栈
Stack.Push(scene);
// 进入场景
await scene.OnEnterAsync();
await scene.OnEnterAsync().ConfigureAwait(true);
Log.Debug("Push Scene: {0}, stackCount={1}",
sceneKey, Stack.Count);
@ -262,7 +263,7 @@ public abstract class SceneRouterBase
/// <returns>异步任务。</returns>
public async ValueTask PopAsync()
{
await _transitionLock.WaitAsync();
await _transitionLock.WaitAsync().ConfigureAwait(true);
try
{
IsTransitioning = true;
@ -293,7 +294,7 @@ public abstract class SceneRouterBase
var top = Stack.Peek();
// 守卫检查
if (!await ExecuteLeaveGuardsAsync(top.Key))
if (!await ExecuteLeaveGuardsAsync(top.Key).ConfigureAwait(true))
{
Log.Warn("Pop blocked by guard: {0}", top.Key);
return;
@ -302,10 +303,10 @@ public abstract class SceneRouterBase
Stack.Pop();
// 退出场景
await top.OnExitAsync();
await top.OnExitAsync().ConfigureAwait(true);
// 卸载资源
await top.OnUnloadAsync();
await top.OnUnloadAsync().ConfigureAwait(true);
// 从场景树移除
Root!.RemoveScene(top);
@ -314,7 +315,7 @@ public abstract class SceneRouterBase
if (Stack.Count > 0)
{
var next = Stack.Peek();
await next.OnResumeAsync();
await next.OnResumeAsync().ConfigureAwait(true);
}
Log.Debug("Pop Scene, stackCount={0}", Stack.Count);
@ -330,7 +331,7 @@ public abstract class SceneRouterBase
/// <returns>异步任务。</returns>
public async ValueTask ClearAsync()
{
await _transitionLock.WaitAsync();
await _transitionLock.WaitAsync().ConfigureAwait(true);
try
{
IsTransitioning = true;

View File

@ -29,7 +29,13 @@ public class SettingsModel<TRepository>(IDataLocationProvider? locationProvider,
private readonly ConcurrentDictionary<Type, ISettingsData> _data = new();
private readonly ConcurrentDictionary<Type, Dictionary<int, ISettingsMigration>> _migrationCache = new();
#if NET9_0_OR_GREATER
// net9.0 及以上目标使用专用 Lock以满足分析器对专用同步原语的建议。
private readonly System.Threading.Lock _migrationMapLock = new();
#else
// net8.0 目标仍回退到 object 锁,以保持多目标编译兼容性。
private readonly object _migrationMapLock = new();
#endif
private readonly ConcurrentDictionary<(Type type, int from), ISettingsMigration> _migrations = new();
private volatile bool _initialized;
@ -169,7 +175,7 @@ public class SettingsModel<TRepository>(IDataLocationProvider? locationProvider,
try
{
allData = await DataRepository.LoadAllAsync();
allData = await DataRepository.LoadAllAsync().ConfigureAwait(false);
}
catch (Exception ex)
{
@ -213,7 +219,7 @@ public class SettingsModel<TRepository>(IDataLocationProvider? locationProvider,
try
{
var location = LocationProvider.GetLocation(data.GetType());
await DataRepository.SaveAsync(location, data);
await DataRepository.SaveAsync(location, data).ConfigureAwait(false);
}
catch (Exception ex)
{
@ -231,7 +237,7 @@ public class SettingsModel<TRepository>(IDataLocationProvider? locationProvider,
foreach (var applicator in _applicators)
try
{
await applicator.Value.ApplyAsync();
await applicator.Value.ApplyAsync().ConfigureAwait(false);
}
catch (Exception ex)
{

View File

@ -51,8 +51,8 @@ public static class UiInteractionProfiles
{
return action switch
{
UiInputAction.Cancel => (profile.CapturedActions & UiInputActionMask.Cancel) != 0,
UiInputAction.Confirm => (profile.CapturedActions & UiInputActionMask.Confirm) != 0,
UiInputAction.Cancel => (profile.CapturedActions & UiInputActionMask.Cancel) != UiInputActionMask.None,
UiInputAction.Confirm => (profile.CapturedActions & UiInputActionMask.Confirm) != UiInputActionMask.None,
_ => false
};
}

View File

@ -260,7 +260,7 @@ public abstract class UiRouterBase : RouterBase<IUiPageBehavior, IUiPageEnterPar
/// <returns>如果栈顶是指定UI则返回true否则返回false</returns>
public new bool IsTop(string uiKey)
{
return Stack.Count != 0 && Stack.Peek().Key.Equals(uiKey);
return Stack.Count != 0 && string.Equals(Stack.Peek().Key, uiKey, StringComparison.Ordinal);
}
/// <summary>
@ -270,7 +270,7 @@ public abstract class UiRouterBase : RouterBase<IUiPageBehavior, IUiPageEnterPar
/// <returns>如果栈中包含指定UI则返回true否则返回false</returns>
public new bool Contains(string uiKey)
{
return Stack.Any(p => p.Key.Equals(uiKey));
return Stack.Any(p => string.Equals(p.Key, uiKey, StringComparison.Ordinal));
}
/// <summary>
@ -293,7 +293,7 @@ public abstract class UiRouterBase : RouterBase<IUiPageBehavior, IUiPageEnterPar
public UiHandle Show(string uiKey, UiLayer layer, IUiPageEnterParam? param = null)
{
if (layer == UiLayer.Page)
throw new ArgumentException("Use Push() for Page layer");
throw new ArgumentException("Use Push() for Page layer", nameof(layer));
// 创建实例
var page = _factory.Create(uiKey);
@ -311,7 +311,7 @@ public abstract class UiRouterBase : RouterBase<IUiPageBehavior, IUiPageEnterPar
public UiHandle Show(IUiPageBehavior page, UiLayer layer)
{
if (layer == UiLayer.Page)
throw new ArgumentException("Use Push() for Page layer");
throw new ArgumentException("Use Push() for Page layer", nameof(layer));
return ShowInternal(page, layer, null);
}
@ -414,7 +414,7 @@ public abstract class UiRouterBase : RouterBase<IUiPageBehavior, IUiPageEnterPar
return Array.Empty<UiHandle>();
return layerDict
.Where(kvp => kvp.Value.Key.Equals(uiKey))
.Where(kvp => string.Equals(kvp.Value.Key, uiKey, StringComparison.Ordinal))
.Select(kvp => new UiHandle(uiKey, kvp.Key, layer))
.ToList();
}
@ -593,14 +593,18 @@ public abstract class UiRouterBase : RouterBase<IUiPageBehavior, IUiPageEnterPar
var handle = new UiHandle(page.Key, instanceId, layer);
// 初始化层级字典
if (!_layers.ContainsKey(layer))
_layers[layer] = new Dictionary<string, IUiPageBehavior>();
if (!_layers.TryGetValue(layer, out var layerDict))
{
layerDict = new Dictionary<string, IUiPageBehavior>(StringComparer.Ordinal);
_layers[layer] = layerDict;
}
// 设置句柄
page.Handle = handle;
var layerDict = _layers[layer];
// 检查重入性
if (!page.IsReentrant && layerDict.Values.Any(p => p.Key == page.Key))
if (!page.IsReentrant &&
layerDict.Values.Any(p => string.Equals(p.Key, page.Key, StringComparison.Ordinal)))
{
Log.Warn("UI {0} is not reentrant but already exists in layer {1}", page.Key, layer);
throw new InvalidOperationException(

View File

@ -0,0 +1,39 @@
# Analyzer Warning Reduction 跟踪归档RP074-RP078
## 范围
- 归档 `RP074``RP078` 期间从 active todo 中迁出的批次明细。
- 保留当前波次的已完成 slice 摘要、验证收口与延后候选,供后续恢复时回溯。
## 已完成批次摘要
- 第一轮并行 warning 清理:
- `GFramework.Core` 事件 / 状态 / 属性 / 协程统计中的 `MA0158` 专用锁迁移
- `GFramework.Game/Data``DataRepository``UnifiedSettingsDataRepository``SaveRepository``ConfigureAwait` / 比较器 / 专用锁修正
- `GFramework.Game/Scene/SceneRouterBase.cs``GFramework.Game/UI/UiRouterBase.cs` 中的显式上下文 / 参数名 / 比较器修正
- 收口提交:`fb0a55f` `fix(analyzer): 收口首轮并行警告清理`
- 第三轮 `Core.Tests` 低风险 slice
- `GFramework.Core.Tests/Concurrency/AsyncKeyLockManagerTests.cs``MA0004`
- `GFramework.Core.Tests/Pause/PauseStackManagerTests.cs``MA0158`
- `GFramework.Core.Tests/Extensions/AsyncExtensionsTests.cs``MA0015`
- `GFramework.Core.Tests/Architectures/ArchitectureModulesBehaviorTests.cs``MA0004`
## 批次验证快照
- `dotnet clean`
- 结果:提权直接执行成功,确认为当前权威 clean 基线
- `dotnet build`
- 结果提权直接构建成功warning 从 `639` 降到 `397`
- `dotnet build GFramework.Core.Tests/GFramework.Core.Tests.csproj -c Release`
- 结果:提权直接构建成功;`0 Warning(s)``0 Error(s)`
## 延后候选
- `GFramework.Game/Config/YamlConfigLoader.cs``MA0158`
- 原因:单点可修,但文件同时承载其他高耦合 warning不适合在当前低风险批次顺手推进
- 测试项目中的 `MA0048` 文件名拆分波次
- 原因:会显著增加 changed-file 数,更适合另开后续波次
## 关联资料
- 详细执行过程见 [analyzer-warning-reduction-history-rp073-rp078.md](../traces/analyzer-warning-reduction-history-rp073-rp078.md)。

View File

@ -0,0 +1,176 @@
# Analyzer Warning Reduction 追踪归档RP073-RP078
## 2026-04-27 — RP-078
### 阶段:完成第三轮 Core.Tests 低风险 slice 并在 30 files 处收口
- 触发背景:
- 第二轮结束后,`GFramework.Game` 低风险单文件 warning 已基本耗尽,继续推进更适合转向测试项目
- 第三轮选择的 `Core.Tests` slice 仍保持单文件、低耦合,且不会明显放大 branch diff
- 已接受的 delegated scope 与结果:
- worker-A`GFramework.Core.Tests/Concurrency/AsyncKeyLockManagerTests.cs`
- 结果:与 `PauseStackManagerTests.cs` 一并落在提交 `650618b`,修复该文件的 `MA0004`
- worker-B`GFramework.Core.Tests/Pause/PauseStackManagerTests.cs`
- 结果:与 `AsyncKeyLockManagerTests.cs` 一并落在提交 `650618b`,修复该文件的 `MA0158`
- worker-C`GFramework.Core.Tests/Extensions/AsyncExtensionsTests.cs``GFramework.Core.Tests/Architectures/ArchitectureModulesBehaviorTests.cs`
- 结果:提交 `e19e60e`,修复 `MA0015` / `MA0004`
- 主线程验证里程碑:
- 提权 `dotnet clean`
- 结果:成功
- 提权 `dotnet build`
- 结果成功warning 从上一轮的 `405` 降到 `397`
- 提权 `dotnet build GFramework.Core.Tests/GFramework.Core.Tests.csproj -c Release`
- 结果:成功;`0 Warning(s)``0 Error(s)`
- `git diff --name-only refs/remotes/origin/main...HEAD | wc -l`
- 结果:`30`
- `git diff --numstat refs/remotes/origin/main...HEAD`
- 结果:`642` changed lines
- 当前结论:
- 当前分支在 `30 / 50` files 时仍保持可审阅性,且已经连续三轮拿到了实质 warning 降幅
- 继续推进的剩余候选主要是 `YamlConfig*` 高耦合热点与 `MA0048` 批量拆分,不再符合本轮的低风险边界
- 默认建议在这里收口当前波次,把下一波次留给更明确的热点专项
## 2026-04-27 — RP-077
### 阶段:完成第二轮 Game 侧低风险 slice 验证并转向测试项目候选
- 触发背景:
- 第二轮 worker 已分别完成 `SettingsModel.cs``RouterBase.cs`+`UiInteractionProfiles.cs``GameConfigBootstrap.cs`
- 主线程在复验时发现 `SettingsModel.cs``GameConfigBootstrap.cs` 又各暴露一个 touched-file `MA0158`,已在主线程补齐
- 已接受的 delegated scope 与结果:
- worker-A`GFramework.Game/Setting/SettingsModel.cs`
- 结果:提交 `c106e53`,修复 `MA0004`;主线程随后补齐同文件 `MA0158`
- worker-B`GFramework.Game/Routing/RouterBase.cs``GFramework.Game/UI/UiInteractionProfiles.cs`
- 结果:提交 `9deafac`,修复 `MA0006` / `MA0099`
- worker-C`GFramework.Game/Config/GameConfigBootstrap.cs`
- 结果:提交 `9ce634e`,拆分热重载启动流程以修复 `MA0051`;主线程随后补齐同文件 `MA0158`
- explorer重新审视 `GFramework.Game` 排除热点后的剩余候选
- 结果:确认 `Game` 侧低风险单文件 warning 基本耗尽,继续推进应转向其他项目
- 主线程验证里程碑:
- 提权 `dotnet clean`
- 结果:成功
- 提权 `dotnet build`
- 结果成功warning 从上一轮的 `430` 继续降到 `405`
- 提权 `dotnet build GFramework.Game/GFramework.Game.csproj -c Release`
- 结果成功warning 从上一轮的 `147` 降到 `122`
- `git diff --name-only refs/remotes/origin/main...HEAD | wc -l`
- 结果:`26`
- `git diff --numstat refs/remotes/origin/main...HEAD`
- 结果:`483` changed lines
- 当前结论:
- 第二轮 Game 侧 warning 清理已完成验证,且 warning 数继续实质下降
- 当前分支距离 `$gframework-batch-boot 50` 仍有空间,但继续推进不应再硬碰 `YamlConfigSchemaValidator*` / `YamlConfigLoader.cs`
- 若继续下一轮,优先切向 `Core.Tests` 等测试项目里的单文件 `MA0004` / `MA0015` / `MA0158`
## 2026-04-27 — RP-076
### 阶段:首轮收口提交后进入第二轮低风险 Game warning slice
- 触发背景:
- 首轮并行清理已经以 `fb0a55f` 收口,当前分支相对 `origin/main` 的累计改动文件数来到 `22 / 50`
- 用户要求继续采用“先拿构建 warning再分批交给 subagent”模式因此当前仍有继续推进的 branch 预算
- 主线程当前真值:
- 当前基线:`refs/remotes/origin/main` = `617e0bf`
- 当前 `HEAD` stop metric
- files`22`
- changed lines`378`
- 最近权威验证仍为:
- `dotnet build``430 Warning(s)``0 Error(s)`
- `dotnet build GFramework.sln -c Release``147 Warning(s)``0 Error(s)`
- 本轮拟下发的 delegated scope
- worker-A`GFramework.Game/Setting/SettingsModel.cs`
- 目标:修复 `MA0004`,仅在不改变设置模型生命周期语义的前提下补全 `ConfigureAwait(false)`
- worker-B`GFramework.Game/Routing/RouterBase.cs``GFramework.Game/UI/UiInteractionProfiles.cs`
- 目标:修复 `MA0006` / `MA0099`,保持现有路由比较语义与 UI 动作位掩码语义不变
- worker-C`GFramework.Game/Config/GameConfigBootstrap.cs`
- 目标:评估并尽量修复 `MA0051`;若单文件安全提取不可低风险完成,应明确放弃并说明阻塞点
- 当前结论:
- 第二轮继续严格限制在低风险单文件 slice避免直接进入 `YamlConfigSchemaValidator*``YamlConfigLoader.cs` 这种高耦合热点
- 本轮完成后应重新评估 branch diff 是否仍适合继续在同一分支上批量推进
## 2026-04-27 — RP-075
### 阶段:完成 `$gframework-batch-boot 50` 第一轮并行 warning 清理集成
- 触发背景:
- 用户要求先以权威构建输出建立 warning 基线,再把低风险 warning family 按文件边界拆给不同 subagent 并行清理
- 当前批次已完成首轮 worker 集成,但第二组锁迁移、主线程补修与 `ai-plan` 同步仍在工作树,需先收口提交再进入下一轮
- 已接受的 delegated scope 与结果:
- worker-1`GFramework.Core` 事件 / 状态 / 属性 / 协程统计中的 `MA0158`
- 结果:已提交 `8f2d959`,采用 `#if NET9_0_OR_GREATER` + `System.Threading.Lock` / `object` 双分支兼容模式
- worker-2`GFramework.Core` / `GFramework.Cqrs` 资源、日志、配置缓存中的 `MA0158`
- 结果:改动已集成到工作树,待主线程与本轮 `ai-plan` 一并提交
- worker-3`GFramework.Game/Data``SceneRouterBase.cs`
- 结果:已提交 `e3eec54`,主线程随后补修 `SceneRouterBase.Contains``SaveRepository._migrationsLock` 的 touched-file 残留 warning
- worker-4`GFramework.Game/UI/UiRouterBase.cs`
- 结果:已提交 `7e13752`
- 主线程验证里程碑:
- 提权 `dotnet clean`
- 结果:成功
- 提权 `dotnet build`
- 结果成功warning 从本轮批次建立时的 `639` 降到 `430`
- 提权 `dotnet build GFramework.sln -c Release`
- 结果:成功;`147 Warning(s)``0 Error(s)`
- `git diff --name-only refs/remotes/origin/main...HEAD | wc -l`
- 结果:`12`
- `git diff --numstat refs/remotes/origin/main...HEAD`
- 结果:`192` changed lines
- 当前结论:
- 第一轮并行 warning 清理已经完成验证,且 warning 总量出现明显下降,可以继续按 batch 模式推进
- 当前 stop-condition 仍远低于 `$gframework-batch-boot 50`;但在派发下一轮之前,应该先提交当前工作树里的第二组锁迁移与恢复文档同步
- 下一轮优先目标保持“低风险、单文件、避免高耦合热点”,候选包括 `SettingsModel.cs``RouterBase.cs``UiInteractionProfiles.cs`
## 2026-04-27 — RP-074
### 阶段:按 `$gframework-batch-boot 50` 建立并行 warning 清理批次
- 触发背景:
- 用户明确要求在拿到构建 warning 后分批指派给不同 subagent以控制主线程上下文长度并提高 warning 清理效率
- 当前 worktree 映射到 `analyzer-warning-reduction` 主题,且该任务符合 batch candidate 条件:重复、可切片、可按文件边界独立验证
- 基线与停止条件:
- 当前基线采用 `refs/remotes/origin/main`
- `origin/main``HEAD` 当前同为 `617e0bf``2026-04-26T12:17:15+08:00`
- 主 stop condition 为 branch diff files 接近 `50`;当前为 `0 / 50`
- 主线程实施:
- 先读取 `AGENTS.md``.ai/environment/tools.ai.yaml``ai-plan/public/README.md` 以及当前 topic 的 active todo/trace确认批处理流程与 topic 上下文
- 先在沙箱内执行仓库根 `dotnet clean` / `dotnet build`;其中 `dotnet clean` 因缺失 Windows fallback package folder 失败,判定为环境噪音
- 按仓库规则提权重跑直接命令,确认权威基线为 `dotnet clean` 成功、`dotnet build` 成功且 `639 Warning(s)``0 Error(s)`
- 基于当前 warning 输出,预划分以下互不重叠的 subagent ownership
- `GFramework.Core` / `GFramework.Cqrs``MA0158` 专用锁迁移
- `GFramework.Game/Data``MA0004` 与局部 `MA0002`
- `GFramework.Game/Scene/SceneRouterBase.cs``GFramework.Game/UI/UiRouterBase.cs` 的显式上下文 / 参数名 / 比较器修正
- 验证里程碑:
- `dotnet clean`
- 结果:提权后成功;作为本轮 clean 真值
- `dotnet build`
- 结果:提权后成功;`639 Warning(s)``0 Error(s)`
- `git diff --name-only refs/remotes/origin/main...HEAD | wc -l`
- 结果:`0`
- `git diff --numstat refs/remotes/origin/main...HEAD`
- 结果:空输出
- 当前结论:
- 本轮已经完成 batch boot 所需的权威警告基线建立,可以安全进入并行 worker 阶段
- 当前优先级应继续保持在低风险、少文件、可独立验证的 warning family 上,不直接扩展到 `YamlConfigSchemaValidator` 这类高耦合热点
- 下一步默认由主线程下发 disjoint worker 任务并在集成后重新计算 branch diff 与 warning 结果
## 2026-04-26 — RP-073
### 阶段:脱敏 analyzer-warning-reduction 文档中的绝对路径记录
- 触发背景:
- 用户再次显式要求执行 `$gframework-pr-review`,当前分支仍对应 PR `#291`
- 最新抓取结果确认 latest-head 还剩 `2` 条 open review thread分别指向 active todo 与 archive trace 中记录的绝对路径
- active trace 当前也保留了同类 `/tmp` 路径记录;虽然这次 review 没直接点名,但继续保留会留下同一类治理缺口
- 主线程实施:
- 将 active todo 与 active trace 中的 PR review 输出路径改写为 `--json-output <current-pr-review-json>`
- 将 [analyzer-warning-reduction-history-rp062-rp071.md](analyzer-warning-reduction-history-rp062-rp071.md) 里的临时 `dotnet` home、PR review 输出路径和失效 Windows fallback package folder 改写为仓库安全占位符
- 同步刷新 active todo 中的 review 真值,把当前恢复点更新到 `RP-073`
- 验证里程碑:
- `python3 .agents/skills/gframework-pr-review/scripts/fetch_current_pr_review.py --json-output <current-pr-review-json>`
- 结果:成功;确认 PR `#291` latest-head open review thread 为 `2`,两者都指向 `ai-plan` 文档中的绝对路径记录
- `dotnet build`
- 结果:成功;`639 Warning(s)``0 Error(s)`;与当前权威仓库根基线一致
- 当前结论:
- 本轮只吸收当前仍成立的 PR review 文档项,不扩展到新的 warning 清理切片
- 当前仓库根 warning 权威基线仍保持 `639 Warning(s)``0 Error(s)`;本轮目标是让 analyzer-warning-reduction 主题下当前入口不再记录绝对路径
- 下一轮默认先推送本轮同步并重新执行 `$gframework-pr-review`,确认 PR `#291` 的 open thread 是否已自动收口

View File

@ -6,46 +6,65 @@
## 当前恢复点
- 恢复点编号:`ANALYZER-WARNING-REDUCTION-RP-073`
- 当前阶段:`Phase 73`
- 恢复点编号:`ANALYZER-WARNING-REDUCTION-RP-081`
- 当前阶段:`Phase 81`
- 当前焦点:
- `2026-04-26` 主线程再次按 `$gframework-pr-review` 复核当前分支 PR `#291`,确认 latest-head 仍剩 `2` 条 open review thread均指向 `ai-plan` 文档中的绝对路径记录
- 当前批次同步 active todo/trace 与相关 archive trace把 PR review 输出路径、临时 `dotnet` home 和失效 Windows fallback package folder 改写为仓库安全占位符
- `dotnet clean` + `dotnet build` 的直接仓库根基线仍为 `639 Warning(s)``0 Error(s)`,因此本轮属于文档真值收口,而不是新的 warning 清理批次
- `2026-04-27` 已复核 PR `#295` 的 latest-head review确认 `ThrowShouldNotRetry``ParamName` open thread 属于 stale finding本地代码已经使用传入值而非 `nameof(parameterName)`
- 已清理 `AsyncExtensionsTests.WithRetry_Should_Respect_ShouldRetry_Predicate` 中的冗余 `Task.Delay(50)`,保留 `ParamName == nameof(taskFactory)` 断言锁定契约
- 已增强 `.agents/skills/gframework-pr-review/scripts/fetch_current_pr_review.py` 的 failed-test 表格解析,允许 `Name` / `Failure Message` 后出现尾随额外列
- 已新增 Python `unittest` 回归用例覆盖“尾随额外列不影响前两列提取”的场景
- 当前剩余 warning 热点仍集中在 `YamlConfigSchemaValidator*``YamlConfigLoader.cs` 与大批量 `MA0048` 文件名拆分;这些 slice 仍高于本轮 PR review follow-up 的低风险边界
## 当前活跃事实
- 当前 `origin/main` 基线提交为 `4ad880c``2026-04-25T14:35:38+08:00`)。
- 提权后的直接仓库根验证当前确认为:
- `dotnet clean`
- 结果:成功;此前沙箱内 “Build FAILED but 0 errors” 的 clean 结果不是仓库真值
- `dotnet build`
- 最新结果:成功;`639 Warning(s)``0 Error(s)`
- 当前分支低风险批次文件:
- `ai-plan/public/analyzer-warning-reduction/todos/analyzer-warning-reduction-tracking.md`
- `ai-plan/public/analyzer-warning-reduction/traces/analyzer-warning-reduction-trace.md`
- `ai-plan/public/analyzer-warning-reduction/archive/traces/analyzer-warning-reduction-history-rp062-rp071.md`
- 当前批次验证结果:
- 当前 `origin/main` 基线提交为 `617e0bf``2026-04-26T12:17:15+08:00`)。
- 当前 PR review 真值:
- `python3 .agents/skills/gframework-pr-review/scripts/fetch_current_pr_review.py --json-output <current-pr-review-json>`
- 最新主线程结果:成功;确认 PR `#291` latest-head open review thread 为 `2`,两者都指向 `ai-plan` 文档中的绝对路径记录
- `dotnet build`
- 最新主线程结果:成功;`639 Warning(s)``0 Error(s)`;与当前权威仓库根基线一致
- 最新结果:成功;当前分支对应 PR 为 `#295`
- 当前测试报告输出已能显示 `Summary` 统计、失败测试名称,以及 `Name / Failure Message` 表格中的关键信息
- 当前 GitHub latest-head review 仍显示 `1` 条 open thread但该线程指向的 `nameof(parameterName)` 问题已不在本地代码中成立,属于 stale finding
- 当前 latest review 中仍有 `2` 条与本地工作树一致的 nitpick`AsyncExtensionsTests` 冗余等待,以及 failed-test 表格解析对尾随列不鲁棒
- 当前直接验证结果:
- `python3 .agents/skills/gframework-pr-review/scripts/test_fetch_current_pr_review.py`
- 最新结果:成功;`Ran 1 test in 0.000s`, `OK`
- `python3 .agents/skills/gframework-pr-review/scripts/fetch_current_pr_review.py --section tests --json-output /tmp/current-pr-review-postfix.json`
- 最新结果:成功;真实 PR 评论抓取仍能输出 `2` 份测试报告,失败用例详情保持可见
- `dotnet test GFramework.Core.Tests/GFramework.Core.Tests.csproj -c Release --filter "FullyQualifiedName~WithRetry_Should_Respect_ShouldRetry_Predicate"`
- 最新结果:成功;`Failed: 0, Passed: 1, Skipped: 0, Total: 1`
- `dotnet test GFramework.Game.Tests/GFramework.Game.Tests.csproj -c Release --filter "FullyQualifiedName~RegisterMigration_During_Cache_Rebuild_Should_Not_Leave_Stale_Type_Cache"`
- 最新结果:成功;`Failed: 0, Passed: 1, Skipped: 0, Total: 1`
- 当前分支 stop-condition 指标:
- `git diff --name-only refs/remotes/origin/main...HEAD | wc -l`
- 最新结果:`35`
- `git diff --numstat refs/remotes/origin/main...HEAD`
- 最新结果:`642` changed lines
- 当前批次摘要:
- 三轮低风险 warning 清理已在此前验证中将仓库根 warning 从 `639` 降到 `397`
- 当前批次的已完成 slice 明细已迁移到归档active todo 仅保留恢复真值
- 本轮新增内容为 PR review nitpick 收口与脚本回归测试补齐,不扩展 warning reduction 的热点清理边界
- 当前建议保留到下一波次的候选:
- `GFramework.Game/Config/YamlConfigLoader.cs``MA0158`(单点可修,但文件本身同时承载其他高耦合 warning
- 测试项目中的 `MA0048` 文件名拆分波次(会显著增加 changed-file 数)
## 当前风险
- `GFramework.Core``GFramework.Game``GFramework.Core.Tests``GFramework.Cqrs.Tests` 仍有较大 warning 基线。
- 缓解措施:后续批次继续优先挑低风险、少文件、可独立验证的测试与局部逻辑切片。
- 当前 review 相关真值要等新 head 推送后才能在 GitHub UI 中自动收口。
- 缓解措施:本轮提交后立即重新执行 `$gframework-pr-review`,确认 PR `#291` 的 latest-head thread 与 nitpick 是否消失。
- `GFramework.Game/Config/YamlConfigSchemaValidator*.cs` 仍然聚集多类高耦合 warning。
- 缓解措施:本轮先避开该热点,只清理低风险且 ownership 清晰的文件集合。
- `MA0158` 迁移涉及 `net8.0` / `net9.0` / `net10.0` 多目标兼容。
- 缓解措施:复用 `StoreSelection.cs` 已存在的 `#if NET9_0_OR_GREATER` 专用锁模式,不在 `net8.0` 引入不兼容 API。
- 当前 PR open thread 与 CI 失败信号仍依赖新提交进入远端 PR head 才能复核。
- 缓解措施:本轮提交并推送后重新执行 `$gframework-pr-review`,确认 stale open thread 是否被 GitHub 收口,以及两条 nitpick 是否从 latest review 中消失。
## 活跃文档
- 当前轮次归档:
- [analyzer-warning-reduction-history-rp074-rp078.md](../archive/todos/analyzer-warning-reduction-history-rp074-rp078.md)
- [analyzer-warning-reduction-history-rp042-rp048.md](../archive/todos/analyzer-warning-reduction-history-rp042-rp048.md)
- 历史跟踪归档:
- [analyzer-warning-reduction-history-rp001.md](../archive/todos/analyzer-warning-reduction-history-rp001.md)
- [analyzer-warning-reduction-history-rp002-rp041.md](../archive/todos/analyzer-warning-reduction-history-rp002-rp041.md)
- 历史 trace 归档:
- [analyzer-warning-reduction-history-rp073-rp078.md](../archive/traces/analyzer-warning-reduction-history-rp073-rp078.md)
- [analyzer-warning-reduction-history-rp062-rp071.md](../archive/traces/analyzer-warning-reduction-history-rp062-rp071.md)
- [analyzer-warning-reduction-history-rp001.md](../archive/traces/analyzer-warning-reduction-history-rp001.md)
- [analyzer-warning-reduction-history-rp002-rp041.md](../archive/traces/analyzer-warning-reduction-history-rp002-rp041.md)
@ -53,11 +72,12 @@
## 验证说明
- 权威验证结果统一维护在“当前活跃事实”和“当前批次验证结果”。
- 权威验证结果统一维护在“当前活跃事实”。
- `GFramework.Core.Tests` 当前仍有既有 analyzer / nullable warning 基线,因此本轮验证只证明 PR review 修复未引入构建错误,未将该项目 warning 清零。
- 后续若刷新构建或 PR review 真值,只更新上述权威区块,不在本节重复抄录。
## 下一步建议
1. 推送包含本轮 absolute-path 脱敏的提交后,重新执行 `$gframework-pr-review`,确认 PR `#291` 的 latest-head open thread 是否已自动收口
2. 若 PR `#291` 已清零,继续以当前 `639 Warning(s)` 根基线为恢复点,按 `$gframework-batch-boot 50` 规则挑选下一个 1-3 文件的低风险热点
3. 若 GitHub 仍保留 review 信号,先确认它们是否仍指向新 head再决定是否需要继续清理同主题下的其它历史 `ai-plan` 记录
1. 提交本轮 `AsyncExtensionsTests` / `$gframework-pr-review` nitpick 修复、Python 回归测试与 `ai-plan` 同步
2. 推送后重新执行 `$gframework-pr-review`,确认 PR `#295` 的 stale open thread、nitpick 与测试报告是否已刷新为新 head 真值
3. 若后续继续推进 warning reduction建议另开下一波次处理 `YamlConfigLoader.cs` 热点或测试项目 `MA0048` 拆分波次

View File

@ -1,32 +1,51 @@
# Analyzer Warning Reduction 追踪
## 2026-04-26 — RP-073
## 2026-04-27 — RP-081
### 阶段:脱敏 analyzer-warning-reduction 文档中的绝对路径记录
### 阶段:核实 PR `#295` 的剩余 nitpick并补齐脚本解析回归测试
- 触发背景:
- 用户再次显式要求执行 `$gframework-pr-review`,当前分支仍对应 PR `#291`
- 最新抓取结果确认 latest-head 还剩 `2` 条 open review thread分别指向 active todo 与 archive trace 中记录的绝对路径
- active trace 当前也保留了同类 `/tmp` 路径记录;虽然这次 review 没直接点名,但继续保留会留下同一类治理缺口
- 用户再次执行 `$gframework-pr-review`,需要根据当前 PR `#295` 的 latest-head review 继续核实哪些反馈仍需在本地处理
- 远端 review 显示 `1` 条 open thread 与 `2` 条 nitpick需要区分 stale finding 与仍然成立的本地问题
- 主线程实施:
- 将 active todo 与 active trace 中的 PR review 输出路径改写为 `--json-output <current-pr-review-json>`
- 将 [analyzer-warning-reduction-history-rp062-rp071.md](../archive/traces/analyzer-warning-reduction-history-rp062-rp071.md) 里的临时 `dotnet` home、PR review 输出路径和失效 Windows fallback package folder 改写为仓库安全占位符
- 同步刷新 active todo 中的 review 真值,把当前恢复点更新到 `RP-073`
- 复核 `/tmp/current-pr-review.json` 与本地 `AsyncExtensionsTests.cs`,确认 open thread 指向的 `nameof(parameterName)` 问题已在现有代码中修复,属于 stale finding
- 删除 `GFramework.Core.Tests/Extensions/AsyncExtensionsTests.cs``WithRetry_Should_Respect_ShouldRetry_Predicate` 的冗余 `Task.Delay(50)`,将测试改回同步断言路径
- 调整 `.agents/skills/gframework-pr-review/scripts/fetch_current_pr_review.py``parse_failed_test_details`,允许 failed-test HTML 表格在 `Name` / `Failure Message` 后追加额外列
- 新增 `.agents/skills/gframework-pr-review/scripts/test_fetch_current_pr_review.py`,以 `unittest` 覆盖“尾随额外列不影响前两列提取”的回归场景
- 验证里程碑:
- `python3 .agents/skills/gframework-pr-review/scripts/fetch_current_pr_review.py --json-output <current-pr-review-json>`
- 结果:成功;确认 PR `#291` latest-head open review thread 为 `2`,两者都指向 `ai-plan` 文档中的绝对路径记录
- `dotnet build`
- 结果:成功;`639 Warning(s)``0 Error(s)`;与当前权威仓库根基线一致
- `python3 .agents/skills/gframework-pr-review/scripts/test_fetch_current_pr_review.py`
- 结果:成功;`Ran 1 test in 0.000s`, `OK`
- `python3 .agents/skills/gframework-pr-review/scripts/fetch_current_pr_review.py --section tests --json-output /tmp/current-pr-review-postfix.json`
- 结果:成功;真实 PR 评论抓取仍显示 `2` 份测试报告,失败测试名与 failure message 摘要保持可见
- `dotnet test GFramework.Core.Tests/GFramework.Core.Tests.csproj -c Release --filter "FullyQualifiedName~WithRetry_Should_Respect_ShouldRetry_Predicate"`
- 结果:成功;`Failed: 0, Passed: 1, Skipped: 0, Total: 1`
- 当前结论:
- 本轮只吸收当前仍成立的 PR review 文档项,不扩展到新的 warning 清理切片
- 当前仓库根 warning 权威基线仍保持 `639 Warning(s)``0 Error(s)`;本轮目标是让 analyzer-warning-reduction 主题下当前入口不再记录绝对路径
- 下一轮默认先推送本轮同步并重新执行 `$gframework-pr-review`,确认 PR `#291` 的 open thread 是否已自动收口
- 本轮 latest-head review 中只有 `AsyncExtensionsTests` 冗余等待与 failed-test 表格尾随列容错性两个 nitpick 仍与本地代码一致,现已修复
- `ThrowShouldNotRetry``ParamName` open thread 属于 stale finding本地代码已经符合预期只需等待新提交进入远端后复核 thread 状态
## 活跃风险
- PR 上的 latest-head review thread 与测试报告仍需要等新提交进入远端后再复核。
- 缓解措施:提交并推送后重新执行 `$gframework-pr-review`,只以新的 latest-head 和 test report 为准。
- `YamlConfigSchemaValidator*``YamlConfigLoader.cs``MA0048` 拆分仍是下一波次的高耦合候选。
- 缓解措施:保持本轮边界只处理 PR review nitpick follow-up不顺手扩展 warning reduction 范围。
## 下一步
1. 完成本轮提交。
2. 推送后重新执行 `$gframework-pr-review`,确认 PR `#295` 的 stale open thread 与 nitpick 是否已刷新。
## 历史归档指针
- 最新 trace 归档:
- [analyzer-warning-reduction-history-rp073-rp078.md](../archive/traces/analyzer-warning-reduction-history-rp073-rp078.md)
- [analyzer-warning-reduction-history-rp062-rp071.md](../archive/traces/analyzer-warning-reduction-history-rp062-rp071.md)
- 早期 trace 归档:
- 历史 todo 归档:
- [analyzer-warning-reduction-history-rp074-rp078.md](../archive/todos/analyzer-warning-reduction-history-rp074-rp078.md)
- [analyzer-warning-reduction-history-rp042-rp048.md](../archive/todos/analyzer-warning-reduction-history-rp042-rp048.md)
- 早期归档:
- [analyzer-warning-reduction-history-rp001.md](../archive/traces/analyzer-warning-reduction-history-rp001.md)
- [analyzer-warning-reduction-history-rp002-rp041.md](../archive/traces/analyzer-warning-reduction-history-rp002-rp041.md)
- [analyzer-warning-reduction-history-rp042-rp048.md](../archive/traces/analyzer-warning-reduction-history-rp042-rp048.md)
- [analyzer-warning-reduction-history-rp001.md](../archive/todos/analyzer-warning-reduction-history-rp001.md)
- [analyzer-warning-reduction-history-rp002-rp041.md](../archive/todos/analyzer-warning-reduction-history-rp002-rp041.md)