From 2b4c471b4873ee6b3f4548f373a64abf47c69568 Mon Sep 17 00:00:00 2001 From: Malcolm Nixon Date: Mon, 4 May 2026 08:02:31 -0400 Subject: [PATCH 1/3] Add verification-doc and convert tests to xUnit. --- .cspell.yaml | 2 +- .fileassert.yaml | 67 +++- .github/agents/developer.agent.md | 2 +- .github/agents/formal-review.agent.md | 2 + .github/agents/lint-fix.agent.md | 7 +- .github/agents/repo-consistency.agent.md | 77 ---- .github/pull_request_template.md | 11 +- .github/standards/coding-principles.md | 30 +- .github/standards/csharp-language.md | 62 +++- .github/standards/csharp-testing.md | 123 +++---- .github/standards/design-documentation.md | 11 + .github/standards/reqstream-usage.md | 6 +- .github/standards/requirements-principles.md | 4 + .github/standards/reviewmark-usage.md | 13 +- .github/standards/software-items.md | 3 +- .github/standards/technical-documentation.md | 103 +++--- .../standards/verification-documentation.md | 128 +++++++ .reviewmark.yaml | 142 ++++++-- AGENTS.md | 51 ++- docs/design/file-assert/ots-dependencies.md | 6 +- docs/reqstream/ots/mstest.yaml | 27 -- docs/reqstream/ots/pandoc.yaml | 1 + docs/reqstream/ots/weasyprint.yaml | 1 + docs/reqstream/ots/xunit.yaml | 43 +++ docs/verification/definition.yaml | 49 +++ docs/verification/file-assert/cli/cli.md | 74 ++++ docs/verification/file-assert/cli/context.md | 331 ++++++++++++++++++ .../configuration/configuration.md | 49 +++ .../configuration/file-assert-config.md | 107 ++++++ .../configuration/file-assert-data.md | 26 ++ docs/verification/file-assert/file-assert.md | 255 ++++++++++++++ .../file-assert/modeling/file-assert-file.md | 141 ++++++++ .../modeling/file-assert-html-assert.md | 128 +++++++ .../modeling/file-assert-json-assert.md | 145 ++++++++ .../modeling/file-assert-pdf-assert.md | 157 +++++++++ .../file-assert/modeling/file-assert-rule.md | 148 ++++++++ .../file-assert/modeling/file-assert-test.md | 144 ++++++++ .../modeling/file-assert-text-assert.md | 77 ++++ .../modeling/file-assert-xml-assert.md | 128 +++++++ .../modeling/file-assert-yaml-assert.md | 145 ++++++++ .../file-assert/modeling/modeling.md | 55 +++ docs/verification/file-assert/program.md | 71 ++++ .../file-assert/selftest/selftest.md | 48 +++ .../file-assert/selftest/validation.md | 125 +++++++ .../file-assert/utilities/path-helpers.md | 133 +++++++ .../file-assert/utilities/utilities.md | 31 ++ docs/verification/introduction.md | 114 ++++++ docs/verification/ots/buildmark.md | 34 ++ docs/verification/ots/fileassert.md | 45 +++ docs/verification/ots/pandoc.md | 87 +++++ docs/verification/ots/reqstream.md | 36 ++ docs/verification/ots/reviewmark.md | 44 +++ docs/verification/ots/sarifmark.md | 42 +++ docs/verification/ots/sonarmark.md | 58 +++ docs/verification/ots/versionmark.md | 42 +++ docs/verification/ots/weasyprint.md | 89 +++++ docs/verification/ots/xunit.md | 103 ++++++ docs/verification/title.txt | 13 + requirements.yaml | 2 +- .../AssemblyInfo.cs | 32 +- .../Cli/CliTests.cs | 46 +-- .../Cli/ContextNewPropertiesTests.cs | 40 +-- .../Cli/ContextTests.cs | 148 ++++---- .../Configuration/ConfigurationTests.cs | 28 +- .../Configuration/FileAssertConfigTests.cs | 52 +-- .../DemaConsulting.FileAssert.Tests.csproj | 9 +- .../IntegrationTests.cs | 138 ++++---- .../Modeling/FileAssertFileTests.cs | 72 ++-- .../Modeling/FileAssertHtmlAssertTests.cs | 46 +-- .../Modeling/FileAssertJsonAssertTests.cs | 54 +-- .../Modeling/FileAssertPdfAssertTests.cs | 58 +-- .../Modeling/FileAssertRuleTests.cs | 66 ++-- .../Modeling/FileAssertTestTests.cs | 58 +-- .../Modeling/FileAssertTextAssertTests.cs | 32 +- .../Modeling/FileAssertXmlAssertTests.cs | 46 +-- .../Modeling/FileAssertYamlAssertTests.cs | 54 +-- .../Modeling/ModelingTests.cs | 18 +- .../ProgramTests.cs | 14 +- .../SelfTest/SelfTestTests.cs | 12 +- .../SelfTest/ValidationTests.cs | 32 +- .../Utilities/PathHelpersTests.cs | 44 +-- .../Utilities/UtilitiesTests.cs | 6 +- 82 files changed, 4390 insertions(+), 913 deletions(-) delete mode 100644 .github/agents/repo-consistency.agent.md create mode 100644 .github/standards/verification-documentation.md delete mode 100644 docs/reqstream/ots/mstest.yaml create mode 100644 docs/reqstream/ots/xunit.yaml create mode 100644 docs/verification/definition.yaml create mode 100644 docs/verification/file-assert/cli/cli.md create mode 100644 docs/verification/file-assert/cli/context.md create mode 100644 docs/verification/file-assert/configuration/configuration.md create mode 100644 docs/verification/file-assert/configuration/file-assert-config.md create mode 100644 docs/verification/file-assert/configuration/file-assert-data.md create mode 100644 docs/verification/file-assert/file-assert.md create mode 100644 docs/verification/file-assert/modeling/file-assert-file.md create mode 100644 docs/verification/file-assert/modeling/file-assert-html-assert.md create mode 100644 docs/verification/file-assert/modeling/file-assert-json-assert.md create mode 100644 docs/verification/file-assert/modeling/file-assert-pdf-assert.md create mode 100644 docs/verification/file-assert/modeling/file-assert-rule.md create mode 100644 docs/verification/file-assert/modeling/file-assert-test.md create mode 100644 docs/verification/file-assert/modeling/file-assert-text-assert.md create mode 100644 docs/verification/file-assert/modeling/file-assert-xml-assert.md create mode 100644 docs/verification/file-assert/modeling/file-assert-yaml-assert.md create mode 100644 docs/verification/file-assert/modeling/modeling.md create mode 100644 docs/verification/file-assert/program.md create mode 100644 docs/verification/file-assert/selftest/selftest.md create mode 100644 docs/verification/file-assert/selftest/validation.md create mode 100644 docs/verification/file-assert/utilities/path-helpers.md create mode 100644 docs/verification/file-assert/utilities/utilities.md create mode 100644 docs/verification/introduction.md create mode 100644 docs/verification/ots/buildmark.md create mode 100644 docs/verification/ots/fileassert.md create mode 100644 docs/verification/ots/pandoc.md create mode 100644 docs/verification/ots/reqstream.md create mode 100644 docs/verification/ots/reviewmark.md create mode 100644 docs/verification/ots/sarifmark.md create mode 100644 docs/verification/ots/sonarmark.md create mode 100644 docs/verification/ots/versionmark.md create mode 100644 docs/verification/ots/weasyprint.md create mode 100644 docs/verification/ots/xunit.md create mode 100644 docs/verification/title.txt diff --git a/.cspell.yaml b/.cspell.yaml index ff0116a..66e400a 100644 --- a/.cspell.yaml +++ b/.cspell.yaml @@ -19,7 +19,7 @@ words: - Dema - fileassert - Linq - - mstest + - xunit - Pandoc - reqstream - reviewmark diff --git a/.fileassert.yaml b/.fileassert.yaml index acf3a8b..6728e75 100644 --- a/.fileassert.yaml +++ b/.fileassert.yaml @@ -1,7 +1,7 @@ --- # FileAssert document validation tests for FileAssert. # Tests are tagged by document group to allow per-group execution during the build pipeline. -# Tags: build-notes, code-quality, code-review, design, user-guide, requirements. +# Tags: build-notes, code-quality, code-review, design, verification, user-guide, requirements. # # NOTE: build-notes through user-guide tests provide OTS evidence for Pandoc and WeasyPrint # and run before ReqStream. The requirements tests run after ReqStream and validate the @@ -15,7 +15,7 @@ tests: description: "Build Notes HTML was generated by Pandoc" tags: [build-notes] files: - - pattern: "docs/build_notes/build_notes.html" + - pattern: "docs/build_notes/generated/build_notes.html" count: 1 html: - query: "//head/title" @@ -27,7 +27,7 @@ tests: description: "Build Notes PDF was generated by WeasyPrint" tags: [build-notes] files: - - pattern: "docs/FileAssert Build Notes.pdf" + - pattern: "docs/generated/FileAssert Build Notes.pdf" count: 1 pdf: metadata: @@ -48,7 +48,7 @@ tests: description: "Code Quality HTML was generated by Pandoc" tags: [code-quality] files: - - pattern: "docs/code_quality/quality.html" + - pattern: "docs/code_quality/generated/quality.html" count: 1 html: - query: "//head/title" @@ -60,7 +60,7 @@ tests: description: "Code Quality PDF was generated by WeasyPrint" tags: [code-quality] files: - - pattern: "docs/FileAssert Code Quality.pdf" + - pattern: "docs/generated/FileAssert Code Quality.pdf" count: 1 pdf: metadata: @@ -81,7 +81,7 @@ tests: description: "Code Review Plan HTML was generated by Pandoc" tags: [code-review] files: - - pattern: "docs/code_review_plan/plan.html" + - pattern: "docs/code_review_plan/generated/plan.html" count: 1 html: - query: "//head/title" @@ -93,7 +93,7 @@ tests: description: "Code Review Plan PDF was generated by WeasyPrint" tags: [code-review] files: - - pattern: "docs/FileAssert Review Plan.pdf" + - pattern: "docs/generated/FileAssert Review Plan.pdf" count: 1 pdf: metadata: @@ -114,7 +114,7 @@ tests: description: "Code Review Report HTML was generated by Pandoc" tags: [code-review] files: - - pattern: "docs/code_review_report/report.html" + - pattern: "docs/code_review_report/generated/report.html" count: 1 html: - query: "//head/title" @@ -126,7 +126,7 @@ tests: description: "Code Review Report PDF was generated by WeasyPrint" tags: [code-review] files: - - pattern: "docs/FileAssert Review Report.pdf" + - pattern: "docs/generated/FileAssert Review Report.pdf" count: 1 pdf: metadata: @@ -147,7 +147,7 @@ tests: description: "Design HTML was generated by Pandoc" tags: [design] files: - - pattern: "docs/design/design.html" + - pattern: "docs/design/generated/design.html" count: 1 html: - query: "//head/title" @@ -159,7 +159,7 @@ tests: description: "Design PDF was generated by WeasyPrint" tags: [design] files: - - pattern: "docs/FileAssert Software Design.pdf" + - pattern: "docs/generated/FileAssert Software Design.pdf" count: 1 pdf: metadata: @@ -174,13 +174,46 @@ tests: text: - contains: "Design" + # --- VERIFICATION DOCUMENT --- + + - name: Pandoc_VerificationHtml + description: "Verification HTML was generated by Pandoc" + tags: [verification] + files: + - pattern: "docs/verification/generated/verification.html" + count: 1 + html: + - query: "//head/title" + count: 1 + text: + - contains: "Verification" + + - name: WeasyPrint_VerificationPdf + description: "Verification PDF was generated by WeasyPrint" + tags: [verification] + files: + - pattern: "docs/generated/FileAssert Software Verification Design.pdf" + count: 1 + pdf: + metadata: + - field: "Title" + contains: "Verification" + - field: "Author" + contains: "DEMA Consulting" + - field: "Subject" + contains: "Verification design document" + pages: + min: 3 + text: + - contains: "Verification" + # --- USER GUIDE --- - name: Pandoc_UserGuideHtml description: "User Guide HTML was generated by Pandoc" tags: [user-guide] files: - - pattern: "docs/user_guide/introduction.html" + - pattern: "docs/user_guide/generated/user_guide.html" count: 1 html: - query: "//head/title" @@ -192,7 +225,7 @@ tests: description: "User Guide PDF was generated by WeasyPrint" tags: [user-guide] files: - - pattern: "docs/FileAssert User Guide.pdf" + - pattern: "docs/generated/FileAssert User Guide.pdf" count: 1 pdf: metadata: @@ -214,7 +247,7 @@ tests: description: "Requirements HTML was generated by Pandoc" tags: [requirements] files: - - pattern: "docs/requirements_doc/requirements.html" + - pattern: "docs/requirements_doc/generated/requirements.html" count: 1 html: - query: "//head/title" @@ -226,7 +259,7 @@ tests: description: "Requirements PDF was generated by WeasyPrint" tags: [requirements] files: - - pattern: "docs/FileAssert Requirements.pdf" + - pattern: "docs/generated/FileAssert Requirements.pdf" count: 1 pdf: metadata: @@ -248,7 +281,7 @@ tests: description: "Trace Matrix HTML was generated by Pandoc" tags: [requirements] files: - - pattern: "docs/requirements_report/trace_matrix.html" + - pattern: "docs/requirements_report/generated/trace_matrix.html" count: 1 html: - query: "//head/title" @@ -260,7 +293,7 @@ tests: description: "Trace Matrix PDF was generated by WeasyPrint" tags: [requirements] files: - - pattern: "docs/FileAssert Trace Matrix.pdf" + - pattern: "docs/generated/FileAssert Trace Matrix.pdf" count: 1 pdf: metadata: diff --git a/.github/agents/developer.agent.md b/.github/agents/developer.agent.md index 35f5dda..a95c562 100644 --- a/.github/agents/developer.agent.md +++ b/.github/agents/developer.agent.md @@ -21,7 +21,7 @@ Perform software development tasks by determining and applying appropriate stand 5. **Formatting**: Run `pwsh ./fix.ps1` to silently apply all available auto-fixers (dotnet format, markdown, YAML) before committing 6. **Build and test** (code changes only): Run `pwsh ./build.ps1` and confirm it - passes — report FAILED if the build or any tests fail + passes - report FAILED if the build or any tests fail 7. **Generate completion report** per the AGENTS.md reporting requirements - save to `.agent-logs/{agent-name}-{subject}-{unique-id}.md` and return the summary to the caller diff --git a/.github/agents/formal-review.agent.md b/.github/agents/formal-review.agent.md index 88b0691..7dd8e84 100644 --- a/.github/agents/formal-review.agent.md +++ b/.github/agents/formal-review.agent.md @@ -20,6 +20,8 @@ Before reviewing, read these standards to inform review judgments: hierarchy and categorization review judgments - **`design-documentation.md`** - defines mandatory sections, structural conventions, and coverage expected at each level; informs all design documentation review judgments +- **`verification-documentation.md`** - defines mandatory sections, structural conventions, + and coverage expected at each level; informs all verification design review judgments For review sets that include source code or tests, also consult the relevant standards from the selection matrix in AGENTS.md. diff --git a/.github/agents/lint-fix.agent.md b/.github/agents/lint-fix.agent.md index 83ad8cb..549e751 100644 --- a/.github/agents/lint-fix.agent.md +++ b/.github/agents/lint-fix.agent.md @@ -36,7 +36,12 @@ submission, not during normal development. - **markdownlint MD013 (line length)**: Wrap long lines at natural break points, after commas, before conjunctions, or at sentence boundaries. Do not break - in the middle of a code span or URL. + in the middle of a code span or URL. **Pipe-tables that cannot be wrapped + without breaking structure** are a special case - convert them to a bullet + list if the data reads naturally that way, or rewrite as a + [grid table](https://pandoc.org/MANUAL.html#tables) if a tabular layout is + essential. Do not get stuck trying to squeeze a wide pipe-table into 120 + characters. - **markdownlint other rules**: Apply the specific fix indicated in the output (e.g., missing blank lines, heading levels, code fence languages). diff --git a/.github/agents/repo-consistency.agent.md b/.github/agents/repo-consistency.agent.md deleted file mode 100644 index 5dbe99f..0000000 --- a/.github/agents/repo-consistency.agent.md +++ /dev/null @@ -1,77 +0,0 @@ ---- -name: repo-consistency -description: > - Ensures downstream repositories remain consistent with the TemplateDotNetTool - template patterns and best practices. -user-invocable: true ---- - -# Repo Consistency Agent - -Maintain consistency between downstream projects and the TemplateDotNetTool template, ensuring repositories -benefit from template evolution while respecting project-specific customizations. - -# Consistency Workflow (MANDATORY) - -**CRITICAL**: This agent MUST follow these steps systematically to ensure proper template consistency analysis: - -1. **Fetch Recent Template Changes**: Use GitHub search to fetch the 20 most recently merged PRs - (`is:pr is:merged sort:updated-desc`) from -2. **Analyze Template Evolution**: For each relevant PR, determine the intent and scope of changes - (what files were modified, what improvements were made) -3. **Assess Downstream Applicability**: Evaluate which template changes would benefit this repository - while respecting project-specific customizations -4. **Apply Appropriate Updates**: Implement applicable template improvements with proper translation for project context -5. **Validate Consistency**: Verify that applied changes maintain functionality and follow project patterns -6. **Generate completion report** per the AGENTS.md reporting requirements - save to - `.agent-logs/{agent-name}-{subject}-{unique-id}.md` and return the summary to the caller - -## Key Principles - -- **Evolutionary Consistency**: Template improvements should enhance downstream projects systematically -- **Intelligent Customization Respect**: Distinguish valid customizations from unintentional drift -- **Incremental Template Adoption**: Support phased adoption of template improvements based on project capacity - -# Don't Do These Things - -- **Never recommend changes without understanding project context** (some differences are intentional) -- **Never flag valid project-specific customizations** as consistency problems -- **Never apply template changes blindly** without assessing downstream project impact -- **Never ignore template evolution benefits** when they clearly improve downstream projects -- **Never recommend breaking changes** without migration guidance and impact assessment -- **Never skip validation** of preserved functionality after template alignment -- **Never assume all template patterns apply universally** (assess project-specific needs) - -# Report Template - -```markdown -# Repo Consistency Report - -**Result**: (SUCCEEDED|FAILED) - -## Consistency Analysis - -- **Template PRs Analyzed**: {Number and timeframe of PRs reviewed} -- **Template Changes Identified**: {Count and types of template improvements} -- **Applicable Updates**: {Changes determined suitable for this repository} -- **Project Customizations Preserved**: {Valid differences maintained} - -## Template Evolution Applied - -- **Files Modified**: {List of files updated for template consistency} -- **Improvements Adopted**: {Specific template enhancements implemented} -- **Configuration Updates**: {Tool configurations, workflows, or standards updated} - -## Consistency Status - -- **Template Alignment**: {Overall consistency rating with template} -- **Customization Respect**: {How project-specific needs were preserved} -- **Functionality Validation**: {Verification that changes don't break existing features} -- **Future Consistency**: {Recommendations for ongoing template alignment} - -## Issues Resolved - -- **Drift Corrections**: {Template drift issues addressed} -- **Enhancement Adoptions**: {Template improvements successfully integrated} -- **Validation Results**: {Testing and validation outcomes} -``` diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 0d0123c..82a413e 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -26,16 +26,11 @@ Before submitting this pull request, ensure you have completed the following: ### Build and Test -- [ ] Code builds successfully: `dotnet build --configuration Release` -- [ ] All unit tests pass: `dotnet test --configuration Release` -- [ ] Self-validation tests pass: - `dotnet run --project src/DemaConsulting.FileAssert --configuration Release --framework net10.0` - `--no-build -- --validate` +- [ ] Code builds successfully and all tests pass: `pwsh ./build.ps1` - [ ] Code produces zero warnings ### Code Quality -- [ ] Code formatting is correct: `dotnet format --verify-no-changes` - [ ] New code has appropriate XML documentation comments - [ ] Static analyzer warnings have been addressed @@ -43,7 +38,7 @@ Before submitting this pull request, ensure you have completed the following: Please run the following checks before submitting: -- [ ] **All linters pass**: `./lint.sh` (Unix/macOS) or `cmd /c lint.bat` / `./lint.bat` (Windows) +- [ ] **All linters pass**: `pwsh ./lint.ps1` ### Testing @@ -55,7 +50,7 @@ Please run the following checks before submitting: ### Documentation - [ ] Updated README.md (if applicable) -- [ ] Updated ARCHITECTURE.md (if applicable) +- [ ] Updated docs/ documentation (if applicable) - [ ] Added code examples for new features (if applicable) - [ ] Updated requirements.yaml (if applicable) diff --git a/.github/standards/coding-principles.md b/.github/standards/coding-principles.md index 213c031..9e67fbb 100644 --- a/.github/standards/coding-principles.md +++ b/.github/standards/coding-principles.md @@ -20,11 +20,35 @@ All code MUST follow literate programming principles: matches design intent without reading the full codebase - **Logical Separation**: Complex functions use block comments to separate and describe logical steps within the implementation -- **Public Documentation**: All public interfaces have comprehensive documentation - because consumers and auditors rely on interface contracts for integration - and compliance verification +- **Full Symbol Documentation**: ALL symbols have comprehensive documentation + because reviewers and auditors must verify every implementation detail, not + just the public interface - access-level specifics (public, protected, + private, internal, etc.) vary by language; see the language-specific standard - **Clarity Over Cleverness**: Code should be immediately understandable by team members +## API Documentation + +Good API documentation enables consumers, reviewers, and agents to use an +interface correctly without reading the implementation: + +- **Self-Contained**: Each member's documentation must be fully understandable + in isolation - consumers must not need to read the implementation to call it + correctly +- **Intent-Focused**: Explain WHY the member exists and WHAT problem it solves, + not just restate the name - this lets reviewers verify the implementation + matches design intent +- **Parameter and Return Contracts**: Document valid ranges, null handling, and + boundary cases - agents and consumers rely on these contracts to call the API + correctly +- **Error Conditions**: Document every exception or error code, the condition + that triggers it, and how the caller should respond - undocumented errors + cannot be handled correctly +- **Side Effects**: Document I/O, state mutation, resource allocation, or + network calls - hidden side effects cause integration bugs that are hard to + diagnose +- **Thread Safety**: State whether the API is safe for concurrent use - missing + this forces consumers to read the implementation or risk data races + ## Universal Code Architecture Principles ### Design Patterns diff --git a/.github/standards/csharp-language.md b/.github/standards/csharp-language.md index 707b0f9..6df39cd 100644 --- a/.github/standards/csharp-language.md +++ b/.github/standards/csharp-language.md @@ -4,37 +4,63 @@ description: Follow these standards when developing C# source code. globs: ["**/*.cs"] --- -# C# Language Development Standard - -## Required Standards +# Required Standards Read these standards first before applying this standard: - **`coding-principles.md`** - Universal coding principles and quality gates -# File Patterns - -- **Source Files**: `**/*.cs` +# API Documentation and Literate Coding Example -# Literate Coding Example +The example below demonstrates good XmlDoc API documentation combined with +literate coding comments. ```csharp -// Validate input parameters to prevent downstream errors -if (string.IsNullOrEmpty(input)) +/// +/// Converts a raw sensor reading into a validated measurement ready for downstream consumers. +/// +/// +/// Clamping is preferred over throwing on out-of-range values because sensor drift at +/// range boundaries is expected; clamping produces a usable result where rejection would +/// discard valid near-boundary readings. Stateless and thread-safe; the calibration +/// profile is read but never modified. +/// +/// Raw sensor value. Must be finite (NaN and infinities are rejected). +/// Calibration profile providing offset and range. Must not be null. +/// Corrected value clamped to [calibration.Minimum, calibration.Maximum]. +/// Thrown when is NaN or infinite. +/// Thrown when is null. +public double ProcessReading(double reading, CalibrationProfile calibration) { - throw new ArgumentException("Input cannot be null or empty", nameof(input)); -} - -// Transform input data using the configured processing pipeline -var processedData = ProcessingPipeline.Transform(input); + // Reject invalid inputs before any calculation - non-finite readings cannot be + // corrected, and a null calibration profile provides no offset or range to apply + if (!double.IsFinite(reading)) + throw new ArgumentException("Reading must be a finite number.", nameof(reading)); + ArgumentNullException.ThrowIfNull(calibration); -// Apply business rules and validation logic -var validatedResults = BusinessRuleEngine.ValidateAndProcess(processedData); + // Apply the calibration offset to convert raw counts to physical units + var corrected = reading + calibration.Offset; -// Return formatted results matching the expected output contract -return OutputFormatter.Format(validatedResults); + // Clamp to the operational range so consumers can rely on the documented contract + return Math.Clamp(corrected, calibration.Minimum, calibration.Maximum); +} ``` +Key qualities demonstrated above: + +- **``** is a brief one-liner explaining *what* the method does +- **``** sits directly after summary and carries the extended intent - + *why* it exists, design decisions, thread-safety, and side-effect disclosures +- **`` tags** state constraints (finite, non-null) so callers know what + is valid without reading the body +- **``** documents the boundary guarantee so consumers can rely on the + contract +- **`` tags** name every thrown exception and the condition that + triggers each one +- **Inline block comments** follow the Literate Coding principles from + `coding-principles.md`, separating logical steps so reviewers can verify each + step against design intent + # Code Formatting - **Format entire solution**: `dotnet format` diff --git a/.github/standards/csharp-testing.md b/.github/standards/csharp-testing.md index 1591eeb..181de02 100644 --- a/.github/standards/csharp-testing.md +++ b/.github/standards/csharp-testing.md @@ -4,115 +4,74 @@ description: Follow these standards when developing C# tests. globs: ["**/test/**/*.cs", "**/tests/**/*.cs", "**/*Tests.cs", "**/*Test.cs"] --- -# C# Testing Standards (MSTest) - -This document defines standards for C# test development using -MSTest within Continuous Compliance environments. - -## Required Standards +# Required Standards Read these standards first before applying this standard: - **`testing-principles.md`** - Universal testing principles and dependency boundaries - **`csharp-language.md`** - C# language development standards -# C# AAA Pattern Implementation +# Package Reference -```csharp -[TestMethod] -public void ServiceName_MethodName_Scenario_ExpectedBehavior() -{ - // Arrange: description of setup (omit if nothing to set up) +Every xUnit v3 test project requires the following package references for +`dotnet test` to discover and execute tests: - // Act: description of action (can combine with Assert when action occurs within assertion) +| Package | Purpose | +| ------- | ------- | +| `xunit.v3` | xUnit v3 framework (monolithic - includes assertions and fixtures) | +| `Microsoft.NET.Test.Sdk` | Required by the VSTest/`dotnet test` host for test discovery | +| `xunit.runner.visualstudio` | VSTest adapter that bridges xUnit v3 to `dotnet test` | - // Assert: description of verification -} -``` +Omitting `Microsoft.NET.Test.Sdk` or `xunit.runner.visualstudio` causes tests +to be silently undiscoverable by `dotnet test`. + +If tests require mocking of dependencies, add `NSubstitute` as a package +reference - it is recommended when mocking is needed but is not required for +every test project. -# Test Naming Standards +# Test Style -Use descriptive test names because test names appear in requirements traceability matrices and compliance reports. +Test names appear in requirements traceability matrices - use the hierarchical +naming pattern, and follow AAA with labeled comments: - **System tests**: `{SystemName}_{Functionality}_{Scenario}_{ExpectedBehavior}` - **Subsystem tests**: `{SubsystemName}_{Functionality}_{Scenario}_{ExpectedBehavior}` - **Unit tests**: `{ClassName}_{MethodUnderTest}_{Scenario}_{ExpectedBehavior}` -- **Descriptive Scenarios**: Clearly describe the input condition being tested -- **Expected Behavior**: State the expected outcome or exception - -## Examples - -- `UserValidator_ValidateEmail_ValidFormat_ReturnsTrue` -- `UserValidator_ValidateEmail_InvalidFormat_ThrowsArgumentException` -- `PaymentProcessor_ProcessPayment_InsufficientFunds_ReturnsFailureResult` - -# Mock Dependencies - -Mock external dependencies using NSubstitute (preferred) because tests must run in isolation to generate -reliable evidence. - -- **Isolate System Under Test**: Mock all external dependencies (databases, web services, file systems) -- **Verify Interactions**: Assert that expected method calls occurred with correct parameters -- **Predictable Behavior**: Set up mocks to return known values for consistent test results - -# MSTest V4 Anti-patterns - -Avoid these common MSTest V4 patterns because they produce poor error messages or cause tests to be silently ignored. - -# Avoid Assertions in Catch Blocks (MSTEST0058) - -Instead of wrapping code in try/catch and asserting in the catch block, use `Assert.ThrowsExactly()`: - -```csharp -var ex = Assert.ThrowsExactly(() => SomeWork()); -Assert.Contains("Some message", ex.Message); -``` - -# Avoid Assert.IsTrue/IsFalse for Equality Checks - -Use `Assert.AreEqual`/`Assert.AreNotEqual` instead, as they provide better failure messages: - -```csharp -// ❌ Bad: Assert.IsTrue(result == expected); -// ✅ Good: Assert.AreEqual(expected, result); -``` - -# Avoid Non-Public Test Classes and Methods - -Test classes and `[TestMethod]` methods must be `public` or they will be silently ignored: ```csharp -// ❌ Bad: internal class MyTests -// ✅ Good: public class MyTests -``` - -# Avoid Assert.IsTrue for Collection Count - -Use `Assert.HasCount` for count assertions: +/// +/// Validates that an invalid email format throws an ArgumentException. +/// +[Fact] +public void UserValidator_ValidateEmail_InvalidFormat_ThrowsArgumentException() +{ + // Arrange: create a validator with default configuration + var validator = new UserValidator(); -```csharp -// ❌ Bad: Assert.IsTrue(collection.Count == 3); -// ✅ Good: Assert.HasCount(3, collection); + // Act / Assert: email with no domain throws + Assert.Throws(() => validator.ValidateEmail("not-an-email")); +} ``` -# Avoid Assert.IsTrue for String Prefix Checks +# xUnit v3 Specifics -Use `Assert.StartsWith` instead, as it produces clearer failure messages: +These are non-obvious v3 behaviors that differ from v2 or common assumptions: -```csharp -// ❌ Bad: Assert.IsTrue(value.StartsWith("prefix")); -// ✅ Good: Assert.StartsWith("prefix", value); -``` +- **`IAsyncLifetime`**: Both `InitializeAsync` and `DisposeAsync` return `ValueTask` + in v3, not `Task` - using `Task` compiles but does not satisfy the v3 interface +- **`Assert.Multiple`**: Use to collect all assertion failures in a single test + rather than stopping at the first +- **`[Collection]` without `[CollectionDefinition]`**: Silently disables parallelism + without providing any shared fixture - always pair them or remove `[Collection]` # Quality Checks Before submitting C# tests, verify: - [ ] All tests follow AAA pattern with clear section comments -- [ ] Test names follow hierarchical patterns defined in Test Naming Standards section -- [ ] Each test verifies single, specific behavior (no shared state) +- [ ] Test names follow hierarchical naming pattern above +- [ ] Each test verifies single, specific behavior (no shared state between tests) - [ ] Both success and failure scenarios covered including edge cases -- [ ] External dependencies mocked with NSubstitute or equivalent +- [ ] External dependencies mocked with NSubstitute (when mocking is needed) - [ ] Tests linked to requirements with source filters where needed -- [ ] Test results generate TRX format for ReqStream compatibility -- [ ] MSTest V4 anti-patterns avoided (proper assertions, public visibility, etc.) +- [ ] Test results generated in TRX format for ReqStream compatibility (`dotnet test --logger trx`) diff --git a/.github/standards/design-documentation.md b/.github/standards/design-documentation.md index 30becb5..3b448f3 100644 --- a/.github/standards/design-documentation.md +++ b/.github/standards/design-documentation.md @@ -108,6 +108,13 @@ src/Project2Name/ └── HelperClass.cs - Helper functions ``` +### References Section (RECOMMENDED) + +If the design references external documents (standards, specifications), include +a `## References` section in `introduction.md`. This is the **only** place in the +design document collection where a References section should appear - do not add +one to any other design file. + ### Companion Artifact Structure (RECOMMENDED) Include a brief note explaining that each software item has parallel artifacts @@ -122,6 +129,7 @@ parallel directory trees: - Requirements: `docs/reqstream/{system}/.../{item}.yaml` (kebab-case) - Design docs: `docs/design/{system}/.../{item}.md` (kebab-case) +- Verification design: `docs/verification/{system}/.../{item}.md` (kebab-case) - Source code: `src/{System}/.../{Item}.{ext}` (cased per language - see `software-items.md`) - Tests: `test/{System}.Tests/.../{Item}Tests.{ext}` (cased per language - see `software-items.md`) - Review-sets: defined in `.reviewmark.yaml` @@ -168,6 +176,9 @@ implementation specification for formal code review: - **Implementation Detail**: Provide sufficient detail for code review and implementation - **Architectural Clarity**: Clearly define component boundaries and interfaces - **Traceability**: Link to requirements where applicable using ReqStream patterns +- **Verbal Cross-References**: Reference other parts of the design by name (e.g., + "See *Parser Design* for more details") - do not use markdown hyperlinks, which + break in compiled PDFs # Mermaid Diagram Integration diff --git a/.github/standards/reqstream-usage.md b/.github/standards/reqstream-usage.md index ae5e565..58b08b4 100644 --- a/.github/standards/reqstream-usage.md +++ b/.github/standards/reqstream-usage.md @@ -104,16 +104,16 @@ dotnet reqstream --requirements requirements.yaml --lint # Generate requirements document for compliance record dotnet reqstream --requirements requirements.yaml \ - --report docs/requirements_doc/requirements.md + --report docs/requirements_doc/generated/requirements.md # Generate justifications document for compliance record dotnet reqstream --requirements requirements.yaml \ - --justifications docs/requirements_doc/justifications.md + --justifications docs/requirements_doc/generated/justifications.md # Generate trace matrix proving each requirement is covered by passing tests dotnet reqstream --requirements requirements.yaml \ --tests "artifacts/**/*.trx" \ - --matrix docs/requirements_report/trace_matrix.md + --matrix docs/requirements_report/generated/trace_matrix.md ``` # Quality Checks diff --git a/.github/standards/requirements-principles.md b/.github/standards/requirements-principles.md index 7d2d572..b6cf136 100644 --- a/.github/standards/requirements-principles.md +++ b/.github/standards/requirements-principles.md @@ -29,6 +29,10 @@ implementation code. - **Valid**: "The parser shall report the line number of the first syntax error." - **Not a requirement (design decision)**: "The parser shall use a `TokenStream` class." +A unit may use its own name freely - that is identity, not HOW. What is +forbidden is describing *internal construction*: class names, method signatures, +algorithms, or data structures. + # Requirements at Every Level (MANDATORY) Every identified subsystem and unit MUST have its own requirements file because diff --git a/.github/standards/reviewmark-usage.md b/.github/standards/reviewmark-usage.md index 5d6219e..2f778dc 100644 --- a/.github/standards/reviewmark-usage.md +++ b/.github/standards/reviewmark-usage.md @@ -20,7 +20,7 @@ review, organizes them into review-sets, and generates review plans and reports. - **Lint Configuration**: `dotnet reviewmark --lint` - **Elaborate Review-Set**: `dotnet reviewmark --elaborate {review-set}` -- **Generate Plan**: `dotnet reviewmark --plan docs/code_review_plan/plan.md --enforce` +- **Generate Plan**: `dotnet reviewmark --plan docs/code_review_plan/generated/plan.md --enforce` > **Note**: `--enforce` causes the plan to fail with a non-zero exit code if any repository > files are not covered by a review-set. Uncovered files indicate a gap in review-set @@ -31,7 +31,8 @@ review, organizes them into review-sets, and generates review plans and reports. Required repository items for ReviewMark operation: - `.reviewmark.yaml` - Configuration for review-sets, file-patterns, and review evidence-source. -- `docs/code_review_plan/` - Review planning artifacts +- `docs/code_review_plan/generated/` - Generated review plan (build output, do not edit) +- `docs/code_review_report/generated/` - Generated review report (build output, do not edit) # Review Definition Structure @@ -55,6 +56,7 @@ needs-review: - "README.md" # Root level README - "docs/user_guide/**/*.md" # User guide - "docs/design/**/*.md" # Design documentation + - "docs/verification/**/*.md" # Verification design documentation # Source of review evidence evidence-source: @@ -109,6 +111,8 @@ Reviews system architecture and operational validation: - System requirements: `docs/reqstream/{system-name}/{system-name}.yaml` - Design introduction: `docs/design/introduction.md` - System design: `docs/design/{system-name}/{system-name}.md` + - Verification introduction: `docs/verification/introduction.md` + - System verification design: `docs/verification/{system-name}/{system-name}.md` - System integration tests: `test/{SystemName}.Tests/{SystemName}Tests.{ext}` ## `{System}-Design` Review (one per system) @@ -147,6 +151,7 @@ Reviews subsystem architecture and interfaces: - **File Path Patterns**: - Requirements: `docs/reqstream/{system-name}/.../{subsystem-name}/{subsystem-name}.yaml` - Design: `docs/design/{system-name}/.../{subsystem-name}/{subsystem-name}.md` + - Verification design: `docs/verification/{system-name}/.../{subsystem-name}/{subsystem-name}.md` - Tests: `test/{SystemName}.Tests/.../{SubsystemName}/{SubsystemName}Tests.{ext}` ## `{System}-{Subsystem[-Child...]}-{Unit}` Review (one per unit) @@ -159,6 +164,7 @@ Reviews individual software unit implementation: - **File Path Patterns**: - Requirements: `docs/reqstream/{system-name}/.../{unit-name}.yaml` - Design: `docs/design/{system-name}/.../{unit-name}.md` + - Verification design: `docs/verification/{system-name}/.../{unit-name}.md` - Source: `src/{SystemName}/.../{UnitName}.{ext}` - Tests: `test/{SystemName}.Tests/.../{UnitName}Tests.{ext}` @@ -175,6 +181,9 @@ Before submitting ReviewMark configuration, verify: - [ ] System-level reviews follow hierarchical scope principle (exclude subsystem/unit details) - [ ] Subsystem reviews follow hierarchical scope principle (exclude unit source code) - [ ] Only unit reviews include actual source code files +- [ ] Architecture review-sets include system verification design alongside system design +- [ ] Subsystem review-sets include subsystem verification design +- [ ] Unit review-sets include unit verification design - [ ] Each review-set focuses on a single compliance question (single focus principle) - [ ] File patterns use correct glob syntax and match intended files - [ ] Review-set file counts remain manageable (context management principle) diff --git a/.github/standards/software-items.md b/.github/standards/software-items.md index bb67b1d..4e5c90e 100644 --- a/.github/standards/software-items.md +++ b/.github/standards/software-items.md @@ -84,11 +84,12 @@ Choose the appropriate category based on scope and testability: # Software Item Artifact Model -Each software item has four artifact types that together form a complete review +Each software item has five artifact types that together form a complete review unit - because reviewing any one artifact in isolation cannot determine whether the item is correct, well-designed, and proven to work: - **Requirements** - WHAT the item must do (drives all other artifacts; applies to all item types) - **Design** - HOW the item satisfies its requirements (in-house items only: system, subsystem, unit) +- **Verification Design** - HOW the requirements will be tested (applies to all item types) - **Source code** - The implementation of the design (in-house units only) - **Tests** - PROOF the item does WHAT it is required to do (applies to all item types) diff --git a/.github/standards/technical-documentation.md b/.github/standards/technical-documentation.md index 455b2fd..7ff5b5a 100644 --- a/.github/standards/technical-documentation.md +++ b/.github/standards/technical-documentation.md @@ -1,7 +1,7 @@ --- name: Technical Documentation description: Follow these standards when creating technical documentation. -globs: ["docs/**/*.md", "README.md"] +globs: ["docs/**/*.md", "README.md", "!docs/**/generated/**"] --- # Technical Documentation Standards @@ -23,63 +23,25 @@ for regulatory review: - **Review Integration**: Documentation follows ReviewMark patterns for formal review tracking -# Documentation Organization +# Pandoc Document Structure (MANDATORY) -Structure documentation under `docs/` following standard patterns for -consistency and tool compatibility: +Each document collection under `docs/` follows this layout: ```text -docs/ - build_notes.md # Generated by BuildMark - build_notes/ # Auto-generated build notes - versions.md # Generated by VersionMark - code_review_plan/ # Auto-generated review plans - plan.md # Generated by ReviewMark - code_review_report/ # Auto-generated review reports - report.md # Generated by ReviewMark - design/ # Design documentation - introduction.md # Design overview - {system-name}/ # System architecture folder - {system-name}.md # System architecture - {subsystem-name}/ # Subsystem folder; may nest recursively - {subsystem-name}.md # Subsystem-specific designs - {child-subsystem}/ # Child subsystem (same structure) - {unit-name}.md # Unit-specific designs - {unit-name}.md # Top-level unit design - reqstream/ # Requirements source files - {system-name}/ # System requirements folder - {system-name}.yaml # System requirements - platform-requirements.yaml # Platform requirements - {subsystem-name}/ # Subsystem folder; may nest recursively - {subsystem-name}.yaml # Subsystem requirements - {child-subsystem}/ # Child subsystem (same structure) - {unit-name}.yaml # Unit-specific requirements - {unit-name}.yaml # Top-level unit requirements - ots/ # OTS requirement files - {ots-name}.yaml # OTS requirements - requirements_doc/ # Auto-generated requirements reports - requirements.md # Generated by ReqStream - justifications.md # Generated by ReqStream - requirements_report/ # Auto-generated trace matrices - trace_matrix.md # Generated by ReqStream - user_guide/ # User-facing documentation - introduction.md # User guide overview - {section}.md # User guide sections +docs/{collection}/ + title.txt # MANDATORY - YAML document metadata (title, author, etc.) + definition.yaml # MANDATORY - Pandoc build definition (inputs, template, paths) + introduction.md # MANDATORY - document introduction (Purpose, Scope, References) + {section}.md # optional checked-in content sections (zero or more) + generated/ # BUILD OUTPUT - never read, edit, or lint these files + {report}.md # generated by CI tools (ReqStream, ReviewMark, SarifMark, etc.) + {collection}.html # generated by Pandoc ``` -# Pandoc Document Structure (MANDATORY) - -All document collections processed by Pandoc MUST include all four files below - -without `title.txt` and `definition.yaml` the pipeline cannot generate the document: - -- `title.txt` - YAML metadata (title, subtitle, author, description, lang, keywords) -- `definition.yaml` - Pandoc build definition (resource paths, input file list, template) -- `introduction.md` - document introduction -- `{sections}.md` - additional content sections - -When creating a new document collection, create `title.txt` and `definition.yaml` -alongside `introduction.md`. Use the existing files under `docs/` as templates - -they share a consistent structure across all collections. +Without `title.txt` and `definition.yaml` the pipeline cannot generate the document. +When creating a new document collection, create these three files together and use +the existing collections under `docs/` as templates - they share a consistent +structure across all collections. **`title.txt`** - YAML front matter with document metadata. Use the existing files under `docs/` as a pattern and keep fields consistent with the rest of @@ -106,8 +68,17 @@ Include regulatory or business drivers where applicable. Define what is covered and what is explicitly excluded from this documentation. Specify version, system boundaries, and applicability constraints. + +## References + +- [REF-1] Document Title, Author, Version, Date +- [REF-2] Standard Name (e.g., IEEE 12207, ISO 9001) ``` +The `Purpose`, `Scope`, and `References` sections are **unique to `introduction.md`** and must +**not** be replicated in other markdown files within the same document collection. Including them +elsewhere causes duplicate sections in the compiled PDF. + ## Document Ordering List documents in logical reading order in Pandoc configuration because @@ -135,6 +106,19 @@ References in design/technical documents must point to **external specifications - **INCLUDE**: Requirements documents, system specifications, program documents, standards (IEEE, ISO, etc.) - **NEVER INCLUDE**: Internal development standards (`.github/standards/` files) - these are agent guides +## Cross-References (Within-Document and Cross-Document) + +Do **not** use markdown hyperlinks to reference other sections or documents. Markdown anchor links +(`[text](#heading)`) and relative file links work in a browser but break when compiled to a PDF. + +Instead use **verbal references** - plain prose that identifies the target by name: + +> See *XYZ Design* for more details. +> +> Refer to the *System Requirements* document for the full specification. + +Verbal references are readable by both AI agents and humans in any rendering environment. + # Markdown Format Requirements Markdown documentation in this repository must follow the formatting standards @@ -156,14 +140,13 @@ for consistency and professional presentation: # Auto-Generated Content (CRITICAL) -**NEVER modify auto-generated markdown files** because changes will be -overwritten and break compliance automation: +**NEVER read, lint, or modify files inside any `generated/` folder** - they are +build outputs that are overwritten on every CI run: -- **Read-Only Files**: Generated reports under `docs/requirements_doc/`, - `docs/requirements_report/`, `docs/code_review_plan/`, and - `docs/code_review_report/` are regenerated on every build -- **Source Modification**: Update source files (requirements YAML, code - comments) instead of generated output +- **Location**: All generated files live in `generated/` subfolders within their + respective `docs/` sections, or in `docs/generated/` for final release artifacts +- **Source Modification**: Update source files (requirements YAML, `.reviewmark.yaml`, + tool configuration) instead of generated output - **Tool Integration**: Generated content integrates with CI/CD pipelines and manual changes disrupt automation diff --git a/.github/standards/verification-documentation.md b/.github/standards/verification-documentation.md new file mode 100644 index 0000000..f6f407f --- /dev/null +++ b/.github/standards/verification-documentation.md @@ -0,0 +1,128 @@ +--- +name: Verification Documentation +description: Follow these standards when creating software verification design documentation. +globs: ["docs/verification/**/*.md"] +--- + +# Required Standards + +Read these standards first before applying this standard: + +- **`technical-documentation.md`** - General technical documentation standards +- **`software-items.md`** - Software categorization (System/Subsystem/Unit/OTS) + +# Core Principles + +Verification design is the bridge between requirements and tests - it documents HOW +requirements will be verified, enabling reviewers to confirm test completeness without +reading implementation code. + +# Required Structure and Documents + +Organize under `docs/verification/` mirroring the software item hierarchy: + +```text +docs/verification/ +├── introduction.md # Verification overview +├── {system-name}/ # System-level verification folder (one per system) +│ ├── {system-name}.md # System-level verification design +│ ├── {subsystem-name}/ # Subsystem (kebab-case); may nest recursively +│ │ ├── {subsystem-name}.md # Subsystem verification design +│ │ ├── {child-subsystem}/ # Child subsystem (same structure as parent) +│ │ └── {unit-name}.md # Unit-level verification design documents +│ └── {unit-name}.md # Top-level unit verification documents (if not in subsystem) +└── ots/ # OTS items (one verification file per OTS item) + └── {ots-name}.md # Verification evidence for each OTS item +``` + +## introduction.md (MANDATORY) + +Follow the standard `introduction.md` format from `technical-documentation.md`. Scope +covers all software items including OTS items (via self-validation if appropriate). + +Include a Companion Artifact Structure note so agents and reviewers can navigate from any +artifact to all related files: + +```text +In-house items have parallel artifacts in: +- Requirements: `docs/reqstream/{system}/.../{item}.yaml` (kebab-case) +- Design: `docs/design/{system}/.../{item}.md` (kebab-case) +- Verification: `docs/verification/{system}/.../{item}.md` (kebab-case) +- Source: `src/{System}/.../{Item}.{ext}` (cased per language) +- Tests: `test/{System}.Tests/.../{Item}Tests.{ext}` (cased per language) + +OTS items have parallel artifacts in: +- Requirements: `docs/reqstream/ots/{ots-name}.yaml` (kebab-case) +- Verification: `docs/verification/ots/{ots-name}.md` (kebab-case) +- Tests: `test/{OtsName}.Tests/...` (cased per language, if required) + +Review-sets: defined in `.reviewmark.yaml` +``` + +If the verification design references external documents (standards, specifications), include +a `## References` section in `introduction.md` only - do not add one to any other verification file. + +## System Verification Design (MANDATORY) + +For each system, create a kebab-case folder and `{system-name}.md` covering: + +- System verification strategy and overall test approach +- Test environments and configuration required +- External interface simulation and test-harness design +- End-to-end and integration test scenarios covering system requirements +- Acceptance criteria and pass/fail conditions at the system boundary +- Coverage mapping of system requirements to system-level test scenarios + +## Subsystem Verification Design (MANDATORY) + +For each subsystem, create a kebab-case folder and `{subsystem-name}.md` covering: + +- Subsystem verification strategy and integration test approach +- Dependencies that must be mocked or stubbed at the subsystem boundary +- Integration test scenarios covering subsystem requirements +- Coverage mapping of subsystem requirements to subsystem-level test scenarios + +## Unit Verification Design (MANDATORY) + +For each unit, create `{unit-name}.md` covering: + +- Verification approach for each unit requirement +- Named test scenarios including boundary conditions, error paths, and normal-operation cases +- Which dependencies are mocked and how they are configured +- Coverage mapping of every unit requirement to at least one named test scenario + +## OTS Verification Evidence (when OTS items are used) + +For each OTS item, create `docs/verification/ots/{ots-name}.md` covering: + +- The OTS item's required functionality (reference `docs/reqstream/ots/{ots-name}.yaml`) +- Verification of each requirement (using self-validation evidence if appropriate) +- Coverage mapping of OTS requirements to test scenarios + +# Writing Guidelines + +- **Test Coverage**: Map every requirement to at least one named test scenario so + reviewers can verify completeness without reading test code +- **Scenario Clarity**: Name each scenario clearly - "Valid input returns parsed result" not "Test 1" +- **Boundary Conditions**: Call out boundary values, error inputs, and edge cases explicitly +- **Isolation Strategy**: Describe what is mocked or stubbed and why at each level +- **Traceability**: Link to requirements where applicable using ReqStream patterns +- **Verbal Cross-References**: Reference other documents by name - do not use markdown + hyperlinks, which break in compiled PDFs + +Mermaid diagrams may supplement text descriptions where test flow benefits from visual +representation, but must not replace text content. + +# Quality Checks + +Before submitting verification documentation, verify: + +- [ ] Every requirement at each level is mapped to at least one named test scenario +- [ ] System verification documents cover end-to-end and integration scenarios +- [ ] Subsystem verification documents identify mocked boundaries and integration scenarios +- [ ] Unit verification documents identify individual scenarios including boundary and error paths +- [ ] Subsystem documentation folders use kebab-case names mirroring the source subsystem structure +- [ ] All documents follow technical documentation formatting standards +- [ ] Content is current with requirements and test implementation +- [ ] Every OTS item has `docs/verification/ots/{ots-name}.md` with requirement coverage +- [ ] Documents are integrated into ReviewMark review-sets for formal review diff --git a/.reviewmark.yaml b/.reviewmark.yaml index 8a022a1..43e5206 100644 --- a/.reviewmark.yaml +++ b/.reviewmark.yaml @@ -12,6 +12,7 @@ needs-review: - "requirements.yaml" - "docs/reqstream/**/*.yaml" - "docs/design/**/*.md" + - "docs/verification/**/*.md" - "!**/obj/**" - "!**/bin/**" @@ -42,6 +43,7 @@ reviews: - "docs/reqstream/file-assert/file-assert.yaml" # system requirements - "docs/design/introduction.md" # design introduction - "docs/design/file-assert/file-assert.md" # system design + - "docs/verification/file-assert/file-assert.md" # system verification - "test/**/IntegrationTests.cs" # integration tests - "test/**/AssemblyInfo.cs" # test infrastructure - "test/**/Runner.cs" # test infrastructure @@ -54,6 +56,8 @@ reviews: - "docs/reqstream/file-assert/platform-requirements.yaml" # platform requirements - "docs/design/introduction.md" # design introduction - "docs/design/file-assert/**/*.md" # all system design documents + - "docs/verification/introduction.md" # verification introduction + - "docs/verification/file-assert/**/*.md" # all verification documents # FileAssert-AllRequirements Review (one per system) - id: FileAssert-AllRequirements @@ -68,6 +72,7 @@ reviews: paths: - "docs/reqstream/file-assert/cli/cli.yaml" # subsystem requirements - "docs/design/file-assert/cli/cli.md" # subsystem design + - "docs/verification/file-assert/cli/cli.md" # subsystem verification - "test/**/Cli/CliTests.cs" # subsystem tests # FileAssert-Configuration Review (one per subsystem) @@ -76,6 +81,7 @@ reviews: paths: - "docs/reqstream/file-assert/configuration/configuration.yaml" # subsystem requirements - "docs/design/file-assert/configuration/configuration.md" # subsystem design + - "docs/verification/file-assert/configuration/configuration.md" # subsystem verification - "test/**/Configuration/ConfigurationTests.cs" # subsystem tests # FileAssert-Modeling Review (one per subsystem) @@ -84,6 +90,7 @@ reviews: paths: - "docs/reqstream/file-assert/modeling/modeling.yaml" # subsystem requirements - "docs/design/file-assert/modeling/modeling.md" # subsystem design + - "docs/verification/file-assert/modeling/modeling.md" # subsystem verification - "test/**/Modeling/ModelingTests.cs" # subsystem tests # FileAssert-Utilities Review (one per subsystem) @@ -92,6 +99,7 @@ reviews: paths: - "docs/reqstream/file-assert/utilities/utilities.yaml" # subsystem requirements - "docs/design/file-assert/utilities/utilities.md" # subsystem design + - "docs/verification/file-assert/utilities/utilities.md" # subsystem verification - "test/**/Utilities/UtilitiesTests.cs" # subsystem tests # FileAssert-SelfTest Review (one per subsystem) @@ -100,6 +108,7 @@ reviews: paths: - "docs/reqstream/file-assert/selftest/selftest.yaml" # subsystem requirements - "docs/design/file-assert/selftest/selftest.md" # subsystem design + - "docs/verification/file-assert/selftest/selftest.md" # subsystem verification - "test/**/SelfTest/SelfTestTests.cs" # subsystem tests # FileAssert-Cli-Context Review (one per unit) @@ -108,6 +117,7 @@ reviews: paths: - "docs/reqstream/file-assert/cli/context.yaml" # requirements - "docs/design/file-assert/cli/context.md" # design + - "docs/verification/file-assert/cli/context.md" # verification - "src/**/Cli/Context.cs" # implementation - "test/**/Cli/ContextTests.cs" # unit tests - "test/**/Cli/ContextNewPropertiesTests.cs" # unit tests @@ -118,6 +128,7 @@ reviews: paths: - "docs/reqstream/file-assert/program.yaml" # requirements - "docs/design/file-assert/program.md" # design + - "docs/verification/file-assert/program.md" # verification - "src/**/Program.cs" # implementation - "test/**/ProgramTests.cs" # unit tests @@ -127,6 +138,7 @@ reviews: paths: - "docs/reqstream/file-assert/selftest/validation.yaml" # requirements - "docs/design/file-assert/selftest/validation.md" # design + - "docs/verification/file-assert/selftest/validation.md" # verification - "src/**/SelfTest/Validation.cs" # implementation - "test/**/SelfTest/ValidationTests.cs" # unit tests @@ -136,6 +148,7 @@ reviews: paths: - "docs/reqstream/file-assert/utilities/path-helpers.yaml" # requirements - "docs/design/file-assert/utilities/path-helpers.md" # design + - "docs/verification/file-assert/utilities/path-helpers.md" # verification - "src/**/Utilities/PathHelpers.cs" # implementation - "test/**/Utilities/PathHelpersTests.cs" # unit tests @@ -145,6 +158,7 @@ reviews: paths: - "docs/reqstream/file-assert/modeling/file-assert-rule.yaml" # requirements - "docs/design/file-assert/modeling/file-assert-rule.md" # design + - "docs/verification/file-assert/modeling/file-assert-rule.md" # verification - "src/**/Modeling/FileAssertRule.cs" # implementation - "test/**/Modeling/FileAssertRuleTests.cs" # unit tests @@ -154,6 +168,7 @@ reviews: paths: - "docs/reqstream/file-assert/modeling/file-assert-file.yaml" # requirements - "docs/design/file-assert/modeling/file-assert-file.md" # design + - "docs/verification/file-assert/modeling/file-assert-file.md" # verification - "src/**/Modeling/FileAssertFile.cs" # implementation - "test/**/Modeling/FileAssertFileTests.cs" # unit tests @@ -163,6 +178,7 @@ reviews: paths: - "docs/reqstream/file-assert/modeling/file-assert-test.yaml" # requirements - "docs/design/file-assert/modeling/file-assert-test.md" # design + - "docs/verification/file-assert/modeling/file-assert-test.md" # verification - "src/**/Modeling/FileAssertTest.cs" # implementation - "test/**/Modeling/FileAssertTestTests.cs" # unit tests @@ -172,6 +188,7 @@ reviews: paths: - "docs/reqstream/file-assert/configuration/file-assert-config.yaml" # requirements - "docs/design/file-assert/configuration/file-assert-config.md" # design + - "docs/verification/file-assert/configuration/file-assert-config.md" # verification - "src/**/Configuration/FileAssertConfig.cs" # implementation - "test/**/Configuration/FileAssertConfigTests.cs" # unit tests @@ -181,6 +198,7 @@ reviews: paths: - "docs/reqstream/file-assert/configuration/file-assert-data.yaml" # requirements - "docs/design/file-assert/configuration/file-assert-data.md" # design + - "docs/verification/file-assert/configuration/file-assert-data.md" # verification - "src/**/Configuration/FileAssertData.cs" # implementation - "test/**/Configuration/FileAssertConfigTests.cs" # unit tests @@ -188,52 +206,128 @@ reviews: - id: FileAssert-Modeling-FileAssertTextAssert title: Review that FileAssert Modeling FileAssertTextAssert Implementation is Correct paths: - - "docs/reqstream/file-assert/modeling/file-assert-text-assert.yaml" # requirements - - "docs/design/file-assert/modeling/file-assert-text-assert.md" # design - - "src/**/Modeling/FileAssertTextAssert.cs" # implementation - - "test/**/Modeling/FileAssertTextAssertTests.cs" # unit tests + - "docs/reqstream/file-assert/modeling/file-assert-text-assert.yaml" # requirements + - "docs/design/file-assert/modeling/file-assert-text-assert.md" # design + - "docs/verification/file-assert/modeling/file-assert-text-assert.md" # verification + - "src/**/Modeling/FileAssertTextAssert.cs" # implementation + - "test/**/Modeling/FileAssertTextAssertTests.cs" # unit tests # FileAssert-Modeling-FileAssertPdfAssert Review (one per unit) - id: FileAssert-Modeling-FileAssertPdfAssert title: Review that FileAssert Modeling FileAssertPdfAssert Implementation is Correct paths: - - "docs/reqstream/file-assert/modeling/file-assert-pdf-assert.yaml" # requirements - - "docs/design/file-assert/modeling/file-assert-pdf-assert.md" # design - - "src/**/Modeling/FileAssertPdfAssert.cs" # implementation - - "test/**/Modeling/FileAssertPdfAssertTests.cs" # unit tests + - "docs/reqstream/file-assert/modeling/file-assert-pdf-assert.yaml" # requirements + - "docs/design/file-assert/modeling/file-assert-pdf-assert.md" # design + - "docs/verification/file-assert/modeling/file-assert-pdf-assert.md" # verification + - "src/**/Modeling/FileAssertPdfAssert.cs" # implementation + - "test/**/Modeling/FileAssertPdfAssertTests.cs" # unit tests # FileAssert-Modeling-FileAssertXmlAssert Review (one per unit) - id: FileAssert-Modeling-FileAssertXmlAssert title: Review that FileAssert Modeling FileAssertXmlAssert Implementation is Correct paths: - - "docs/reqstream/file-assert/modeling/file-assert-xml-assert.yaml" # requirements - - "docs/design/file-assert/modeling/file-assert-xml-assert.md" # design - - "src/**/Modeling/FileAssertXmlAssert.cs" # implementation - - "test/**/Modeling/FileAssertXmlAssertTests.cs" # unit tests + - "docs/reqstream/file-assert/modeling/file-assert-xml-assert.yaml" # requirements + - "docs/design/file-assert/modeling/file-assert-xml-assert.md" # design + - "docs/verification/file-assert/modeling/file-assert-xml-assert.md" # verification + - "src/**/Modeling/FileAssertXmlAssert.cs" # implementation + - "test/**/Modeling/FileAssertXmlAssertTests.cs" # unit tests # FileAssert-Modeling-FileAssertHtmlAssert Review (one per unit) - id: FileAssert-Modeling-FileAssertHtmlAssert title: Review that FileAssert Modeling FileAssertHtmlAssert Implementation is Correct paths: - - "docs/reqstream/file-assert/modeling/file-assert-html-assert.yaml" # requirements - - "docs/design/file-assert/modeling/file-assert-html-assert.md" # design - - "src/**/Modeling/FileAssertHtmlAssert.cs" # implementation - - "test/**/Modeling/FileAssertHtmlAssertTests.cs" # unit tests + - "docs/reqstream/file-assert/modeling/file-assert-html-assert.yaml" # requirements + - "docs/design/file-assert/modeling/file-assert-html-assert.md" # design + - "docs/verification/file-assert/modeling/file-assert-html-assert.md" # verification + - "src/**/Modeling/FileAssertHtmlAssert.cs" # implementation + - "test/**/Modeling/FileAssertHtmlAssertTests.cs" # unit tests # FileAssert-Modeling-FileAssertYamlAssert Review (one per unit) - id: FileAssert-Modeling-FileAssertYamlAssert title: Review that FileAssert Modeling FileAssertYamlAssert Implementation is Correct paths: - - "docs/reqstream/file-assert/modeling/file-assert-yaml-assert.yaml" # requirements - - "docs/design/file-assert/modeling/file-assert-yaml-assert.md" # design - - "src/**/Modeling/FileAssertYamlAssert.cs" # implementation - - "test/**/Modeling/FileAssertYamlAssertTests.cs" # unit tests + - "docs/reqstream/file-assert/modeling/file-assert-yaml-assert.yaml" # requirements + - "docs/design/file-assert/modeling/file-assert-yaml-assert.md" # design + - "docs/verification/file-assert/modeling/file-assert-yaml-assert.md" # verification + - "src/**/Modeling/FileAssertYamlAssert.cs" # implementation + - "test/**/Modeling/FileAssertYamlAssertTests.cs" # unit tests # FileAssert-Modeling-FileAssertJsonAssert Review (one per unit) - id: FileAssert-Modeling-FileAssertJsonAssert title: Review that FileAssert Modeling FileAssertJsonAssert Implementation is Correct paths: - - "docs/reqstream/file-assert/modeling/file-assert-json-assert.yaml" # requirements - - "docs/design/file-assert/modeling/file-assert-json-assert.md" # design - - "src/**/Modeling/FileAssertJsonAssert.cs" # implementation - - "test/**/Modeling/FileAssertJsonAssertTests.cs" # unit tests + - "docs/reqstream/file-assert/modeling/file-assert-json-assert.yaml" # requirements + - "docs/design/file-assert/modeling/file-assert-json-assert.md" # design + - "docs/verification/file-assert/modeling/file-assert-json-assert.md" # verification + - "src/**/Modeling/FileAssertJsonAssert.cs" # implementation + - "test/**/Modeling/FileAssertJsonAssertTests.cs" # unit tests + + # FileAssert-OTS-BuildMark Review + - id: FileAssert-OTS-BuildMark + title: Review FileAssert OTS BuildMark Requirements and Verification + paths: + - "docs/reqstream/ots/buildmark.yaml" # OTS requirements + - "docs/verification/ots/buildmark.md" # OTS verification + + # FileAssert-OTS-FileAssert Review + - id: FileAssert-OTS-FileAssert + title: Review FileAssert OTS FileAssert Requirements and Verification + paths: + - "docs/reqstream/ots/fileassert.yaml" # OTS requirements + - "docs/verification/ots/fileassert.md" # OTS verification + + # FileAssert-OTS-Pandoc Review + - id: FileAssert-OTS-Pandoc + title: Review FileAssert OTS Pandoc Requirements and Verification + paths: + - "docs/reqstream/ots/pandoc.yaml" # OTS requirements + - "docs/verification/ots/pandoc.md" # OTS verification + + # FileAssert-OTS-ReqStream Review + - id: FileAssert-OTS-ReqStream + title: Review FileAssert OTS ReqStream Requirements and Verification + paths: + - "docs/reqstream/ots/reqstream.yaml" # OTS requirements + - "docs/verification/ots/reqstream.md" # OTS verification + + # FileAssert-OTS-ReviewMark Review + - id: FileAssert-OTS-ReviewMark + title: Review FileAssert OTS ReviewMark Requirements and Verification + paths: + - "docs/reqstream/ots/reviewmark.yaml" # OTS requirements + - "docs/verification/ots/reviewmark.md" # OTS verification + + # FileAssert-OTS-SarifMark Review + - id: FileAssert-OTS-SarifMark + title: Review FileAssert OTS SarifMark Requirements and Verification + paths: + - "docs/reqstream/ots/sarifmark.yaml" # OTS requirements + - "docs/verification/ots/sarifmark.md" # OTS verification + + # FileAssert-OTS-SonarMark Review + - id: FileAssert-OTS-SonarMark + title: Review FileAssert OTS SonarMark Requirements and Verification + paths: + - "docs/reqstream/ots/sonarmark.yaml" # OTS requirements + - "docs/verification/ots/sonarmark.md" # OTS verification + + # FileAssert-OTS-VersionMark Review + - id: FileAssert-OTS-VersionMark + title: Review FileAssert OTS VersionMark Requirements and Verification + paths: + - "docs/reqstream/ots/versionmark.yaml" # OTS requirements + - "docs/verification/ots/versionmark.md" # OTS verification + + # FileAssert-OTS-WeasyPrint Review + - id: FileAssert-OTS-WeasyPrint + title: Review FileAssert OTS WeasyPrint Requirements and Verification + paths: + - "docs/reqstream/ots/weasyprint.yaml" # OTS requirements + - "docs/verification/ots/weasyprint.md" # OTS verification + + # FileAssert-OTS-xUnit Review + - id: FileAssert-OTS-xUnit + title: Review FileAssert OTS xUnit Requirements and Verification + paths: + - "docs/reqstream/ots/xunit.yaml" # OTS requirements + - "docs/verification/ots/xunit.md" # OTS verification diff --git a/AGENTS.md b/AGENTS.md index 2207f6b..f0f4b59 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,3 +1,10 @@ +# Project Overview + +- **name**: FileAssert +- **description**: A .NET command-line application for asserting file properties using YAML-defined test suites +- **languages**: C# +- **technologies**: .NET, YamlDotNet, HtmlAgilityPack, PdfPig, System.Xml.Linq, System.Text.Json, xUnit + # Project Structure ```text @@ -10,7 +17,8 @@ │ ├── requirements_doc/ │ ├── requirements_report/ │ ├── reqstream/ -│ └── user_guide/ +│ ├── user_guide/ +│ └── verification/ ├── src/ │ └── DemaConsulting.FileAssert/ └── test/ @@ -45,16 +53,17 @@ before searching the filesystem. Before performing any work, agents must read and apply the relevant standards from `.github/standards/`. Use this matrix to determine which to load: -| Work involves... | Load these standards | -|----------------------|------------------------------------------------------------------------------| -| Any code | `coding-principles.md` | -| C# code | `coding-principles.md`, `csharp-language.md` | -| Any tests | `testing-principles.md` | -| C# tests | `testing-principles.md`, `csharp-testing.md` | -| Requirements | `requirements-principles.md`, `software-items.md`, `reqstream-usage.md` | -| Design docs | `software-items.md`, `design-documentation.md`, `technical-documentation.md` | -| Review configuration | `software-items.md`, `reviewmark-usage.md` | -| Any documentation | `technical-documentation.md` | +| Work involves... | Load these standards | +|----------------------|------------------------------------------------------------------------------------| +| Any code | `coding-principles.md` | +| C# code | `coding-principles.md`, `csharp-language.md` | +| Any tests | `testing-principles.md` | +| C# tests | `testing-principles.md`, `csharp-testing.md` | +| Requirements | `requirements-principles.md`, `software-items.md`, `reqstream-usage.md` | +| Design docs | `software-items.md`, `design-documentation.md`, `technical-documentation.md` | +| Verification docs | `software-items.md`, `verification-documentation.md`, `technical-documentation.md` | +| Review configuration | `software-items.md`, `reviewmark-usage.md` | +| Any documentation | `technical-documentation.md` | Load only the standards relevant to your specific task scope. @@ -69,26 +78,10 @@ Delegate to specialized agents only for specific scenarios: - **Formal feature implementation** (complex, multi-step) → Call the implementation agent - **Formal bug resolution** (complex debugging, systematic fixes) → Call the implementation agent - **Formal reviews** (compliance verification, detailed analysis) → Call the formal-review agent -- **Template consistency** (downstream repository alignment) → Call the repo-consistency agent - -## Available Specialized Agents - -- **lint-fix** - Pre-PR lint sweep agent that loops running `pwsh ./lint.ps1`, - fixing issues until the repository is lint-clean -- **developer** - General-purpose software development agent that applies appropriate - standards based on the work being performed -- **formal-review** - Agent for performing formal reviews using standardized review processes -- **implementation** - Orchestrator agent that manages quality implementations - through a formal state machine workflow -- **quality** - Quality assurance agent that grades developer work against project - standards and Continuous Compliance practices -- **repo-consistency** - Ensures downstream repositories remain consistent with - the TemplateDotNetTool template patterns and best practices # Agent Reporting (Specialized Agents Must Follow) -Specialized agents (lint-fix, developer, quality, implementation, -formal-review, repo-consistency) MUST generate a completion report: +Specialized agents MUST generate a completion report: 1. Save to `.agent-logs/{agent-name}-{subject}-{unique-id}.md` where `{subject}` is a kebab-case task summary (max 5 words) and @@ -124,6 +117,8 @@ reqstream, versionmark, and reviewmark. # Scope Discipline (ALL Agents Must Follow) +- **No generated file access**: Files inside any `generated/` folder are build + outputs — do not read, lint, or modify them - **Minimum necessary changes**: Only modify files directly required by the task - **No speculative refactoring**: Do not refactor code adjacent to the change unless the task explicitly requests it diff --git a/docs/design/file-assert/ots-dependencies.md b/docs/design/file-assert/ots-dependencies.md index 89cd579..78d7b02 100644 --- a/docs/design/file-assert/ots-dependencies.md +++ b/docs/design/file-assert/ots-dependencies.md @@ -80,7 +80,7 @@ VersionMark reads the version from a central configuration file and propagates i all artifacts produced by the build. This eliminates manual version updates and ensures consistency between the NuGet package version, assembly version, and release tag. -### MSTest +### xUnit | Attribute | Value | | :----------- | :----------------------------------------------------------------------------------- | @@ -88,7 +88,7 @@ consistency between the NuGet package version, assembly version, and release tag | Role | Provides the test runner, assertion library, and TRX result output used by all tests | | Verification | All tests must pass; TRX files are consumed by ReqStream for traceability | -MSTest (version 4) is the unit test framework for all C# tests in this repository. -It provides `[TestClass]`, `[TestMethod]`, and the assertion methods used throughout +xUnit (version 3) is the unit test framework for all C# tests in this repository. +It provides `[Fact]`, `[Collection]`, and the assertion methods used throughout the test suite. TRX output format is enabled so that ReqStream can parse test results and verify requirements coverage. diff --git a/docs/reqstream/ots/mstest.yaml b/docs/reqstream/ots/mstest.yaml deleted file mode 100644 index 6a35c8c..0000000 --- a/docs/reqstream/ots/mstest.yaml +++ /dev/null @@ -1,27 +0,0 @@ ---- -# MSTest OTS Software Requirements -# -# Requirements for the MSTest testing framework functionality. - -sections: - - title: OTS Software Requirements - sections: - - title: MSTest Requirements - requirements: - - id: FileAssert-OTS-MSTest - title: MSTest shall execute unit tests and report results. - justification: | - MSTest (MSTest.TestFramework and MSTest.TestAdapter) is the unit-testing framework used - by the project. It discovers and runs all test methods and writes TRX result files that - feed into coverage reporting and requirements traceability. Passing tests confirm the - framework is functioning correctly. - tags: [ots] - tests: - - Context_Create_NoArguments_ReturnsDefaultContext - - Context_Create_VersionFlag_SetsVersionTrue - - Context_Create_SilentFlag_SetsSilentTrue - - Context_Create_LogFlag_OpensLogFile - - Context_Create_UnknownArgument_ThrowsArgumentException - - PathHelpers_SafePathCombine_ValidPaths_CombinesCorrectly - - Program_Run_WithVersionFlag_DisplaysVersionOnly - - Validation_Run_WithSilentContext_PrintsSummary diff --git a/docs/reqstream/ots/pandoc.yaml b/docs/reqstream/ots/pandoc.yaml index 0071219..2517f4f 100644 --- a/docs/reqstream/ots/pandoc.yaml +++ b/docs/reqstream/ots/pandoc.yaml @@ -23,4 +23,5 @@ sections: - Pandoc_ReviewPlanHtml - Pandoc_ReviewReportHtml - Pandoc_DesignHtml + - Pandoc_VerificationHtml - Pandoc_UserGuideHtml diff --git a/docs/reqstream/ots/weasyprint.yaml b/docs/reqstream/ots/weasyprint.yaml index cc691b4..054bbf8 100644 --- a/docs/reqstream/ots/weasyprint.yaml +++ b/docs/reqstream/ots/weasyprint.yaml @@ -23,4 +23,5 @@ sections: - WeasyPrint_ReviewPlanPdf - WeasyPrint_ReviewReportPdf - WeasyPrint_DesignPdf + - WeasyPrint_VerificationPdf - WeasyPrint_UserGuidePdf diff --git a/docs/reqstream/ots/xunit.yaml b/docs/reqstream/ots/xunit.yaml new file mode 100644 index 0000000..1382b8f --- /dev/null +++ b/docs/reqstream/ots/xunit.yaml @@ -0,0 +1,43 @@ +--- +# xUnit OTS Software Requirements +# +# Requirements for the xUnit testing framework functionality. + +sections: + - title: OTS Software Requirements + sections: + - title: xUnit Requirements + requirements: + - id: FileAssert-OTS-xUnit-Execute + title: xUnit shall discover and execute all unit tests. + justification: | + xUnit (xunit.v3 and xunit.runner.visualstudio) is the unit-testing framework used + by the project. It discovers and runs all test methods annotated with [Fact] and + writes TRX result files that feed into coverage reporting and requirements + traceability. Passing tests confirm the framework is functioning correctly. + tags: [ots] + tests: + - Context_Create_NoArguments_ReturnsDefaultContext + - Context_Create_VersionFlag_SetsVersionTrue + - Context_Create_SilentFlag_SetsSilentTrue + - Context_Create_LogFlag_OpensLogFile + - Context_Create_UnknownArgument_ThrowsArgumentException + - PathHelpers_SafePathCombine_ValidPaths_CombinesCorrectly + - Program_Run_WithVersionFlag_DisplaysVersionOnly + - Validation_Run_WithSilentContext_PrintsSummary + - id: FileAssert-OTS-xUnit-Report + title: xUnit shall produce TRX result files for requirements traceability. + justification: | + xUnit with xunit.runner.visualstudio produces TRX output format files that are + consumed by ReqStream to verify requirements coverage. Each passing test provides + evidence for the requirements it covers, providing continuous compliance traceability. + tags: [ots] + tests: + - Context_Create_NoArguments_ReturnsDefaultContext + - Context_Create_VersionFlag_SetsVersionTrue + - Context_Create_SilentFlag_SetsSilentTrue + - Context_Create_LogFlag_OpensLogFile + - Context_Create_UnknownArgument_ThrowsArgumentException + - PathHelpers_SafePathCombine_ValidPaths_CombinesCorrectly + - Program_Run_WithVersionFlag_DisplaysVersionOnly + - Validation_Run_WithSilentContext_PrintsSummary diff --git a/docs/verification/definition.yaml b/docs/verification/definition.yaml new file mode 100644 index 0000000..2f2e26e --- /dev/null +++ b/docs/verification/definition.yaml @@ -0,0 +1,49 @@ +--- +resource-path: + - docs/verification + - docs/verification/file-assert + - docs/verification/file-assert/cli + - docs/verification/file-assert/configuration + - docs/verification/file-assert/modeling + - docs/verification/file-assert/utilities + - docs/verification/file-assert/selftest + - docs/verification/ots + - docs/template + +input-files: + - docs/verification/title.txt + - docs/verification/introduction.md + - docs/verification/file-assert/file-assert.md + - docs/verification/file-assert/program.md + - docs/verification/file-assert/cli/cli.md + - docs/verification/file-assert/cli/context.md + - docs/verification/file-assert/configuration/configuration.md + - docs/verification/file-assert/configuration/file-assert-config.md + - docs/verification/file-assert/configuration/file-assert-data.md + - docs/verification/file-assert/modeling/modeling.md + - docs/verification/file-assert/modeling/file-assert-test.md + - docs/verification/file-assert/modeling/file-assert-file.md + - docs/verification/file-assert/modeling/file-assert-rule.md + - docs/verification/file-assert/modeling/file-assert-text-assert.md + - docs/verification/file-assert/modeling/file-assert-pdf-assert.md + - docs/verification/file-assert/modeling/file-assert-xml-assert.md + - docs/verification/file-assert/modeling/file-assert-html-assert.md + - docs/verification/file-assert/modeling/file-assert-yaml-assert.md + - docs/verification/file-assert/modeling/file-assert-json-assert.md + - docs/verification/file-assert/utilities/utilities.md + - docs/verification/file-assert/utilities/path-helpers.md + - docs/verification/file-assert/selftest/selftest.md + - docs/verification/file-assert/selftest/validation.md + - docs/verification/ots/buildmark.md + - docs/verification/ots/fileassert.md + - docs/verification/ots/pandoc.md + - docs/verification/ots/reqstream.md + - docs/verification/ots/reviewmark.md + - docs/verification/ots/sarifmark.md + - docs/verification/ots/sonarmark.md + - docs/verification/ots/versionmark.md + - docs/verification/ots/weasyprint.md + - docs/verification/ots/xunit.md +template: template.html +table-of-contents: true +number-sections: true diff --git a/docs/verification/file-assert/cli/cli.md b/docs/verification/file-assert/cli/cli.md new file mode 100644 index 0000000..77b6424 --- /dev/null +++ b/docs/verification/file-assert/cli/cli.md @@ -0,0 +1,74 @@ +# Cli Subsystem Verification + +This document describes the subsystem-level verification design for the `Cli` subsystem. It +defines the integration test approach, subsystem boundary, mocking strategy, and test scenarios +that together verify the `Cli` subsystem requirements. + +## Verification Approach + +The `Cli` subsystem boundary at `Program` is verified by integration tests defined in +`CliTests.cs`. Each test exercises `Context.Create` and `Program.Run` together, treating the pair +as the observable subsystem interface. Tests pass controlled argument arrays and assert on captured +console output, file system side-effects, and exit codes. + +## Dependencies and Mocking Strategy + +At the subsystem boundary, `Validation` (part of the `SelfTest` subsystem) is the only external +collaborator that `Program` calls. In scenarios that exercise the `--validate` path, `Validation` +executes its real logic rather than being stubbed. Scenarios that do not involve `--validate` do +not reach `Validation` at all. + +No mocking is applied at this level; all collaborators within and directly adjacent to the +subsystem use their real implementations. + +## Integration Test Scenarios + +The following integration test scenarios are defined in `CliTests.cs`. + +### Cli_CreateContext_ParsesSilentValidateAndLogFlags + +**Scenario**: Arguments containing `--silent`, `--validate`, and `--log ` flags are passed +through `Context.Create`. + +**Expected**: All three flags are correctly parsed; exit code is 0. + +### Cli_CreateContext_ParsesVersionHelpConfigResultsFlags + +**Scenario**: Arguments containing `--version`, `--help`, `--config `, and +`--results ` flags are passed through `Context.Create`. + +**Expected**: All four flags are correctly parsed; exit code is 0. + +### Cli_CreateContext_WithFilters_ParsesPositionalArguments + +**Scenario**: Positional arguments (test filters) are passed through `Context.Create`. + +**Expected**: The filters list contains the expected values; exit code is 0. + +### Cli_CreateContext_UnknownArgument_ThrowsArgumentException + +**Scenario**: An unrecognized argument is passed through `Context.Create`. + +**Expected**: An `ArgumentException` is thrown. + +### Cli_WriteError_ChangesExitCodeToOne + +**Scenario**: `Context.WriteError` is called with an error message. + +**Expected**: `ExitCode` becomes 1 after the call. + +### Cli_OutputPipeline_WritesMessagesToLogFile + +**Scenario**: A context with both `--silent` and `--log ` flags is created; +`Context.WriteLine` is called with a message. + +**Expected**: The message appears in the log file; exit code is 0. + +## Requirements Coverage + +- **Argument parsing**: Cli_CreateContext_ParsesSilentValidateAndLogFlags, + Cli_CreateContext_ParsesVersionHelpConfigResultsFlags, + Cli_CreateContext_WithFilters_ParsesPositionalArguments +- **Unknown argument rejection**: Cli_CreateContext_UnknownArgument_ThrowsArgumentException +- **Error exit code**: Cli_WriteError_ChangesExitCodeToOne +- **Log file output**: Cli_OutputPipeline_WritesMessagesToLogFile diff --git a/docs/verification/file-assert/cli/context.md b/docs/verification/file-assert/cli/context.md new file mode 100644 index 0000000..0d5c3f6 --- /dev/null +++ b/docs/verification/file-assert/cli/context.md @@ -0,0 +1,331 @@ +# Context Verification + +This document describes the unit-level verification design for the `Context` unit. It defines the +test scenarios, dependency usage, and requirement coverage for `Cli/Context.cs`. + +## Verification Approach + +`Context` is verified with unit tests defined in `ContextTests.cs` and +`ContextNewPropertiesTests.cs`. Because `Context` depends only on .NET base class library types +(`Console`, `StreamWriter`, `Path`), no mocking or test doubles are required. Tests call +`Context.Create` with controlled argument arrays, inspect the resulting properties and exit codes, +and verify output written to captured streams. + +## Dependencies + +`Context` has no dependencies on other tool units. All dependencies are real .NET BCL types; +no mocking is needed at this level. + +## Test Scenarios + +### Context_Create_NoArguments_ReturnsDefaultContext + +**Scenario**: `Context.Create` is called with an empty argument array. + +**Expected**: All boolean flags are false; `ResultsFile` is null; exit code is 0. + +**Requirement coverage**: Default context creation requirement. + +### Context_Create_VersionFlag_SetsVersionTrue + +**Scenario**: `Context.Create` is called with `["--version"]`. + +**Expected**: `Version` property is true. + +**Requirement coverage**: Version flag parsing requirement. + +### Context_Create_ShortVersionFlag_SetsVersionTrue + +**Scenario**: `Context.Create` is called with `["-v"]`. + +**Expected**: `Version` property is true. + +**Requirement coverage**: Short version flag parsing requirement. + +### Context_Create_HelpFlag_SetsHelpTrue + +**Scenario**: `Context.Create` is called with `["--help"]`. + +**Expected**: `Help` property is true. + +**Requirement coverage**: Help flag (long form) parsing requirement. + +### Context_Create_ShortHelpFlag_H_SetsHelpTrue + +**Scenario**: `Context.Create` is called with `["-h"]`. + +**Expected**: `Help` property is true. + +**Requirement coverage**: Help flag (-h) parsing requirement. + +### Context_Create_ShortHelpFlag_Question_SetsHelpTrue + +**Scenario**: `Context.Create` is called with `["-?"]`. + +**Expected**: `Help` property is true. + +**Requirement coverage**: Help flag (-?) parsing requirement. + +### Context_Create_SilentFlag_SetsSilentTrue + +**Scenario**: `Context.Create` is called with `["--silent"]`. + +**Expected**: `Silent` property is true. + +**Requirement coverage**: Silent flag parsing requirement. + +### Context_Create_ValidateFlag_SetsValidateTrue + +**Scenario**: `Context.Create` is called with `["--validate"]`. + +**Expected**: `Validate` property is true. + +**Requirement coverage**: Validate flag parsing requirement. + +### Context_Create_ResultsFlag_SetsResultsFile + +**Scenario**: `Context.Create` is called with `["--results", "output.trx"]`. + +**Expected**: `ResultsFile` property equals `"output.trx"`. + +**Requirement coverage**: Results file path parsing requirement. + +### Context_Create_ResultAliasFlag_SetsResultsFile + +**Scenario**: `Context.Create` is called with `["--result", "output.trx"]` (legacy alias). + +**Expected**: `ResultsFile` property equals `"output.trx"`, identical to the `--results` flag. + +**Requirement coverage**: Results alias flag parsing requirement. + +### Context_Create_LogFlag_OpensLogFile + +**Scenario**: `Context.Create` is called with `["--log", ".log"]`; `WriteLine` is then called +with a test message. + +**Expected**: The log file is created; the test message is written to it. + +**Requirement coverage**: Log file opening and writing requirement. + +### Context_Create_UnknownArgument_ThrowsArgumentException + +**Scenario**: `Context.Create` is called with an unrecognized argument (e.g., `["--unknown"]`). + +**Expected**: An `ArgumentException` is thrown containing the text "Unsupported argument". + +**Requirement coverage**: Unknown argument rejection requirement. + +### Context_Create_LogFlag_WithoutValue_ThrowsArgumentException + +**Scenario**: `Context.Create` is called with `["--log"]` (value missing). + +**Expected**: An `ArgumentException` is thrown. + +**Requirement coverage**: Log flag missing-value validation requirement. + +### Context_Create_ResultsFlag_WithoutValue_ThrowsArgumentException + +**Scenario**: `Context.Create` is called with `["--results"]` (value missing). + +**Expected**: An `ArgumentException` is thrown. + +**Requirement coverage**: Results flag missing-value validation requirement. + +### Context_WriteLine_NotSilent_WritesToConsole + +**Scenario**: A non-silent `Context` is created and `WriteLine` is called with a test message. + +**Expected**: The test message appears on standard output. + +**Requirement coverage**: Normal output writing requirement. + +### Context_WriteLine_Silent_DoesNotWriteToConsole + +**Scenario**: A silent `Context` (created with `["--silent"]`) calls `WriteLine`. + +**Expected**: Standard output receives nothing. + +**Requirement coverage**: Silent mode suppression requirement. + +### Context_WriteError_Silent_DoesNotWriteToConsole + +**Scenario**: A silent `Context` calls `WriteError`. + +**Expected**: Standard error receives nothing. + +**Requirement coverage**: Silent mode error suppression requirement. + +### Context_WriteError_SetsErrorExitCode + +**Scenario**: A `Context` calls `WriteError`. + +**Expected**: `ExitCode` is 1 after the call. + +**Requirement coverage**: Error exit code setting requirement. + +### Context_WriteError_NotSilent_WritesToConsole + +**Scenario**: A non-silent `Context` calls `WriteError` with a test message. + +**Expected**: The test message appears on standard error. + +**Requirement coverage**: Error output writing requirement. + +### Context_WriteError_WritesToLogFile + +**Scenario**: A `Context` created with `["--silent", "--log", ".log"]` calls `WriteError` +with a test message. + +**Expected**: The test message appears in the log file. + +**Requirement coverage**: Error log writing requirement. + +### Context_ErrorCount_IncrementsOnEachWriteError + +**Scenario**: `WriteError` is called multiple times on the same `Context`. + +**Expected**: `ErrorCount` increments by one for each call. + +**Requirement coverage**: Error count tracking requirement. + +### Context_Create_DepthFlag_SetsDepth + +**Scenario**: `Context.Create` is called with `["--depth", "3"]`. + +**Expected**: `HeadingDepth` property equals 3. + +**Requirement coverage**: Depth flag parsing requirement. + +### Context_Create_NoArguments_DepthDefaultsToOne + +**Scenario**: `Context.Create` is called with an empty argument array. + +**Expected**: `HeadingDepth` property equals 1 (the default). + +**Requirement coverage**: Default heading depth requirement. + +### Context_Create_DepthFlag_WithoutValue_ThrowsArgumentException + +**Scenario**: `Context.Create` is called with `["--depth"]` (value missing). + +**Expected**: An `ArgumentException` is thrown. + +**Requirement coverage**: Depth flag missing-value validation requirement. + +### Context_Create_DepthFlag_NonNumeric_ThrowsArgumentException + +**Scenario**: `Context.Create` is called with `["--depth", "abc"]`. + +**Expected**: An `ArgumentException` is thrown. + +**Requirement coverage**: Depth flag non-integer validation requirement. + +### Context_Create_DepthFlag_Zero_ThrowsArgumentException + +**Scenario**: `Context.Create` is called with `["--depth", "0"]` (below minimum of 1). + +**Expected**: An `ArgumentException` is thrown. + +**Requirement coverage**: Depth flag minimum-value validation requirement. + +### Context_Create_DepthFlag_AboveSix_ThrowsArgumentException + +**Scenario**: `Context.Create` is called with `["--depth", "7"]` (above maximum of 6). + +**Expected**: An `ArgumentException` is thrown. + +**Requirement coverage**: Depth flag maximum-value validation requirement. + +### Context_Create_NoArguments_ConfigFileHasDefaultValue + +**Scenario**: `Context.Create` is called with an empty argument array. + +**Expected**: `ConfigFile` property has the default value. + +**Requirement coverage**: Default config file path requirement. + +### Context_Create_NoArguments_FiltersIsEmpty + +**Scenario**: `Context.Create` is called with an empty argument array. + +**Expected**: `Filters` collection is empty. + +**Requirement coverage**: Default filters requirement. + +### Context_Create_ConfigFlag_SetsConfigFile + +**Scenario**: `Context.Create` is called with `["--config", "my.yaml"]`. + +**Expected**: `ConfigFile` property equals `"my.yaml"`. + +**Requirement coverage**: Config file flag parsing requirement. + +### Context_Create_PositionalArguments_AddedToFilters + +**Scenario**: `Context.Create` is called with positional arguments (e.g., `["TestA", "TestB"]`). + +**Expected**: `Filters` contains `["TestA", "TestB"]`. + +**Requirement coverage**: Test filter parsing requirement. + +### Context_Create_MixedArguments_ParsesCorrectly + +**Scenario**: `Context.Create` is called with a mix of flags and positional arguments. + +**Expected**: All flags and positional arguments are correctly parsed. + +**Requirement coverage**: Mixed argument parsing requirement. + +### Context_Create_UnknownFlagWithDash_ThrowsArgumentException + +**Scenario**: `Context.Create` is called with an unrecognized flag starting with `--`. + +**Expected**: An `ArgumentException` is thrown. + +**Requirement coverage**: Unknown flag rejection requirement. + +### Context_Create_ConfigFlag_WithoutValue_ThrowsArgumentException + +**Scenario**: `Context.Create` is called with `["--config"]` (value missing). + +**Expected**: An `ArgumentException` is thrown. + +**Requirement coverage**: Config flag missing-value validation requirement. + +## Requirements Coverage + +| Requirement | Test Scenario | +|--------------------------------|-----------------------------------------------------------------| +| Default context creation | Context_Create_NoArguments_ReturnsDefaultContext | +| --version flag parsing | Context_Create_VersionFlag_SetsVersionTrue | +| -v flag parsing | Context_Create_ShortVersionFlag_SetsVersionTrue | +| --help flag parsing | Context_Create_HelpFlag_SetsHelpTrue | +| -h flag parsing | Context_Create_ShortHelpFlag_H_SetsHelpTrue | +| -? flag parsing | Context_Create_ShortHelpFlag_Question_SetsHelpTrue | +| --silent flag parsing | Context_Create_SilentFlag_SetsSilentTrue | +| --validate flag parsing | Context_Create_ValidateFlag_SetsValidateTrue | +| --results flag parsing | Context_Create_ResultsFlag_SetsResultsFile | +| --result alias parsing | Context_Create_ResultAliasFlag_SetsResultsFile | +| --log flag and file writing | Context_Create_LogFlag_OpensLogFile | +| Unknown argument rejection | Context_Create_UnknownArgument_ThrowsArgumentException | +| --log missing value | Context_Create_LogFlag_WithoutValue_ThrowsArgumentException | +| --results missing value | Context_Create_ResultsFlag_WithoutValue_ThrowsArgumentException | +| Normal output writing | Context_WriteLine_NotSilent_WritesToConsole | +| Silent mode output suppression | Context_WriteLine_Silent_DoesNotWriteToConsole | +| Silent mode error suppression | Context_WriteError_Silent_DoesNotWriteToConsole | +| Error exit code | Context_WriteError_SetsErrorExitCode | +| Error output to stderr | Context_WriteError_NotSilent_WritesToConsole | +| Error writing to log file | Context_WriteError_WritesToLogFile | +| Error count tracking | Context_ErrorCount_IncrementsOnEachWriteError | +| --depth flag parsing | Context_Create_DepthFlag_SetsDepth | +| Default heading depth | Context_Create_NoArguments_DepthDefaultsToOne | +| --depth missing value | Context_Create_DepthFlag_WithoutValue_ThrowsArgumentException | +| --depth non-integer value | Context_Create_DepthFlag_NonNumeric_ThrowsArgumentException | +| --depth zero value (min 1) | Context_Create_DepthFlag_Zero_ThrowsArgumentException | +| --depth exceeds maximum (max 6)| Context_Create_DepthFlag_AboveSix_ThrowsArgumentException | +| --config flag parsing | Context_Create_ConfigFlag_SetsConfigFile | +| Default config file path | Context_Create_NoArguments_ConfigFileHasDefaultValue | +| Test filter parsing | Context_Create_PositionalArguments_AddedToFilters | +| Mixed argument parsing | Context_Create_MixedArguments_ParsesCorrectly | +| Unknown flag rejection | Context_Create_UnknownFlagWithDash_ThrowsArgumentException | +| --config missing value | Context_Create_ConfigFlag_WithoutValue_ThrowsArgumentException | diff --git a/docs/verification/file-assert/configuration/configuration.md b/docs/verification/file-assert/configuration/configuration.md new file mode 100644 index 0000000..2b35b5b --- /dev/null +++ b/docs/verification/file-assert/configuration/configuration.md @@ -0,0 +1,49 @@ +# Configuration Subsystem Verification + +This document describes the subsystem-level verification design for the `Configuration` subsystem. +It defines the integration test approach, subsystem boundary, mocking strategy, and test scenarios +that together verify the `Configuration` subsystem requirements. + +## Verification Approach + +The `Configuration` subsystem is verified by integration tests defined in `ConfigurationTests.cs`. +Each test exercises the full loading and execution pipeline — reading a YAML file, constructing +the test hierarchy, and running the resulting configuration — with a real `Context`. + +## Dependencies and Mocking Strategy + +All collaborators at the subsystem boundary (`Context`, `FileAssertConfig`, `PathHelpers`) use +their real implementations. Temporary directories are used for configuration files and test +artifacts so that tests remain isolated and leave no permanent file-system side-effects. + +## Integration Test Scenarios + +The following integration test scenarios are defined in `ConfigurationTests.cs`. + +### Configuration_LoadYaml_BuildsCompleteTestHierarchy + +**Scenario**: A YAML configuration file with nested test, file, and rule entries is loaded using +`FileAssertConfig.ReadFromFile`. + +**Expected**: The complete object hierarchy (tests → files → rules) is correctly constructed with +all properties populated. + +### Configuration_RunWithFilter_ExecutesOnlyMatchingTests + +**Scenario**: A configuration with two tests is loaded. Only one file exists; a filter naming one +test is passed to `FileAssertConfig.Run`. + +**Expected**: Only the named test runs; exit code is 0. + +### Configuration_RunWithTagFilter_ExecutesOnlyMatchingTests + +**Scenario**: A configuration with two tests with different tags is loaded. Only one file exists; +a filter naming one tag is passed to `FileAssertConfig.Run`. + +**Expected**: Only the test matching the tag runs; exit code is 0. + +## Requirements Coverage + +- **YAML loading and hierarchy construction**: Configuration_LoadYaml_BuildsCompleteTestHierarchy +- **Test name filtering**: Configuration_RunWithFilter_ExecutesOnlyMatchingTests +- **Tag filtering**: Configuration_RunWithTagFilter_ExecutesOnlyMatchingTests diff --git a/docs/verification/file-assert/configuration/file-assert-config.md b/docs/verification/file-assert/configuration/file-assert-config.md new file mode 100644 index 0000000..8f81247 --- /dev/null +++ b/docs/verification/file-assert/configuration/file-assert-config.md @@ -0,0 +1,107 @@ +# FileAssertConfig Verification + +This document describes the unit-level verification design for the `FileAssertConfig` unit. It +defines the test scenarios, dependency usage, and requirement coverage for +`Configuration/FileAssertConfig.cs`. + +## Verification Approach + +`FileAssertConfig` is verified with unit tests defined in `FileAssertConfigTests.cs`. Tests supply +YAML configuration files in temporary directories and assert on the resulting object state, exit +codes, and results files. + +## Dependencies + +| Dependency | Usage in Tests | +|----------------|--------------------------------------------------------------| +| `Context` | Used directly (not mocked) — created with controlled flags. | +| `PathHelpers` | Used internally by `FileAssertConfig`; not mocked. | + +## Test Scenarios + +### FileAssertConfig_ReadFromFile_ValidFile_ReturnsConfig + +**Scenario**: `FileAssertConfig.ReadFromFile` is called with a valid YAML file path. + +**Expected**: A non-null `FileAssertConfig` instance is returned with the correct properties. + +**Requirement coverage**: Configuration file reading requirement. + +### FileAssertConfig_ReadFromFile_FileNotFound_ThrowsFileNotFoundException + +**Scenario**: `FileAssertConfig.ReadFromFile` is called with a path that does not exist. + +**Expected**: A `FileNotFoundException` is thrown. + +**Boundary / error path**: Missing configuration file error path. + +### FileAssertConfig_ReadFromFile_NullPath_ThrowsArgumentNullException + +**Scenario**: `FileAssertConfig.ReadFromFile` is called with `null` as the path. + +**Expected**: An `ArgumentNullException` is thrown. + +**Boundary / error path**: Null guard on the path parameter. + +### FileAssertConfig_Run_WithNoFilter_RunsAllTests + +**Scenario**: `FileAssertConfig.Run` is called with an empty filter list. + +**Expected**: All tests in the configuration are executed; exit code reflects pass or fail. + +**Requirement coverage**: Run-all-tests requirement. + +### FileAssertConfig_Run_WithMatchingFilter_RunsMatchingTest + +**Scenario**: `FileAssertConfig.Run` is called with a filter that matches one test name. + +**Expected**: Only the matching test runs; exit code reflects the result of that test. + +**Requirement coverage**: Test name filtering requirement. + +### FileAssertConfig_Run_WithNonMatchingFilter_SkipsTests + +**Scenario**: `FileAssertConfig.Run` is called with a filter that matches no tests. + +**Expected**: No tests run; exit code is 0. + +**Requirement coverage**: Non-matching filter skips all tests requirement. + +### FileAssertConfig_Run_WithResultsFile_WritesTrxWithPassedOutcome + +**Scenario**: `FileAssertConfig.Run` is called with a context whose `ResultsFile` points to a +temporary `.trx` path, and all assertions pass. + +**Expected**: A TRX file is created; it contains a passing result entry. + +**Requirement coverage**: TRX results output requirement. + +### FileAssertConfig_Run_WithResultsFile_WritesJUnitWithFailedOutcome + +**Scenario**: `FileAssertConfig.Run` is called with a context whose `ResultsFile` points to a +temporary `.xml` path, and at least one assertion fails. + +**Expected**: A JUnit XML file is created; it contains a failing result entry. + +**Requirement coverage**: JUnit results output requirement. + +### FileAssertConfig_ReadFromFile_PdfAssertConfig_ParsesCorrectly + +**Scenario**: `FileAssertConfig.ReadFromFile` is called with a YAML file that includes PDF +assertion configuration (pages, metadata, text rules). + +**Expected**: The PDF assertion config is correctly deserialized with all fields populated. + +**Requirement coverage**: PDF assertion configuration parsing requirement. + +## Requirements Coverage + +- **Configuration file reading**: FileAssertConfig_ReadFromFile_ValidFile_ReturnsConfig +- **Missing file error path**: FileAssertConfig_ReadFromFile_FileNotFound_ThrowsFileNotFoundException +- **Null path guard**: FileAssertConfig_ReadFromFile_NullPath_ThrowsArgumentNullException +- **Run all tests**: FileAssertConfig_Run_WithNoFilter_RunsAllTests +- **Name filter**: FileAssertConfig_Run_WithMatchingFilter_RunsMatchingTest +- **Non-matching filter**: FileAssertConfig_Run_WithNonMatchingFilter_SkipsTests +- **TRX results output**: FileAssertConfig_Run_WithResultsFile_WritesTrxWithPassedOutcome +- **JUnit results output**: FileAssertConfig_Run_WithResultsFile_WritesJUnitWithFailedOutcome +- **PDF config parsing**: FileAssertConfig_ReadFromFile_PdfAssertConfig_ParsesCorrectly diff --git a/docs/verification/file-assert/configuration/file-assert-data.md b/docs/verification/file-assert/configuration/file-assert-data.md new file mode 100644 index 0000000..90bfe39 --- /dev/null +++ b/docs/verification/file-assert/configuration/file-assert-data.md @@ -0,0 +1,26 @@ +# FileAssertData Verification + +This document describes the unit-level verification design for the `FileAssertData` unit. It +defines the verification approach for `Configuration/FileAssertData.cs`. + +## Verification Approach + +`FileAssertData` consists of data-transfer objects used exclusively for YAML deserialization. They +carry no logic and are exercised indirectly through `FileAssertConfig.ReadFromFile` in +`FileAssertConfigTests.cs`. No dedicated test file exists; all coverage is inherited from the +`FileAssertConfig` tests. + +## Dependencies + +`FileAssertData` depends only on YamlDotNet deserialization annotations. No mocking is needed. + +## Coverage + +`FileAssertData` objects are verified indirectly by every `FileAssertConfig_ReadFromFile_*` test +scenario in `FileAssertConfigTests.cs` that supplies YAML content. Correct population of all +fields confirms the data-transfer objects are correctly annotated and deserialized. + +## Requirements Coverage + +All `FileAssertData` requirements are satisfied indirectly by the `FileAssertConfig` test +scenarios. See [FileAssertConfig Verification](file-assert-config.md) for details. diff --git a/docs/verification/file-assert/file-assert.md b/docs/verification/file-assert/file-assert.md new file mode 100644 index 0000000..4ec79ae --- /dev/null +++ b/docs/verification/file-assert/file-assert.md @@ -0,0 +1,255 @@ +# System Verification + +This document describes the system-level verification design for FileAssert. It defines the overall +verification strategy, test environments, interface simulation approach, and end-to-end integration +test scenarios that together demonstrate the system meets its requirements. + +## Verification Strategy + +System-level verification uses end-to-end integration tests that invoke the tool as a real process +via the `Runner.Run` helper in `IntegrationTests.cs`. Each test exercises the full stack — argument +parsing, dispatch, execution, and output — and validates both exit code and console output. + +This approach ensures that system requirements are verified at the system boundary without assuming +any internal implementation detail. The tests treat the tool as a black box and assert on +observable outputs only. + +**Note**: `Runner.Run` merges stdout and stderr into a single combined output string. Per-stream +assertions (e.g., "standard error is empty") are therefore not possible at the integration test +level; all assertions are made against the combined output. + +## Test Environments + +Integration tests are executed across the following environments to satisfy multi-runtime and +multi-platform requirements: + +| Runtime | Platform | +|------------|----------| +| .NET 8.0 | Windows | +| .NET 8.0 | Linux | +| .NET 8.0 | macOS | +| .NET 9.0 | Windows | +| .NET 9.0 | Linux | +| .NET 9.0 | macOS | +| .NET 10.0 | Windows | +| .NET 10.0 | Linux | +| .NET 10.0 | macOS | + +All integration test scenarios are expected to produce identical results on all supported runtime +and platform combinations. + +## External Interface Simulation + +At the system level, no interfaces are mocked. All external interfaces are exercised with real +implementations: + +- **Standard output / standard error** — Captured by `Runner.Run` and returned as a combined + string for assertion. +- **File system** — Temporary files and directories are created and cleaned up within each test. +- **Process exit code** — Returned by `Runner.Run` and asserted directly. + +## Integration Test Scenarios + +The following integration test scenarios are defined in `IntegrationTests.cs`. + +### IntegrationTest_VersionFlag_OutputsVersion + +**Scenario**: The `--version` flag is passed as the sole argument. + +**Expected**: Exit code 0; combined output contains a semantic version string. + +### IntegrationTest_HelpFlag_OutputsUsageInformation + +**Scenario**: The `--help` flag is passed as the sole argument. + +**Expected**: Exit code 0; combined output contains the text "Usage". + +### IntegrationTest_ValidateFlag_RunsValidation + +**Scenario**: The `--validate` flag is passed as the sole argument. + +**Expected**: Exit code 0; combined output contains "Total Tests:". + +### IntegrationTest_ValidateWithResults_GeneratesTrxFile + +**Scenario**: The `--validate` flag is combined with `--results .trx`. + +**Expected**: Exit code 0; a TRX file is created at the specified path containing a `.xml`. + +**Expected**: Exit code 0; a JUnit XML file is created at the specified path containing a +`` flag is passed. + +**Expected**: Exit code 0; the log file is created and contains output. + +### IntegrationTest_UnknownArgument_ReturnsError + +**Scenario**: An unrecognized argument is passed. + +**Expected**: Exit code non-zero; combined output contains an error message. + +### IntegrationTest_TestFiltering_OnlyRunsMatchingTests + +**Scenario**: A configuration file with two tests is supplied; only one test name is passed as +a positional filter argument. + +**Expected**: Exit code 0; only the named test is executed. + +### IntegrationTest_ValidConfig_PassingAssertions_ReturnsZero + +**Scenario**: A valid configuration file is supplied where all file assertions pass. + +**Expected**: Exit code 0. + +### IntegrationTest_ValidConfig_FailingAssertions_ReturnsNonZero + +**Scenario**: A configuration file is supplied where at least one assertion fails. + +**Expected**: Exit code non-zero. + +### IntegrationTest_PassingAssertions_WritesTrxWithPassedResults + +**Scenario**: All assertions pass and `--results .trx` is specified. + +**Expected**: Exit code 0; TRX file contains passing test results. + +### IntegrationTest_FailingAssertions_WritesJUnitWithFailedResults + +**Scenario**: At least one assertion fails and `--results .xml` is specified. + +**Expected**: Exit code non-zero; JUnit file contains failing test results. + +### IntegrationTest_MinCountConstraint_TooFewFiles_ReturnsNonZero + +**Scenario**: A `min` constraint is configured but fewer than the required number of files exist. + +**Expected**: Exit code non-zero. + +### IntegrationTest_MaxCountConstraint_TooManyFiles_ReturnsNonZero + +**Scenario**: A `max` constraint is configured but more files exist than allowed. + +**Expected**: Exit code non-zero. + +### IntegrationTest_RegexRule_MatchingContent_ReturnsZero + +**Scenario**: A `matches` regex rule is configured and the file content matches the pattern. + +**Expected**: Exit code 0. + +### IntegrationTest_RegexRule_NonMatchingContent_ReturnsNonZero + +**Scenario**: A `matches` regex rule is configured but the file content does not match. + +**Expected**: Exit code non-zero. + +### IntegrationTest_ExactCountConstraint_WrongCount_ReturnsNonZero + +**Scenario**: An exact `count` constraint is configured but the actual file count differs. + +**Expected**: Exit code non-zero. + +### IntegrationTest_FileSizeConstraints_TooSmall_ReturnsNonZero + +**Scenario**: A minimum file size constraint is configured but the file is too small. + +**Expected**: Exit code non-zero. + +### IntegrationTest_FileSizeConstraints_TooLarge_ReturnsNonZero + +**Scenario**: A maximum file size constraint is configured but the file is too large. + +**Expected**: Exit code non-zero. + +### IntegrationTest_DoesNotContainRule_ForbiddenTextPresent_ReturnsNonZero + +**Scenario**: A `does-not-contain` rule is configured and the forbidden text is present. + +**Expected**: Exit code non-zero. + +### IntegrationTest_DoesNotContainRegexRule_ForbiddenPatternMatches_ReturnsNonZero + +**Scenario**: A `does-not-match` regex rule is configured and the forbidden pattern matches. + +**Expected**: Exit code non-zero. + +### IntegrationTest_XmlAssert_PassingQuery_ReturnsZero + +**Scenario**: An XML XPath assertion is configured and the query matches the expected result. + +**Expected**: Exit code 0. + +### IntegrationTest_XmlAssert_InvalidFile_ReturnsNonZero + +**Scenario**: An XML assertion is configured but the target file is not valid XML. + +**Expected**: Exit code non-zero. + +### IntegrationTest_HtmlAssert_PassingQuery_ReturnsZero + +**Scenario**: An HTML XPath assertion is configured and the query matches the expected result. + +**Expected**: Exit code 0. + +### IntegrationTest_YamlAssert_PassingQuery_ReturnsZero + +**Scenario**: A YAML path assertion is configured and the query matches the expected result. + +**Expected**: Exit code 0. + +### IntegrationTest_JsonAssert_PassingQuery_ReturnsZero + +**Scenario**: A JSON path assertion is configured and the query matches the expected result. + +**Expected**: Exit code 0. + +### IntegrationTest_PdfAssert_InvalidFile_ReturnsNonZero + +**Scenario**: A PDF assertion is configured but the target file is not a valid PDF. + +**Expected**: Exit code non-zero. + +## Requirements Coverage + +- **Version display**: IntegrationTest_VersionFlag_OutputsVersion +- **Help display**: IntegrationTest_HelpFlag_OutputsUsageInformation +- **Self-validation**: IntegrationTest_ValidateFlag_RunsValidation, + IntegrationTest_ValidateWithResults_GeneratesTrxFile, + IntegrationTest_ValidateWithResults_GeneratesJUnitFile +- **Silent mode**: IntegrationTest_SilentFlag_SuppressesOutput +- **Log file output**: IntegrationTest_LogFlag_WritesOutputToFile +- **Invalid argument rejection**: IntegrationTest_UnknownArgument_ReturnsError +- **Test filtering**: IntegrationTest_TestFiltering_OnlyRunsMatchingTests +- **File assertions**: IntegrationTest_ValidConfig_PassingAssertions_ReturnsZero, + IntegrationTest_ValidConfig_FailingAssertions_ReturnsNonZero +- **Results output**: IntegrationTest_PassingAssertions_WritesTrxWithPassedResults, + IntegrationTest_FailingAssertions_WritesJUnitWithFailedResults +- **Count/size constraints**: IntegrationTest_MinCountConstraint_TooFewFiles_ReturnsNonZero, + IntegrationTest_MaxCountConstraint_TooManyFiles_ReturnsNonZero, + IntegrationTest_ExactCountConstraint_WrongCount_ReturnsNonZero, + IntegrationTest_FileSizeConstraints_TooSmall_ReturnsNonZero, + IntegrationTest_FileSizeConstraints_TooLarge_ReturnsNonZero +- **Text rules**: IntegrationTest_RegexRule_MatchingContent_ReturnsZero, + IntegrationTest_RegexRule_NonMatchingContent_ReturnsNonZero, + IntegrationTest_DoesNotContainRule_ForbiddenTextPresent_ReturnsNonZero, + IntegrationTest_DoesNotContainRegexRule_ForbiddenPatternMatches_ReturnsNonZero +- **Structured file assertions**: IntegrationTest_XmlAssert_PassingQuery_ReturnsZero, + IntegrationTest_XmlAssert_InvalidFile_ReturnsNonZero, + IntegrationTest_HtmlAssert_PassingQuery_ReturnsZero, + IntegrationTest_YamlAssert_PassingQuery_ReturnsZero, + IntegrationTest_JsonAssert_PassingQuery_ReturnsZero, + IntegrationTest_PdfAssert_InvalidFile_ReturnsNonZero diff --git a/docs/verification/file-assert/modeling/file-assert-file.md b/docs/verification/file-assert/modeling/file-assert-file.md new file mode 100644 index 0000000..a9f3d6d --- /dev/null +++ b/docs/verification/file-assert/modeling/file-assert-file.md @@ -0,0 +1,141 @@ +# FileAssertFile Verification + +This document describes the unit-level verification design for the `FileAssertFile` unit. It +defines the test scenarios, dependency usage, and requirement coverage for +`Modeling/FileAssertFile.cs`. + +## Verification Approach + +`FileAssertFile` is verified with unit tests defined in `FileAssertFileTests.cs`. Tests create +temporary directories with controlled file sets and assert on constraint evaluation and error +reporting behavior. + +## Dependencies + +| Dependency | Usage in Tests | +|--------------|-------------------------------------------------------------| +| `Context` | Used directly (not mocked) — created with controlled flags. | + +## Test Scenarios + +### FileAssertFile_Create_ValidData_CreatesFile + +**Scenario**: `FileAssertFile.Create` is called with valid data containing a pattern. + +**Expected**: A non-null `FileAssertFile` instance is returned. + +**Requirement coverage**: File entry creation requirement. + +### FileAssertFile_Create_NullData_ThrowsArgumentNullException + +**Scenario**: `FileAssertFile.Create` is called with `null` data. + +**Expected**: An `ArgumentNullException` is thrown. + +**Boundary / error path**: Null data guard. + +### FileAssertFile_Create_NullPattern_ThrowsInvalidOperationException + +**Scenario**: `FileAssertFile.Create` is called with data whose `Pattern` is `null`. + +**Expected**: An `InvalidOperationException` is thrown. + +**Boundary / error path**: Null pattern validation. + +### FileAssertFile_Create_BlankPattern_ThrowsInvalidOperationException + +**Scenario**: `FileAssertFile.Create` is called with data whose `Pattern` is blank. + +**Expected**: An `InvalidOperationException` is thrown. + +**Boundary / error path**: Blank pattern validation. + +### FileAssertFile_Run_NoMatchingFiles_NoConstraints_NoError + +**Scenario**: `FileAssertFile.Run` is called with a pattern that matches no files and no count +constraints are specified. + +**Expected**: No errors are written to the context; exit code is 0. + +### FileAssertFile_Run_WithMatchingFiles_NoConstraints_NoError + +**Scenario**: `FileAssertFile.Run` is called with a pattern that matches one or more files and no +constraints are specified. + +**Expected**: No errors are written to the context; exit code is 0. + +### FileAssertFile_Run_TooFewFiles_WritesError + +**Scenario**: `FileAssertFile.Run` is called with a `min` constraint but fewer than the required +files match the pattern. + +**Expected**: An error is written to the context; exit code is non-zero. + +### FileAssertFile_Run_TooManyFiles_WritesError + +**Scenario**: `FileAssertFile.Run` is called with a `max` constraint but more files than allowed +match the pattern. + +**Expected**: An error is written to the context; exit code is non-zero. + +### FileAssertFile_Run_WithContentRule_ContentContainsValue_NoError + +**Scenario**: `FileAssertFile.Run` is called with a `contains` text rule; the matching file +contains the expected text. + +**Expected**: No errors are written to the context; exit code is 0. + +### FileAssertFile_Run_WithContentRule_ContentMissingValue_WritesError + +**Scenario**: `FileAssertFile.Run` is called with a `contains` text rule; the matching file does +not contain the expected text. + +**Expected**: An error is written to the context; exit code is non-zero. + +### FileAssertFile_Run_WrongCount_WritesError + +**Scenario**: `FileAssertFile.Run` is called with an exact `count` constraint but the actual file +count differs. + +**Expected**: An error is written to the context; exit code is non-zero. + +### FileAssertFile_Run_TooSmall_WritesError + +**Scenario**: `FileAssertFile.Run` is called with a minimum file size constraint; the matching +file is smaller than the minimum. + +**Expected**: An error is written to the context; exit code is non-zero. + +### FileAssertFile_Run_TooLarge_WritesError + +**Scenario**: `FileAssertFile.Run` is called with a maximum file size constraint; the matching +file is larger than the maximum. + +**Expected**: An error is written to the context; exit code is non-zero. + +### FileAssertFile_Run_MultipleFiles_MultipleViolateSizeConstraints_WritesErrorForEachViolation + +**Scenario**: Multiple files match the pattern; more than one violates the size constraints. + +**Expected**: A separate error is written for each violation; exit code is non-zero. + +### FileAssertFile_Run_MultipleFiles_MultipleFailContentRule_WritesErrorForEachViolation + +**Scenario**: Multiple files match the pattern; more than one fails a content rule. + +**Expected**: A separate error is written for each violation; exit code is non-zero. + +## Requirements Coverage + +- **File entry creation**: FileAssertFile_Create_ValidData_CreatesFile +- **Null/blank pattern guards**: FileAssertFile_Create_NullData_ThrowsArgumentNullException, + FileAssertFile_Create_NullPattern_ThrowsInvalidOperationException, + FileAssertFile_Create_BlankPattern_ThrowsInvalidOperationException +- **Count constraints**: FileAssertFile_Run_TooFewFiles_WritesError, + FileAssertFile_Run_TooManyFiles_WritesError, FileAssertFile_Run_WrongCount_WritesError +- **Size constraints**: FileAssertFile_Run_TooSmall_WritesError, + FileAssertFile_Run_TooLarge_WritesError, + FileAssertFile_Run_MultipleFiles_MultipleViolateSizeConstraints_WritesErrorForEachViolation +- **Content rules**: FileAssertFile_Run_WithContentRule_ContentContainsValue_NoError, + FileAssertFile_Run_WithContentRule_ContentMissingValue_WritesError, + FileAssertFile_Run_MultipleFiles_MultipleFailContentRule_WritesErrorForEachViolation diff --git a/docs/verification/file-assert/modeling/file-assert-html-assert.md b/docs/verification/file-assert/modeling/file-assert-html-assert.md new file mode 100644 index 0000000..bcbca5d --- /dev/null +++ b/docs/verification/file-assert/modeling/file-assert-html-assert.md @@ -0,0 +1,128 @@ +# FileAssertHtmlAssert Verification + +This document describes the unit-level verification design for the `FileAssertHtmlAssert` unit. It +defines the test scenarios, dependency usage, and requirement coverage for +`Modeling/FileAssertHtmlAssert.cs`. + +## Verification Approach + +`FileAssertHtmlAssert` is verified with unit tests defined in `FileAssertHtmlAssertTests.cs`. Tests +create temporary HTML files with controlled content and assert on XPath query results, count +constraints, and text matching. + +## Dependencies + +| Dependency | Usage in Tests | +|------------|-------------------------------------------------------------| +| `Context` | Used directly (not mocked) — created with controlled flags. | + +## Test Scenarios + +### FileAssertHtmlAssert_Create_ValidData_CreatesHtmlAssert + +**Scenario**: `FileAssertHtmlAssert.Create` is called with valid data. + +**Expected**: A non-null `FileAssertHtmlAssert` instance is returned. + +**Requirement coverage**: HTML assert creation requirement. + +### FileAssertHtmlAssert_Create_NullData_ThrowsArgumentNullException + +**Scenario**: `FileAssertHtmlAssert.Create` is called with `null` data. + +**Expected**: An `ArgumentNullException` is thrown. + +**Boundary / error path**: Null data guard. + +### FileAssertHtmlAssert_Run_ExactCount_Matches_NoError + +**Scenario**: `FileAssertHtmlAssert.Run` is called with an exact count assertion and the XPath +query returns exactly the expected number of elements. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: Exact count match requirement. + +### FileAssertHtmlAssert_Run_ExactCount_Mismatch_WritesError + +**Scenario**: `FileAssertHtmlAssert.Run` is called with an exact count assertion and the XPath +query returns a different number of elements. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: Exact count mismatch requirement. + +### FileAssertHtmlAssert_Run_MinMaxCount_WithinBounds_NoError + +**Scenario**: `FileAssertHtmlAssert.Run` is called with min/max count constraints and the XPath +query result count is within bounds. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: Min/max count constraint pass requirement. + +### FileAssertHtmlAssert_Run_NonExistentFile_WritesError + +**Scenario**: `FileAssertHtmlAssert.Run` is called with a path that does not exist. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Boundary / error path**: Missing file error path. + +### FileAssertHtmlAssert_Run_InvalidXPathQuery_WritesError + +**Scenario**: `FileAssertHtmlAssert.Run` is called with a malformed XPath query string. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Boundary / error path**: Invalid XPath query error path. + +### FileAssertHtmlAssert_Run_XPathExactTextMatch_Matches_NoError + +**Scenario**: `FileAssertHtmlAssert.Run` is called with an exact-text assertion and the XPath +result matches exactly. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: XPath exact text match pass requirement. + +### FileAssertHtmlAssert_Run_XPathExactTextMatch_NoMatch_WritesError + +**Scenario**: `FileAssertHtmlAssert.Run` is called with an exact-text assertion but the XPath +result does not match. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: XPath exact text match fail requirement. + +### FileAssertHtmlAssert_Run_XPathContainsText_Matches_NoError + +**Scenario**: `FileAssertHtmlAssert.Run` is called with a `contains` text assertion and the XPath +result contains the expected value. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: XPath contains text pass requirement. + +### FileAssertHtmlAssert_Run_XPathContainsText_NoMatch_WritesError + +**Scenario**: `FileAssertHtmlAssert.Run` is called with a `contains` text assertion but the XPath +result does not contain the expected value. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: XPath contains text fail requirement. + +## Requirements Coverage + +- **HTML assert creation**: FileAssertHtmlAssert_Create_ValidData_CreatesHtmlAssert +- **Null guard**: FileAssertHtmlAssert_Create_NullData_ThrowsArgumentNullException +- **Missing file**: FileAssertHtmlAssert_Run_NonExistentFile_WritesError +- **Invalid query**: FileAssertHtmlAssert_Run_InvalidXPathQuery_WritesError +- **Count constraints**: FileAssertHtmlAssert_Run_ExactCount_Matches_NoError, + FileAssertHtmlAssert_Run_ExactCount_Mismatch_WritesError, + FileAssertHtmlAssert_Run_MinMaxCount_WithinBounds_NoError +- **Text assertions**: FileAssertHtmlAssert_Run_XPathExactTextMatch_Matches_NoError, + FileAssertHtmlAssert_Run_XPathExactTextMatch_NoMatch_WritesError, + FileAssertHtmlAssert_Run_XPathContainsText_Matches_NoError, + FileAssertHtmlAssert_Run_XPathContainsText_NoMatch_WritesError diff --git a/docs/verification/file-assert/modeling/file-assert-json-assert.md b/docs/verification/file-assert/modeling/file-assert-json-assert.md new file mode 100644 index 0000000..9b4d5ed --- /dev/null +++ b/docs/verification/file-assert/modeling/file-assert-json-assert.md @@ -0,0 +1,145 @@ +# FileAssertJsonAssert Verification + +This document describes the unit-level verification design for the `FileAssertJsonAssert` unit. It +defines the test scenarios, dependency usage, and requirement coverage for +`Modeling/FileAssertJsonAssert.cs`. + +## Verification Approach + +`FileAssertJsonAssert` is verified with unit tests defined in `FileAssertJsonAssertTests.cs`. Tests +create temporary JSON files with controlled content and assert on path query results and count +constraints. + +## Dependencies + +| Dependency | Usage in Tests | +|------------|-------------------------------------------------------------| +| `Context` | Used directly (not mocked) — created with controlled flags. | + +## Test Scenarios + +### FileAssertJsonAssert_Create_ValidData_CreatesJsonAssert + +**Scenario**: `FileAssertJsonAssert.Create` is called with valid data. + +**Expected**: A non-null `FileAssertJsonAssert` instance is returned. + +**Requirement coverage**: JSON assert creation requirement. + +### FileAssertJsonAssert_Create_NullData_ThrowsArgumentNullException + +**Scenario**: `FileAssertJsonAssert.Create` is called with `null` data. + +**Expected**: An `ArgumentNullException` is thrown. + +**Boundary / error path**: Null data guard. + +### FileAssertJsonAssert_Create_EmptyQuery_ThrowsInvalidOperationException + +**Scenario**: `FileAssertJsonAssert.Create` is called with data whose query is empty. + +**Expected**: An `InvalidOperationException` is thrown. + +**Boundary / error path**: Empty query validation. + +### FileAssertJsonAssert_Create_TrailingDotQuery_ThrowsInvalidOperationException + +**Scenario**: `FileAssertJsonAssert.Create` is called with a query that ends with a dot. + +**Expected**: An `InvalidOperationException` is thrown. + +**Boundary / error path**: Malformed query validation. + +### FileAssertJsonAssert_Create_LeadingDotQuery_ThrowsInvalidOperationException + +**Scenario**: `FileAssertJsonAssert.Create` is called with a query that starts with a dot. + +**Expected**: An `InvalidOperationException` is thrown. + +**Boundary / error path**: Malformed query validation. + +### FileAssertJsonAssert_Create_ConsecutiveDotsQuery_ThrowsInvalidOperationException + +**Scenario**: `FileAssertJsonAssert.Create` is called with a query containing consecutive dots. + +**Expected**: An `InvalidOperationException` is thrown. + +**Boundary / error path**: Malformed query validation. + +### FileAssertJsonAssert_Run_InvalidFile_WritesError + +**Scenario**: `FileAssertJsonAssert.Run` is called with a path that is not valid JSON. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Boundary / error path**: Invalid JSON file error path. + +### FileAssertJsonAssert_Run_ArrayCount_Matches_NoError + +**Scenario**: `FileAssertJsonAssert.Run` is called with an exact count assertion and the path +query returns a JSON array with exactly the expected number of elements. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: Array count match requirement. + +### FileAssertJsonAssert_Run_ArrayCount_Mismatch_WritesError + +**Scenario**: `FileAssertJsonAssert.Run` is called with an exact count assertion and the path +query returns a different number of elements. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: Array count mismatch requirement. + +### FileAssertJsonAssert_Run_MinMaxCount_WithinBounds_NoError + +**Scenario**: `FileAssertJsonAssert.Run` is called with min/max count constraints and the result +count is within bounds. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: Min/max count constraint pass requirement. + +### FileAssertJsonAssert_Run_ScalarValue_CountsAsOne_NoError + +**Scenario**: `FileAssertJsonAssert.Run` is called on a path that resolves to a scalar JSON value; +a count of 1 is asserted. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: Scalar value counts as one requirement. + +### FileAssertJsonAssert_Run_MinCount_BelowMinimum_WritesError + +**Scenario**: `FileAssertJsonAssert.Run` is called with a minimum count constraint that is not +satisfied. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: Minimum count constraint requirement. + +### FileAssertJsonAssert_Run_MaxCount_ExceedsMaximum_WritesError + +**Scenario**: `FileAssertJsonAssert.Run` is called with a maximum count constraint that is +exceeded. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: Maximum count constraint requirement. + +## Requirements Coverage + +- **JSON assert creation**: FileAssertJsonAssert_Create_ValidData_CreatesJsonAssert +- **Null guard**: FileAssertJsonAssert_Create_NullData_ThrowsArgumentNullException +- **Query validation**: FileAssertJsonAssert_Create_EmptyQuery_ThrowsInvalidOperationException, + FileAssertJsonAssert_Create_TrailingDotQuery_ThrowsInvalidOperationException, + FileAssertJsonAssert_Create_LeadingDotQuery_ThrowsInvalidOperationException, + FileAssertJsonAssert_Create_ConsecutiveDotsQuery_ThrowsInvalidOperationException +- **Invalid file**: FileAssertJsonAssert_Run_InvalidFile_WritesError +- **Count constraints**: FileAssertJsonAssert_Run_ArrayCount_Matches_NoError, + FileAssertJsonAssert_Run_ArrayCount_Mismatch_WritesError, + FileAssertJsonAssert_Run_MinMaxCount_WithinBounds_NoError, + FileAssertJsonAssert_Run_ScalarValue_CountsAsOne_NoError, + FileAssertJsonAssert_Run_MinCount_BelowMinimum_WritesError, + FileAssertJsonAssert_Run_MaxCount_ExceedsMaximum_WritesError diff --git a/docs/verification/file-assert/modeling/file-assert-pdf-assert.md b/docs/verification/file-assert/modeling/file-assert-pdf-assert.md new file mode 100644 index 0000000..e356aa2 --- /dev/null +++ b/docs/verification/file-assert/modeling/file-assert-pdf-assert.md @@ -0,0 +1,157 @@ +# FileAssertPdfAssert Verification + +This document describes the unit-level verification design for the `FileAssertPdfAssert` unit. It +defines the test scenarios, dependency usage, and requirement coverage for +`Modeling/FileAssertPdfAssert.cs`. + +## Verification Approach + +`FileAssertPdfAssert` is verified with unit tests defined in `FileAssertPdfAssertTests.cs`. Tests +use PDF files in test fixtures and assert on page-count constraints, metadata field assertions, +and text content assertions. + +## Dependencies + +| Dependency | Usage in Tests | +|------------|-------------------------------------------------------------| +| `Context` | Used directly (not mocked) — created with controlled flags. | + +## Test Scenarios + +### FileAssertPdfAssert_Create_ValidData_CreatesPdfAssert + +**Scenario**: `FileAssertPdfAssert.Create` is called with valid data. + +**Expected**: A non-null `FileAssertPdfAssert` instance is returned. + +**Requirement coverage**: PDF assert creation requirement. + +### FileAssertPdfAssert_Create_NullData_ThrowsArgumentNullException + +**Scenario**: `FileAssertPdfAssert.Create` is called with `null` data. + +**Expected**: An `ArgumentNullException` is thrown. + +**Boundary / error path**: Null data guard. + +### FileAssertPdfAssert_Run_InvalidFile_WritesError + +**Scenario**: `FileAssertPdfAssert.Run` is called with a path that is not a valid PDF file. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Boundary / error path**: Invalid PDF file error path. + +### FileAssertPdfAssert_Run_ValidPdf_PageCountSatisfied_NoError + +**Scenario**: `FileAssertPdfAssert.Run` is called on a valid PDF that meets the page count +constraints. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: Page count constraint pass requirement. + +### FileAssertPdfAssert_Run_ValidPdf_TooFewPages_WritesError + +**Scenario**: `FileAssertPdfAssert.Run` is called on a valid PDF with fewer pages than the minimum. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: Minimum page count constraint requirement. + +### FileAssertPdfAssert_Run_ValidPdf_TooManyPages_WritesError + +**Scenario**: `FileAssertPdfAssert.Run` is called on a valid PDF with more pages than the maximum. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: Maximum page count constraint requirement. + +### FileAssertPdfAssert_Run_MetadataContainsRule_FieldMissing_WritesError + +**Scenario**: `FileAssertPdfAssert.Run` is called with a metadata `contains` rule on a field that +does not exist in the PDF metadata. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: Metadata field missing error requirement. + +### FileAssertPdfAssert_Run_MetadataContainsRule_TitleMatches_NoError + +**Scenario**: `FileAssertPdfAssert.Run` is called with a metadata `contains` rule on the Title +field, and the Title contains the expected value. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: Metadata title match requirement. + +### FileAssertPdfAssert_Run_MetadataContainsRule_AuthorField_NoError + +**Scenario**: `FileAssertPdfAssert.Run` is called with a metadata `contains` rule on the Author +field, and the Author contains the expected value. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: Metadata author match requirement. + +### FileAssertPdfAssert_Run_MetadataMatchesRule_Matches_NoError + +**Scenario**: `FileAssertPdfAssert.Run` is called with a metadata `matches` regex rule that +matches the field value. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: Metadata regex match pass requirement. + +### FileAssertPdfAssert_Run_MetadataMatchesRule_NoMatch_WritesError + +**Scenario**: `FileAssertPdfAssert.Run` is called with a metadata `matches` regex rule that does +not match the field value. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: Metadata regex match fail requirement. + +### FileAssertPdfAssert_Run_TextContainsRule_ContentPresent_NoError + +**Scenario**: `FileAssertPdfAssert.Run` is called with a text `contains` rule and the PDF text +contains the expected value. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: PDF text content assertion pass requirement. + +### FileAssertPdfAssert_Run_TextRule_ContentMissing_WritesError + +**Scenario**: `FileAssertPdfAssert.Run` is called with a text rule and the PDF text does not +satisfy the rule. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: PDF text content assertion fail requirement. + +### FileAssertPdfAssert_Run_TextMatchesRule_PatternMatches_NoError + +**Scenario**: `FileAssertPdfAssert.Run` is called with a text `matches` regex rule and the PDF +text matches the pattern. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: PDF text regex match requirement. + +## Requirements Coverage + +- **PDF assert creation**: FileAssertPdfAssert_Create_ValidData_CreatesPdfAssert +- **Null guard**: FileAssertPdfAssert_Create_NullData_ThrowsArgumentNullException +- **Invalid file**: FileAssertPdfAssert_Run_InvalidFile_WritesError +- **Page count constraints**: FileAssertPdfAssert_Run_ValidPdf_PageCountSatisfied_NoError, + FileAssertPdfAssert_Run_ValidPdf_TooFewPages_WritesError, + FileAssertPdfAssert_Run_ValidPdf_TooManyPages_WritesError +- **Metadata assertions**: FileAssertPdfAssert_Run_MetadataContainsRule_FieldMissing_WritesError, + FileAssertPdfAssert_Run_MetadataContainsRule_TitleMatches_NoError, + FileAssertPdfAssert_Run_MetadataContainsRule_AuthorField_NoError, + FileAssertPdfAssert_Run_MetadataMatchesRule_Matches_NoError, + FileAssertPdfAssert_Run_MetadataMatchesRule_NoMatch_WritesError +- **Text assertions**: FileAssertPdfAssert_Run_TextContainsRule_ContentPresent_NoError, + FileAssertPdfAssert_Run_TextRule_ContentMissing_WritesError, + FileAssertPdfAssert_Run_TextMatchesRule_PatternMatches_NoError diff --git a/docs/verification/file-assert/modeling/file-assert-rule.md b/docs/verification/file-assert/modeling/file-assert-rule.md new file mode 100644 index 0000000..8cf04eb --- /dev/null +++ b/docs/verification/file-assert/modeling/file-assert-rule.md @@ -0,0 +1,148 @@ +# FileAssertRule Verification + +This document describes the unit-level verification design for the `FileAssertRule` unit. It +defines the test scenarios, dependency usage, and requirement coverage for +`Modeling/FileAssertRule.cs`. + +## Verification Approach + +`FileAssertRule` is verified with unit tests defined in `FileAssertRuleTests.cs`. Tests supply +controlled rule data objects and string content, asserting on rule type creation, application +results, and error reporting. + +## Dependencies + +| Dependency | Usage in Tests | +|------------|-------------------------------------------------------------| +| `Context` | Used directly (not mocked) — created with controlled flags. | + +## Test Scenarios + +### FileAssertRule_Create_WithContains_ReturnsContainsRule + +**Scenario**: `FileAssertRule.Create` is called with data specifying a `contains` rule. + +**Expected**: A `ContainsRule` instance is returned. + +**Requirement coverage**: Contains rule creation requirement. + +### FileAssertRule_Create_WithMatches_ReturnsMatchesRule + +**Scenario**: `FileAssertRule.Create` is called with data specifying a `matches` regex rule. + +**Expected**: A `MatchesRule` instance is returned. + +**Requirement coverage**: Regex matches rule creation requirement. + +### FileAssertRule_Create_WithDoesNotContain_ReturnsDoesNotContainRule + +**Scenario**: `FileAssertRule.Create` is called with data specifying a `does-not-contain` rule. + +**Expected**: A `DoesNotContainRule` instance is returned. + +**Requirement coverage**: Does-not-contain rule creation requirement. + +### FileAssertRule_Create_WithDoesNotContainRegex_ReturnsDoesNotMatchRule + +**Scenario**: `FileAssertRule.Create` is called with data specifying a `does-not-match` regex rule. + +**Expected**: A `DoesNotMatchRule` instance is returned. + +**Requirement coverage**: Does-not-match rule creation requirement. + +### FileAssertRule_Create_WithNoType_ThrowsInvalidOperationException + +**Scenario**: `FileAssertRule.Create` is called with data that specifies no rule type. + +**Expected**: An `InvalidOperationException` is thrown. + +**Boundary / error path**: Unknown rule type validation. + +### FileAssertRule_Create_WithNullData_ThrowsArgumentNullException + +**Scenario**: `FileAssertRule.Create` is called with `null` data. + +**Expected**: An `ArgumentNullException` is thrown. + +**Boundary / error path**: Null data guard. + +### FileAssertContainsRule_Apply_ContentContainsValue_NoError + +**Scenario**: A `ContainsRule` is applied to content that contains the expected value. + +**Expected**: No error is written to the context; exit code is 0. + +**Requirement coverage**: Contains rule pass requirement. + +### FileAssertContainsRule_Apply_ContentMissingValue_WritesError + +**Scenario**: A `ContainsRule` is applied to content that does not contain the expected value. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: Contains rule fail requirement. + +### FileAssertMatchesRule_Apply_ContentMatchesPattern_NoError + +**Scenario**: A `MatchesRule` is applied to content that matches the regex pattern. + +**Expected**: No error is written to the context; exit code is 0. + +**Requirement coverage**: Regex match pass requirement. + +### FileAssertMatchesRule_Apply_ContentDoesNotMatchPattern_WritesError + +**Scenario**: A `MatchesRule` is applied to content that does not match the regex pattern. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: Regex match fail requirement. + +### FileAssertDoesNotContainRule_Apply_ContentContainsValue_WritesError + +**Scenario**: A `DoesNotContainRule` is applied to content that contains the forbidden value. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: Does-not-contain fail requirement. + +### FileAssertDoesNotContainRule_Apply_ContentMissingValue_NoError + +**Scenario**: A `DoesNotContainRule` is applied to content that does not contain the forbidden value. + +**Expected**: No error is written to the context; exit code is 0. + +**Requirement coverage**: Does-not-contain pass requirement. + +### FileAssertDoesNotMatchRule_Apply_ContentMatchesPattern_WritesError + +**Scenario**: A `DoesNotMatchRule` is applied to content that matches the forbidden regex pattern. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: Does-not-match fail requirement. + +### FileAssertDoesNotMatchRule_Apply_ContentDoesNotMatchPattern_NoError + +**Scenario**: A `DoesNotMatchRule` is applied to content that does not match the forbidden pattern. + +**Expected**: No error is written to the context; exit code is 0. + +**Requirement coverage**: Does-not-match pass requirement. + +## Requirements Coverage + +- **Contains rule**: FileAssertRule_Create_WithContains_ReturnsContainsRule, + FileAssertContainsRule_Apply_ContentContainsValue_NoError, + FileAssertContainsRule_Apply_ContentMissingValue_WritesError +- **Regex matches rule**: FileAssertRule_Create_WithMatches_ReturnsMatchesRule, + FileAssertMatchesRule_Apply_ContentMatchesPattern_NoError, + FileAssertMatchesRule_Apply_ContentDoesNotMatchPattern_WritesError +- **Does-not-contain rule**: FileAssertRule_Create_WithDoesNotContain_ReturnsDoesNotContainRule, + FileAssertDoesNotContainRule_Apply_ContentContainsValue_WritesError, + FileAssertDoesNotContainRule_Apply_ContentMissingValue_NoError +- **Does-not-match rule**: FileAssertRule_Create_WithDoesNotContainRegex_ReturnsDoesNotMatchRule, + FileAssertDoesNotMatchRule_Apply_ContentMatchesPattern_WritesError, + FileAssertDoesNotMatchRule_Apply_ContentDoesNotMatchPattern_NoError +- **Unknown type guard**: FileAssertRule_Create_WithNoType_ThrowsInvalidOperationException +- **Null data guard**: FileAssertRule_Create_WithNullData_ThrowsArgumentNullException diff --git a/docs/verification/file-assert/modeling/file-assert-test.md b/docs/verification/file-assert/modeling/file-assert-test.md new file mode 100644 index 0000000..7d06cc4 --- /dev/null +++ b/docs/verification/file-assert/modeling/file-assert-test.md @@ -0,0 +1,144 @@ +# FileAssertTest Verification + +This document describes the unit-level verification design for the `FileAssertTest` unit. It +defines the test scenarios, dependency usage, and requirement coverage for +`Modeling/FileAssertTest.cs`. + +## Verification Approach + +`FileAssertTest` is verified with unit tests defined in `FileAssertTestTests.cs`. Tests supply +controlled `FileAssertTestData` instances and assert on filter matching behavior, creation +validation, and execution delegation. + +## Dependencies + +| Dependency | Usage in Tests | +|-------------------|----------------------------------------------------------------------| +| `Context` | Used directly (not mocked) — created with controlled flags. | +| `FileAssertFile` | Used directly (not mocked) — called through `FileAssertTest.Run`. | + +## Test Scenarios + +### FileAssertTest_Create_ValidData_CreatesTest + +**Scenario**: `FileAssertTest.Create` is called with valid data containing a name and files list. + +**Expected**: A non-null `FileAssertTest` instance is returned with correct properties. + +**Requirement coverage**: Test creation requirement. + +### FileAssertTest_Create_NullData_ThrowsArgumentNullException + +**Scenario**: `FileAssertTest.Create` is called with `null` data. + +**Expected**: An `ArgumentNullException` is thrown. + +**Boundary / error path**: Null guard on data. + +### FileAssertTest_Create_NullName_ThrowsInvalidOperationException + +**Scenario**: `FileAssertTest.Create` is called with data whose `Name` property is `null`. + +**Expected**: An `InvalidOperationException` is thrown. + +**Boundary / error path**: Null name validation. + +### FileAssertTest_Create_WhitespaceName_ThrowsInvalidOperationException + +**Scenario**: `FileAssertTest.Create` is called with data whose `Name` property is whitespace. + +**Expected**: An `InvalidOperationException` is thrown. + +**Boundary / error path**: Whitespace name validation. + +### FileAssertTest_MatchesFilter_EmptyFilters_ReturnsTrue + +**Scenario**: `FileAssertTest.MatchesFilter` is called with an empty filter list. + +**Expected**: Returns `true` (no filter means run all tests). + +**Requirement coverage**: Empty filter match requirement. + +### FileAssertTest_MatchesFilter_MatchingName_ReturnsTrue + +**Scenario**: `FileAssertTest.MatchesFilter` is called with a filter list containing the test name. + +**Expected**: Returns `true`. + +**Requirement coverage**: Name-based filter match requirement. + +### FileAssertTest_MatchesFilter_MatchingTag_ReturnsTrue + +**Scenario**: `FileAssertTest.MatchesFilter` is called with a filter list containing one of the +test's tags. + +**Expected**: Returns `true`. + +**Requirement coverage**: Tag-based filter match requirement. + +### FileAssertTest_MatchesFilter_NonMatchingFilter_ReturnsFalse + +**Scenario**: `FileAssertTest.MatchesFilter` is called with a filter list containing neither the +test name nor any of its tags. + +**Expected**: Returns `false`. + +**Requirement coverage**: Non-matching filter requirement. + +### FileAssertTest_MatchesFilter_CaseInsensitiveName_ReturnsTrue + +**Scenario**: `FileAssertTest.MatchesFilter` is called with a filter that differs only in case +from the test name. + +**Expected**: Returns `true`. + +**Requirement coverage**: Case-insensitive name matching requirement. + +### FileAssertTest_MatchesFilter_CaseInsensitiveTag_ReturnsTrue + +**Scenario**: `FileAssertTest.MatchesFilter` is called with a filter that differs only in case +from a test tag. + +**Expected**: Returns `true`. + +**Requirement coverage**: Case-insensitive tag matching requirement. + +### FileAssertTest_Run_RunsAllFiles + +**Scenario**: `FileAssertTest.Run` is called on a test with multiple file entries. + +**Expected**: All file entries are evaluated. + +**Requirement coverage**: Run-all-files requirement. + +### FileAssertTest_Run_NullContext_ThrowsArgumentNullException + +**Scenario**: `FileAssertTest.Run` is called with a `null` context. + +**Expected**: An `ArgumentNullException` is thrown. + +**Boundary / error path**: Null context guard. + +### FileAssertTest_Run_NullBasePath_ThrowsArgumentNullException + +**Scenario**: `FileAssertTest.Run` is called with a `null` base path. + +**Expected**: An `ArgumentNullException` is thrown. + +**Boundary / error path**: Null base path guard. + +## Requirements Coverage + +- **Test creation**: FileAssertTest_Create_ValidData_CreatesTest +- **Null data guard**: FileAssertTest_Create_NullData_ThrowsArgumentNullException +- **Null/whitespace name guard**: FileAssertTest_Create_NullName_ThrowsInvalidOperationException, + FileAssertTest_Create_WhitespaceName_ThrowsInvalidOperationException +- **Empty filter match**: FileAssertTest_MatchesFilter_EmptyFilters_ReturnsTrue +- **Name filter**: FileAssertTest_MatchesFilter_MatchingName_ReturnsTrue, + FileAssertTest_MatchesFilter_CaseInsensitiveName_ReturnsTrue +- **Tag filter**: FileAssertTest_MatchesFilter_MatchingTag_ReturnsTrue, + FileAssertTest_MatchesFilter_CaseInsensitiveTag_ReturnsTrue +- **Non-matching filter**: FileAssertTest_MatchesFilter_NonMatchingFilter_ReturnsFalse +- **Run all files**: FileAssertTest_Run_RunsAllFiles +- **Null context guard**: FileAssertTest_Run_NullContext_ThrowsArgumentNullException +- **Null base path guard**: FileAssertTest_Run_NullBasePath_ThrowsArgumentNullException diff --git a/docs/verification/file-assert/modeling/file-assert-text-assert.md b/docs/verification/file-assert/modeling/file-assert-text-assert.md new file mode 100644 index 0000000..f88442d --- /dev/null +++ b/docs/verification/file-assert/modeling/file-assert-text-assert.md @@ -0,0 +1,77 @@ +# FileAssertTextAssert Verification + +This document describes the unit-level verification design for the `FileAssertTextAssert` unit. It +defines the test scenarios, dependency usage, and requirement coverage for +`Modeling/FileAssertTextAssert.cs`. + +## Verification Approach + +`FileAssertTextAssert` is verified with unit tests defined in `FileAssertTextAssertTests.cs`. Tests +create temporary files with controlled content and assert on rule evaluation and error reporting. + +## Dependencies + +| Dependency | Usage in Tests | +|------------------|-------------------------------------------------------------| +| `Context` | Used directly (not mocked) — created with controlled flags. | +| `FileAssertRule` | Used directly (not mocked). | + +## Test Scenarios + +### FileAssertTextAssert_Create_ValidData_CreatesTextAssert + +**Scenario**: `FileAssertTextAssert.Create` is called with valid data containing at least one rule. + +**Expected**: A non-null `FileAssertTextAssert` instance is returned. + +**Requirement coverage**: Text assert creation requirement. + +### FileAssertTextAssert_Create_NullData_ThrowsArgumentNullException + +**Scenario**: `FileAssertTextAssert.Create` is called with `null` data. + +**Expected**: An `ArgumentNullException` is thrown. + +**Boundary / error path**: Null data guard. + +### FileAssertTextAssert_Run_FileContainsText_NoError + +**Scenario**: `FileAssertTextAssert.Run` is called on a file whose content satisfies all rules. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: Text assertion pass requirement. + +### FileAssertTextAssert_Run_FileMissingText_WritesError + +**Scenario**: `FileAssertTextAssert.Run` is called on a file whose content does not satisfy a +`contains` rule. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: Text assertion fail requirement. + +### FileAssertTextAssert_Run_NonExistentFile_WritesError + +**Scenario**: `FileAssertTextAssert.Run` is called with a path that does not exist. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Boundary / error path**: Missing file error path. + +### FileAssertTextAssert_Run_MultipleRulesMultipleViolations_WritesMultipleErrors + +**Scenario**: `FileAssertTextAssert.Run` is called on a file that violates multiple rules. + +**Expected**: A separate error is written for each violation; exit code is non-zero. + +**Requirement coverage**: Multiple-rule violation reporting requirement. + +## Requirements Coverage + +- **Text assert creation**: FileAssertTextAssert_Create_ValidData_CreatesTextAssert +- **Null guard**: FileAssertTextAssert_Create_NullData_ThrowsArgumentNullException +- **Pass**: FileAssertTextAssert_Run_FileContainsText_NoError +- **Fail**: FileAssertTextAssert_Run_FileMissingText_WritesError +- **Missing file**: FileAssertTextAssert_Run_NonExistentFile_WritesError +- **Multiple violations**: FileAssertTextAssert_Run_MultipleRulesMultipleViolations_WritesMultipleErrors diff --git a/docs/verification/file-assert/modeling/file-assert-xml-assert.md b/docs/verification/file-assert/modeling/file-assert-xml-assert.md new file mode 100644 index 0000000..47093d5 --- /dev/null +++ b/docs/verification/file-assert/modeling/file-assert-xml-assert.md @@ -0,0 +1,128 @@ +# FileAssertXmlAssert Verification + +This document describes the unit-level verification design for the `FileAssertXmlAssert` unit. It +defines the test scenarios, dependency usage, and requirement coverage for +`Modeling/FileAssertXmlAssert.cs`. + +## Verification Approach + +`FileAssertXmlAssert` is verified with unit tests defined in `FileAssertXmlAssertTests.cs`. Tests +create temporary XML files with controlled content and assert on XPath query results, count +constraints, and text matching. + +## Dependencies + +| Dependency | Usage in Tests | +|------------|-------------------------------------------------------------| +| `Context` | Used directly (not mocked) — created with controlled flags. | + +## Test Scenarios + +### FileAssertXmlAssert_Create_ValidData_CreatesXmlAssert + +**Scenario**: `FileAssertXmlAssert.Create` is called with valid data. + +**Expected**: A non-null `FileAssertXmlAssert` instance is returned. + +**Requirement coverage**: XML assert creation requirement. + +### FileAssertXmlAssert_Create_NullData_ThrowsArgumentNullException + +**Scenario**: `FileAssertXmlAssert.Create` is called with `null` data. + +**Expected**: An `ArgumentNullException` is thrown. + +**Boundary / error path**: Null data guard. + +### FileAssertXmlAssert_Run_InvalidFile_WritesError + +**Scenario**: `FileAssertXmlAssert.Run` is called with a path that is not valid XML. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Boundary / error path**: Invalid XML file error path. + +### FileAssertXmlAssert_Run_ExactCount_Matches_NoError + +**Scenario**: `FileAssertXmlAssert.Run` is called with an exact count assertion and the XPath +query returns exactly the expected number of nodes. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: Exact count match requirement. + +### FileAssertXmlAssert_Run_ExactCount_Mismatch_WritesError + +**Scenario**: `FileAssertXmlAssert.Run` is called with an exact count assertion and the XPath +query returns a different number of nodes. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: Exact count mismatch requirement. + +### FileAssertXmlAssert_Run_MinMaxCount_WithinBounds_NoError + +**Scenario**: `FileAssertXmlAssert.Run` is called with min/max count constraints and the XPath +query result count is within bounds. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: Min/max count constraint pass requirement. + +### FileAssertXmlAssert_Run_InvalidXPathQuery_WritesError + +**Scenario**: `FileAssertXmlAssert.Run` is called with a malformed XPath query string. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Boundary / error path**: Invalid XPath query error path. + +### FileAssertXmlAssert_Run_XPathExactTextMatch_Matches_NoError + +**Scenario**: `FileAssertXmlAssert.Run` is called with an exact-text assertion and the first +XPath result node matches exactly. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: XPath exact text match pass requirement. + +### FileAssertXmlAssert_Run_XPathExactTextMatch_NoMatch_WritesError + +**Scenario**: `FileAssertXmlAssert.Run` is called with an exact-text assertion but the XPath +result does not match. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: XPath exact text match fail requirement. + +### FileAssertXmlAssert_Run_XPathContainsText_Matches_NoError + +**Scenario**: `FileAssertXmlAssert.Run` is called with a `contains` text assertion and the XPath +result contains the expected value. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: XPath contains text pass requirement. + +### FileAssertXmlAssert_Run_XPathContainsText_NoMatch_WritesError + +**Scenario**: `FileAssertXmlAssert.Run` is called with a `contains` text assertion but the XPath +result does not contain the expected value. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: XPath contains text fail requirement. + +## Requirements Coverage + +- **XML assert creation**: FileAssertXmlAssert_Create_ValidData_CreatesXmlAssert +- **Null guard**: FileAssertXmlAssert_Create_NullData_ThrowsArgumentNullException +- **Invalid file**: FileAssertXmlAssert_Run_InvalidFile_WritesError +- **Invalid query**: FileAssertXmlAssert_Run_InvalidXPathQuery_WritesError +- **Count constraints**: FileAssertXmlAssert_Run_ExactCount_Matches_NoError, + FileAssertXmlAssert_Run_ExactCount_Mismatch_WritesError, + FileAssertXmlAssert_Run_MinMaxCount_WithinBounds_NoError +- **Text assertions**: FileAssertXmlAssert_Run_XPathExactTextMatch_Matches_NoError, + FileAssertXmlAssert_Run_XPathExactTextMatch_NoMatch_WritesError, + FileAssertXmlAssert_Run_XPathContainsText_Matches_NoError, + FileAssertXmlAssert_Run_XPathContainsText_NoMatch_WritesError diff --git a/docs/verification/file-assert/modeling/file-assert-yaml-assert.md b/docs/verification/file-assert/modeling/file-assert-yaml-assert.md new file mode 100644 index 0000000..a9d6450 --- /dev/null +++ b/docs/verification/file-assert/modeling/file-assert-yaml-assert.md @@ -0,0 +1,145 @@ +# FileAssertYamlAssert Verification + +This document describes the unit-level verification design for the `FileAssertYamlAssert` unit. It +defines the test scenarios, dependency usage, and requirement coverage for +`Modeling/FileAssertYamlAssert.cs`. + +## Verification Approach + +`FileAssertYamlAssert` is verified with unit tests defined in `FileAssertYamlAssertTests.cs`. Tests +create temporary YAML files with controlled content and assert on path query results and count +constraints. + +## Dependencies + +| Dependency | Usage in Tests | +|------------|-------------------------------------------------------------| +| `Context` | Used directly (not mocked) — created with controlled flags. | + +## Test Scenarios + +### FileAssertYamlAssert_Create_ValidData_CreatesYamlAssert + +**Scenario**: `FileAssertYamlAssert.Create` is called with valid data. + +**Expected**: A non-null `FileAssertYamlAssert` instance is returned. + +**Requirement coverage**: YAML assert creation requirement. + +### FileAssertYamlAssert_Create_NullData_ThrowsArgumentNullException + +**Scenario**: `FileAssertYamlAssert.Create` is called with `null` data. + +**Expected**: An `ArgumentNullException` is thrown. + +**Boundary / error path**: Null data guard. + +### FileAssertYamlAssert_Create_EmptyQuery_ThrowsInvalidOperationException + +**Scenario**: `FileAssertYamlAssert.Create` is called with data whose query is empty. + +**Expected**: An `InvalidOperationException` is thrown. + +**Boundary / error path**: Empty query validation. + +### FileAssertYamlAssert_Create_TrailingDotQuery_ThrowsInvalidOperationException + +**Scenario**: `FileAssertYamlAssert.Create` is called with a query that ends with a dot. + +**Expected**: An `InvalidOperationException` is thrown. + +**Boundary / error path**: Malformed query validation. + +### FileAssertYamlAssert_Create_LeadingDotQuery_ThrowsInvalidOperationException + +**Scenario**: `FileAssertYamlAssert.Create` is called with a query that starts with a dot. + +**Expected**: An `InvalidOperationException` is thrown. + +**Boundary / error path**: Malformed query validation. + +### FileAssertYamlAssert_Create_ConsecutiveDotsQuery_ThrowsInvalidOperationException + +**Scenario**: `FileAssertYamlAssert.Create` is called with a query containing consecutive dots. + +**Expected**: An `InvalidOperationException` is thrown. + +**Boundary / error path**: Malformed query validation. + +### FileAssertYamlAssert_Run_InvalidFile_WritesError + +**Scenario**: `FileAssertYamlAssert.Run` is called with a path that is not valid YAML. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Boundary / error path**: Invalid YAML file error path. + +### FileAssertYamlAssert_Run_SequenceCount_Matches_NoError + +**Scenario**: `FileAssertYamlAssert.Run` is called with an exact count assertion and the path +query returns a sequence with exactly the expected number of items. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: Sequence count match requirement. + +### FileAssertYamlAssert_Run_SequenceCount_Mismatch_WritesError + +**Scenario**: `FileAssertYamlAssert.Run` is called with an exact count assertion and the path +query returns a different count. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: Sequence count mismatch requirement. + +### FileAssertYamlAssert_Run_MinMaxCount_WithinBounds_NoError + +**Scenario**: `FileAssertYamlAssert.Run` is called with min/max count constraints and the result +count is within bounds. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: Min/max count constraint pass requirement. + +### FileAssertYamlAssert_Run_ScalarValue_CountsAsOne_NoError + +**Scenario**: `FileAssertYamlAssert.Run` is called on a path that resolves to a scalar value; +a count of 1 is asserted. + +**Expected**: No errors are written to the context; exit code is 0. + +**Requirement coverage**: Scalar value counts as one requirement. + +### FileAssertYamlAssert_Run_MinCount_BelowMinimum_WritesError + +**Scenario**: `FileAssertYamlAssert.Run` is called with a minimum count constraint that is not +satisfied. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: Minimum count constraint requirement. + +### FileAssertYamlAssert_Run_MaxCount_ExceedsMaximum_WritesError + +**Scenario**: `FileAssertYamlAssert.Run` is called with a maximum count constraint that is +exceeded. + +**Expected**: An error is written to the context; exit code is non-zero. + +**Requirement coverage**: Maximum count constraint requirement. + +## Requirements Coverage + +- **YAML assert creation**: FileAssertYamlAssert_Create_ValidData_CreatesYamlAssert +- **Null guard**: FileAssertYamlAssert_Create_NullData_ThrowsArgumentNullException +- **Query validation**: FileAssertYamlAssert_Create_EmptyQuery_ThrowsInvalidOperationException, + FileAssertYamlAssert_Create_TrailingDotQuery_ThrowsInvalidOperationException, + FileAssertYamlAssert_Create_LeadingDotQuery_ThrowsInvalidOperationException, + FileAssertYamlAssert_Create_ConsecutiveDotsQuery_ThrowsInvalidOperationException +- **Invalid file**: FileAssertYamlAssert_Run_InvalidFile_WritesError +- **Count constraints**: FileAssertYamlAssert_Run_SequenceCount_Matches_NoError, + FileAssertYamlAssert_Run_SequenceCount_Mismatch_WritesError, + FileAssertYamlAssert_Run_MinMaxCount_WithinBounds_NoError, + FileAssertYamlAssert_Run_ScalarValue_CountsAsOne_NoError, + FileAssertYamlAssert_Run_MinCount_BelowMinimum_WritesError, + FileAssertYamlAssert_Run_MaxCount_ExceedsMaximum_WritesError diff --git a/docs/verification/file-assert/modeling/modeling.md b/docs/verification/file-assert/modeling/modeling.md new file mode 100644 index 0000000..83852e3 --- /dev/null +++ b/docs/verification/file-assert/modeling/modeling.md @@ -0,0 +1,55 @@ +# Modeling Subsystem Verification + +This document describes the subsystem-level verification design for the `Modeling` subsystem. It +defines the integration test approach, subsystem boundary, mocking strategy, and test scenarios +that together verify the `Modeling` subsystem requirements. + +## Verification Approach + +The `Modeling` subsystem is verified by integration tests defined in `ModelingTests.cs`. Each test +exercises the assertion execution pipeline — creating a `FileAssertTest`, resolving file patterns, +evaluating constraints, and reporting results through a real `Context`. + +## Dependencies and Mocking Strategy + +All collaborators at the subsystem boundary use their real implementations. Temporary directories +are used for test files so that tests remain isolated. + +## Integration Test Scenarios + +The following integration test scenarios are defined in `ModelingTests.cs`. + +### Modeling_ExecuteChain_PassesWhenAllConstraintsMet + +**Scenario**: A `FileAssertTest` is created with a configuration where all file pattern, count, +and content constraints are satisfied by the test files in a temporary directory. + +**Expected**: No errors are written to the context; exit code is 0. + +### Modeling_ExecuteChain_ReportsFailuresThroughContext + +**Scenario**: A `FileAssertTest` is created with a configuration where at least one constraint +is not satisfied. + +**Expected**: Errors are written to the context; exit code is non-zero. + +### Modeling_FileTypeParsing_InvalidXml_ReportsParseError + +**Scenario**: A `FileAssertFile` with an XML assertion is configured to evaluate a file that is +not valid XML. + +**Expected**: An error is written to the context; exit code is non-zero. + +### Modeling_QueryAssertions_XmlQueryMeetsCount_NoError + +**Scenario**: A `FileAssertFile` with an XML XPath assertion is configured and a valid XML file +satisfying the query and count constraints is provided. + +**Expected**: No errors are written to the context; exit code is 0. + +## Requirements Coverage + +- **Constraint evaluation**: Modeling_ExecuteChain_PassesWhenAllConstraintsMet, + Modeling_ExecuteChain_ReportsFailuresThroughContext +- **XML parsing error reporting**: Modeling_FileTypeParsing_InvalidXml_ReportsParseError +- **XML query assertion**: Modeling_QueryAssertions_XmlQueryMeetsCount_NoError diff --git a/docs/verification/file-assert/program.md b/docs/verification/file-assert/program.md new file mode 100644 index 0000000..f75198a --- /dev/null +++ b/docs/verification/file-assert/program.md @@ -0,0 +1,71 @@ +# Program Verification + +This document describes the unit-level verification design for the `Program` unit. It defines the +test scenarios, dependency usage, and requirement coverage for `Program.cs`. + +## Verification Approach + +`Program` is verified with unit tests defined in `ProgramTests.cs`. Because `Program` directly +instantiates `Context` from real arguments and calls `Validation.Run` when needed, no mocking is +required. The tests pass controlled argument arrays and assert on captured console output and exit +codes. + +## Dependencies + +| Dependency | Usage in Tests | +|--------------|--------------------------------------------------------------------------| +| `Context` | Used directly (not mocked) — created from the argument array under test. | +| `Validation` | Used directly (not mocked) — called when the validate flag is set. | + +No test doubles are introduced at the `Program` level; all collaborators execute their real logic. + +## Test Scenarios + +### Program_Run_WithVersionFlag_DisplaysVersionOnly + +**Scenario**: `Program.Run` is called with a context created from `["--version"]`. + +**Expected**: Standard output contains the version string; the word "Copyright" does not appear; +exit code is 0. + +**Requirement coverage**: Version display requirement. + +### Program_Run_WithHelpFlag_DisplaysUsageInformation + +**Scenario**: `Program.Run` is called with a context created from `["--help"]`. + +**Expected**: Standard output contains "Usage:"; exit code is 0. + +**Requirement coverage**: Help display requirement. + +### Program_Run_WithValidateFlag_RunsValidation + +**Scenario**: `Program.Run` is called with a context created from `["--validate"]`. + +**Expected**: Standard output contains "Total Tests:"; exit code is 0. + +**Requirement coverage**: Self-validation requirement. + +### Program_Run_NoArguments_DisplaysDefaultBehavior + +**Scenario**: `Program.Run` is called with a context created from an empty argument array. + +**Expected**: Standard output contains the tool name and copyright notice; exit code is 0. + +**Requirement coverage**: Default behavior requirement. + +### Program_Version_ReturnsNonEmptyString + +**Scenario**: The `Program.Version` static property is read. + +**Expected**: The returned string is non-empty and non-null. + +**Requirement coverage**: Version string availability requirement. + +## Requirements Coverage + +- **Version display**: Program_Run_WithVersionFlag_DisplaysVersionOnly, + Program_Version_ReturnsNonEmptyString +- **Help display**: Program_Run_WithHelpFlag_DisplaysUsageInformation +- **Self-validation**: Program_Run_WithValidateFlag_RunsValidation +- **Default behavior**: Program_Run_NoArguments_DisplaysDefaultBehavior diff --git a/docs/verification/file-assert/selftest/selftest.md b/docs/verification/file-assert/selftest/selftest.md new file mode 100644 index 0000000..044bf6c --- /dev/null +++ b/docs/verification/file-assert/selftest/selftest.md @@ -0,0 +1,48 @@ +# SelfTest Subsystem Verification + +This document describes the subsystem-level verification design for the `SelfTest` subsystem. It +defines the integration test approach, subsystem boundary, mocking strategy, and test scenarios +that together verify the `SelfTest` subsystem requirements. + +## Verification Approach + +The `SelfTest` subsystem is verified by integration tests defined in `SelfTestTests.cs`. Each +test exercises the `Validation.Run` method with a real `Context` to confirm that the subsystem +produces correct output and result files across the supported result-format options. + +## Dependencies and Mocking Strategy + +At the subsystem boundary, `Context` (from the `Cli` subsystem) and `PathHelpers` (from the +`Utilities` subsystem) are used with their real implementations. No mocking is applied. Temporary +directories are used for result file output so that tests remain isolated. + +## Integration Test Scenarios + +The following integration test scenarios are defined in `SelfTestTests.cs`. + +### SelfTest_Run_ExecutesBuiltInTestsAndProducesSummary + +**Scenario**: `Validation.Run` is called with a real context. + +**Expected**: Validation completes without error; exit code is 0; output contains a summary +including "Total Tests:". + +### SelfTest_Run_WhenInvoked_PrintsSystemInfoHeader + +**Scenario**: `Validation.Run` is called with a real context. + +**Expected**: Output contains a system information header. + +### SelfTest_Run_WithResultsFile_WritesTrxResultsFile + +**Scenario**: `Validation.Run` is called with a context whose `ResultsFile` points to a temporary +`.trx` path. + +**Expected**: A TRX file is created at the specified path; the file contains a ` +/// Defines the Sequential test collection. +/// Tests in this collection are disabled from running in parallel to +/// prevent conflicts when sharing Console state. +/// +[CollectionDefinition("Sequential", DisableParallelization = true)] +public sealed class SequentialCollection { } diff --git a/test/DemaConsulting.FileAssert.Tests/Cli/CliTests.cs b/test/DemaConsulting.FileAssert.Tests/Cli/CliTests.cs index 921be2f..b8ce265 100644 --- a/test/DemaConsulting.FileAssert.Tests/Cli/CliTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/Cli/CliTests.cs @@ -25,13 +25,13 @@ namespace DemaConsulting.FileAssert.Tests.Cli; /// /// Subsystem tests for the Cli subsystem. /// -[TestClass] +[Collection("Sequential")] public class CliTests { /// /// Verifies that the Cli subsystem correctly parses the --silent, --validate, and --log flags. /// - [TestMethod] + [Fact] public void Cli_CreateContext_ParsesSilentValidateAndLogFlags() { // Arrange @@ -49,12 +49,12 @@ public void Cli_CreateContext_ParsesSilentValidateAndLogFlags() ])) { // Assert - all flags are reflected in the context properties - Assert.IsTrue(context.Silent); - Assert.IsTrue(context.Validate); - Assert.IsFalse(context.Version); - Assert.IsFalse(context.Help); - Assert.AreEqual(".fileassert.yaml", context.ConfigFile); - Assert.AreEqual(0, context.ExitCode); + Assert.True(context.Silent); + Assert.True(context.Validate); + Assert.False(context.Version); + Assert.False(context.Help); + Assert.Equal(".fileassert.yaml", context.ConfigFile); + Assert.Equal(0, context.ExitCode); } } finally @@ -66,7 +66,7 @@ public void Cli_CreateContext_ParsesSilentValidateAndLogFlags() /// /// Verifies that the Cli subsystem correctly parses --version, --help, --config, and --results flags. /// - [TestMethod] + [Fact] public void Cli_CreateContext_ParsesVersionHelpConfigResultsFlags() { // Arrange @@ -86,10 +86,10 @@ public void Cli_CreateContext_ParsesVersionHelpConfigResultsFlags() ]); // Assert - all flags are reflected in the context properties - Assert.IsTrue(context.Version); - Assert.IsTrue(context.Help); - Assert.AreEqual(configPath, context.ConfigFile); - Assert.AreEqual(resultsPath, context.ResultsFile); + Assert.True(context.Version); + Assert.True(context.Help); + Assert.Equal(configPath, context.ConfigFile); + Assert.Equal(resultsPath, context.ResultsFile); } finally { @@ -100,50 +100,50 @@ public void Cli_CreateContext_ParsesVersionHelpConfigResultsFlags() /// /// Verifies that the Cli subsystem captures positional arguments as test name/tag filters. /// - [TestMethod] + [Fact] public void Cli_CreateContext_WithFilters_ParsesPositionalArguments() { // Arrange & Act using var context = Context.Create(["--silent", "smoke", "regression"]); // Assert - positional arguments are captured in the Filters collection - Assert.HasCount(2, context.Filters); - Assert.AreEqual("smoke", context.Filters[0]); - Assert.AreEqual("regression", context.Filters[1]); + Assert.Equal(2, context.Filters.Count); + Assert.Equal("smoke", context.Filters[0]); + Assert.Equal("regression", context.Filters[1]); } /// /// Verifies that the Cli subsystem throws ArgumentException for unknown flags. /// - [TestMethod] + [Fact] public void Cli_CreateContext_UnknownArgument_ThrowsArgumentException() { // Arrange & Act & Assert - Assert.ThrowsExactly(() => Context.Create(["--unknown-flag"])); + Assert.Throws(() => Context.Create(["--unknown-flag"])); } /// /// Verifies that WriteError changes the context exit code from 0 to 1. /// - [TestMethod] + [Fact] public void Cli_WriteError_ChangesExitCodeToOne() { // Arrange using var context = Context.Create(["--silent"]); - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); // Act context.WriteError("something went wrong"); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } /// /// Verifies that the Cli subsystem routes both informational and error messages /// through the log file when a log path is specified. /// - [TestMethod] + [Fact] public void Cli_OutputPipeline_WritesMessagesToLogFile() { // Arrange diff --git a/test/DemaConsulting.FileAssert.Tests/Cli/ContextNewPropertiesTests.cs b/test/DemaConsulting.FileAssert.Tests/Cli/ContextNewPropertiesTests.cs index 1d4f235..38aa754 100644 --- a/test/DemaConsulting.FileAssert.Tests/Cli/ContextNewPropertiesTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/Cli/ContextNewPropertiesTests.cs @@ -25,98 +25,98 @@ namespace DemaConsulting.FileAssert.Tests.Cli; /// /// Unit tests for the new ConfigFile, Filters, and --config features of . /// -[TestClass] +[Collection("Sequential")] public class ContextNewPropertiesTests { /// /// Verifies that ConfigFile defaults to .fileassert.yaml when no arguments are provided. /// - [TestMethod] + [Fact] public void Context_Create_NoArguments_ConfigFileHasDefaultValue() { // Act using var context = Context.Create([]); // Assert - Assert.AreEqual(".fileassert.yaml", context.ConfigFile); + Assert.Equal(".fileassert.yaml", context.ConfigFile); } /// /// Verifies that Filters is empty when no positional arguments are provided. /// - [TestMethod] + [Fact] public void Context_Create_NoArguments_FiltersIsEmpty() { // Act using var context = Context.Create([]); // Assert - Assert.HasCount(0, context.Filters); + Assert.Empty(context.Filters); } /// /// Verifies that --config sets the ConfigFile property. /// - [TestMethod] + [Fact] public void Context_Create_ConfigFlag_SetsConfigFile() { // Act using var context = Context.Create(["--config", "my-tests.yaml"]); // Assert - Assert.AreEqual("my-tests.yaml", context.ConfigFile); + Assert.Equal("my-tests.yaml", context.ConfigFile); } /// /// Verifies that positional arguments are collected into the Filters list. /// - [TestMethod] + [Fact] public void Context_Create_PositionalArguments_AddedToFilters() { // Act using var context = Context.Create(["smoke", "regression"]); // Assert - Assert.HasCount(2, context.Filters); - Assert.AreEqual("smoke", context.Filters[0]); - Assert.AreEqual("regression", context.Filters[1]); + Assert.Equal(2, context.Filters.Count); + Assert.Equal("smoke", context.Filters[0]); + Assert.Equal("regression", context.Filters[1]); } /// /// Verifies that positional arguments may be mixed with flag arguments. /// - [TestMethod] + [Fact] public void Context_Create_MixedArguments_ParsesCorrectly() { // Act using var context = Context.Create(["--silent", "my-filter", "--config", "cfg.yaml"]); // Assert - Assert.IsTrue(context.Silent); - Assert.AreEqual("cfg.yaml", context.ConfigFile); - Assert.HasCount(1, context.Filters); - Assert.AreEqual("my-filter", context.Filters[0]); + Assert.True(context.Silent); + Assert.Equal("cfg.yaml", context.ConfigFile); + Assert.Single(context.Filters); + Assert.Equal("my-filter", context.Filters[0]); } /// /// Verifies that an unknown flag (starting with -) still throws . /// - [TestMethod] + [Fact] public void Context_Create_UnknownFlagWithDash_ThrowsArgumentException() { // Act & Assert - var exception = Assert.ThrowsExactly(() => Context.Create(["--bogus-flag"])); + var exception = Assert.Throws(() => Context.Create(["--bogus-flag"])); Assert.Contains("Unsupported argument", exception.Message); } /// /// Verifies that --config without a value throws . /// - [TestMethod] + [Fact] public void Context_Create_ConfigFlag_WithoutValue_ThrowsArgumentException() { // Act & Assert - var exception = Assert.ThrowsExactly(() => Context.Create(["--config"])); + var exception = Assert.Throws(() => Context.Create(["--config"])); Assert.Contains("--config", exception.Message); } } diff --git a/test/DemaConsulting.FileAssert.Tests/Cli/ContextTests.cs b/test/DemaConsulting.FileAssert.Tests/Cli/ContextTests.cs index 74b4eb2..78a674e 100644 --- a/test/DemaConsulting.FileAssert.Tests/Cli/ContextTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/Cli/ContextTests.cs @@ -25,162 +25,162 @@ namespace DemaConsulting.FileAssert.Tests.Cli; /// /// Unit tests for the Context class. /// -[TestClass] +[Collection("Sequential")] public class ContextTests { /// /// Test creating a context with no arguments. /// - [TestMethod] + [Fact] public void Context_Create_NoArguments_ReturnsDefaultContext() { // Act using var context = Context.Create([]); // Assert - Assert.IsFalse(context.Version); - Assert.IsFalse(context.Help); - Assert.IsFalse(context.Silent); - Assert.IsFalse(context.Validate); - Assert.IsNull(context.ResultsFile); - Assert.AreEqual(0, context.ExitCode); + Assert.False(context.Version); + Assert.False(context.Help); + Assert.False(context.Silent); + Assert.False(context.Validate); + Assert.Null(context.ResultsFile); + Assert.Equal(0, context.ExitCode); } /// /// Test creating a context with the version flag. /// - [TestMethod] + [Fact] public void Context_Create_VersionFlag_SetsVersionTrue() { // Act using var context = Context.Create(["--version"]); // Assert - Assert.IsTrue(context.Version); - Assert.IsFalse(context.Help); - Assert.AreEqual(0, context.ExitCode); + Assert.True(context.Version); + Assert.False(context.Help); + Assert.Equal(0, context.ExitCode); } /// /// Test creating a context with the short version flag. /// - [TestMethod] + [Fact] public void Context_Create_ShortVersionFlag_SetsVersionTrue() { // Act using var context = Context.Create(["-v"]); // Assert - Assert.IsTrue(context.Version); - Assert.IsFalse(context.Help); - Assert.AreEqual(0, context.ExitCode); + Assert.True(context.Version); + Assert.False(context.Help); + Assert.Equal(0, context.ExitCode); } /// /// Test creating a context with the help flag. /// - [TestMethod] + [Fact] public void Context_Create_HelpFlag_SetsHelpTrue() { // Act using var context = Context.Create(["--help"]); // Assert - Assert.IsFalse(context.Version); - Assert.IsTrue(context.Help); - Assert.AreEqual(0, context.ExitCode); + Assert.False(context.Version); + Assert.True(context.Help); + Assert.Equal(0, context.ExitCode); } /// /// Test creating a context with the short help flag -h. /// - [TestMethod] + [Fact] public void Context_Create_ShortHelpFlag_H_SetsHelpTrue() { // Act using var context = Context.Create(["-h"]); // Assert - Assert.IsFalse(context.Version); - Assert.IsTrue(context.Help); - Assert.AreEqual(0, context.ExitCode); + Assert.False(context.Version); + Assert.True(context.Help); + Assert.Equal(0, context.ExitCode); } /// /// Test creating a context with the short help flag -?. /// - [TestMethod] + [Fact] public void Context_Create_ShortHelpFlag_Question_SetsHelpTrue() { // Act using var context = Context.Create(["-?"]); // Assert - Assert.IsFalse(context.Version); - Assert.IsTrue(context.Help); - Assert.AreEqual(0, context.ExitCode); + Assert.False(context.Version); + Assert.True(context.Help); + Assert.Equal(0, context.ExitCode); } /// /// Test creating a context with the silent flag. /// - [TestMethod] + [Fact] public void Context_Create_SilentFlag_SetsSilentTrue() { // Act using var context = Context.Create(["--silent"]); // Assert - Assert.IsTrue(context.Silent); - Assert.AreEqual(0, context.ExitCode); + Assert.True(context.Silent); + Assert.Equal(0, context.ExitCode); } /// /// Test creating a context with the validate flag. /// - [TestMethod] + [Fact] public void Context_Create_ValidateFlag_SetsValidateTrue() { // Act using var context = Context.Create(["--validate"]); // Assert - Assert.IsTrue(context.Validate); - Assert.AreEqual(0, context.ExitCode); + Assert.True(context.Validate); + Assert.Equal(0, context.ExitCode); } /// /// Test creating a context with the results flag. /// - [TestMethod] + [Fact] public void Context_Create_ResultsFlag_SetsResultsFile() { // Act using var context = Context.Create(["--results", "test.trx"]); // Assert - Assert.AreEqual("test.trx", context.ResultsFile); - Assert.AreEqual(0, context.ExitCode); + Assert.Equal("test.trx", context.ResultsFile); + Assert.Equal(0, context.ExitCode); } /// /// Test creating a context with the result alias flag. /// - [TestMethod] + [Fact] public void Context_Create_ResultAliasFlag_SetsResultsFile() { // Act using var context = Context.Create(["--result", "test.trx"]); // Assert - Assert.AreEqual("test.trx", context.ResultsFile); - Assert.AreEqual(0, context.ExitCode); + Assert.Equal("test.trx", context.ResultsFile); + Assert.Equal(0, context.ExitCode); } /// /// Test creating a context with the log flag. /// - [TestMethod] + [Fact] public void Context_Create_LogFlag_OpensLogFile() { // Arrange @@ -191,12 +191,12 @@ public void Context_Create_LogFlag_OpensLogFile() using (var context = Context.Create(["--log", logFile])) { context.WriteLine("Test message"); - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } // Assert // Verify log file was written - Assert.IsTrue(File.Exists(logFile)); + Assert.True(File.Exists(logFile)); var logContent = File.ReadAllText(logFile); Assert.Contains("Test message", logContent); } @@ -212,40 +212,40 @@ public void Context_Create_LogFlag_OpensLogFile() /// /// Test creating a context with an unknown argument throws exception. /// - [TestMethod] + [Fact] public void Context_Create_UnknownArgument_ThrowsArgumentException() { // Act & Assert - var exception = Assert.ThrowsExactly(() => Context.Create(["--unknown"])); + var exception = Assert.Throws(() => Context.Create(["--unknown"])); Assert.Contains("Unsupported argument", exception.Message); } /// /// Test creating a context with --log flag but no value throws exception. /// - [TestMethod] + [Fact] public void Context_Create_LogFlag_WithoutValue_ThrowsArgumentException() { // Act & Assert - var exception = Assert.ThrowsExactly(() => Context.Create(["--log"])); + var exception = Assert.Throws(() => Context.Create(["--log"])); Assert.Contains("--log", exception.Message); } /// /// Test creating a context with --results flag but no value throws exception. /// - [TestMethod] + [Fact] public void Context_Create_ResultsFlag_WithoutValue_ThrowsArgumentException() { // Act & Assert - var exception = Assert.ThrowsExactly(() => Context.Create(["--results"])); + var exception = Assert.Throws(() => Context.Create(["--results"])); Assert.Contains("--results", exception.Message); } /// /// Test WriteLine writes to console output when not silent. /// - [TestMethod] + [Fact] public void Context_WriteLine_NotSilent_WritesToConsole() { // Arrange @@ -272,7 +272,7 @@ public void Context_WriteLine_NotSilent_WritesToConsole() /// /// Test WriteLine does not write to console when silent. /// - [TestMethod] + [Fact] public void Context_WriteLine_Silent_DoesNotWriteToConsole() { // Arrange @@ -299,7 +299,7 @@ public void Context_WriteLine_Silent_DoesNotWriteToConsole() /// /// Test WriteError does not write to console when silent. /// - [TestMethod] + [Fact] public void Context_WriteError_Silent_DoesNotWriteToConsole() { // Arrange @@ -326,7 +326,7 @@ public void Context_WriteError_Silent_DoesNotWriteToConsole() /// /// Test WriteError sets exit code to 1. /// - [TestMethod] + [Fact] public void Context_WriteError_SetsErrorExitCode() { // Arrange @@ -341,7 +341,7 @@ public void Context_WriteError_SetsErrorExitCode() context.WriteError("Test error message"); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -352,7 +352,7 @@ public void Context_WriteError_SetsErrorExitCode() /// /// Test WriteError writes message to console when not silent. /// - [TestMethod] + [Fact] public void Context_WriteError_NotSilent_WritesToConsole() { // Arrange @@ -379,7 +379,7 @@ public void Context_WriteError_NotSilent_WritesToConsole() /// /// Test WriteError writes message to log file when logging is enabled. /// - [TestMethod] + [Fact] public void Context_WriteError_WritesToLogFile() { // Arrange @@ -390,11 +390,11 @@ public void Context_WriteError_WritesToLogFile() using (var context = Context.Create(["--silent", "--log", logFile])) { context.WriteError("Test error in log"); - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } // Assert - log file should contain the error message - Assert.IsTrue(File.Exists(logFile)); + Assert.True(File.Exists(logFile)); var logContent = File.ReadAllText(logFile); Assert.Contains("Test error in log", logContent); } @@ -410,7 +410,7 @@ public void Context_WriteError_WritesToLogFile() /// /// Test that ErrorCount starts at zero and increments with each WriteError call. /// - [TestMethod] + [Fact] public void Context_ErrorCount_IncrementsOnEachWriteError() { // Arrange @@ -422,16 +422,16 @@ public void Context_ErrorCount_IncrementsOnEachWriteError() using var context = Context.Create(["--silent"]); // Assert initial state - Assert.AreEqual(0, context.ErrorCount); + Assert.Equal(0, context.ErrorCount); // Act - report two errors context.WriteError("First error"); - Assert.AreEqual(1, context.ErrorCount); + Assert.Equal(1, context.ErrorCount); context.WriteError("Second error"); // Assert - Assert.AreEqual(2, context.ErrorCount); + Assert.Equal(2, context.ErrorCount); } finally { @@ -442,70 +442,70 @@ public void Context_ErrorCount_IncrementsOnEachWriteError() /// /// Test creating a context with --depth flag sets the Depth property. /// - [TestMethod] + [Fact] public void Context_Create_DepthFlag_SetsDepth() { // Act using var context = Context.Create(["--depth", "3"]); // Assert - Assert.AreEqual(3, context.Depth); + Assert.Equal(3, context.Depth); } /// /// Test creating a context with no arguments defaults Depth to 1. /// - [TestMethod] + [Fact] public void Context_Create_NoArguments_DepthDefaultsToOne() { // Act using var context = Context.Create([]); // Assert - Assert.AreEqual(1, context.Depth); + Assert.Equal(1, context.Depth); } /// /// Test creating a context with --depth flag but no value throws exception. /// - [TestMethod] + [Fact] public void Context_Create_DepthFlag_WithoutValue_ThrowsArgumentException() { // Act & Assert - var exception = Assert.ThrowsExactly(() => Context.Create(["--depth"])); + var exception = Assert.Throws(() => Context.Create(["--depth"])); Assert.Contains("--depth", exception.Message); } /// /// Test creating a context with --depth flag and non-numeric value throws exception. /// - [TestMethod] + [Fact] public void Context_Create_DepthFlag_NonNumeric_ThrowsArgumentException() { // Act & Assert - var exception = Assert.ThrowsExactly(() => Context.Create(["--depth", "abc"])); + var exception = Assert.Throws(() => Context.Create(["--depth", "abc"])); Assert.Contains("--depth", exception.Message); } /// /// Test creating a context with --depth flag and zero value throws exception. /// - [TestMethod] + [Fact] public void Context_Create_DepthFlag_Zero_ThrowsArgumentException() { // Act & Assert - var exception = Assert.ThrowsExactly(() => Context.Create(["--depth", "0"])); + var exception = Assert.Throws(() => Context.Create(["--depth", "0"])); Assert.Contains("--depth", exception.Message); } /// /// Test creating a context with --depth flag and value above 6 throws exception. /// - [TestMethod] + [Fact] public void Context_Create_DepthFlag_AboveSix_ThrowsArgumentException() { // Act & Assert - var exception = Assert.ThrowsExactly(() => Context.Create(["--depth", "7"])); + var exception = Assert.Throws(() => Context.Create(["--depth", "7"])); Assert.Contains("--depth", exception.Message); } } diff --git a/test/DemaConsulting.FileAssert.Tests/Configuration/ConfigurationTests.cs b/test/DemaConsulting.FileAssert.Tests/Configuration/ConfigurationTests.cs index 03a86a1..39b1d48 100644 --- a/test/DemaConsulting.FileAssert.Tests/Configuration/ConfigurationTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/Configuration/ConfigurationTests.cs @@ -26,14 +26,14 @@ namespace DemaConsulting.FileAssert.Tests.Configuration; /// /// Subsystem tests for the Configuration subsystem. /// -[TestClass] +[Collection("Sequential")] public class ConfigurationTests { /// /// Verifies that the Configuration subsystem loads a YAML file and builds the /// complete test hierarchy (tests → files → rules) correctly. /// - [TestMethod] + [Fact] public void Configuration_LoadYaml_BuildsCompleteTestHierarchy() { // Arrange - write a YAML configuration with nested test, file, and rule entries @@ -57,16 +57,16 @@ public void Configuration_LoadYaml_BuildsCompleteTestHierarchy() var config = FileAssertConfig.ReadFromFile(configPath); // Assert - the full hierarchy is correctly constructed - Assert.HasCount(1, config.Tests); + Assert.Single(config.Tests); var test = config.Tests[0]; - Assert.AreEqual("License Check", test.Name); - Assert.HasCount(1, test.Tags); - Assert.AreEqual("license", test.Tags[0]); - Assert.HasCount(1, test.Files); + Assert.Equal("License Check", test.Name); + Assert.Single(test.Tags); + Assert.Equal("license", test.Tags[0]); + Assert.Single(test.Files); var file = test.Files[0]; - Assert.AreEqual("**/*.txt", file.Pattern); - Assert.AreEqual(1, file.Min); - Assert.HasCount(1, file.TextAssert!.Rules); + Assert.Equal("**/*.txt", file.Pattern); + Assert.Equal(1, file.Min); + Assert.Single(file.TextAssert!.Rules); } finally { @@ -78,7 +78,7 @@ public void Configuration_LoadYaml_BuildsCompleteTestHierarchy() /// Verifies that the Configuration subsystem executes only tests that match /// the provided filters when running a configuration with multiple tests. /// - [TestMethod] + [Fact] public void Configuration_RunWithFilter_ExecutesOnlyMatchingTests() { // Arrange - two tests in config; only one file exists so only that test should pass @@ -108,7 +108,7 @@ public void Configuration_RunWithFilter_ExecutesOnlyMatchingTests() config.Run(context, ["Alpha"]); // Assert - no errors because only Alpha ran (and alpha.txt exists) - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -120,7 +120,7 @@ public void Configuration_RunWithFilter_ExecutesOnlyMatchingTests() /// Verifies that the Configuration subsystem executes only tests whose tag matches /// the provided filter when running a configuration with multiple tests. /// - [TestMethod] + [Fact] public void Configuration_RunWithTagFilter_ExecutesOnlyMatchingTests() { // Arrange - two tests with different tags; only one file exists so only that test passes @@ -154,7 +154,7 @@ public void Configuration_RunWithTagFilter_ExecutesOnlyMatchingTests() config.Run(context, ["smoke"]); // Assert - no errors because only Alpha ran (matching the smoke tag) and alpha.txt exists - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { diff --git a/test/DemaConsulting.FileAssert.Tests/Configuration/FileAssertConfigTests.cs b/test/DemaConsulting.FileAssert.Tests/Configuration/FileAssertConfigTests.cs index 3fcb8a0..b052218 100644 --- a/test/DemaConsulting.FileAssert.Tests/Configuration/FileAssertConfigTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/Configuration/FileAssertConfigTests.cs @@ -27,7 +27,7 @@ namespace DemaConsulting.FileAssert.Tests.Configuration; /// /// Unit tests for the class. /// -[TestClass] +[Collection("Sequential")] public class FileAssertConfigTests { /// @@ -46,7 +46,7 @@ public class FileAssertConfigTests /// /// Verifies that ReadFromFile successfully parses a valid YAML configuration file. /// - [TestMethod] + [Fact] public void FileAssertConfig_ReadFromFile_ValidFile_ReturnsConfig() { // Arrange - write a minimal config to a temp file @@ -65,8 +65,8 @@ public void FileAssertConfig_ReadFromFile_ValidFile_ReturnsConfig() var config = FileAssertConfig.ReadFromFile(configPath); // Assert - Assert.HasCount(1, config.Tests); - Assert.AreEqual("Sample Test", config.Tests[0].Name); + Assert.Single(config.Tests); + Assert.Equal("Sample Test", config.Tests[0].Name); } finally { @@ -77,30 +77,30 @@ public void FileAssertConfig_ReadFromFile_ValidFile_ReturnsConfig() /// /// Verifies that ReadFromFile throws for a missing file. /// - [TestMethod] + [Fact] public void FileAssertConfig_ReadFromFile_FileNotFound_ThrowsFileNotFoundException() { // Arrange - construct a path that does not exist var missingPath = Path.Combine(Path.GetTempPath(), $"nonexistent_{Guid.NewGuid()}.yaml"); // Act & Assert - Assert.ThrowsExactly(() => FileAssertConfig.ReadFromFile(missingPath)); + Assert.Throws(() => FileAssertConfig.ReadFromFile(missingPath)); } /// /// Verifies that ReadFromFile throws when path is null. /// - [TestMethod] + [Fact] public void FileAssertConfig_ReadFromFile_NullPath_ThrowsArgumentNullException() { // Act & Assert - Assert.ThrowsExactly(() => FileAssertConfig.ReadFromFile(null!)); + Assert.Throws(() => FileAssertConfig.ReadFromFile(null!)); } /// /// Verifies that Run with no filter executes all tests. /// - [TestMethod] + [Fact] public void FileAssertConfig_Run_WithNoFilter_RunsAllTests() { // Arrange - create temp directory with files that satisfy both test patterns @@ -120,7 +120,7 @@ public void FileAssertConfig_Run_WithNoFilter_RunsAllTests() config.Run(context, []); // Assert - both tests ran and found their files with no errors - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -131,7 +131,7 @@ public void FileAssertConfig_Run_WithNoFilter_RunsAllTests() /// /// Verifies that Run with a matching filter only executes the matching test. /// - [TestMethod] + [Fact] public void FileAssertConfig_Run_WithMatchingFilter_RunsMatchingTest() { // Arrange - only create alpha.txt; beta.txt missing would cause an error if Test Beta ran @@ -160,7 +160,7 @@ public void FileAssertConfig_Run_WithMatchingFilter_RunsMatchingTest() config.Run(context, ["Test Alpha"]); // Assert - no error because Test Beta was skipped - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -171,7 +171,7 @@ public void FileAssertConfig_Run_WithMatchingFilter_RunsMatchingTest() /// /// Verifies that Run with a non-matching filter skips all tests. /// - [TestMethod] + [Fact] public void FileAssertConfig_Run_WithNonMatchingFilter_SkipsTests() { // Arrange - both patterns would fail if executed (files are absent with min=1) @@ -198,7 +198,7 @@ public void FileAssertConfig_Run_WithNonMatchingFilter_SkipsTests() config.Run(context, ["No Match"]); // Assert - no error because all tests were skipped by the filter - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -209,7 +209,7 @@ public void FileAssertConfig_Run_WithNonMatchingFilter_SkipsTests() /// /// Verifies that Run writes a TRX results file with Passed outcome when all tests pass. /// - [TestMethod] + [Fact] public void FileAssertConfig_Run_WithResultsFile_WritesTrxWithPassedOutcome() { // Arrange @@ -236,8 +236,8 @@ public void FileAssertConfig_Run_WithResultsFile_WritesTrxWithPassedOutcome() config.Run(context, []); // Assert - TRX file contains the test name with Passed outcome - Assert.AreEqual(0, context.ExitCode); - Assert.IsTrue(File.Exists(trxFile)); + Assert.Equal(0, context.ExitCode); + Assert.True(File.Exists(trxFile)); var trxContent = File.ReadAllText(trxFile); Assert.Contains("LicenseCheck", trxContent); Assert.Contains("outcome=\"Passed\"", trxContent); @@ -255,7 +255,7 @@ public void FileAssertConfig_Run_WithResultsFile_WritesTrxWithPassedOutcome() /// /// Verifies that Run writes a JUnit XML results file with failure entries when tests fail. /// - [TestMethod] + [Fact] public void FileAssertConfig_Run_WithResultsFile_WritesJUnitWithFailedOutcome() { // Arrange @@ -283,8 +283,8 @@ public void FileAssertConfig_Run_WithResultsFile_WritesJUnitWithFailedOutcome() config.Run(context, []); // Assert - JUnit file contains the test name with a failure entry - Assert.AreNotEqual(0, context.ExitCode); - Assert.IsTrue(File.Exists(xmlFile)); + Assert.NotEqual(0, context.ExitCode); + Assert.True(File.Exists(xmlFile)); var xmlContent = File.ReadAllText(xmlFile); Assert.Contains("LicenseCheck", xmlContent); Assert.Contains("failures=\"1\"", xmlContent); @@ -302,7 +302,7 @@ public void FileAssertConfig_Run_WithResultsFile_WritesJUnitWithFailedOutcome() /// /// Verifies that ReadFromFile correctly parses a YAML configuration containing a PDF assertion block. /// - [TestMethod] + [Fact] public void FileAssertConfig_ReadFromFile_PdfAssertConfig_ParsesCorrectly() { // Arrange - write a config with a pdf: block containing metadata and pages @@ -330,13 +330,13 @@ public void FileAssertConfig_ReadFromFile_PdfAssertConfig_ParsesCorrectly() var config = FileAssertConfig.ReadFromFile(configPath); // Assert - one test was parsed with one file assertion and populated PDF settings - Assert.HasCount(1, config.Tests); - Assert.AreEqual("PDF Check", config.Tests[0].Name); - Assert.HasCount(1, config.Tests[0].Files); + Assert.Single(config.Tests); + Assert.Equal("PDF Check", config.Tests[0].Name); + Assert.Single(config.Tests[0].Files); var fileAssertion = config.Tests[0].Files[0]; - Assert.AreEqual("report.pdf", fileAssertion.Pattern); - Assert.IsNotNull(fileAssertion.PdfAssert); + Assert.Equal("report.pdf", fileAssertion.Pattern); + Assert.NotNull(fileAssertion.PdfAssert); } finally { diff --git a/test/DemaConsulting.FileAssert.Tests/DemaConsulting.FileAssert.Tests.csproj b/test/DemaConsulting.FileAssert.Tests/DemaConsulting.FileAssert.Tests.csproj index d69b48e..b1c09aa 100644 --- a/test/DemaConsulting.FileAssert.Tests/DemaConsulting.FileAssert.Tests.csproj +++ b/test/DemaConsulting.FileAssert.Tests/DemaConsulting.FileAssert.Tests.csproj @@ -7,6 +7,7 @@ enable enable + Exe false true true @@ -21,6 +22,7 @@ + @@ -34,9 +36,12 @@ runtime; build; native; contentfiles; analyzers; buildtransitive - - + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + diff --git a/test/DemaConsulting.FileAssert.Tests/IntegrationTests.cs b/test/DemaConsulting.FileAssert.Tests/IntegrationTests.cs index 6631140..b94076b 100644 --- a/test/DemaConsulting.FileAssert.Tests/IntegrationTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/IntegrationTests.cs @@ -27,32 +27,28 @@ namespace DemaConsulting.FileAssert.Tests; /// /// Integration tests that run the FileAssert application through dotnet. /// -[TestClass] +[Collection("Sequential")] public partial class IntegrationTests { [GeneratedRegex(@"\d+\.\d+\.\d+")] private static partial Regex SemanticVersionRegex(); - private string _dllPath = string.Empty; + private readonly string _dllPath; /// /// Initialize test by locating the FileAssert DLL. /// - [TestInitialize] - public void TestInitialize() + public IntegrationTests() { - // The DLL should be in the same directory as the test assembly - // because the test project references the main project var baseDir = AppContext.BaseDirectory; _dllPath = PathHelpers.SafePathCombine(baseDir, "DemaConsulting.FileAssert.dll"); - - Assert.IsTrue(File.Exists(_dllPath), $"Could not find FileAssert DLL at {_dllPath}"); + Assert.True(File.Exists(_dllPath), $"Could not find FileAssert DLL at {_dllPath}"); } /// /// Test that version flag outputs version information. /// - [TestMethod] + [Fact] public void IntegrationTest_VersionFlag_OutputsVersion() { // Act @@ -63,14 +59,14 @@ public void IntegrationTest_VersionFlag_OutputsVersion() "--version"); // Assert - Assert.AreEqual(0, exitCode); - Assert.IsTrue(SemanticVersionRegex().IsMatch(output), $"Output did not contain a semantic version: {output}"); + Assert.Equal(0, exitCode); + Assert.True(SemanticVersionRegex().IsMatch(output), $"Output did not contain a semantic version: {output}"); } /// /// Test that help flag outputs usage information. /// - [TestMethod] + [Fact] public void IntegrationTest_HelpFlag_OutputsUsageInformation() { // Act @@ -81,7 +77,7 @@ public void IntegrationTest_HelpFlag_OutputsUsageInformation() "--help"); // Assert - Assert.AreEqual(0, exitCode); + Assert.Equal(0, exitCode); Assert.Contains("Usage:", output); Assert.Contains("Options:", output); Assert.Contains("--version", output); @@ -90,7 +86,7 @@ public void IntegrationTest_HelpFlag_OutputsUsageInformation() /// /// Test that validate flag runs self-validation. /// - [TestMethod] + [Fact] public void IntegrationTest_ValidateFlag_RunsValidation() { // Act @@ -101,7 +97,7 @@ public void IntegrationTest_ValidateFlag_RunsValidation() "--validate"); // Assert - Assert.AreEqual(0, exitCode); + Assert.Equal(0, exitCode); Assert.Contains("Total Tests:", output); Assert.Contains("Passed:", output); } @@ -109,7 +105,7 @@ public void IntegrationTest_ValidateFlag_RunsValidation() /// /// Test that validate with results flag generates TRX file. /// - [TestMethod] + [Fact] public void IntegrationTest_ValidateWithResults_GeneratesTrxFile() { // Arrange @@ -127,8 +123,8 @@ public void IntegrationTest_ValidateWithResults_GeneratesTrxFile() resultsFile); // Assert - Assert.AreEqual(0, exitCode); - Assert.IsTrue(File.Exists(resultsFile), "Results file was not created"); + Assert.Equal(0, exitCode); + Assert.True(File.Exists(resultsFile), "Results file was not created"); var trxContent = File.ReadAllText(resultsFile); Assert.Contains(" /// Test that silent flag suppresses output. /// - [TestMethod] + [Fact] public void IntegrationTest_SilentFlag_SuppressesOutput() { // Act @@ -157,14 +153,14 @@ public void IntegrationTest_SilentFlag_SuppressesOutput() "--silent"); // Assert - Assert.AreEqual(0, exitCode); - Assert.AreEqual(string.Empty, output); + Assert.Equal(0, exitCode); + Assert.Equal(string.Empty, output); } /// /// Test that log flag writes output to file. /// - [TestMethod] + [Fact] public void IntegrationTest_LogFlag_WritesOutputToFile() { // Arrange @@ -181,8 +177,8 @@ public void IntegrationTest_LogFlag_WritesOutputToFile() logFile); // Assert - Assert.AreEqual(0, exitCode); - Assert.IsTrue(File.Exists(logFile), "Log file was not created"); + Assert.Equal(0, exitCode); + Assert.True(File.Exists(logFile), "Log file was not created"); var logContent = File.ReadAllText(logFile); Assert.Contains("FileAssert version", logContent); @@ -199,7 +195,7 @@ public void IntegrationTest_LogFlag_WritesOutputToFile() /// /// Test that validate with results flag generates JUnit XML file. /// - [TestMethod] + [Fact] public void IntegrationTest_ValidateWithResults_GeneratesJUnitFile() { // Arrange @@ -217,8 +213,8 @@ public void IntegrationTest_ValidateWithResults_GeneratesJUnitFile() resultsFile); // Assert - Assert.AreEqual(0, exitCode); - Assert.IsTrue(File.Exists(resultsFile), "Results file was not created"); + Assert.Equal(0, exitCode); + Assert.True(File.Exists(resultsFile), "Results file was not created"); var xmlContent = File.ReadAllText(resultsFile); Assert.Contains(" /// Test that unknown argument returns error. /// - [TestMethod] + [Fact] public void IntegrationTest_UnknownArgument_ReturnsError() { // Act @@ -246,14 +242,14 @@ public void IntegrationTest_UnknownArgument_ReturnsError() "--unknown"); // Assert - Assert.AreNotEqual(0, exitCode); + Assert.NotEqual(0, exitCode); Assert.Contains("Error", output); } /// /// Test that positional name/tag filter arguments cause only matching tests to run. /// - [TestMethod] + [Fact] public void IntegrationTest_TestFiltering_OnlyRunsMatchingTests() { // Arrange @@ -290,7 +286,7 @@ public void IntegrationTest_TestFiltering_OnlyRunsMatchingTests() "smoke"); // Assert - exit code 0 because the failing regression test was skipped by the filter - Assert.AreEqual(0, exitCode); + Assert.Equal(0, exitCode); } finally { @@ -301,7 +297,7 @@ public void IntegrationTest_TestFiltering_OnlyRunsMatchingTests() /// /// Test that a valid configuration file causes the tool to run assertions and succeed. /// - [TestMethod] + [Fact] public void IntegrationTest_ValidConfig_PassingAssertions_ReturnsZero() { // Arrange @@ -332,7 +328,7 @@ public void IntegrationTest_ValidConfig_PassingAssertions_ReturnsZero() configPath); // Assert - Assert.AreEqual(0, exitCode); + Assert.Equal(0, exitCode); } finally { @@ -343,7 +339,7 @@ public void IntegrationTest_ValidConfig_PassingAssertions_ReturnsZero() /// /// Test that a configuration file with a failing assertion causes the tool to return non-zero. /// - [TestMethod] + [Fact] public void IntegrationTest_ValidConfig_FailingAssertions_ReturnsNonZero() { // Arrange @@ -374,7 +370,7 @@ public void IntegrationTest_ValidConfig_FailingAssertions_ReturnsNonZero() configPath); // Assert - non-zero exit code indicates assertion failure - Assert.AreNotEqual(0, exitCode); + Assert.NotEqual(0, exitCode); } finally { @@ -385,7 +381,7 @@ public void IntegrationTest_ValidConfig_FailingAssertions_ReturnsNonZero() /// /// Test that passing file assertions write a TRX results file with Passed outcomes. /// - [TestMethod] + [Fact] public void IntegrationTest_PassingAssertions_WritesTrxWithPassedResults() { // Arrange @@ -418,8 +414,8 @@ public void IntegrationTest_PassingAssertions_WritesTrxWithPassedResults() resultsFile); // Assert - exit code 0 and TRX file contains LicenseCheck with Passed outcome - Assert.AreEqual(0, exitCode); - Assert.IsTrue(File.Exists(resultsFile), "Results file was not created"); + Assert.Equal(0, exitCode); + Assert.True(File.Exists(resultsFile), "Results file was not created"); var trxContent = File.ReadAllText(resultsFile); Assert.Contains("LicenseCheck", trxContent); Assert.Contains("outcome=\"Passed\"", trxContent); @@ -437,7 +433,7 @@ public void IntegrationTest_PassingAssertions_WritesTrxWithPassedResults() /// /// Test that failing file assertions write a JUnit results file with failure entries. /// - [TestMethod] + [Fact] public void IntegrationTest_FailingAssertions_WritesJUnitWithFailedResults() { // Arrange @@ -470,8 +466,8 @@ public void IntegrationTest_FailingAssertions_WritesJUnitWithFailedResults() resultsFile); // Assert - non-zero exit code and JUnit file contains LicenseCheck with a failure entry - Assert.AreNotEqual(0, exitCode); - Assert.IsTrue(File.Exists(resultsFile), "Results file was not created"); + Assert.NotEqual(0, exitCode); + Assert.True(File.Exists(resultsFile), "Results file was not created"); var xmlContent = File.ReadAllText(resultsFile); Assert.Contains("LicenseCheck", xmlContent); Assert.Contains("failures=\"1\"", xmlContent); @@ -489,7 +485,7 @@ public void IntegrationTest_FailingAssertions_WritesJUnitWithFailedResults() /// /// Test that a minimum file count constraint returns a non-zero exit code when too few files are found. /// - [TestMethod] + [Fact] public void IntegrationTest_MinCountConstraint_TooFewFiles_ReturnsNonZero() { // Arrange @@ -516,7 +512,7 @@ public void IntegrationTest_MinCountConstraint_TooFewFiles_ReturnsNonZero() configPath); // Assert - non-zero exit code because the min count constraint was not met - Assert.AreNotEqual(0, exitCode); + Assert.NotEqual(0, exitCode); } finally { @@ -527,7 +523,7 @@ public void IntegrationTest_MinCountConstraint_TooFewFiles_ReturnsNonZero() /// /// Test that a maximum file count constraint returns a non-zero exit code when exceeded. /// - [TestMethod] + [Fact] public void IntegrationTest_MaxCountConstraint_TooManyFiles_ReturnsNonZero() { // Arrange @@ -557,7 +553,7 @@ public void IntegrationTest_MaxCountConstraint_TooManyFiles_ReturnsNonZero() configPath); // Assert - non-zero exit code because the max count constraint was exceeded - Assert.AreNotEqual(0, exitCode); + Assert.NotEqual(0, exitCode); } finally { @@ -568,7 +564,7 @@ public void IntegrationTest_MaxCountConstraint_TooManyFiles_ReturnsNonZero() /// /// Test that a regex rule returns a zero exit code when file content matches the pattern. /// - [TestMethod] + [Fact] public void IntegrationTest_RegexRule_MatchingContent_ReturnsZero() { // Arrange @@ -597,7 +593,7 @@ public void IntegrationTest_RegexRule_MatchingContent_ReturnsZero() configPath); // Assert - Assert.AreEqual(0, exitCode); + Assert.Equal(0, exitCode); } finally { @@ -608,7 +604,7 @@ public void IntegrationTest_RegexRule_MatchingContent_ReturnsZero() /// /// Test that a regex rule returns a non-zero exit code when file content does not match the pattern. /// - [TestMethod] + [Fact] public void IntegrationTest_RegexRule_NonMatchingContent_ReturnsNonZero() { // Arrange @@ -638,7 +634,7 @@ public void IntegrationTest_RegexRule_NonMatchingContent_ReturnsNonZero() configPath); // Assert - non-zero because the file does not match the version pattern - Assert.AreNotEqual(0, exitCode); + Assert.NotEqual(0, exitCode); } finally { @@ -649,7 +645,7 @@ public void IntegrationTest_RegexRule_NonMatchingContent_ReturnsNonZero() /// /// Test that an exact count constraint returns a non-zero exit code when file count is wrong. /// - [TestMethod] + [Fact] public void IntegrationTest_ExactCountConstraint_WrongCount_ReturnsNonZero() { // Arrange @@ -679,7 +675,7 @@ public void IntegrationTest_ExactCountConstraint_WrongCount_ReturnsNonZero() configPath); // Assert - non-zero exit code because the exact count constraint was not met - Assert.AreNotEqual(0, exitCode); + Assert.NotEqual(0, exitCode); } finally { @@ -690,7 +686,7 @@ public void IntegrationTest_ExactCountConstraint_WrongCount_ReturnsNonZero() /// /// Test that a min-size constraint returns a non-zero exit code when file is too small. /// - [TestMethod] + [Fact] public void IntegrationTest_FileSizeConstraints_TooSmall_ReturnsNonZero() { // Arrange @@ -719,7 +715,7 @@ public void IntegrationTest_FileSizeConstraints_TooSmall_ReturnsNonZero() configPath); // Assert - non-zero exit code because the file is smaller than the minimum size - Assert.AreNotEqual(0, exitCode); + Assert.NotEqual(0, exitCode); } finally { @@ -730,7 +726,7 @@ public void IntegrationTest_FileSizeConstraints_TooSmall_ReturnsNonZero() /// /// Test that a max-size constraint returns a non-zero exit code when file is too large. /// - [TestMethod] + [Fact] public void IntegrationTest_FileSizeConstraints_TooLarge_ReturnsNonZero() { // Arrange @@ -759,7 +755,7 @@ public void IntegrationTest_FileSizeConstraints_TooLarge_ReturnsNonZero() configPath); // Assert - non-zero exit code because the file exceeds the maximum size - Assert.AreNotEqual(0, exitCode); + Assert.NotEqual(0, exitCode); } finally { @@ -770,7 +766,7 @@ public void IntegrationTest_FileSizeConstraints_TooLarge_ReturnsNonZero() /// /// Test that a does-not-contain rule returns a non-zero exit code when forbidden text is present. /// - [TestMethod] + [Fact] public void IntegrationTest_DoesNotContainRule_ForbiddenTextPresent_ReturnsNonZero() { // Arrange @@ -800,7 +796,7 @@ public void IntegrationTest_DoesNotContainRule_ForbiddenTextPresent_ReturnsNonZe configPath); // Assert - non-zero exit code because the forbidden text is present - Assert.AreNotEqual(0, exitCode); + Assert.NotEqual(0, exitCode); } finally { @@ -811,7 +807,7 @@ public void IntegrationTest_DoesNotContainRule_ForbiddenTextPresent_ReturnsNonZe /// /// Test that a does-not-contain-regex rule returns a non-zero exit code when the forbidden pattern matches. /// - [TestMethod] + [Fact] public void IntegrationTest_DoesNotContainRegexRule_ForbiddenPatternMatches_ReturnsNonZero() { // Arrange @@ -841,7 +837,7 @@ public void IntegrationTest_DoesNotContainRegexRule_ForbiddenPatternMatches_Retu configPath); // Assert - non-zero exit code because the forbidden pattern matched - Assert.AreNotEqual(0, exitCode); + Assert.NotEqual(0, exitCode); } finally { @@ -852,7 +848,7 @@ public void IntegrationTest_DoesNotContainRegexRule_ForbiddenPatternMatches_Retu /// /// Test that an XML assert with a passing query returns a zero exit code. /// - [TestMethod] + [Fact] public void IntegrationTest_XmlAssert_PassingQuery_ReturnsZero() { // Arrange @@ -880,7 +876,7 @@ public void IntegrationTest_XmlAssert_PassingQuery_ReturnsZero() var exitCode = Runner.Run(out var _, "dotnet", _dllPath, "--silent", "--config", configPath); // Assert - Assert.AreEqual(0, exitCode); + Assert.Equal(0, exitCode); } finally { @@ -891,7 +887,7 @@ public void IntegrationTest_XmlAssert_PassingQuery_ReturnsZero() /// /// Test that an XML assert with an invalid XML file returns a non-zero exit code. /// - [TestMethod] + [Fact] public void IntegrationTest_XmlAssert_InvalidFile_ReturnsNonZero() { // Arrange @@ -915,7 +911,7 @@ public void IntegrationTest_XmlAssert_InvalidFile_ReturnsNonZero() var exitCode = Runner.Run(out var _, "dotnet", _dllPath, "--silent", "--config", configPath); // Assert - Assert.AreNotEqual(0, exitCode); + Assert.NotEqual(0, exitCode); } finally { @@ -926,7 +922,7 @@ public void IntegrationTest_XmlAssert_InvalidFile_ReturnsNonZero() /// /// Test that an HTML assert with a passing query returns a zero exit code. /// - [TestMethod] + [Fact] public void IntegrationTest_HtmlAssert_PassingQuery_ReturnsZero() { // Arrange @@ -955,7 +951,7 @@ public void IntegrationTest_HtmlAssert_PassingQuery_ReturnsZero() var exitCode = Runner.Run(out var _, "dotnet", _dllPath, "--silent", "--config", configPath); // Assert - Assert.AreEqual(0, exitCode); + Assert.Equal(0, exitCode); } finally { @@ -966,7 +962,7 @@ public void IntegrationTest_HtmlAssert_PassingQuery_ReturnsZero() /// /// Test that a YAML assert with a passing dot-notation query returns a zero exit code. /// - [TestMethod] + [Fact] public void IntegrationTest_YamlAssert_PassingQuery_ReturnsZero() { // Arrange @@ -994,7 +990,7 @@ public void IntegrationTest_YamlAssert_PassingQuery_ReturnsZero() var exitCode = Runner.Run(out var _, "dotnet", _dllPath, "--silent", "--config", configPath); // Assert - Assert.AreEqual(0, exitCode); + Assert.Equal(0, exitCode); } finally { @@ -1005,7 +1001,7 @@ public void IntegrationTest_YamlAssert_PassingQuery_ReturnsZero() /// /// Test that a JSON assert with a passing dot-notation query returns a zero exit code. /// - [TestMethod] + [Fact] public void IntegrationTest_JsonAssert_PassingQuery_ReturnsZero() { // Arrange @@ -1035,7 +1031,7 @@ public void IntegrationTest_JsonAssert_PassingQuery_ReturnsZero() var exitCode = Runner.Run(out var _, "dotnet", _dllPath, "--silent", "--config", configPath); // Assert - Assert.AreEqual(0, exitCode); + Assert.Equal(0, exitCode); } finally { @@ -1046,7 +1042,7 @@ public void IntegrationTest_JsonAssert_PassingQuery_ReturnsZero() /// /// Test that a PDF assert with an invalid file returns a non-zero exit code. /// - [TestMethod] + [Fact] public void IntegrationTest_PdfAssert_InvalidFile_ReturnsNonZero() { // Arrange @@ -1070,7 +1066,7 @@ public void IntegrationTest_PdfAssert_InvalidFile_ReturnsNonZero() var exitCode = Runner.Run(out var _, "dotnet", _dllPath, "--silent", "--config", configPath); // Assert - Assert.AreNotEqual(0, exitCode); + Assert.NotEqual(0, exitCode); } finally { diff --git a/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertFileTests.cs b/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertFileTests.cs index 62c5d66..e90cc07 100644 --- a/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertFileTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertFileTests.cs @@ -27,13 +27,13 @@ namespace DemaConsulting.FileAssert.Tests.Modeling; /// /// Unit tests for the class. /// -[TestClass] +[Collection("Sequential")] public class FileAssertFileTests { /// /// Verifies that Create succeeds given valid data. /// - [TestMethod] + [Fact] public void FileAssertFile_Create_ValidData_CreatesFile() { // Arrange @@ -43,54 +43,54 @@ public void FileAssertFile_Create_ValidData_CreatesFile() var file = FileAssertFile.Create(data); // Assert - Assert.AreEqual("**/*.txt", file.Pattern); - Assert.AreEqual(1, file.Min); - Assert.AreEqual(10, file.Max); - Assert.IsNull(file.TextAssert); + Assert.Equal("**/*.txt", file.Pattern); + Assert.Equal(1, file.Min); + Assert.Equal(10, file.Max); + Assert.Null(file.TextAssert); } /// /// Verifies that Create throws when data is null. /// - [TestMethod] + [Fact] public void FileAssertFile_Create_NullData_ThrowsArgumentNullException() { // Act & Assert - Assert.ThrowsExactly(() => FileAssertFile.Create(null!)); + Assert.Throws(() => FileAssertFile.Create(null!)); } /// /// Verifies that Create throws when Pattern is null. /// - [TestMethod] + [Fact] public void FileAssertFile_Create_NullPattern_ThrowsInvalidOperationException() { // Arrange var data = new FileAssertFileData { Pattern = null }; // Act & Assert - var exception = Assert.ThrowsExactly(() => FileAssertFile.Create(data)); + var exception = Assert.Throws(() => FileAssertFile.Create(data)); Assert.Contains("pattern", exception.Message); } /// /// Verifies that Create throws when Pattern is blank. /// - [TestMethod] + [Fact] public void FileAssertFile_Create_BlankPattern_ThrowsInvalidOperationException() { // Arrange var data = new FileAssertFileData { Pattern = " " }; // Act & Assert - var exception = Assert.ThrowsExactly(() => FileAssertFile.Create(data)); + var exception = Assert.Throws(() => FileAssertFile.Create(data)); Assert.Contains("pattern", exception.Message); } /// /// Verifies that Run produces no error when there are no matching files and no constraints. /// - [TestMethod] + [Fact] public void FileAssertFile_Run_NoMatchingFiles_NoConstraints_NoError() { // Arrange - use an empty temp directory so the pattern matches nothing @@ -105,7 +105,7 @@ public void FileAssertFile_Run_NoMatchingFiles_NoConstraints_NoError() file.Run(context, tempDir.FullName); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -116,7 +116,7 @@ public void FileAssertFile_Run_NoMatchingFiles_NoConstraints_NoError() /// /// Verifies that Run produces no error when files are found and no constraints are set. /// - [TestMethod] + [Fact] public void FileAssertFile_Run_WithMatchingFiles_NoConstraints_NoError() { // Arrange - create a temp file for the pattern to match @@ -132,7 +132,7 @@ public void FileAssertFile_Run_WithMatchingFiles_NoConstraints_NoError() file.Run(context, tempDir.FullName); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -143,7 +143,7 @@ public void FileAssertFile_Run_WithMatchingFiles_NoConstraints_NoError() /// /// Verifies that Run reports an error when fewer files are found than the minimum. /// - [TestMethod] + [Fact] public void FileAssertFile_Run_TooFewFiles_WritesError() { // Arrange - empty directory so zero files match, but min requires at least 1 @@ -158,7 +158,7 @@ public void FileAssertFile_Run_TooFewFiles_WritesError() file.Run(context, tempDir.FullName); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -169,7 +169,7 @@ public void FileAssertFile_Run_TooFewFiles_WritesError() /// /// Verifies that Run reports an error when more files are found than the maximum. /// - [TestMethod] + [Fact] public void FileAssertFile_Run_TooManyFiles_WritesError() { // Arrange - create two files but constrain max to 1 @@ -186,7 +186,7 @@ public void FileAssertFile_Run_TooManyFiles_WritesError() file.Run(context, tempDir.FullName); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -197,7 +197,7 @@ public void FileAssertFile_Run_TooManyFiles_WritesError() /// /// Verifies that Run produces no error when a content rule is satisfied by the matching file. /// - [TestMethod] + [Fact] public void FileAssertFile_Run_WithContentRule_ContentContainsValue_NoError() { // Arrange - create a file that satisfies the contains rule @@ -217,7 +217,7 @@ public void FileAssertFile_Run_WithContentRule_ContentContainsValue_NoError() file.Run(context, tempDir.FullName); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -228,7 +228,7 @@ public void FileAssertFile_Run_WithContentRule_ContentContainsValue_NoError() /// /// Verifies that Run reports an error when a content rule is not satisfied by the matching file. /// - [TestMethod] + [Fact] public void FileAssertFile_Run_WithContentRule_ContentMissingValue_WritesError() { // Arrange - create a file that does NOT satisfy the contains rule @@ -248,7 +248,7 @@ public void FileAssertFile_Run_WithContentRule_ContentMissingValue_WritesError() file.Run(context, tempDir.FullName); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -259,7 +259,7 @@ public void FileAssertFile_Run_WithContentRule_ContentMissingValue_WritesError() /// /// Verifies that Run reports an error when the file count does not match the exact count constraint. /// - [TestMethod] + [Fact] public void FileAssertFile_Run_WrongCount_WritesError() { // Arrange - create two files but constrain count to exactly 1 @@ -276,7 +276,7 @@ public void FileAssertFile_Run_WrongCount_WritesError() file.Run(context, tempDir.FullName); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -287,7 +287,7 @@ public void FileAssertFile_Run_WrongCount_WritesError() /// /// Verifies that Run reports an error when a file is smaller than the minimum size. /// - [TestMethod] + [Fact] public void FileAssertFile_Run_TooSmall_WritesError() { // Arrange - create an empty file and require at least 10 bytes @@ -303,7 +303,7 @@ public void FileAssertFile_Run_TooSmall_WritesError() file.Run(context, tempDir.FullName); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -314,7 +314,7 @@ public void FileAssertFile_Run_TooSmall_WritesError() /// /// Verifies that Run reports an error when a file exceeds the maximum size. /// - [TestMethod] + [Fact] public void FileAssertFile_Run_TooLarge_WritesError() { // Arrange - create a file with content larger than 5 bytes @@ -330,7 +330,7 @@ public void FileAssertFile_Run_TooLarge_WritesError() file.Run(context, tempDir.FullName); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -342,7 +342,7 @@ public void FileAssertFile_Run_TooLarge_WritesError() /// Verifies that Run checks size constraints against every matched file, not just the first, /// by confirming one error is reported per violating file regardless of enumeration order. /// - [TestMethod] + [Fact] public void FileAssertFile_Run_MultipleFiles_MultipleViolateSizeConstraints_WritesErrorForEachViolation() { // Arrange - three files: one within bounds, one too small, one too large @@ -360,8 +360,8 @@ public void FileAssertFile_Run_MultipleFiles_MultipleViolateSizeConstraints_Writ file.Run(context, tempDir.FullName); // Assert - both invalid files should trigger errors regardless of enumeration order - Assert.AreEqual(1, context.ExitCode); - Assert.AreEqual(2, context.ErrorCount); + Assert.Equal(1, context.ExitCode); + Assert.Equal(2, context.ErrorCount); } finally { @@ -373,7 +373,7 @@ public void FileAssertFile_Run_MultipleFiles_MultipleViolateSizeConstraints_Writ /// Verifies that Run applies content rules to every matched file, not just the first, /// by confirming one error is reported per violating file regardless of enumeration order. /// - [TestMethod] + [Fact] public void FileAssertFile_Run_MultipleFiles_MultipleFailContentRule_WritesErrorForEachViolation() { // Arrange - three files: one with the required content, two without @@ -395,8 +395,8 @@ public void FileAssertFile_Run_MultipleFiles_MultipleFailContentRule_WritesError file.Run(context, tempDir.FullName); // Assert - both bad files should trigger errors regardless of enumeration order - Assert.AreEqual(1, context.ExitCode); - Assert.AreEqual(2, context.ErrorCount); + Assert.Equal(1, context.ExitCode); + Assert.Equal(2, context.ErrorCount); } finally { diff --git a/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertHtmlAssertTests.cs b/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertHtmlAssertTests.cs index bed19d3..7b55b25 100644 --- a/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertHtmlAssertTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertHtmlAssertTests.cs @@ -27,7 +27,7 @@ namespace DemaConsulting.FileAssert.Tests.Modeling; /// /// Unit tests for the class. /// -[TestClass] +[Collection("Sequential")] public sealed class FileAssertHtmlAssertTests { private const string SampleHtml = """ @@ -46,7 +46,7 @@ public sealed class FileAssertHtmlAssertTests /// /// Verifies that Create succeeds given valid query data. /// - [TestMethod] + [Fact] public void FileAssertHtmlAssert_Create_ValidData_CreatesHtmlAssert() { // Arrange @@ -59,23 +59,23 @@ public void FileAssertHtmlAssert_Create_ValidData_CreatesHtmlAssert() var htmlAssert = FileAssertHtmlAssert.Create(data); // Assert - Assert.IsNotNull(htmlAssert); + Assert.NotNull(htmlAssert); } /// /// Verifies that Create throws when data is null. /// - [TestMethod] + [Fact] public void FileAssertHtmlAssert_Create_NullData_ThrowsArgumentNullException() { // Act & Assert - Assert.ThrowsExactly(() => FileAssertHtmlAssert.Create(null!)); + Assert.Throws(() => FileAssertHtmlAssert.Create(null!)); } /// /// Verifies that Run produces no error when the XPath count matches exactly. /// - [TestMethod] + [Fact] public void FileAssertHtmlAssert_Run_ExactCount_Matches_NoError() { // Arrange - write sample HTML with 2 paragraph elements @@ -91,7 +91,7 @@ public void FileAssertHtmlAssert_Run_ExactCount_Matches_NoError() htmlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -102,7 +102,7 @@ public void FileAssertHtmlAssert_Run_ExactCount_Matches_NoError() /// /// Verifies that Run reports an error when the XPath count does not match exactly. /// - [TestMethod] + [Fact] public void FileAssertHtmlAssert_Run_ExactCount_Mismatch_WritesError() { // Arrange - sample HTML has 2 paragraphs but we assert count = 5 @@ -118,7 +118,7 @@ public void FileAssertHtmlAssert_Run_ExactCount_Mismatch_WritesError() htmlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -129,7 +129,7 @@ public void FileAssertHtmlAssert_Run_ExactCount_Mismatch_WritesError() /// /// Verifies that Run produces no error when the XPath count is within min/max bounds. /// - [TestMethod] + [Fact] public void FileAssertHtmlAssert_Run_MinMaxCount_WithinBounds_NoError() { // Arrange - sample HTML has 2 paragraphs; assert min=1, max=4 @@ -145,7 +145,7 @@ public void FileAssertHtmlAssert_Run_MinMaxCount_WithinBounds_NoError() htmlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -156,7 +156,7 @@ public void FileAssertHtmlAssert_Run_MinMaxCount_WithinBounds_NoError() /// /// Verifies that Run reports an error when the file does not exist and cannot be parsed. /// - [TestMethod] + [Fact] public void FileAssertHtmlAssert_Run_NonExistentFile_WritesError() { // Arrange - use a path that does not exist to trigger a parse failure @@ -169,13 +169,13 @@ public void FileAssertHtmlAssert_Run_NonExistentFile_WritesError() htmlAssert.Run(context, missingFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } /// /// Verifies that Run reports an error when the XPath query has invalid syntax. /// - [TestMethod] + [Fact] public void FileAssertHtmlAssert_Run_InvalidXPathQuery_WritesError() { // Arrange - valid HTML but an XPath expression with invalid syntax @@ -191,7 +191,7 @@ public void FileAssertHtmlAssert_Run_InvalidXPathQuery_WritesError() htmlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -202,7 +202,7 @@ public void FileAssertHtmlAssert_Run_InvalidXPathQuery_WritesError() /// /// Verifies that Run produces no error when an XPath query selects HTML nodes by exact text. /// - [TestMethod] + [Fact] public void FileAssertHtmlAssert_Run_XPathExactTextMatch_Matches_NoError() { // Arrange - sample HTML has a

with text "Paragraph one"; query for exact match @@ -218,7 +218,7 @@ public void FileAssertHtmlAssert_Run_XPathExactTextMatch_Matches_NoError() htmlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -229,7 +229,7 @@ public void FileAssertHtmlAssert_Run_XPathExactTextMatch_Matches_NoError() ///

/// Verifies that Run reports an error when an XPath exact text query finds no matching nodes. /// - [TestMethod] + [Fact] public void FileAssertHtmlAssert_Run_XPathExactTextMatch_NoMatch_WritesError() { // Arrange - no

has text "No such paragraph"; query should return 0 nodes @@ -245,7 +245,7 @@ public void FileAssertHtmlAssert_Run_XPathExactTextMatch_NoMatch_WritesError() htmlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -256,7 +256,7 @@ public void FileAssertHtmlAssert_Run_XPathExactTextMatch_NoMatch_WritesError() ///

/// Verifies that Run produces no error when an XPath contains() predicate matches an HTML node. /// - [TestMethod] + [Fact] public void FileAssertHtmlAssert_Run_XPathContainsText_Matches_NoError() { // Arrange - sample HTML has paragraphs containing "Paragraph"; substring query returns 2 @@ -272,7 +272,7 @@ public void FileAssertHtmlAssert_Run_XPathContainsText_Matches_NoError() htmlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -283,7 +283,7 @@ public void FileAssertHtmlAssert_Run_XPathContainsText_Matches_NoError() /// /// Verifies that Run reports an error when an XPath contains() predicate finds no matching nodes. /// - [TestMethod] + [Fact] public void FileAssertHtmlAssert_Run_XPathContainsText_NoMatch_WritesError() { // Arrange - no

contains "xyz"; contains() query returns 0 nodes @@ -299,7 +299,7 @@ public void FileAssertHtmlAssert_Run_XPathContainsText_NoMatch_WritesError() htmlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { diff --git a/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertJsonAssertTests.cs b/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertJsonAssertTests.cs index af981a1..d556926 100644 --- a/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertJsonAssertTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertJsonAssertTests.cs @@ -27,7 +27,7 @@ namespace DemaConsulting.FileAssert.Tests.Modeling; ///

/// Unit tests for the class. /// -[TestClass] +[Collection("Sequential")] public sealed class FileAssertJsonAssertTests { private const string SampleJson = """ @@ -44,7 +44,7 @@ public sealed class FileAssertJsonAssertTests /// /// Verifies that Create succeeds given valid query data. /// - [TestMethod] + [Fact] public void FileAssertJsonAssert_Create_ValidData_CreatesJsonAssert() { // Arrange @@ -57,23 +57,23 @@ public void FileAssertJsonAssert_Create_ValidData_CreatesJsonAssert() var jsonAssert = FileAssertJsonAssert.Create(data); // Assert - Assert.IsNotNull(jsonAssert); + Assert.NotNull(jsonAssert); } /// /// Verifies that Create throws when data is null. /// - [TestMethod] + [Fact] public void FileAssertJsonAssert_Create_NullData_ThrowsArgumentNullException() { // Act & Assert - Assert.ThrowsExactly(() => FileAssertJsonAssert.Create(null!)); + Assert.Throws(() => FileAssertJsonAssert.Create(null!)); } /// /// Verifies that Run reports an error when the file cannot be parsed as JSON. /// - [TestMethod] + [Fact] public void FileAssertJsonAssert_Run_InvalidFile_WritesError() { // Arrange - create a non-JSON file @@ -89,7 +89,7 @@ public void FileAssertJsonAssert_Run_InvalidFile_WritesError() jsonAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -100,7 +100,7 @@ public void FileAssertJsonAssert_Run_InvalidFile_WritesError() /// /// Verifies that Run produces no error when the array count matches exactly. /// - [TestMethod] + [Fact] public void FileAssertJsonAssert_Run_ArrayCount_Matches_NoError() { // Arrange - sample JSON has 3 tools entries; assert count = 3 @@ -116,7 +116,7 @@ public void FileAssertJsonAssert_Run_ArrayCount_Matches_NoError() jsonAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -127,7 +127,7 @@ public void FileAssertJsonAssert_Run_ArrayCount_Matches_NoError() /// /// Verifies that Run reports an error when the array count does not match exactly. /// - [TestMethod] + [Fact] public void FileAssertJsonAssert_Run_ArrayCount_Mismatch_WritesError() { // Arrange - sample JSON has 3 tools but we assert count = 5 @@ -143,7 +143,7 @@ public void FileAssertJsonAssert_Run_ArrayCount_Mismatch_WritesError() jsonAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -154,7 +154,7 @@ public void FileAssertJsonAssert_Run_ArrayCount_Mismatch_WritesError() /// /// Verifies that Run produces no error when the array count is within min/max bounds. /// - [TestMethod] + [Fact] public void FileAssertJsonAssert_Run_MinMaxCount_WithinBounds_NoError() { // Arrange - sample JSON has 3 tools entries; assert min=2, max=5 @@ -170,7 +170,7 @@ public void FileAssertJsonAssert_Run_MinMaxCount_WithinBounds_NoError() jsonAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -181,7 +181,7 @@ public void FileAssertJsonAssert_Run_MinMaxCount_WithinBounds_NoError() /// /// Verifies that a scalar value counts as 1. /// - [TestMethod] + [Fact] public void FileAssertJsonAssert_Run_ScalarValue_CountsAsOne_NoError() { // Arrange - sample JSON has a scalar "version" key; assert count = 1 @@ -197,7 +197,7 @@ public void FileAssertJsonAssert_Run_ScalarValue_CountsAsOne_NoError() jsonAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -208,7 +208,7 @@ public void FileAssertJsonAssert_Run_ScalarValue_CountsAsOne_NoError() /// /// Verifies that Run reports an error when the count is below the minimum. /// - [TestMethod] + [Fact] public void FileAssertJsonAssert_Run_MinCount_BelowMinimum_WritesError() { // Arrange - sample JSON has 3 tools; assert min=5 (3 < 5, should fail) @@ -224,7 +224,7 @@ public void FileAssertJsonAssert_Run_MinCount_BelowMinimum_WritesError() jsonAssert.Run(context, tempFile); // Assert - min violation produces an error - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -235,7 +235,7 @@ public void FileAssertJsonAssert_Run_MinCount_BelowMinimum_WritesError() /// /// Verifies that Run reports an error when the count exceeds the maximum. /// - [TestMethod] + [Fact] public void FileAssertJsonAssert_Run_MaxCount_ExceedsMaximum_WritesError() { // Arrange - sample JSON has 3 tools; assert max=2 (3 > 2, should fail) @@ -251,7 +251,7 @@ public void FileAssertJsonAssert_Run_MaxCount_ExceedsMaximum_WritesError() jsonAssert.Run(context, tempFile); // Assert - max violation produces an error - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -262,52 +262,52 @@ public void FileAssertJsonAssert_Run_MaxCount_ExceedsMaximum_WritesError() /// /// Verifies that Create throws when query string is empty. /// - [TestMethod] + [Fact] public void FileAssertJsonAssert_Create_EmptyQuery_ThrowsInvalidOperationException() { // Arrange var data = new List { new() { Query = " " } }; // Act & Assert - Assert.ThrowsExactly(() => FileAssertJsonAssert.Create(data)); + Assert.Throws(() => FileAssertJsonAssert.Create(data)); } /// /// Verifies that Create throws when query has a trailing dot. /// - [TestMethod] + [Fact] public void FileAssertJsonAssert_Create_TrailingDotQuery_ThrowsInvalidOperationException() { // Arrange var data = new List { new() { Query = "tools." } }; // Act & Assert - Assert.ThrowsExactly(() => FileAssertJsonAssert.Create(data)); + Assert.Throws(() => FileAssertJsonAssert.Create(data)); } /// /// Verifies that Create throws when query has a leading dot. /// - [TestMethod] + [Fact] public void FileAssertJsonAssert_Create_LeadingDotQuery_ThrowsInvalidOperationException() { // Arrange var data = new List { new() { Query = ".tools" } }; // Act & Assert - Assert.ThrowsExactly(() => FileAssertJsonAssert.Create(data)); + Assert.Throws(() => FileAssertJsonAssert.Create(data)); } /// /// Verifies that Create throws when query has consecutive dots. /// - [TestMethod] + [Fact] public void FileAssertJsonAssert_Create_ConsecutiveDotsQuery_ThrowsInvalidOperationException() { // Arrange var data = new List { new() { Query = "a..b" } }; // Act & Assert - Assert.ThrowsExactly(() => FileAssertJsonAssert.Create(data)); + Assert.Throws(() => FileAssertJsonAssert.Create(data)); } } diff --git a/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertPdfAssertTests.cs b/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertPdfAssertTests.cs index 9025445..bdd4b99 100644 --- a/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertPdfAssertTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertPdfAssertTests.cs @@ -31,13 +31,13 @@ namespace DemaConsulting.FileAssert.Tests.Modeling; /// /// Unit tests for the class. /// -[TestClass] +[Collection("Sequential")] public sealed class FileAssertPdfAssertTests { /// /// Verifies that Create succeeds given valid data. /// - [TestMethod] + [Fact] public void FileAssertPdfAssert_Create_ValidData_CreatesPdfAssert() { // Arrange @@ -50,23 +50,23 @@ public void FileAssertPdfAssert_Create_ValidData_CreatesPdfAssert() var pdfAssert = FileAssertPdfAssert.Create(data); // Assert - Assert.IsNotNull(pdfAssert); + Assert.NotNull(pdfAssert); } /// /// Verifies that Create throws when data is null. /// - [TestMethod] + [Fact] public void FileAssertPdfAssert_Create_NullData_ThrowsArgumentNullException() { // Act & Assert - Assert.ThrowsExactly(() => FileAssertPdfAssert.Create(null!)); + Assert.Throws(() => FileAssertPdfAssert.Create(null!)); } /// /// Verifies that Run reports an error when the file cannot be parsed as PDF. /// - [TestMethod] + [Fact] public void FileAssertPdfAssert_Run_InvalidFile_WritesError() { // Arrange - create a temp file with non-PDF content @@ -82,7 +82,7 @@ public void FileAssertPdfAssert_Run_InvalidFile_WritesError() pdfAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -93,7 +93,7 @@ public void FileAssertPdfAssert_Run_InvalidFile_WritesError() /// /// Verifies that Run produces no error when the PDF satisfies all page count constraints. /// - [TestMethod] + [Fact] public void FileAssertPdfAssert_Run_ValidPdf_PageCountSatisfied_NoError() { // Arrange - build a single-page PDF and assert min=1, max=5 @@ -115,7 +115,7 @@ public void FileAssertPdfAssert_Run_ValidPdf_PageCountSatisfied_NoError() pdfAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -126,7 +126,7 @@ public void FileAssertPdfAssert_Run_ValidPdf_PageCountSatisfied_NoError() /// /// Verifies that Run reports an error when the PDF has fewer pages than the minimum. /// - [TestMethod] + [Fact] public void FileAssertPdfAssert_Run_ValidPdf_TooFewPages_WritesError() { // Arrange - build a single-page PDF but require at least 5 pages @@ -148,7 +148,7 @@ public void FileAssertPdfAssert_Run_ValidPdf_TooFewPages_WritesError() pdfAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -159,7 +159,7 @@ public void FileAssertPdfAssert_Run_ValidPdf_TooFewPages_WritesError() /// /// Verifies that Run reports an error when the PDF has more pages than the maximum. /// - [TestMethod] + [Fact] public void FileAssertPdfAssert_Run_ValidPdf_TooManyPages_WritesError() { // Arrange - build a three-page PDF but allow at most 2 pages @@ -183,7 +183,7 @@ public void FileAssertPdfAssert_Run_ValidPdf_TooManyPages_WritesError() pdfAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -194,7 +194,7 @@ public void FileAssertPdfAssert_Run_ValidPdf_TooManyPages_WritesError() /// /// Verifies that Run reports an error when a metadata contains assertion fails. /// - [TestMethod] + [Fact] public void FileAssertPdfAssert_Run_MetadataContainsRule_FieldMissing_WritesError() { // Arrange - build a PDF without metadata; assert Title contains "Test" @@ -219,7 +219,7 @@ public void FileAssertPdfAssert_Run_MetadataContainsRule_FieldMissing_WritesErro pdfAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -230,7 +230,7 @@ public void FileAssertPdfAssert_Run_MetadataContainsRule_FieldMissing_WritesErro /// /// Verifies that Run reports an error when a text contains rule fails. /// - [TestMethod] + [Fact] public void FileAssertPdfAssert_Run_TextRule_ContentMissing_WritesError() { // Arrange - build a PDF with no text content; assert text contains "Hello" @@ -255,7 +255,7 @@ public void FileAssertPdfAssert_Run_TextRule_ContentMissing_WritesError() pdfAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -266,7 +266,7 @@ public void FileAssertPdfAssert_Run_TextRule_ContentMissing_WritesError() /// /// Verifies that Run produces no error when the PDF metadata Title contains the required string. /// - [TestMethod] + [Fact] public void FileAssertPdfAssert_Run_MetadataContainsRule_TitleMatches_NoError() { // Arrange - build a PDF with Title metadata set and assert it contains "Annual" @@ -292,7 +292,7 @@ public void FileAssertPdfAssert_Run_MetadataContainsRule_TitleMatches_NoError() pdfAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -303,7 +303,7 @@ public void FileAssertPdfAssert_Run_MetadataContainsRule_TitleMatches_NoError() /// /// Verifies that Run produces no error when the PDF metadata Author field is checked. /// - [TestMethod] + [Fact] public void FileAssertPdfAssert_Run_MetadataContainsRule_AuthorField_NoError() { // Arrange - build a PDF with Author metadata and assert that field contains expected text @@ -329,7 +329,7 @@ public void FileAssertPdfAssert_Run_MetadataContainsRule_AuthorField_NoError() pdfAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -340,7 +340,7 @@ public void FileAssertPdfAssert_Run_MetadataContainsRule_AuthorField_NoError() /// /// Verifies that Run produces no error when a PDF metadata matches regex rule succeeds. /// - [TestMethod] + [Fact] public void FileAssertPdfAssert_Run_MetadataMatchesRule_Matches_NoError() { // Arrange - build a PDF with Title set; assert it matches regex pattern @@ -366,7 +366,7 @@ public void FileAssertPdfAssert_Run_MetadataMatchesRule_Matches_NoError() pdfAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -377,7 +377,7 @@ public void FileAssertPdfAssert_Run_MetadataMatchesRule_Matches_NoError() /// /// Verifies that Run reports an error when a PDF metadata matches regex rule does not match. /// - [TestMethod] + [Fact] public void FileAssertPdfAssert_Run_MetadataMatchesRule_NoMatch_WritesError() { // Arrange - build a PDF with Title set; assert it matches a pattern it does not satisfy @@ -403,7 +403,7 @@ public void FileAssertPdfAssert_Run_MetadataMatchesRule_NoMatch_WritesError() pdfAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -414,7 +414,7 @@ public void FileAssertPdfAssert_Run_MetadataMatchesRule_NoMatch_WritesError() /// /// Verifies that Run produces no error when the PDF text contains the required string. /// - [TestMethod] + [Fact] public void FileAssertPdfAssert_Run_TextContainsRule_ContentPresent_NoError() { // Arrange - build a PDF with text "Hello World" and assert text contains "Hello" @@ -441,7 +441,7 @@ public void FileAssertPdfAssert_Run_TextContainsRule_ContentPresent_NoError() pdfAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -452,7 +452,7 @@ public void FileAssertPdfAssert_Run_TextContainsRule_ContentPresent_NoError() /// /// Verifies that Run produces no error when the PDF text matches a regex pattern. /// - [TestMethod] + [Fact] public void FileAssertPdfAssert_Run_TextMatchesRule_PatternMatches_NoError() { // Arrange - build a PDF with text "Hello World 2024" and assert it matches a regex @@ -479,7 +479,7 @@ public void FileAssertPdfAssert_Run_TextMatchesRule_PatternMatches_NoError() pdfAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { diff --git a/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertRuleTests.cs b/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertRuleTests.cs index fcf4d0e..6a31bf1 100644 --- a/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertRuleTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertRuleTests.cs @@ -27,13 +27,13 @@ namespace DemaConsulting.FileAssert.Tests.Modeling; /// /// Unit tests for and its derived rule classes. /// -[TestClass] +[Collection("Sequential")] public class FileAssertRuleTests { /// /// Verifies that the factory creates a when 'contains' is specified. /// - [TestMethod] + [Fact] public void FileAssertRule_Create_WithContains_ReturnsContainsRule() { // Arrange @@ -43,14 +43,14 @@ public void FileAssertRule_Create_WithContains_ReturnsContainsRule() var rule = FileAssertRule.Create(data); // Assert - Assert.IsInstanceOfType(rule); - Assert.AreEqual("expected text", ((FileAssertContainsRule)rule).Value); + Assert.IsType(rule); + Assert.Equal("expected text", ((FileAssertContainsRule)rule).Value); } /// /// Verifies that the factory creates a when 'matches' is specified. /// - [TestMethod] + [Fact] public void FileAssertRule_Create_WithMatches_ReturnsMatchesRule() { // Arrange @@ -60,21 +60,21 @@ public void FileAssertRule_Create_WithMatches_ReturnsMatchesRule() var rule = FileAssertRule.Create(data); // Assert - Assert.IsInstanceOfType(rule); - Assert.AreEqual(@"\d+", ((FileAssertMatchesRule)rule).Pattern); + Assert.IsType(rule); + Assert.Equal(@"\d+", ((FileAssertMatchesRule)rule).Pattern); } /// /// Verifies that the factory throws when no rule type is specified. /// - [TestMethod] + [Fact] public void FileAssertRule_Create_WithNoType_ThrowsInvalidOperationException() { // Arrange var data = new FileAssertRuleData(); // Act & Assert - var exception = Assert.ThrowsExactly(() => FileAssertRule.Create(data)); + var exception = Assert.Throws(() => FileAssertRule.Create(data)); Assert.Contains("contains", exception.Message); Assert.Contains("does-not-contain", exception.Message); Assert.Contains("matches", exception.Message); @@ -84,17 +84,17 @@ public void FileAssertRule_Create_WithNoType_ThrowsInvalidOperationException() /// /// Verifies that the factory throws when data is null. /// - [TestMethod] + [Fact] public void FileAssertRule_Create_WithNullData_ThrowsArgumentNullException() { // Act & Assert - Assert.ThrowsExactly(() => FileAssertRule.Create(null!)); + Assert.Throws(() => FileAssertRule.Create(null!)); } /// /// Verifies that produces no error when content contains the value. /// - [TestMethod] + [Fact] public void FileAssertContainsRule_Apply_ContentContainsValue_NoError() { // Arrange @@ -105,13 +105,13 @@ public void FileAssertContainsRule_Apply_ContentContainsValue_NoError() rule.Apply(context, "test.txt", "say hello world"); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } /// /// Verifies that reports an error when content is missing the value. /// - [TestMethod] + [Fact] public void FileAssertContainsRule_Apply_ContentMissingValue_WritesError() { // Arrange @@ -122,13 +122,13 @@ public void FileAssertContainsRule_Apply_ContentMissingValue_WritesError() rule.Apply(context, "test.txt", "nothing relevant here"); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } /// /// Verifies that produces no error when content matches the pattern. /// - [TestMethod] + [Fact] public void FileAssertMatchesRule_Apply_ContentMatchesPattern_NoError() { // Arrange @@ -139,13 +139,13 @@ public void FileAssertMatchesRule_Apply_ContentMatchesPattern_NoError() rule.Apply(context, "test.txt", "version 42 is here"); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } /// /// Verifies that reports an error when content does not match. /// - [TestMethod] + [Fact] public void FileAssertMatchesRule_Apply_ContentDoesNotMatchPattern_WritesError() { // Arrange @@ -156,13 +156,13 @@ public void FileAssertMatchesRule_Apply_ContentDoesNotMatchPattern_WritesError() rule.Apply(context, "test.txt", "no numbers here at all"); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } /// /// Verifies that the factory creates a when 'does-not-contain' is specified. /// - [TestMethod] + [Fact] public void FileAssertRule_Create_WithDoesNotContain_ReturnsDoesNotContainRule() { // Arrange @@ -172,14 +172,14 @@ public void FileAssertRule_Create_WithDoesNotContain_ReturnsDoesNotContainRule() var rule = FileAssertRule.Create(data); // Assert - Assert.IsInstanceOfType(rule); - Assert.AreEqual("forbidden text", ((FileAssertDoesNotContainRule)rule).Value); + Assert.IsType(rule); + Assert.Equal("forbidden text", ((FileAssertDoesNotContainRule)rule).Value); } /// /// Verifies that the factory creates a when 'does-not-contain-regex' is specified. /// - [TestMethod] + [Fact] public void FileAssertRule_Create_WithDoesNotContainRegex_ReturnsDoesNotMatchRule() { // Arrange @@ -189,14 +189,14 @@ public void FileAssertRule_Create_WithDoesNotContainRegex_ReturnsDoesNotMatchRul var rule = FileAssertRule.Create(data); // Assert - Assert.IsInstanceOfType(rule); - Assert.AreEqual(@"FATAL|ERROR", ((FileAssertDoesNotMatchRule)rule).Pattern); + Assert.IsType(rule); + Assert.Equal(@"FATAL|ERROR", ((FileAssertDoesNotMatchRule)rule).Pattern); } /// /// Verifies that reports an error when content contains the forbidden value. /// - [TestMethod] + [Fact] public void FileAssertDoesNotContainRule_Apply_ContentContainsValue_WritesError() { // Arrange @@ -207,13 +207,13 @@ public void FileAssertDoesNotContainRule_Apply_ContentContainsValue_WritesError( rule.Apply(context, "test.txt", "the password123 is here"); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } /// /// Verifies that produces no error when content does not contain the forbidden value. /// - [TestMethod] + [Fact] public void FileAssertDoesNotContainRule_Apply_ContentMissingValue_NoError() { // Arrange @@ -224,13 +224,13 @@ public void FileAssertDoesNotContainRule_Apply_ContentMissingValue_NoError() rule.Apply(context, "test.txt", "no secrets here"); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } /// /// Verifies that reports an error when content matches the forbidden pattern. /// - [TestMethod] + [Fact] public void FileAssertDoesNotMatchRule_Apply_ContentMatchesPattern_WritesError() { // Arrange @@ -241,13 +241,13 @@ public void FileAssertDoesNotMatchRule_Apply_ContentMatchesPattern_WritesError() rule.Apply(context, "test.txt", "FATAL: something went wrong"); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } /// /// Verifies that produces no error when content does not match the forbidden pattern. /// - [TestMethod] + [Fact] public void FileAssertDoesNotMatchRule_Apply_ContentDoesNotMatchPattern_NoError() { // Arrange @@ -258,6 +258,6 @@ public void FileAssertDoesNotMatchRule_Apply_ContentDoesNotMatchPattern_NoError( rule.Apply(context, "test.txt", "everything is fine"); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } } diff --git a/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertTestTests.cs b/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertTestTests.cs index beb27a6..babbe4b 100644 --- a/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertTestTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertTestTests.cs @@ -27,13 +27,13 @@ namespace DemaConsulting.FileAssert.Tests.Modeling; /// /// Unit tests for the class. /// -[TestClass] +[Collection("Sequential")] public class FileAssertTestTests { /// /// Verifies that Create succeeds given valid data. /// - [TestMethod] + [Fact] public void FileAssertTest_Create_ValidData_CreatesTest() { // Arrange @@ -47,53 +47,53 @@ public void FileAssertTest_Create_ValidData_CreatesTest() var test = FileAssertTest.Create(data); // Assert - Assert.AreEqual("My Test", test.Name); - Assert.HasCount(2, test.Tags); - Assert.HasCount(0, test.Files); + Assert.Equal("My Test", test.Name); + Assert.Equal(2, test.Tags.Count); + Assert.Empty(test.Files); } /// /// Verifies that Create throws when data is null. /// - [TestMethod] + [Fact] public void FileAssertTest_Create_NullData_ThrowsArgumentNullException() { // Act & Assert - Assert.ThrowsExactly(() => FileAssertTest.Create(null!)); + Assert.Throws(() => FileAssertTest.Create(null!)); } /// /// Verifies that Create throws when Name is null. /// - [TestMethod] + [Fact] public void FileAssertTest_Create_NullName_ThrowsInvalidOperationException() { // Arrange var data = new FileAssertTestData { Name = null }; // Act & Assert - var exception = Assert.ThrowsExactly(() => FileAssertTest.Create(data)); + var exception = Assert.Throws(() => FileAssertTest.Create(data)); Assert.Contains("name", exception.Message); } /// /// Verifies that Create throws when Name is whitespace. /// - [TestMethod] + [Fact] public void FileAssertTest_Create_WhitespaceName_ThrowsInvalidOperationException() { // Arrange var data = new FileAssertTestData { Name = " " }; // Act & Assert - var exception = Assert.ThrowsExactly(() => FileAssertTest.Create(data)); + var exception = Assert.Throws(() => FileAssertTest.Create(data)); Assert.Contains("name", exception.Message); } /// /// Verifies that MatchesFilter returns true when the filter list is empty. /// - [TestMethod] + [Fact] public void FileAssertTest_MatchesFilter_EmptyFilters_ReturnsTrue() { // Arrange @@ -103,13 +103,13 @@ public void FileAssertTest_MatchesFilter_EmptyFilters_ReturnsTrue() var result = test.MatchesFilter([]); // Assert - Assert.IsTrue(result); + Assert.True(result); } /// /// Verifies that MatchesFilter returns true when a filter matches the test name. /// - [TestMethod] + [Fact] public void FileAssertTest_MatchesFilter_MatchingName_ReturnsTrue() { // Arrange @@ -119,13 +119,13 @@ public void FileAssertTest_MatchesFilter_MatchingName_ReturnsTrue() var result = test.MatchesFilter(["Alpha"]); // Assert - Assert.IsTrue(result); + Assert.True(result); } /// /// Verifies that MatchesFilter returns true when a filter matches one of the test's tags. /// - [TestMethod] + [Fact] public void FileAssertTest_MatchesFilter_MatchingTag_ReturnsTrue() { // Arrange @@ -139,13 +139,13 @@ public void FileAssertTest_MatchesFilter_MatchingTag_ReturnsTrue() var result = test.MatchesFilter(["smoke"]); // Assert - Assert.IsTrue(result); + Assert.True(result); } /// /// Verifies that MatchesFilter returns false when no filter matches the name or tags. /// - [TestMethod] + [Fact] public void FileAssertTest_MatchesFilter_NonMatchingFilter_ReturnsFalse() { // Arrange @@ -159,13 +159,13 @@ public void FileAssertTest_MatchesFilter_NonMatchingFilter_ReturnsFalse() var result = test.MatchesFilter(["Beta"]); // Assert - Assert.IsFalse(result); + Assert.False(result); } /// /// Verifies that MatchesFilter name comparison is case-insensitive. /// - [TestMethod] + [Fact] public void FileAssertTest_MatchesFilter_CaseInsensitiveName_ReturnsTrue() { // Arrange @@ -175,13 +175,13 @@ public void FileAssertTest_MatchesFilter_CaseInsensitiveName_ReturnsTrue() var result = test.MatchesFilter(["ALPHA TEST"]); // Assert - Assert.IsTrue(result); + Assert.True(result); } /// /// Verifies that MatchesFilter tag comparison is case-insensitive. /// - [TestMethod] + [Fact] public void FileAssertTest_MatchesFilter_CaseInsensitiveTag_ReturnsTrue() { // Arrange @@ -195,13 +195,13 @@ public void FileAssertTest_MatchesFilter_CaseInsensitiveTag_ReturnsTrue() var result = test.MatchesFilter(["SMOKE"]); // Assert - Assert.IsTrue(result); + Assert.True(result); } /// /// Verifies that Run executes all file assertions in the test. /// - [TestMethod] + [Fact] public void FileAssertTest_Run_RunsAllFiles() { // Arrange - create a temp directory with a file matching the pattern @@ -221,7 +221,7 @@ public void FileAssertTest_Run_RunsAllFiles() test.Run(context, tempDir.FullName); // Assert - min=1 would have produced an error if the file had not been found - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -232,7 +232,7 @@ public void FileAssertTest_Run_RunsAllFiles() /// /// Verifies that Run throws when context is null. /// - [TestMethod] + [Fact] public void FileAssertTest_Run_NullContext_ThrowsArgumentNullException() { // Arrange @@ -240,13 +240,13 @@ public void FileAssertTest_Run_NullContext_ThrowsArgumentNullException() var basePath = Directory.GetCurrentDirectory(); // Act & Assert - Assert.ThrowsExactly(() => test.Run(null!, basePath)); + Assert.Throws(() => test.Run(null!, basePath)); } /// /// Verifies that Run throws when basePath is null. /// - [TestMethod] + [Fact] public void FileAssertTest_Run_NullBasePath_ThrowsArgumentNullException() { // Arrange @@ -254,6 +254,6 @@ public void FileAssertTest_Run_NullBasePath_ThrowsArgumentNullException() using var context = Context.Create(["--silent"]); // Act & Assert - Assert.ThrowsExactly(() => test.Run(context, null!)); + Assert.Throws(() => test.Run(context, null!)); } } diff --git a/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertTextAssertTests.cs b/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertTextAssertTests.cs index b1aaac8..e6177d2 100644 --- a/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertTextAssertTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertTextAssertTests.cs @@ -27,13 +27,13 @@ namespace DemaConsulting.FileAssert.Tests.Modeling; /// /// Unit tests for the class. /// -[TestClass] +[Collection("Sequential")] public sealed class FileAssertTextAssertTests { /// /// Verifies that Create succeeds given valid data. /// - [TestMethod] + [Fact] public void FileAssertTextAssert_Create_ValidData_CreatesTextAssert() { // Arrange @@ -46,24 +46,24 @@ public void FileAssertTextAssert_Create_ValidData_CreatesTextAssert() var textAssert = FileAssertTextAssert.Create(data); // Assert - Assert.IsNotNull(textAssert); - Assert.HasCount(1, textAssert.Rules); + Assert.NotNull(textAssert); + Assert.Single(textAssert.Rules); } /// /// Verifies that Create throws when data is null. /// - [TestMethod] + [Fact] public void FileAssertTextAssert_Create_NullData_ThrowsArgumentNullException() { // Act & Assert - Assert.ThrowsExactly(() => FileAssertTextAssert.Create(null!)); + Assert.Throws(() => FileAssertTextAssert.Create(null!)); } /// /// Verifies that Run produces no error when the file contains the required text. /// - [TestMethod] + [Fact] public void FileAssertTextAssert_Run_FileContainsText_NoError() { // Arrange - create a temp file with content that satisfies the rule @@ -79,7 +79,7 @@ public void FileAssertTextAssert_Run_FileContainsText_NoError() textAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -90,7 +90,7 @@ public void FileAssertTextAssert_Run_FileContainsText_NoError() /// /// Verifies that Run reports an error when the file does not contain the required text. /// - [TestMethod] + [Fact] public void FileAssertTextAssert_Run_FileMissingText_WritesError() { // Arrange - create a temp file with content that does NOT satisfy the rule @@ -106,7 +106,7 @@ public void FileAssertTextAssert_Run_FileMissingText_WritesError() textAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -117,7 +117,7 @@ public void FileAssertTextAssert_Run_FileMissingText_WritesError() /// /// Verifies that Run reports an error when the file cannot be read (I/O error). /// - [TestMethod] + [Fact] public void FileAssertTextAssert_Run_NonExistentFile_WritesError() { // Arrange - use a path that does not exist to trigger an I/O failure @@ -130,15 +130,15 @@ public void FileAssertTextAssert_Run_NonExistentFile_WritesError() textAssert.Run(context, missingFile); // Assert - an error was reported - Assert.AreEqual(1, context.ExitCode); - Assert.AreEqual(1, context.ErrorCount); + Assert.Equal(1, context.ExitCode); + Assert.Equal(1, context.ErrorCount); } /// /// Verifies that Run evaluates all rules and reports multiple errors without /// short-circuiting on the first failure. /// - [TestMethod] + [Fact] public void FileAssertTextAssert_Run_MultipleRulesMultipleViolations_WritesMultipleErrors() { // Arrange - create a temp file whose content satisfies neither rule @@ -158,8 +158,8 @@ public void FileAssertTextAssert_Run_MultipleRulesMultipleViolations_WritesMulti textAssert.Run(context, tempFile); // Assert - both rules must have been evaluated (no short-circuit) - Assert.AreEqual(2, context.ErrorCount); - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(2, context.ErrorCount); + Assert.Equal(1, context.ExitCode); } finally { diff --git a/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertXmlAssertTests.cs b/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertXmlAssertTests.cs index ced2e72..837bd59 100644 --- a/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertXmlAssertTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertXmlAssertTests.cs @@ -27,7 +27,7 @@ namespace DemaConsulting.FileAssert.Tests.Modeling; /// /// Unit tests for the class. /// -[TestClass] +[Collection("Sequential")] public sealed class FileAssertXmlAssertTests { private const string SampleXml = """ @@ -42,7 +42,7 @@ public sealed class FileAssertXmlAssertTests /// /// Verifies that Create succeeds given valid query data. /// - [TestMethod] + [Fact] public void FileAssertXmlAssert_Create_ValidData_CreatesXmlAssert() { // Arrange @@ -55,23 +55,23 @@ public void FileAssertXmlAssert_Create_ValidData_CreatesXmlAssert() var xmlAssert = FileAssertXmlAssert.Create(data); // Assert - Assert.IsNotNull(xmlAssert); + Assert.NotNull(xmlAssert); } /// /// Verifies that Create throws when data is null. /// - [TestMethod] + [Fact] public void FileAssertXmlAssert_Create_NullData_ThrowsArgumentNullException() { // Act & Assert - Assert.ThrowsExactly(() => FileAssertXmlAssert.Create(null!)); + Assert.Throws(() => FileAssertXmlAssert.Create(null!)); } /// /// Verifies that Run reports an error when the file cannot be parsed as XML. /// - [TestMethod] + [Fact] public void FileAssertXmlAssert_Run_InvalidFile_WritesError() { // Arrange - create a non-XML file @@ -87,7 +87,7 @@ public void FileAssertXmlAssert_Run_InvalidFile_WritesError() xmlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -98,7 +98,7 @@ public void FileAssertXmlAssert_Run_InvalidFile_WritesError() /// /// Verifies that Run produces no error when the XPath count matches exactly. /// - [TestMethod] + [Fact] public void FileAssertXmlAssert_Run_ExactCount_Matches_NoError() { // Arrange - write sample XML with 3 item elements @@ -114,7 +114,7 @@ public void FileAssertXmlAssert_Run_ExactCount_Matches_NoError() xmlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -125,7 +125,7 @@ public void FileAssertXmlAssert_Run_ExactCount_Matches_NoError() /// /// Verifies that Run reports an error when the XPath count does not match exactly. /// - [TestMethod] + [Fact] public void FileAssertXmlAssert_Run_ExactCount_Mismatch_WritesError() { // Arrange - sample XML has 3 items but we assert count = 5 @@ -141,7 +141,7 @@ public void FileAssertXmlAssert_Run_ExactCount_Mismatch_WritesError() xmlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -152,7 +152,7 @@ public void FileAssertXmlAssert_Run_ExactCount_Mismatch_WritesError() /// /// Verifies that Run produces no error when the XPath count is within min/max bounds. /// - [TestMethod] + [Fact] public void FileAssertXmlAssert_Run_MinMaxCount_WithinBounds_NoError() { // Arrange - sample XML has 3 items; assert min=2, max=5 @@ -168,7 +168,7 @@ public void FileAssertXmlAssert_Run_MinMaxCount_WithinBounds_NoError() xmlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -179,7 +179,7 @@ public void FileAssertXmlAssert_Run_MinMaxCount_WithinBounds_NoError() /// /// Verifies that Run reports an error when the XPath query is invalid syntax. /// - [TestMethod] + [Fact] public void FileAssertXmlAssert_Run_InvalidXPathQuery_WritesError() { // Arrange - valid XML but an XPath expression with invalid syntax @@ -195,7 +195,7 @@ public void FileAssertXmlAssert_Run_InvalidXPathQuery_WritesError() xmlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -206,7 +206,7 @@ public void FileAssertXmlAssert_Run_InvalidXPathQuery_WritesError() /// /// Verifies that Run produces no error when an XPath query selects nodes by exact text content. /// - [TestMethod] + [Fact] public void FileAssertXmlAssert_Run_XPathExactTextMatch_Matches_NoError() { // Arrange - sample XML has an item with text "two"; query for exact match should return 1 @@ -222,7 +222,7 @@ public void FileAssertXmlAssert_Run_XPathExactTextMatch_Matches_NoError() xmlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -233,7 +233,7 @@ public void FileAssertXmlAssert_Run_XPathExactTextMatch_Matches_NoError() /// /// Verifies that Run reports an error when an XPath exact text query finds no matching nodes. /// - [TestMethod] + [Fact] public void FileAssertXmlAssert_Run_XPathExactTextMatch_NoMatch_WritesError() { // Arrange - no item has text "four"; exact match query should return 0 nodes @@ -249,7 +249,7 @@ public void FileAssertXmlAssert_Run_XPathExactTextMatch_NoMatch_WritesError() xmlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -260,7 +260,7 @@ public void FileAssertXmlAssert_Run_XPathExactTextMatch_NoMatch_WritesError() /// /// Verifies that Run produces no error when an XPath contains() predicate matches a node. /// - [TestMethod] + [Fact] public void FileAssertXmlAssert_Run_XPathContainsText_Matches_NoError() { // Arrange - sample XML has an item with text "three"; substring "hre" matches @@ -276,7 +276,7 @@ public void FileAssertXmlAssert_Run_XPathContainsText_Matches_NoError() xmlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -287,7 +287,7 @@ public void FileAssertXmlAssert_Run_XPathContainsText_Matches_NoError() /// /// Verifies that Run reports an error when an XPath contains() predicate finds no matching nodes. /// - [TestMethod] + [Fact] public void FileAssertXmlAssert_Run_XPathContainsText_NoMatch_WritesError() { // Arrange - no item contains "xyz"; contains() query returns 0 nodes @@ -303,7 +303,7 @@ public void FileAssertXmlAssert_Run_XPathContainsText_NoMatch_WritesError() xmlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { diff --git a/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertYamlAssertTests.cs b/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertYamlAssertTests.cs index 085f1bb..f729f0a 100644 --- a/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertYamlAssertTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/Modeling/FileAssertYamlAssertTests.cs @@ -27,7 +27,7 @@ namespace DemaConsulting.FileAssert.Tests.Modeling; /// /// Unit tests for the class. /// -[TestClass] +[Collection("Sequential")] public sealed class FileAssertYamlAssertTests { private const string SampleYaml = """ @@ -41,7 +41,7 @@ public sealed class FileAssertYamlAssertTests /// /// Verifies that Create succeeds given valid query data. /// - [TestMethod] + [Fact] public void FileAssertYamlAssert_Create_ValidData_CreatesYamlAssert() { // Arrange @@ -54,23 +54,23 @@ public void FileAssertYamlAssert_Create_ValidData_CreatesYamlAssert() var yamlAssert = FileAssertYamlAssert.Create(data); // Assert - Assert.IsNotNull(yamlAssert); + Assert.NotNull(yamlAssert); } /// /// Verifies that Create throws when data is null. /// - [TestMethod] + [Fact] public void FileAssertYamlAssert_Create_NullData_ThrowsArgumentNullException() { // Act & Assert - Assert.ThrowsExactly(() => FileAssertYamlAssert.Create(null!)); + Assert.Throws(() => FileAssertYamlAssert.Create(null!)); } /// /// Verifies that Run reports an error when the file cannot be parsed as YAML. /// - [TestMethod] + [Fact] public void FileAssertYamlAssert_Run_InvalidFile_WritesError() { // Arrange - write malformed YAML content that will cause a parse error @@ -86,7 +86,7 @@ public void FileAssertYamlAssert_Run_InvalidFile_WritesError() yamlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -97,7 +97,7 @@ public void FileAssertYamlAssert_Run_InvalidFile_WritesError() /// /// Verifies that Run produces no error when the sequence count matches exactly. /// - [TestMethod] + [Fact] public void FileAssertYamlAssert_Run_SequenceCount_Matches_NoError() { // Arrange - sample YAML has 3 tools entries; assert count = 3 @@ -113,7 +113,7 @@ public void FileAssertYamlAssert_Run_SequenceCount_Matches_NoError() yamlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -124,7 +124,7 @@ public void FileAssertYamlAssert_Run_SequenceCount_Matches_NoError() /// /// Verifies that Run reports an error when the sequence count does not match exactly. /// - [TestMethod] + [Fact] public void FileAssertYamlAssert_Run_SequenceCount_Mismatch_WritesError() { // Arrange - sample YAML has 3 tools but we assert count = 5 @@ -140,7 +140,7 @@ public void FileAssertYamlAssert_Run_SequenceCount_Mismatch_WritesError() yamlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -151,7 +151,7 @@ public void FileAssertYamlAssert_Run_SequenceCount_Mismatch_WritesError() /// /// Verifies that Run produces no error when the sequence count is within min/max bounds. /// - [TestMethod] + [Fact] public void FileAssertYamlAssert_Run_MinMaxCount_WithinBounds_NoError() { // Arrange - sample YAML has 3 tools entries; assert min=2, max=5 @@ -167,7 +167,7 @@ public void FileAssertYamlAssert_Run_MinMaxCount_WithinBounds_NoError() yamlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -178,7 +178,7 @@ public void FileAssertYamlAssert_Run_MinMaxCount_WithinBounds_NoError() /// /// Verifies that a scalar value counts as 1. /// - [TestMethod] + [Fact] public void FileAssertYamlAssert_Run_ScalarValue_CountsAsOne_NoError() { // Arrange - sample YAML has a scalar "version" key; assert count = 1 @@ -194,7 +194,7 @@ public void FileAssertYamlAssert_Run_ScalarValue_CountsAsOne_NoError() yamlAssert.Run(context, tempFile); // Assert - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -205,7 +205,7 @@ public void FileAssertYamlAssert_Run_ScalarValue_CountsAsOne_NoError() /// /// Verifies that Run reports an error when the count is below the minimum. /// - [TestMethod] + [Fact] public void FileAssertYamlAssert_Run_MinCount_BelowMinimum_WritesError() { // Arrange - sample YAML has 3 tools; assert min=5 (3 < 5, should fail) @@ -221,7 +221,7 @@ public void FileAssertYamlAssert_Run_MinCount_BelowMinimum_WritesError() yamlAssert.Run(context, tempFile); // Assert - min violation produces an error - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -232,7 +232,7 @@ public void FileAssertYamlAssert_Run_MinCount_BelowMinimum_WritesError() /// /// Verifies that Run reports an error when the count exceeds the maximum. /// - [TestMethod] + [Fact] public void FileAssertYamlAssert_Run_MaxCount_ExceedsMaximum_WritesError() { // Arrange - sample YAML has 3 tools; assert max=2 (3 > 2, should fail) @@ -248,7 +248,7 @@ public void FileAssertYamlAssert_Run_MaxCount_ExceedsMaximum_WritesError() yamlAssert.Run(context, tempFile); // Assert - max violation produces an error - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -259,52 +259,52 @@ public void FileAssertYamlAssert_Run_MaxCount_ExceedsMaximum_WritesError() /// /// Verifies that Create throws when query string is empty. /// - [TestMethod] + [Fact] public void FileAssertYamlAssert_Create_EmptyQuery_ThrowsInvalidOperationException() { // Arrange var data = new List { new() { Query = " " } }; // Act & Assert - Assert.ThrowsExactly(() => FileAssertYamlAssert.Create(data)); + Assert.Throws(() => FileAssertYamlAssert.Create(data)); } /// /// Verifies that Create throws when query has a trailing dot. /// - [TestMethod] + [Fact] public void FileAssertYamlAssert_Create_TrailingDotQuery_ThrowsInvalidOperationException() { // Arrange var data = new List { new() { Query = "tools." } }; // Act & Assert - Assert.ThrowsExactly(() => FileAssertYamlAssert.Create(data)); + Assert.Throws(() => FileAssertYamlAssert.Create(data)); } /// /// Verifies that Create throws when query has a leading dot. /// - [TestMethod] + [Fact] public void FileAssertYamlAssert_Create_LeadingDotQuery_ThrowsInvalidOperationException() { // Arrange var data = new List { new() { Query = ".tools" } }; // Act & Assert - Assert.ThrowsExactly(() => FileAssertYamlAssert.Create(data)); + Assert.Throws(() => FileAssertYamlAssert.Create(data)); } /// /// Verifies that Create throws when query has consecutive dots. /// - [TestMethod] + [Fact] public void FileAssertYamlAssert_Create_ConsecutiveDotsQuery_ThrowsInvalidOperationException() { // Arrange var data = new List { new() { Query = "a..b" } }; // Act & Assert - Assert.ThrowsExactly(() => FileAssertYamlAssert.Create(data)); + Assert.Throws(() => FileAssertYamlAssert.Create(data)); } } diff --git a/test/DemaConsulting.FileAssert.Tests/Modeling/ModelingTests.cs b/test/DemaConsulting.FileAssert.Tests/Modeling/ModelingTests.cs index 1b0b307..b93f056 100644 --- a/test/DemaConsulting.FileAssert.Tests/Modeling/ModelingTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/Modeling/ModelingTests.cs @@ -27,14 +27,14 @@ namespace DemaConsulting.FileAssert.Tests.Modeling; /// /// Subsystem tests for the Modeling subsystem. /// -[TestClass] +[Collection("Sequential")] public class ModelingTests { /// /// Verifies that the Modeling subsystem executes the full test → file → rule /// chain without errors when all constraints are satisfied. /// - [TestMethod] + [Fact] public void Modeling_ExecuteChain_PassesWhenAllConstraintsMet() { // Arrange - create a real file with content that satisfies all rules @@ -67,7 +67,7 @@ public void Modeling_ExecuteChain_PassesWhenAllConstraintsMet() test.Run(context, tempDir.FullName); // Assert - no errors reported - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { @@ -79,7 +79,7 @@ public void Modeling_ExecuteChain_PassesWhenAllConstraintsMet() /// Verifies that the Modeling subsystem reports failures through the context /// when a content rule is not satisfied. /// - [TestMethod] + [Fact] public void Modeling_ExecuteChain_ReportsFailuresThroughContext() { // Arrange - create a file that does NOT contain the required text @@ -111,7 +111,7 @@ public void Modeling_ExecuteChain_ReportsFailuresThroughContext() test.Run(context, tempDir.FullName); // Assert - an error was reported and the exit code is non-zero - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -123,7 +123,7 @@ public void Modeling_ExecuteChain_ReportsFailuresThroughContext() /// Verifies that the Modeling subsystem reports a parse error when a file-type /// assertion block is declared but the file cannot be parsed as the declared format. /// - [TestMethod] + [Fact] public void Modeling_FileTypeParsing_InvalidXml_ReportsParseError() { // Arrange - create a file with invalid XML content @@ -155,7 +155,7 @@ public void Modeling_FileTypeParsing_InvalidXml_ReportsParseError() test.Run(context, tempDir.FullName); // Assert - an error was reported because the file could not be parsed as XML - Assert.AreEqual(1, context.ExitCode); + Assert.Equal(1, context.ExitCode); } finally { @@ -167,7 +167,7 @@ public void Modeling_FileTypeParsing_InvalidXml_ReportsParseError() /// Verifies that the Modeling subsystem evaluates XPath query assertions against /// a valid XML document and reports no error when the count constraint is satisfied. /// - [TestMethod] + [Fact] public void Modeling_QueryAssertions_XmlQueryMeetsCount_NoError() { // Arrange - create a valid XML file with elements the query will match @@ -204,7 +204,7 @@ public void Modeling_QueryAssertions_XmlQueryMeetsCount_NoError() test.Run(context, tempDir.FullName); // Assert - no errors reported because the query matched the expected count - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } finally { diff --git a/test/DemaConsulting.FileAssert.Tests/ProgramTests.cs b/test/DemaConsulting.FileAssert.Tests/ProgramTests.cs index 7ef7e14..66024a7 100644 --- a/test/DemaConsulting.FileAssert.Tests/ProgramTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/ProgramTests.cs @@ -25,13 +25,13 @@ namespace DemaConsulting.FileAssert.Tests; /// /// Unit tests for the Program class. /// -[TestClass] +[Collection("Sequential")] public class ProgramTests { /// /// Test that Run with version flag displays version only. /// - [TestMethod] + [Fact] public void Program_Run_WithVersionFlag_DisplaysVersionOnly() { // Arrange @@ -60,7 +60,7 @@ public void Program_Run_WithVersionFlag_DisplaysVersionOnly() /// /// Test that Run with help flag displays usage information. /// - [TestMethod] + [Fact] public void Program_Run_WithHelpFlag_DisplaysUsageInformation() { // Arrange @@ -90,7 +90,7 @@ public void Program_Run_WithHelpFlag_DisplaysUsageInformation() /// /// Test that Run with validate flag runs validation. /// - [TestMethod] + [Fact] public void Program_Run_WithValidateFlag_RunsValidation() { // Arrange @@ -117,7 +117,7 @@ public void Program_Run_WithValidateFlag_RunsValidation() /// /// Test that Run with no arguments displays default behavior. /// - [TestMethod] + [Fact] public void Program_Run_NoArguments_DisplaysDefaultBehavior() { // Arrange @@ -145,13 +145,13 @@ public void Program_Run_NoArguments_DisplaysDefaultBehavior() /// /// Test that version property returns non-empty version string. /// - [TestMethod] + [Fact] public void Program_Version_ReturnsNonEmptyString() { // Act var version = Program.Version; // Assert - Assert.IsFalse(string.IsNullOrWhiteSpace(version)); + Assert.False(string.IsNullOrWhiteSpace(version)); } } diff --git a/test/DemaConsulting.FileAssert.Tests/SelfTest/SelfTestTests.cs b/test/DemaConsulting.FileAssert.Tests/SelfTest/SelfTestTests.cs index d95412b..41c4200 100644 --- a/test/DemaConsulting.FileAssert.Tests/SelfTest/SelfTestTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/SelfTest/SelfTestTests.cs @@ -26,14 +26,14 @@ namespace DemaConsulting.FileAssert.Tests.SelfTest; /// /// Subsystem tests for the SelfTest subsystem. /// -[TestClass] +[Collection("Sequential")] public class SelfTestTests { /// /// Verifies that the SelfTest subsystem runs all built-in tests and produces /// a summary that includes pass and fail counts. /// - [TestMethod] + [Fact] public void SelfTest_Run_ExecutesBuiltInTestsAndProducesSummary() { // Arrange @@ -53,7 +53,7 @@ public void SelfTest_Run_ExecutesBuiltInTestsAndProducesSummary() } // Assert - context is disposed above so the log file is fully flushed and closed - Assert.AreEqual(0, exitCode); + Assert.Equal(0, exitCode); var logContent = File.ReadAllText(logPath); Assert.Contains("Total Tests:", logContent); @@ -69,7 +69,7 @@ public void SelfTest_Run_ExecutesBuiltInTestsAndProducesSummary() /// /// Verifies that the SelfTest subsystem prints a system information header. /// - [TestMethod] + [Fact] public void SelfTest_Run_WhenInvoked_PrintsSystemInfoHeader() { // Arrange @@ -99,7 +99,7 @@ public void SelfTest_Run_WhenInvoked_PrintsSystemInfoHeader() /// /// Verifies that the SelfTest subsystem writes a TRX results file when --results is specified. /// - [TestMethod] + [Fact] public void SelfTest_Run_WithResultsFile_WritesTrxResultsFile() { // Arrange @@ -115,7 +115,7 @@ public void SelfTest_Run_WithResultsFile_WritesTrxResultsFile() } // Assert - TRX results file must exist and contain test result content - Assert.IsTrue(File.Exists(resultsPath), "TRX results file should be created"); + Assert.True(File.Exists(resultsPath), "TRX results file should be created"); var content = File.ReadAllText(resultsPath); Assert.Contains("TestRun", content); } diff --git a/test/DemaConsulting.FileAssert.Tests/SelfTest/ValidationTests.cs b/test/DemaConsulting.FileAssert.Tests/SelfTest/ValidationTests.cs index 2f10685..2e7c741 100644 --- a/test/DemaConsulting.FileAssert.Tests/SelfTest/ValidationTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/SelfTest/ValidationTests.cs @@ -26,13 +26,13 @@ namespace DemaConsulting.FileAssert.Tests.SelfTest; /// /// Unit tests for the Validation class. /// -[TestClass] +[Collection("Sequential")] public class ValidationTests { /// /// Test that Run throws ArgumentNullException when context is null. /// - [TestMethod] + [Fact] public void Validation_Run_NullContext_ThrowsArgumentNullException() { // Arrange @@ -40,13 +40,13 @@ public void Validation_Run_NullContext_ThrowsArgumentNullException() // Act & Assert // Proves that Run guards against null context with ArgumentNullException. - Assert.ThrowsExactly(() => Validation.Run(null!)); + Assert.Throws(() => Validation.Run(null!)); } /// /// Test that Run prints a summary containing total, passed, and failed counts. /// - [TestMethod] + [Fact] public void Validation_Run_WithSilentContext_PrintsSummary() { // Arrange @@ -80,7 +80,7 @@ public void Validation_Run_WithSilentContext_PrintsSummary() /// /// Test that Run exits with code zero when all self-validation tests pass. /// - [TestMethod] + [Fact] public void Validation_Run_WithSilentContext_ExitCodeIsZero() { // Arrange @@ -91,13 +91,13 @@ public void Validation_Run_WithSilentContext_ExitCodeIsZero() // Assert // Proves that a successful validation run leaves the exit code at 0. - Assert.AreEqual(0, context.ExitCode); + Assert.Equal(0, context.ExitCode); } /// /// Test that Run writes a valid TRX file when the results path ends with .trx. /// - [TestMethod] + [Fact] public void Validation_Run_WithTrxResultsFile_WritesTrxFile() { // Arrange @@ -111,7 +111,7 @@ public void Validation_Run_WithTrxResultsFile_WritesTrxFile() // Assert // Proves that Run creates a TRX-format file at the requested path. - Assert.IsTrue(File.Exists(trxFile)); + Assert.True(File.Exists(trxFile)); var content = File.ReadAllText(trxFile); Assert.Contains(" /// Test that Run writes a valid JUnit XML file when the results path ends with .xml. /// - [TestMethod] + [Fact] public void Validation_Run_WithXmlResultsFile_WritesXmlFile() { // Arrange @@ -141,7 +141,7 @@ public void Validation_Run_WithXmlResultsFile_WritesXmlFile() // Assert // Proves that Run creates a JUnit-format XML file at the requested path. - Assert.IsTrue(File.Exists(xmlFile)); + Assert.True(File.Exists(xmlFile)); var content = File.ReadAllText(xmlFile); Assert.Contains(" /// Test that Run does not write a results file when the extension is unsupported. /// - [TestMethod] + [Fact] public void Validation_Run_WithUnsupportedResultsFormat_DoesNotWriteFile() { // Arrange @@ -171,7 +171,7 @@ public void Validation_Run_WithUnsupportedResultsFormat_DoesNotWriteFile() // Assert // Proves that Run does not create a file for unsupported formats. - Assert.IsFalse(File.Exists(jsonFile)); + Assert.False(File.Exists(jsonFile)); } finally { @@ -185,7 +185,7 @@ public void Validation_Run_WithUnsupportedResultsFormat_DoesNotWriteFile() /// /// Test that Run logs a passing FileAssert_Results result. /// - [TestMethod] + [Fact] public void Validation_Run_WithSilentContext_LogContainsFileAssertResults() { // Act & Assert @@ -196,7 +196,7 @@ public void Validation_Run_WithSilentContext_LogContainsFileAssertResults() /// /// Test that Run logs a passing FileAssert_Exists result. /// - [TestMethod] + [Fact] public void Validation_Run_WithSilentContext_LogContainsFileAssertExists() { // Act & Assert @@ -207,7 +207,7 @@ public void Validation_Run_WithSilentContext_LogContainsFileAssertExists() /// /// Test that Run logs a passing FileAssert_Contains result. /// - [TestMethod] + [Fact] public void Validation_Run_WithSilentContext_LogContainsFileAssertContains() { // Act & Assert @@ -218,7 +218,7 @@ public void Validation_Run_WithSilentContext_LogContainsFileAssertContains() /// /// Test that Run uses the specified depth for the markdown heading. /// - [TestMethod] + [Fact] public void Validation_Run_WithDepth_UsesSpecifiedHeadingDepth() { // Arrange diff --git a/test/DemaConsulting.FileAssert.Tests/Utilities/PathHelpersTests.cs b/test/DemaConsulting.FileAssert.Tests/Utilities/PathHelpersTests.cs index 81389a2..b3a7969 100644 --- a/test/DemaConsulting.FileAssert.Tests/Utilities/PathHelpersTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/Utilities/PathHelpersTests.cs @@ -25,13 +25,13 @@ namespace DemaConsulting.FileAssert.Tests.Utilities; /// /// Tests for the PathHelpers class. /// -[TestClass] +[Collection("Sequential")] public class PathHelpersTests { /// /// Test that SafePathCombine correctly combines valid paths. /// - [TestMethod] + [Fact] public void PathHelpers_SafePathCombine_ValidPaths_CombinesCorrectly() { // Arrange @@ -42,13 +42,13 @@ public void PathHelpers_SafePathCombine_ValidPaths_CombinesCorrectly() var result = PathHelpers.SafePathCombine(basePath, relativePath); // Assert - Assert.AreEqual(Path.Combine(basePath, relativePath), result); + Assert.Equal(Path.Combine(basePath, relativePath), result); } /// /// Test that SafePathCombine throws ArgumentException for path traversal with double dots. /// - [TestMethod] + [Fact] public void PathHelpers_SafePathCombine_PathTraversalWithDoubleDots_ThrowsArgumentException() { // Arrange @@ -56,7 +56,7 @@ public void PathHelpers_SafePathCombine_PathTraversalWithDoubleDots_ThrowsArgume var relativePath = "../etc/passwd"; // Act & Assert - var exception = Assert.ThrowsExactly(() => + var exception = Assert.Throws(() => PathHelpers.SafePathCombine(basePath, relativePath)); Assert.Contains("Invalid path component", exception.Message); } @@ -64,7 +64,7 @@ public void PathHelpers_SafePathCombine_PathTraversalWithDoubleDots_ThrowsArgume /// /// Test that SafePathCombine throws ArgumentException for path with double dots in middle. /// - [TestMethod] + [Fact] public void PathHelpers_SafePathCombine_DoubleDotsInMiddle_ThrowsArgumentException() { // Arrange @@ -72,7 +72,7 @@ public void PathHelpers_SafePathCombine_DoubleDotsInMiddle_ThrowsArgumentExcepti var relativePath = "subfolder/../../../etc/passwd"; // Act & Assert - var exception = Assert.ThrowsExactly(() => + var exception = Assert.Throws(() => PathHelpers.SafePathCombine(basePath, relativePath)); Assert.Contains("Invalid path component", exception.Message); } @@ -80,13 +80,13 @@ public void PathHelpers_SafePathCombine_DoubleDotsInMiddle_ThrowsArgumentExcepti /// /// Test that SafePathCombine throws ArgumentException for absolute paths. /// - [TestMethod] + [Fact] public void PathHelpers_SafePathCombine_AbsolutePath_ThrowsArgumentException() { // Test Unix absolute path var unixBasePath = "/home/user/project"; var unixRelativePath = "/etc/passwd"; - var unixException = Assert.ThrowsExactly(() => + var unixException = Assert.Throws(() => PathHelpers.SafePathCombine(unixBasePath, unixRelativePath)); Assert.Contains("Invalid path component", unixException.Message); @@ -95,7 +95,7 @@ public void PathHelpers_SafePathCombine_AbsolutePath_ThrowsArgumentException() { var windowsBasePath = "C:\\Users\\project"; var windowsRelativePath = "C:\\Windows\\System32\\file.txt"; - var windowsException = Assert.ThrowsExactly(() => + var windowsException = Assert.Throws(() => PathHelpers.SafePathCombine(windowsBasePath, windowsRelativePath)); Assert.Contains("Invalid path component", windowsException.Message); } @@ -104,7 +104,7 @@ public void PathHelpers_SafePathCombine_AbsolutePath_ThrowsArgumentException() /// /// Test that SafePathCombine correctly handles current directory reference. /// - [TestMethod] + [Fact] public void PathHelpers_SafePathCombine_CurrentDirectoryReference_CombinesCorrectly() { // Arrange @@ -115,13 +115,13 @@ public void PathHelpers_SafePathCombine_CurrentDirectoryReference_CombinesCorrec var result = PathHelpers.SafePathCombine(basePath, relativePath); // Assert - Assert.AreEqual(Path.Combine(basePath, relativePath), result); + Assert.Equal(Path.Combine(basePath, relativePath), result); } /// /// Test that SafePathCombine correctly handles nested paths. /// - [TestMethod] + [Fact] public void PathHelpers_SafePathCombine_NestedPaths_CombinesCorrectly() { // Arrange @@ -132,13 +132,13 @@ public void PathHelpers_SafePathCombine_NestedPaths_CombinesCorrectly() var result = PathHelpers.SafePathCombine(basePath, relativePath); // Assert - Assert.AreEqual(Path.Combine(basePath, relativePath), result); + Assert.Equal(Path.Combine(basePath, relativePath), result); } /// /// Test that SafePathCombine correctly handles empty relative path. /// - [TestMethod] + [Fact] public void PathHelpers_SafePathCombine_EmptyRelativePath_ReturnsBasePath() { // Arrange @@ -149,13 +149,13 @@ public void PathHelpers_SafePathCombine_EmptyRelativePath_ReturnsBasePath() var result = PathHelpers.SafePathCombine(basePath, relativePath); // Assert - Assert.AreEqual(Path.Combine(basePath, relativePath), result); + Assert.Equal(Path.Combine(basePath, relativePath), result); } /// /// Test that SafePathCombine allows filenames that contain ".." as a substring but not as a path component. /// - [TestMethod] + [Fact] public void PathHelpers_SafePathCombine_DoubleDotInFilename_CombinesCorrectly() { // Arrange - filename with ".." as substring, not a path traversal component @@ -166,13 +166,13 @@ public void PathHelpers_SafePathCombine_DoubleDotInFilename_CombinesCorrectly() var result = PathHelpers.SafePathCombine(basePath, relativePath); // Assert - Assert.AreEqual(Path.Combine(basePath, relativePath), result); + Assert.Equal(Path.Combine(basePath, relativePath), result); } /// /// Test that SafePathCombine throws ArgumentNullException when base path is null. /// - [TestMethod] + [Fact] public void PathHelpers_SafePathCombine_NullBasePath_ThrowsArgumentNullException() { // Arrange @@ -180,14 +180,14 @@ public void PathHelpers_SafePathCombine_NullBasePath_ThrowsArgumentNullException var relativePath = "subfolder/file.txt"; // Act & Assert - Assert.ThrowsExactly(() => + Assert.Throws(() => PathHelpers.SafePathCombine(basePath!, relativePath)); } /// /// Test that SafePathCombine throws ArgumentNullException when relative path is null. /// - [TestMethod] + [Fact] public void PathHelpers_SafePathCombine_NullRelativePath_ThrowsArgumentNullException() { // Arrange @@ -195,7 +195,7 @@ public void PathHelpers_SafePathCombine_NullRelativePath_ThrowsArgumentNullExcep string? relativePath = null; // Act & Assert - Assert.ThrowsExactly(() => + Assert.Throws(() => PathHelpers.SafePathCombine(basePath, relativePath!)); } } diff --git a/test/DemaConsulting.FileAssert.Tests/Utilities/UtilitiesTests.cs b/test/DemaConsulting.FileAssert.Tests/Utilities/UtilitiesTests.cs index 7a49f50..d011ca2 100644 --- a/test/DemaConsulting.FileAssert.Tests/Utilities/UtilitiesTests.cs +++ b/test/DemaConsulting.FileAssert.Tests/Utilities/UtilitiesTests.cs @@ -25,14 +25,14 @@ namespace DemaConsulting.FileAssert.Tests.Utilities; /// /// Subsystem tests for the Utilities subsystem. /// -[TestClass] +[Collection("Sequential")] public class UtilitiesTests { /// /// Verifies that the Utilities subsystem's safe path combination prevents /// path traversal when used against the real file system. /// - [TestMethod] + [Fact] public void Utilities_SafePathCombine_PreventsPathTraversalToFileSystem() { // Arrange @@ -40,7 +40,7 @@ public void Utilities_SafePathCombine_PreventsPathTraversalToFileSystem() try { // Act & Assert - a traversal attempt is rejected with ArgumentException - Assert.ThrowsExactly( + Assert.Throws( () => PathHelpers.SafePathCombine(tempDir.FullName, "../escape.txt")); // Act & Assert - a valid relative path within the base is accepted From e99091be27a6c578fd8aec87298d157ca2333eb2 Mon Sep 17 00:00:00 2001 From: Malcolm Nixon Date: Mon, 4 May 2026 09:41:09 -0400 Subject: [PATCH 2/3] Updates for generated/ folders --- .github/workflows/build.yaml | 119 +++++++++++++++-------- docs/build_notes/definition.yaml | 4 +- docs/code_quality/definition.yaml | 4 +- docs/code_review_plan/definition.yaml | 2 +- docs/code_review_report/definition.yaml | 2 +- docs/requirements_doc/definition.yaml | 4 +- docs/requirements_report/definition.yaml | 2 +- 7 files changed, 86 insertions(+), 51 deletions(-) diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index 8f8f7ff..9c51738 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -466,6 +466,15 @@ jobs: buildmark versionmark reviewmark fileassert echo "✓ Tool versions captured" + # === PREPARE DOCUMENT OUTPUT === + # Creates the shared docs/generated/ folder that all document sections write PDFs into. + # This step is intentionally separate from the document sections so any individual + # section can be commented out without breaking the shared output directory. + + - name: Create documents output directory + shell: bash + run: mkdir -p docs/generated + # === COMPILE BUILD NOTES === # This section generates the Build Notes document. BuildMark and VersionMark self-validations # run here to co-locate their evidence with the document that depends on their output. @@ -473,6 +482,10 @@ jobs: # validates the outputs contain expected content. # Downstream projects: Add any additional build notes steps here. + - name: Create build notes output directories + shell: bash + run: mkdir -p docs/build_notes/generated + - name: Run BuildMark self-validation run: > dotnet buildmark @@ -492,20 +505,20 @@ jobs: run: > dotnet buildmark --build-version ${{ inputs.version }} - --report docs/build_notes.md + --report docs/build_notes/generated/build_notes.md --report-depth 1 - name: Display Build Notes Report shell: bash run: | echo "=== Build Notes Report ===" - cat docs/build_notes.md + cat docs/build_notes/generated/build_notes.md - name: Publish Tool Versions shell: bash run: | echo "Publishing tool versions..." - dotnet versionmark --publish --report docs/build_notes/versions.md --report-depth 1 \ + dotnet versionmark --publish --report docs/build_notes/generated/versions.md --report-depth 1 \ -- "artifacts/**/versionmark-*.json" echo "✓ Tool versions published" @@ -513,7 +526,7 @@ jobs: shell: bash run: | echo "=== Tool Versions Report ===" - cat docs/build_notes/versions.md + cat docs/build_notes/generated/versions.md - name: Generate Build Notes HTML with Pandoc shell: bash @@ -523,14 +536,14 @@ jobs: --filter node_modules/.bin/mermaid-filter.cmd --metadata version="${{ inputs.version }}" --metadata date="$(date +'%Y-%m-%d')" - --output docs/build_notes/build_notes.html + --output docs/build_notes/generated/build_notes.html - name: Generate Build Notes PDF with WeasyPrint run: > dotnet weasyprint --pdf-variant pdf/a-3u - docs/build_notes/build_notes.html - "docs/FileAssert Build Notes.pdf" + docs/build_notes/generated/build_notes.html + "docs/generated/FileAssert Build Notes.pdf" - name: Assert Build Notes Documents with FileAssert run: > @@ -538,6 +551,10 @@ jobs: --results artifacts/fileassert-build-notes.trx build-notes + - name: Copy Build Notes report to docs/generated + shell: bash + run: cp docs/build_notes/generated/build_notes.md docs/generated/build_notes.md + # === COMPILE CODE QUALITY REPORT === # This section generates the Code Quality document. SarifMark and SonarMark self-validations # run here to co-locate their evidence with the document that depends on their output. @@ -545,6 +562,10 @@ jobs: # validates the outputs contain expected content. # Downstream projects: Add any additional code quality steps here. + - name: Create code quality output directory + shell: bash + run: mkdir -p docs/code_quality/generated + - name: Run SarifMark self-validation run: > dotnet sarifmark @@ -561,7 +582,7 @@ jobs: run: > dotnet sarifmark --sarif artifacts/csharp.sarif - --report docs/code_quality/codeql-quality.md + --report docs/code_quality/generated/codeql-quality.md --heading "FileAssert CodeQL Analysis" --report-depth 1 @@ -569,7 +590,7 @@ jobs: shell: bash run: | echo "=== CodeQL Quality Report ===" - cat docs/code_quality/codeql-quality.md + cat docs/code_quality/generated/codeql-quality.md - name: Generate SonarCloud Quality Report shell: bash @@ -581,14 +602,14 @@ jobs: --project-key demaconsulting_FileAssert --branch ${{ github.ref_name }} --token "$SONAR_TOKEN" - --report docs/code_quality/sonar-quality.md + --report docs/code_quality/generated/sonar-quality.md --report-depth 1 - name: Display SonarCloud Quality Report shell: bash run: | echo "=== SonarCloud Quality Report ===" - cat docs/code_quality/sonar-quality.md + cat docs/code_quality/generated/sonar-quality.md - name: Generate Code Quality HTML with Pandoc shell: bash @@ -598,14 +619,14 @@ jobs: --filter node_modules/.bin/mermaid-filter.cmd --metadata version="${{ inputs.version }}" --metadata date="$(date +'%Y-%m-%d')" - --output docs/code_quality/quality.html + --output docs/code_quality/generated/quality.html - name: Generate Code Quality PDF with WeasyPrint run: > dotnet weasyprint --pdf-variant pdf/a-3u - docs/code_quality/quality.html - "docs/FileAssert Code Quality.pdf" + docs/code_quality/generated/quality.html + "docs/generated/FileAssert Code Quality.pdf" - name: Assert Code Quality Documents with FileAssert run: > @@ -620,6 +641,10 @@ jobs: # PDF, and FileAssert validates the outputs contain expected content. # Downstream projects: Add any additional code review steps here. + - name: Create code review output directories + shell: bash + run: mkdir -p docs/code_review_plan/generated docs/code_review_report/generated + - name: Run ReviewMark self-validation run: > dotnet reviewmark @@ -631,22 +656,22 @@ jobs: # TODO: Add --enforce once reviews branch is populated with review evidence PDFs and index.json run: > dotnet reviewmark - --plan docs/code_review_plan/plan.md + --plan docs/code_review_plan/generated/plan.md --plan-depth 1 - --report docs/code_review_report/report.md + --report docs/code_review_report/generated/report.md --report-depth 1 - name: Display Review Plan shell: bash run: | echo "=== Review Plan ===" - cat docs/code_review_plan/plan.md + cat docs/code_review_plan/generated/plan.md - name: Display Review Report shell: bash run: | echo "=== Review Report ===" - cat docs/code_review_report/report.md + cat docs/code_review_report/generated/report.md - name: Generate Review Plan HTML with Pandoc shell: bash @@ -656,14 +681,14 @@ jobs: --filter node_modules/.bin/mermaid-filter.cmd --metadata version="${{ inputs.version }}" --metadata date="$(date +'%Y-%m-%d')" - --output docs/code_review_plan/plan.html + --output docs/code_review_plan/generated/plan.html - name: Generate Review Plan PDF with WeasyPrint run: > dotnet weasyprint --pdf-variant pdf/a-3u - docs/code_review_plan/plan.html - "docs/FileAssert Review Plan.pdf" + docs/code_review_plan/generated/plan.html + "docs/generated/FileAssert Review Plan.pdf" - name: Generate Review Report HTML with Pandoc shell: bash @@ -673,14 +698,14 @@ jobs: --filter node_modules/.bin/mermaid-filter.cmd --metadata version="${{ inputs.version }}" --metadata date="$(date +'%Y-%m-%d')" - --output docs/code_review_report/report.html + --output docs/code_review_report/generated/report.html - name: Generate Review Report PDF with WeasyPrint run: > dotnet weasyprint --pdf-variant pdf/a-3u - docs/code_review_report/report.html - "docs/FileAssert Review Report.pdf" + docs/code_review_report/generated/report.html + "docs/generated/FileAssert Review Report.pdf" - name: Assert Code Review Documents with FileAssert run: > @@ -693,6 +718,10 @@ jobs: # FileAssert validates that the HTML and PDF outputs contain expected content. # Downstream projects: Add any additional design document steps here. + - name: Create design output directory + shell: bash + run: mkdir -p docs/design/generated + - name: Generate Design HTML with Pandoc shell: bash run: > @@ -701,14 +730,14 @@ jobs: --filter node_modules/.bin/mermaid-filter.cmd --metadata version="${{ inputs.version }}" --metadata date="$(date +'%Y-%m-%d')" - --output docs/design/design.html + --output docs/design/generated/design.html - name: Generate Design PDF with WeasyPrint run: > dotnet weasyprint --pdf-variant pdf/a-3u - docs/design/design.html - "docs/FileAssert Software Design.pdf" + docs/design/generated/design.html + "docs/generated/FileAssert Software Design.pdf" - name: Assert Design Documents with FileAssert run: > @@ -721,6 +750,10 @@ jobs: # FileAssert validates that the HTML and PDF outputs contain expected content. # Downstream projects: Add any additional user guide steps here. + - name: Create user guide output directory + shell: bash + run: mkdir -p docs/user_guide/generated + - name: Generate User Guide HTML with Pandoc shell: bash run: > @@ -729,14 +762,14 @@ jobs: --filter node_modules/.bin/mermaid-filter.cmd --metadata version="${{ inputs.version }}" --metadata date="$(date +'%Y-%m-%d')" - --output docs/user_guide/introduction.html + --output docs/user_guide/generated/user_guide.html - name: Generate User Guide PDF with WeasyPrint run: > dotnet weasyprint --pdf-variant pdf/a-3u - docs/user_guide/introduction.html - "docs/FileAssert User Guide.pdf" + docs/user_guide/generated/user_guide.html + "docs/generated/FileAssert User Guide.pdf" - name: Assert User Guide Documents with FileAssert run: > @@ -766,6 +799,10 @@ jobs: # confirm the requirements pipeline produced well-formed documents. # Downstream projects: Add any additional requirements steps here. + - name: Create requirements output directories + shell: bash + run: mkdir -p docs/requirements_doc/generated docs/requirements_report/generated + - name: Run ReqStream self-validation run: > dotnet reqstream @@ -777,9 +814,9 @@ jobs: dotnet reqstream --requirements requirements.yaml --tests "artifacts/**/*.trx" - --report docs/requirements_doc/requirements.md - --justifications docs/requirements_doc/justifications.md - --matrix docs/requirements_report/trace_matrix.md + --report docs/requirements_doc/generated/requirements.md + --justifications docs/requirements_doc/generated/justifications.md + --matrix docs/requirements_report/generated/trace_matrix.md --enforce - name: Generate Requirements HTML with Pandoc @@ -790,14 +827,14 @@ jobs: --filter node_modules/.bin/mermaid-filter.cmd --metadata version="${{ inputs.version }}" --metadata date="$(date +'%Y-%m-%d')" - --output docs/requirements_doc/requirements.html + --output docs/requirements_doc/generated/requirements.html - name: Generate Requirements PDF with WeasyPrint run: > dotnet weasyprint --pdf-variant pdf/a-3u - docs/requirements_doc/requirements.html - "docs/FileAssert Requirements.pdf" + docs/requirements_doc/generated/requirements.html + "docs/generated/FileAssert Requirements.pdf" - name: Generate Trace Matrix HTML with Pandoc shell: bash @@ -807,14 +844,14 @@ jobs: --filter node_modules/.bin/mermaid-filter.cmd --metadata version="${{ inputs.version }}" --metadata date="$(date +'%Y-%m-%d')" - --output docs/requirements_report/trace_matrix.html + --output docs/requirements_report/generated/trace_matrix.html - name: Generate Trace Matrix PDF with WeasyPrint run: > dotnet weasyprint --pdf-variant pdf/a-3u - docs/requirements_report/trace_matrix.html - "docs/FileAssert Trace Matrix.pdf" + docs/requirements_report/generated/trace_matrix.html + "docs/generated/FileAssert Trace Matrix.pdf" - name: Assert Requirements Documents with FileAssert run: > @@ -830,6 +867,4 @@ jobs: uses: actions/upload-artifact@v7 with: name: documents - path: |- - docs/*.pdf - docs/build_notes.md + path: docs/generated/* diff --git a/docs/build_notes/definition.yaml b/docs/build_notes/definition.yaml index 207a375..ba1360b 100644 --- a/docs/build_notes/definition.yaml +++ b/docs/build_notes/definition.yaml @@ -5,8 +5,8 @@ resource-path: input-files: - docs/build_notes/title.txt - docs/build_notes/introduction.md - - docs/build_notes.md - - docs/build_notes/versions.md + - docs/build_notes/generated/build_notes.md + - docs/build_notes/generated/versions.md template: template.html table-of-contents: true number-sections: true diff --git a/docs/code_quality/definition.yaml b/docs/code_quality/definition.yaml index 68c58f2..fed5f02 100644 --- a/docs/code_quality/definition.yaml +++ b/docs/code_quality/definition.yaml @@ -5,8 +5,8 @@ resource-path: input-files: - docs/code_quality/title.txt - docs/code_quality/introduction.md - - docs/code_quality/codeql-quality.md - - docs/code_quality/sonar-quality.md + - docs/code_quality/generated/codeql-quality.md + - docs/code_quality/generated/sonar-quality.md template: template.html table-of-contents: true number-sections: true diff --git a/docs/code_review_plan/definition.yaml b/docs/code_review_plan/definition.yaml index 3a24f0b..56989bf 100644 --- a/docs/code_review_plan/definition.yaml +++ b/docs/code_review_plan/definition.yaml @@ -5,7 +5,7 @@ resource-path: input-files: - docs/code_review_plan/title.txt - docs/code_review_plan/introduction.md - - docs/code_review_plan/plan.md + - docs/code_review_plan/generated/plan.md template: template.html table-of-contents: true number-sections: true diff --git a/docs/code_review_report/definition.yaml b/docs/code_review_report/definition.yaml index 6498e6c..b238d43 100644 --- a/docs/code_review_report/definition.yaml +++ b/docs/code_review_report/definition.yaml @@ -5,7 +5,7 @@ resource-path: input-files: - docs/code_review_report/title.txt - docs/code_review_report/introduction.md - - docs/code_review_report/report.md + - docs/code_review_report/generated/report.md template: template.html table-of-contents: true number-sections: true diff --git a/docs/requirements_doc/definition.yaml b/docs/requirements_doc/definition.yaml index 0f4ccd2..628b789 100644 --- a/docs/requirements_doc/definition.yaml +++ b/docs/requirements_doc/definition.yaml @@ -5,8 +5,8 @@ resource-path: input-files: - docs/requirements_doc/title.txt - docs/requirements_doc/introduction.md - - docs/requirements_doc/requirements.md - - docs/requirements_doc/justifications.md + - docs/requirements_doc/generated/requirements.md + - docs/requirements_doc/generated/justifications.md template: template.html table-of-contents: true number-sections: true diff --git a/docs/requirements_report/definition.yaml b/docs/requirements_report/definition.yaml index 918a645..9ee62a4 100644 --- a/docs/requirements_report/definition.yaml +++ b/docs/requirements_report/definition.yaml @@ -5,7 +5,7 @@ resource-path: input-files: - docs/requirements_report/title.txt - docs/requirements_report/introduction.md - - docs/requirements_report/trace_matrix.md + - docs/requirements_report/generated/trace_matrix.md template: template.html table-of-contents: true number-sections: true From 87ca3a39d95530e18e59813528e003a3e0c9edf9 Mon Sep 17 00:00:00 2001 From: Malcolm Nixon Date: Mon, 4 May 2026 09:56:00 -0400 Subject: [PATCH 3/3] Add building of verification PDF. --- .github/workflows/build.yaml | 36 ++++++++++++++++++++++++++++++++++-- 1 file changed, 34 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index 9c51738..f3b6896 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -745,6 +745,38 @@ jobs: --results artifacts/fileassert-design.trx design + # === COMPILE VERIFICATION DOCUMENT === + # This section generates the Verification Design document using Pandoc and WeasyPrint. + # FileAssert validates that the HTML and PDF outputs contain expected content. + # Downstream projects: Add any additional verification document steps here. + + - name: Create verification output directory + shell: bash + run: mkdir -p docs/verification/generated + + - name: Generate Verification HTML with Pandoc + shell: bash + run: > + dotnet pandoc + --defaults docs/verification/definition.yaml + --filter node_modules/.bin/mermaid-filter.cmd + --metadata version="${{ inputs.version }}" + --metadata date="$(date +'%Y-%m-%d')" + --output docs/verification/generated/verification.html + + - name: Generate Verification PDF with WeasyPrint + run: > + dotnet weasyprint + --pdf-variant pdf/a-3u + docs/verification/generated/verification.html + "docs/generated/FileAssert Software Verification Design.pdf" + + - name: Assert Verification Documents with FileAssert + run: > + dotnet fileassert + --results artifacts/fileassert-verification.trx + verification + # === COMPILE USER GUIDE === # This section generates the User Guide document using Pandoc and WeasyPrint. # FileAssert validates that the HTML and PDF outputs contain expected content. @@ -778,8 +810,8 @@ jobs: user-guide # === FILEASSERT SELF-VALIDATION === - # By this point Pandoc and WeasyPrint have each produced 6 validated documents - # (Build Notes, Code Quality, Review Plan, Review Report, Design, User Guide), + # By this point Pandoc and WeasyPrint have each produced 7 validated documents + # (Build Notes, Code Quality, Review Plan, Review Report, Design, Verification, User Guide), # providing strong OTS evidence for both tools before ReqStream runs. FileAssert # self-validation confirms the assertion tool itself is operational. # Downstream projects: Add any additional FileAssert self-validation steps here.