]> OCCT Git - occt.git/commitdiff
Testing - Adding ASCII code validation (#593)
authorPasukhin Dmitry <dpasukhi@opencascade.com>
Sun, 6 Jul 2025 20:00:22 +0000 (21:00 +0100)
committerGitHub <noreply@github.com>
Sun, 6 Jul 2025 20:00:22 +0000 (21:00 +0100)
- Split the existing regression test artifact upload into separate Windows and macOS/Linux steps with platform-specific paths.
- Added `ascii-check` job in the build-and-test workflow to detect non-ASCII characters in changed files.
- Introduced a new composite action (`.github/actions/ascii-check`) to scan diffs and report any non-ASCII content.

.github/actions/ascii-check/action.yml [new file with mode: 0644]
.github/actions/retest-failures/action.yml
.github/workflows/build-and-test-multiplatform.yml
src/FoundationClasses/TKMath/GTests/ElCLib_Test.cxx
src/FoundationClasses/TKMath/gp/gp_Mat.cxx
src/FoundationClasses/TKernel/GTests/TCollection_ExtendedString_Test.cxx
src/ModelingAlgorithms/TKTopAlgo/BRepExtrema/BRepExtrema_ProximityValueTool.cxx
src/ModelingAlgorithms/TKTopAlgo/BRepExtrema/BRepExtrema_ProximityValueTool.hxx
src/Visualization/TKV3d/AIS/AIS_InteractiveContext.cxx

diff --git a/.github/actions/ascii-check/action.yml b/.github/actions/ascii-check/action.yml
new file mode 100644 (file)
index 0000000..a89dd5b
--- /dev/null
@@ -0,0 +1,95 @@
+name: 'ASCII Code Check'
+description: 'Check for non-ASCII characters in changed code files'
+inputs:
+  base-ref:
+    description: 'Base reference to compare changes against'
+    required: true
+    default: 'master'
+  file-pattern:
+    description: 'Pattern to match files for ASCII check'
+    required: false
+    default: '^(src)/.*\.(cpp|hxx|cxx|lxx|h|pxx|hpp)$'
+
+outputs:
+  has-non-ascii:
+    description: 'Whether any files contained non-ASCII characters'
+    value: ${{ steps.ascii-check.outputs.has_non_ascii }}
+
+runs:
+  using: "composite"
+  steps:
+    - name: Get changed files
+      id: changed-files
+      shell: pwsh
+      run: |
+        $changedFiles = git diff --name-only origin/${{ inputs.base-ref }} HEAD |
+          Where-Object { $_ -match '${{ inputs.file-pattern }}' } |
+          Where-Object { Test-Path $_ }
+        
+        $changedFiles | Set-Content "changed_files.txt"
+        if ($changedFiles.Count -gt 0) {
+          echo "has_files=true" >> $env:GITHUB_OUTPUT
+        }
+
+    - name: Check for non-ASCII characters
+      id: ascii-check
+      if: steps.changed-files.outputs.has_files == 'true'
+      shell: pwsh
+      run: |
+        $hasNonAscii = $false
+        $nonAsciiLogs = @()
+        
+        $files = Get-Content "changed_files.txt" | Where-Object { Test-Path $_ }
+        foreach ($file in $files) {
+          Write-Output "Checking file: $file"
+          $fileContent = Get-Content -Path $file -Raw
+          $lineNumber = 1
+          $nonAsciiInFile = $false
+          
+          foreach ($line in ($fileContent -split "`n")) {
+            # Find non-ASCII characters (char code > 127)
+            $nonAsciiMatches = [regex]::Matches($line, "[^\x00-\x7F]")
+            if ($nonAsciiMatches.Count -gt 0) {
+              $nonAsciiInFile = $true
+              $hasNonAscii = $true
+              
+              foreach ($match in $nonAsciiMatches) {
+                $charCode = [int][char]$match.Value
+                $hexCode = "0x{0:X}" -f $charCode
+                $positionInLine = $match.Index + 1
+                
+                $message = "Non-ASCII character found in '$file' at line $lineNumber, position $($positionInLine): '$($match.Value)' (Unicode: $hexCode)"
+                $nonAsciiLogs += $message
+                Write-Output $message
+              }
+            }
+            $lineNumber++
+          }
+          
+          if ($nonAsciiInFile) {
+            Write-Output "::warning file=$file::File contains non-ASCII characters"
+          }
+        }
+        
+        $nonAsciiLogs | Set-Content "non_ascii_report.txt"
+        if ($hasNonAscii) {
+          echo "has_non_ascii=true" >> $env:GITHUB_OUTPUT
+        }
+
+    - name: Upload non-ASCII report
+      if: steps.ascii-check.outputs.has_non_ascii == 'true'
+      uses: actions/upload-artifact@v4
+      with:
+        name: non-ascii-report
+        path: non_ascii_report.txt
+
+    - name: Failing step for non-ASCII issues
+      if: steps.ascii-check.outputs.has_non_ascii == 'true'
+      shell: pwsh
+      run: |
+        Write-Output "::error::Files contain non-ASCII characters. See the non-ascii-report artifact for details."
+        exit 1
+
+branding:
+  icon: 'alert-circle'
+  color: 'red'
index 3327b05ffb9028c274f565f4330ea233a8b695f1..44f841e840303af92d7eaf39ea01862e8a166900 100644 (file)
@@ -211,12 +211,21 @@ runs:
         CSF_TestScriptsPath: ${{ github.workspace }}/tests
         CSF_TestDataPath: ${{ github.workspace }}/data
 
-    - name: Upload regression test results
-      if: steps.check_failures.outputs.failed_count > 0
+    - name: Upload regression test results (Windows)
+      if: ${{ inputs.platform == 'windows' && steps.check_failures.outputs.failed_count > 0 }}
+      uses: actions/upload-artifact@v4.4.3
+      with:
+        name: ${{ inputs.results-artifact-name }}-retest
+        path: install/results/${{ inputs.test-directory-name }}-retest
+        retention-days: 15
+        overwrite: true
+        
+    - name: Upload regression test results (macOS/Linux)
+      if: ${{ inputs.platform != 'windows' && steps.check_failures.outputs.failed_count > 0 }}
       uses: actions/upload-artifact@v4.4.3
       with:
         name: ${{ inputs.results-artifact-name }}-retest
-        path: install/${{ (inputs.platform == 'windows') && '' || 'bin/' }}results/${{ inputs.test-directory-name }}-retest
+        path: install/bin/results/${{ inputs.test-directory-name }}-retest
         retention-days: 15
         overwrite: true
 
index 9fe11c0e1f64072c136903be22f2c7bad6fb2407..a6f00cb7c1bd1601ba2d76a276fbe1d134a156f3 100644 (file)
@@ -35,6 +35,21 @@ jobs:
       with:
         base-ref: ${{ github.event.pull_request.base.ref || 'master' }}
 
+  ascii-check:
+    name: Check for non-ASCII characters
+    runs-on: windows-2025
+
+    steps:
+    - name: Checkout repository
+      uses: actions/checkout@v4.1.7
+      with:
+        fetch-depth: 0
+
+    - name: Check for non-ASCII characters
+      uses: ./.github/actions/ascii-check
+      with:
+        base-ref: ${{ github.event.pull_request.base.ref || 'master' }}
+
   documentation:
     name: Build Documentation
     runs-on: windows-2025
index a0824ccabb8df72c8cf1802fc1e60851bd4c24dc..b7305d938c06f179c6485c0c7ddb723f4133c7ce 100644 (file)
@@ -62,7 +62,7 @@ void checkDirectorsEqual(const gp_Dir&       theD1,
 
 TEST(ElClibTests, InPeriod)
 {
-  // Test with standard range [0, 2π]
+  // Test with standard range [0, 2pi]
   const Standard_Real PI2 = 2.0 * M_PI;
 
   EXPECT_NEAR(ElCLib::InPeriod(0.5, 0.0, PI2), 0.5, Precision::Confusion());
@@ -82,7 +82,7 @@ TEST(ElClibTests, AdjustPeriodic)
   Standard_Real       U1, U2;
   const Standard_Real PI2 = 2.0 * M_PI;
 
-  // Test with standard range [0, 2π]
+  // Test with standard range [0, 2pi]
   // Case 1: U1 and U2 within range, no adjustment needed
   U1 = 0.5;
   U2 = 0.7;
index e32ce66b673a7ba0c85070d3f773c8e7f5b90fd5..a08483b008d7b40fc2ac650d1404f7d22558de4d 100644 (file)
@@ -121,7 +121,7 @@ void gp_Mat::SetDot(const gp_XYZ& theRef)
 
 void gp_Mat::SetRotation(const gp_XYZ& theAxis, const Standard_Real theAng)
 {
-  // Rodrigues' rotation formula: R = I + sin(θ)K + (1-cos(θ))K²
+  // Rodrigues' rotation formula: R = I + sin(theta)K + (1-cos(theta))K^2
   // Where K is the skew-symmetric matrix of the normalized axis
   const gp_XYZ aV = theAxis.Normalized();
 
@@ -142,9 +142,9 @@ void gp_Mat::SetRotation(const gp_XYZ& theAxis, const Standard_Real theAng)
   const Standard_Real AC = A * C;
   const Standard_Real BC = B * C;
 
-  // Direct matrix computation: R = I + sin(θ)K + (1-cos(θ))K²
-  // K² diagonal terms are -(sum of other two squared components)
-  // K² off-diagonal terms are products of components
+  // Direct matrix computation: R = I + sin(theta)K + (1-cos(theta))K^2
+  // K^2 diagonal terms are -(sum of other two squared components)
+  // K^2 off-diagonal terms are products of components
   myMat[0][0] = 1.0 + aOmCos * (-(B2 + C2));
   myMat[0][1] = aOmCos * AB - aSin * C;
   myMat[0][2] = aOmCos * AC + aSin * B;
index dda82b4ac771728c1d252510c37af226cf2c1635..a261578ad758a5643e2753b4205c30f116b7520f 100644 (file)
@@ -126,14 +126,18 @@ TEST(TCollection_ExtendedStringTest, Comparison)
 TEST(TCollection_ExtendedStringTest, UnicodeSupport)
 {
   // Test with unicode characters
-  const Standard_ExtCharacter unicodeChars[] = {0x0041, 0x00A9, 0x2122, 0x20AC, 0}; // A, ©, ™, €
-  TCollection_ExtendedString unicodeString(unicodeChars);
+  const Standard_ExtCharacter unicodeChars[] = {0x0041,
+                                                0x00A9,
+                                                0x2122,
+                                                0x20AC,
+                                                0}; // A, copyright, trademark, euro
+  TCollection_ExtendedString  unicodeString(unicodeChars);
 
   EXPECT_EQ(4, unicodeString.Length());
   EXPECT_EQ(0x0041, unicodeString.Value(1)); // A
-  EXPECT_EQ(0x00A9, unicodeString.Value(2)); // ©
-  EXPECT_EQ(0x2122, unicodeString.Value(3)); // 
-  EXPECT_EQ(0x20AC, unicodeString.Value(4)); // 
+  EXPECT_EQ(0x00A9, unicodeString.Value(2)); // copyright
+  EXPECT_EQ(0x2122, unicodeString.Value(3)); // trademark
+  EXPECT_EQ(0x20AC, unicodeString.Value(4)); // euro
 }
 
 TEST(TCollection_ExtendedStringTest, HashCode)
@@ -173,7 +177,7 @@ TEST(TCollection_ExtendedStringTest, IsAscii)
   EXPECT_TRUE(asciiString.IsAscii());
 
   // Create a string with non-ASCII character
-  Standard_ExtCharacter      unicodeChars[] = {'A', 0x20AC, 0}; // A, 
+  Standard_ExtCharacter      unicodeChars[] = {'A', 0x20AC, 0}; // A, euro
   TCollection_ExtendedString unicodeString(unicodeChars);
   EXPECT_FALSE(unicodeString.IsAscii());
 }
index b1ab9f0e28cebb188f95a1692245f2f34bb10869..c9ec0e2bb98bcd0bbfa93f520be5bbb72ebd8ff1 100644 (file)
@@ -1,4 +1,4 @@
-// Created on: 2022-08-08
+// Created on: 2022-08-08
 // Created by: Kseniya NOSULKO
 // Copyright (c) 2022 OPEN CASCADE SAS
 //
@@ -271,11 +271,11 @@ Standard_Boolean BRepExtrema_ProximityValueTool::getEdgeAdditionalVertices(
 //=======================================================================
 // function : doRecurTrgSplit
 // purpose  : Splits the triangle into two ones recursively, halving the longest side
-//           until the area of ​​the current triangle > input step
+//           until the area of the current triangle > input step
 //! @param theTrg points of the triangle to be split
 //! @param theEdgesStatus status of triangle edges - on the border or middle of the face
 //! @param theTol telerance used in search of coincidence points
-//! @param theStep minimum area of ​​the resulting triangle
+//! @param theStep minimum area of the resulting triangle
 //! @param theAddVertices vertices obtained halving sides
 //! @param theAddStatuses status of obtained vertices - on the border or middle of the face,
 //! from triangulation of which the input triangle is
index af762fc024d4621e71e414086faede051a172c08..cd04f7484e431861b4522cce76612434fc484fb0 100644 (file)
@@ -1,4 +1,4 @@
-// Created on: 2022-08-08
+// Created on: 2022-08-08
 // Created by: Kseniya NOSULKO
 // Copyright (c) 2022 OPEN CASCADE SAS
 //
@@ -166,7 +166,7 @@ private:
                                              NCollection_Vector<ProxPnt_Status>& theAddStatuses);
 
   //! Splits the triangle recursively, halving the longest side
-  //! to the area of ​​the current triangle > input step
+  //! to the area of the current triangle > input step
   void doRecurTrgSplit(const gp_Pnt (&theTrg)[3],
                        const ProxPnt_Status (&theEdgesStatus)[3],
                        const Standard_Real                 theTol,
index cb2e52e3524e49673508738d9cd44e77715d7416..860f9b495dfe001d6d90460853652e2ded6e843b 100644 (file)
@@ -1,4 +1,4 @@
-// Created on: 1997-01-17
+// Created on: 1997-01-17
 // Created by: Robert COUBLANC
 // Copyright (c) 1997-1999 Matra Datavision
 // Copyright (c) 1999-2014 OPEN CASCADE SAS