Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
5c79d90
initial
pavelsavara Jan 29, 2026
396e233
script
pavelsavara Jan 29, 2026
5b1f12f
ssl
pavelsavara Jan 29, 2026
db65544
process breakdown
pavelsavara Jan 29, 2026
a5b6ac3
baseline
pavelsavara Jan 29, 2026
4a3d5bf
plan
pavelsavara Jan 29, 2026
ec6935b
baseline script
pavelsavara Jan 29, 2026
783ad61
more
pavelsavara Jan 29, 2026
40faf97
Merge branch 'main' into copilot-wasm-library-tests
pavelsavara Jan 29, 2026
6365619
xml baseline
pavelsavara Jan 29, 2026
28cdf89
compare xml results
pavelsavara Jan 29, 2026
d1e8b03
System.Runtime.InteropServices.JavaScript.Tests
pavelsavara Jan 29, 2026
6e369fa
System.Net.Http.Functional.Tests
pavelsavara Jan 29, 2026
82f11bd
System.Net.WebSockets.Tests
pavelsavara Jan 29, 2026
fdc37eb
fixing problems, fii
pavelsavara Jan 29, 2026
7df3f78
more
pavelsavara Jan 29, 2026
b2d1b9b
nohup
pavelsavara Jan 29, 2026
3f0fd31
more
pavelsavara Jan 29, 2026
713bb3f
Merge branch 'main' into copilot-wasm-library-tests
pavelsavara Jan 29, 2026
ffd391c
System.Linq.AsyncEnumerable.Tests
pavelsavara Jan 29, 2026
a68f9c1
System.Runtime.Tests
pavelsavara Jan 29, 2026
2bd9032
release
pavelsavara Jan 29, 2026
f03d8be
System.Runtime.Tests
pavelsavara Jan 29, 2026
1ef9069
System.Collections.Immutable.Tests
pavelsavara Jan 29, 2026
02c688b
System.Collections.Immutable.Tests
pavelsavara Jan 30, 2026
7991033
Test Suites to Run
pavelsavara Jan 30, 2026
effa309
System.Collections.Immutable.Tests
pavelsavara Jan 30, 2026
9f50848
System.Resources.Writer.Tests
pavelsavara Jan 30, 2026
4114727
many
pavelsavara Jan 30, 2026
4d09401
reset
pavelsavara Jan 30, 2026
2b57fe5
more summary
pavelsavara Jan 30, 2026
6a17157
more
pavelsavara Jan 30, 2026
8172b6b
more
pavelsavara Jan 30, 2026
96a4b7e
more
pavelsavara Jan 30, 2026
e9bea28
more
pavelsavara Jan 30, 2026
d94ff2e
more
pavelsavara Jan 30, 2026
e235e9b
more
pavelsavara Jan 30, 2026
ce439f5
more
pavelsavara Jan 30, 2026
035aeeb
more
pavelsavara Jan 30, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
1,220 changes: 1,220 additions & 0 deletions browser-tests/Mono-chrome-workitems.json

Large diffs are not rendered by default.

28 changes: 28 additions & 0 deletions browser-tests/before-testing.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# Before Testing Setup

One-time setup steps required before running Browser/WASM CoreCLR tests.

## HTTPS Developer Certificate

The xharness test runner starts a local HTTPS server. You need to generate a developer certificate:

```bash
./dotnet.sh dev-certs https
./dotnet.sh dev-certs https --trust # May show warnings on Linux, that's OK
```

## Initial Build

Build the runtime for Browser/WASM with CoreCLR:

```bash
export RuntimeFlavor="CoreCLR"
export Scenario="WasmTestOnChrome"
export InstallFirefoxForTests="false"
export XunitShowProgress="true"
export SSL_CERT_DIR="$HOME/.aspnet/dev-certs/trust:/usr/lib/ssl/certs"
export PATH="$(pwd)/.dotnet:$PATH"
./build.sh -os browser -subset clr+libs+host -c Release
```

**Note:** This build can take 30-40+ minutes.
101 changes: 101 additions & 0 deletions browser-tests/collect-suite-info.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
#!/bin/bash

# Script to collect test suite information from Mono baselines
# Downloads all baselines, extracts durations, and generates a sorted report

set -e

SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
WORKITEMS_JSON="${SCRIPT_DIR}/Mono-chrome-workitems.json"
OUTPUT_FILE="${SCRIPT_DIR}/test-suites-info.md"

# Check workitems file exists
if [ ! -f "$WORKITEMS_JSON" ]; then
echo "Error: Workitems file not found: $WORKITEMS_JSON"
exit 1
fi

# Get all workitem names
WORKITEMS=$(jq -r '.[].Name' "$WORKITEMS_JSON")

# Temporary file for collecting data
TEMP_DATA=$(mktemp)

echo "Collecting test suite information..."
echo ""

COUNT=0
TOTAL=$(echo "$WORKITEMS" | wc -l)

for WORKITEM in $WORKITEMS; do
COUNT=$((COUNT + 1))

# Extract test project name from workitem name (remove "WasmTestOnChrome-ST-" prefix)
TEST_PROJECT=$(echo "$WORKITEM" | sed 's/WasmTestOnChrome-ST-//')

RESULTS_DIR="${REPO_ROOT}/browser-tests/results/${TEST_PROJECT}"
MONO_LOG="${RESULTS_DIR}/mono-console.log"

# Download baseline if not exists
if [ ! -f "$MONO_LOG" ]; then
echo "[$COUNT/$TOTAL] Downloading: $TEST_PROJECT"
"$SCRIPT_DIR/download-mono-baseline.sh" "$TEST_PROJECT" > /dev/null 2>&1 || {
echo " Failed to download $TEST_PROJECT"
continue
}
fi

# Extract duration from log
if [ -f "$MONO_LOG" ]; then
# Pattern: after X.XXX minutes with result
DURATION=$(grep -oP 'after \K[0-9.]+(?= minutes)' "$MONO_LOG" 2>/dev/null | head -1)

if [ -z "$DURATION" ]; then
DURATION="N/A"
fi

# Find the .csproj file
CSPROJ_PATH=$(find "$REPO_ROOT/src/libraries" -name "${TEST_PROJECT}.csproj" 2>/dev/null | head -1)
if [ -n "$CSPROJ_PATH" ]; then
# Make path relative to repo root
CSPROJ_PATH=$(echo "$CSPROJ_PATH" | sed "s|$REPO_ROOT/||")
else
CSPROJ_PATH="(not found)"
fi

# Get assembly name (usually same as test project but with .dll)
ASSEMBLY="${TEST_PROJECT}.dll"

echo "$DURATION|$TEST_PROJECT|$ASSEMBLY|$CSPROJ_PATH" >> "$TEMP_DATA"
echo "[$COUNT/$TOTAL] $TEST_PROJECT: ${DURATION} minutes"
fi
done

echo ""
echo "Generating report..."

# Sort by duration (numeric, descending) and generate markdown
cat > "$OUTPUT_FILE" << 'EOF'
# Test Suites to Run

This table lists all browser test suites sorted by Mono baseline duration (longest first).

| Duration (min) | Assembly | csproj Path | Status |
|----------------|----------|-------------|--------|
EOF

# Sort by duration (handle N/A by putting them at the end)
sort -t'|' -k1 -rn "$TEMP_DATA" 2>/dev/null | while IFS='|' read -r DURATION PROJECT ASSEMBLY CSPROJ; do
echo "| $DURATION | $ASSEMBLY | $CSPROJ | ⏳ Not started |" >> "$OUTPUT_FILE"
done

# Cleanup
rm -f "$TEMP_DATA"

echo ""
echo "Report generated: $OUTPUT_FILE"
echo ""
echo "Summary:"
wc -l < "$OUTPUT_FILE"
echo "test suites total"
128 changes: 128 additions & 0 deletions browser-tests/compare-test-results.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
#!/bin/bash

# Script to compare test results between CoreCLR and Mono baseline
# Usage: ./browser-tests/compare-test-results.sh <TestProjectName>
# Example: ./browser-tests/compare-test-results.sh System.Runtime.InteropServices.JavaScript.Tests

set -e

SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"

if [ -z "$1" ]; then
echo "Usage: $0 <TestProjectName>"
echo "Example: $0 System.Runtime.InteropServices.JavaScript.Tests"
exit 1
fi

TEST_PROJECT_NAME="$1"
RESULTS_DIR="${REPO_ROOT}/browser-tests/results/${TEST_PROJECT_NAME}"
MONO_RESULTS="${RESULTS_DIR}/mono-testResults.xml"

# Find the most recent CoreCLR test results
CORECLR_RESULTS=$(ls -t "${RESULTS_DIR}"/testResults_*.xml 2>/dev/null | head -1)
Comment on lines +22 to +23
Copy link

Copilot AI Jan 30, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

With set -e enabled at the top of the script, this ls will exit with a non-zero status (and terminate the script immediately) when no testResults_*.xml files exist, so the user never sees the friendly error message below. Consider using a more robust pattern (e.g., shopt -s nullglob then iterating the glob, or using find/printf), or explicitly guarding this command so that a missing file does not trigger set -e.

Suggested change
# Find the most recent CoreCLR test results
CORECLR_RESULTS=$(ls -t "${RESULTS_DIR}"/testResults_*.xml 2>/dev/null | head -1)
# Find the most recent CoreCLR test results
set +e
CORECLR_RESULTS=$(ls -t "${RESULTS_DIR}"/testResults_*.xml 2>/dev/null | head -1)
set -e

Copilot uses AI. Check for mistakes.

if [ ! -f "$MONO_RESULTS" ]; then
echo "Error: Mono baseline not found: $MONO_RESULTS"
echo "Run: ./browser-tests/download-mono-baseline.sh $TEST_PROJECT_NAME"
exit 1
fi

if [ -z "$CORECLR_RESULTS" ] || [ ! -f "$CORECLR_RESULTS" ]; then
echo "Error: CoreCLR test results not found in: $RESULTS_DIR"
echo "Run the tests first with: ./browser-tests/run-browser-test.sh <path-to-csproj>"
exit 1
fi

echo "Comparing test results:"
echo " Mono: $MONO_RESULTS"
echo " CoreCLR: $CORECLR_RESULTS"
echo ""

# Create temp directory for intermediate files
TEMP_DIR=$(mktemp -d)
trap "rm -rf $TEMP_DIR" EXIT

# Extract test names from XML files
# The test name is in the 'name' attribute of <test> elements
extract_test_names() {
local xml_file="$1"
grep -oP 'name="[^"]*"' "$xml_file" | \
grep -v 'assembly name=' | \
grep -v 'collection.*name=' | \
sed 's/name="//;s/"$//' | \
sort -u
}

Comment on lines +50 to +56
Copy link

Copilot AI Jan 30, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The filtering of assembly and collection names happens after grep -oP 'name="[^"]*"', so those lines no longer contain the assembly or collection text and are not actually excluded; this causes assembly and collection names to be included as "tests" and skews the Mono/CoreCLR counts (e.g., the MetricOuterLoop1 comparison reports 1 test even though total="0" in the XML). Restrict the extraction to <test ...> elements before pulling the name attribute (for example by grepping for <test first or by adjusting the regex to only match name="..." on <test> lines) so only real test cases are counted.

Suggested change
grep -oP 'name="[^"]*"' "$xml_file" | \
grep -v 'assembly name=' | \
grep -v 'collection.*name=' | \
sed 's/name="//;s/"$//' | \
sort -u
}
grep -oP '<test\b[^>]*\Kname="[^"]*"' "$xml_file" | \
sed 's/name="//;s/"$//' | \
sort -u
}

Copilot uses AI. Check for mistakes.
echo "Extracting test names..."
extract_test_names "$MONO_RESULTS" > "$TEMP_DIR/mono-tests.txt"
extract_test_names "$CORECLR_RESULTS" > "$TEMP_DIR/coreclr-tests.txt"

MONO_COUNT=$(wc -l < "$TEMP_DIR/mono-tests.txt")
CORECLR_COUNT=$(wc -l < "$TEMP_DIR/coreclr-tests.txt")

echo " Mono tests: $MONO_COUNT"
echo " CoreCLR tests: $CORECLR_COUNT"
echo ""

# Use comm to find differences
# comm requires sorted input (which we have)
# -23: suppress lines unique to file2 and common lines (show only unique to file1)
# -13: suppress lines unique to file1 and common lines (show only unique to file2)

comm -23 "$TEMP_DIR/coreclr-tests.txt" "$TEMP_DIR/mono-tests.txt" > "$TEMP_DIR/extra-in-coreclr.txt"
comm -13 "$TEMP_DIR/coreclr-tests.txt" "$TEMP_DIR/mono-tests.txt" > "$TEMP_DIR/missing-in-coreclr.txt"

EXTRA_COUNT=$(wc -l < "$TEMP_DIR/extra-in-coreclr.txt")
MISSING_COUNT=$(wc -l < "$TEMP_DIR/missing-in-coreclr.txt")

echo "=========================================="
echo "COMPARISON RESULTS"
echo "=========================================="
echo ""

if [ "$EXTRA_COUNT" -gt 0 ]; then
echo "### Extra in CoreCLR (not in Mono baseline): $EXTRA_COUNT"
echo "These tests run on CoreCLR but were skipped on Mono:"
echo ""
cat "$TEMP_DIR/extra-in-coreclr.txt" | while read -r line; do
echo " - $line"
done
echo ""
else
echo "### Extra in CoreCLR: 0"
echo "No extra tests in CoreCLR."
echo ""
fi

if [ "$MISSING_COUNT" -gt 0 ]; then
echo "### Missing in CoreCLR (in Mono but not CoreCLR): $MISSING_COUNT"
echo "⚠️ These tests ran on Mono but NOT on CoreCLR (potential issue!):"
echo ""
cat "$TEMP_DIR/missing-in-coreclr.txt" | while read -r line; do
echo " - $line"
done
echo ""
else
echo "### Missing in CoreCLR: 0"
echo "✅ All Mono tests also ran on CoreCLR."
echo ""
fi

# Save comparison results to file
COMPARISON_FILE="${RESULTS_DIR}/test-comparison.txt"
{
echo "Test Comparison: $TEST_PROJECT_NAME"
echo "Generated: $(date -Iseconds)"
echo ""
echo "Mono tests: $MONO_COUNT"
echo "CoreCLR tests: $CORECLR_COUNT"
echo ""
echo "=== Extra in CoreCLR ($EXTRA_COUNT) ==="
cat "$TEMP_DIR/extra-in-coreclr.txt"
echo ""
echo "=== Missing in CoreCLR ($MISSING_COUNT) ==="
cat "$TEMP_DIR/missing-in-coreclr.txt"
} > "$COMPARISON_FILE"

echo "Comparison saved to: $COMPARISON_FILE"
100 changes: 100 additions & 0 deletions browser-tests/download-mono-baseline.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
#!/bin/bash

# Script to download Mono baseline console log from Helix
# Usage: ./browser-tests/download-mono-baseline.sh <TestProjectName>
# Example: ./browser-tests/download-mono-baseline.sh System.Runtime.InteropServices.JavaScript.Tests

set -e

SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"

if [ -z "$1" ]; then
echo "Usage: $0 <TestProjectName>"
echo "Example: $0 System.Runtime.InteropServices.JavaScript.Tests"
exit 1
fi

TEST_PROJECT_NAME="$1"
RESULTS_DIR="${REPO_ROOT}/browser-tests/results/${TEST_PROJECT_NAME}"
MONO_LOG_PATH="${RESULTS_DIR}/mono-console.log"
MONO_RESULTS_PATH="${RESULTS_DIR}/mono-testResults.xml"
WORKITEMS_JSON="${SCRIPT_DIR}/Mono-chrome-workitems.json"

# Check if already downloaded
if [ -f "$MONO_LOG_PATH" ] || [ -f "$MONO_RESULTS_PATH" ]; then
echo "Mono baseline already exists:"
echo " - $MONO_LOG_PATH"
echo " - $MONO_RESULTS_PATH"
echo "Delete them first if you want to re-download."
exit 0
fi

# Check workitems file exists
if [ ! -f "$WORKITEMS_JSON" ]; then
echo "Error: Workitems file not found: $WORKITEMS_JSON"
exit 1
fi

# Create results directory
mkdir -p "$RESULTS_DIR"

# Find the workitem for this test suite
WORKITEM_NAME="WasmTestOnChrome-ST-${TEST_PROJECT_NAME}"
echo "Looking for workitem: $WORKITEM_NAME"

DETAILS_URL=$(jq -r ".[] | select(.Name == \"$WORKITEM_NAME\") | .DetailsUrl" "$WORKITEMS_JSON" 2>/dev/null || echo "")

if [ -z "$DETAILS_URL" ] || [ "$DETAILS_URL" = "null" ]; then
echo "Error: Workitem '$WORKITEM_NAME' not found in $WORKITEMS_JSON"
echo ""
echo "Available workitems containing '$TEST_PROJECT_NAME':"
jq -r ".[].Name" "$WORKITEMS_JSON" | grep -i "$TEST_PROJECT_NAME" || echo " (none found)"
exit 1
fi

echo "Fetching workitem details from Helix API..."
WORKITEM_DETAILS=$(curl -s "$DETAILS_URL" 2>/dev/null)

if [ -z "$WORKITEM_DETAILS" ]; then
echo "Error: Failed to fetch workitem details from: $DETAILS_URL"
exit 1
fi

CONSOLE_URI=$(echo "$WORKITEM_DETAILS" | jq -r '.ConsoleOutputUri // empty' 2>/dev/null)
TEST_RESULTS_URI=$(echo "$WORKITEM_DETAILS" | jq -r '.Files[] | select(.FileName | test("testResults.xml$")) | .Uri // empty' 2>/dev/null)

if [ -z "$CONSOLE_URI" ]; then
echo "Error: ConsoleOutputUri not found in workitem details"
echo "Response: $WORKITEM_DETAILS"
exit 1
fi

if [ -z "$TEST_RESULTS_URI" ]; then
echo "Warning: testResults.xml not found in workitem files"
fi

echo "Downloading console log..."
if curl -s -o "$MONO_LOG_PATH" "$CONSOLE_URI"; then
FILE_SIZE=$(wc -c < "$MONO_LOG_PATH")
echo "✓ Downloaded Mono baseline: $MONO_LOG_PATH ($FILE_SIZE bytes)"

# Extract and display test summary
echo ""
echo "Mono Test Summary:"
grep "TEST EXECUTION SUMMARY" -A1 "$MONO_LOG_PATH" | tail -2 || echo " (summary not found)"
else
echo "Error: Failed to download console log from: $CONSOLE_URI"
exit 1
fi

if [ -n "$TEST_RESULTS_URI" ]; then
echo ""
echo "Downloading testResults.xml..."
if curl -s -o "$MONO_RESULTS_PATH" "$TEST_RESULTS_URI"; then
FILE_SIZE=$(wc -c < "$MONO_RESULTS_PATH")
echo "✓ Downloaded Mono test results: $MONO_RESULTS_PATH ($FILE_SIZE bytes)"
else
echo "Warning: Failed to download testResults.xml from: $TEST_RESULTS_URI"
fi
fi
Loading
Loading