mirror of
https://github.com/openclaw/openclaw.git
synced 2026-03-23 07:51:33 +00:00
1303 lines
46 KiB
YAML
1303 lines
46 KiB
YAML
name: CI
|
|
|
|
on:
|
|
push:
|
|
branches: [main]
|
|
pull_request:
|
|
|
|
concurrency:
|
|
group: ci-${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
|
cancel-in-progress: true
|
|
|
|
env:
|
|
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: "true"
|
|
|
|
jobs:
|
|
# Detect docs-only changes to skip heavy jobs (test, build, Windows, macOS, Android).
|
|
# Lint and format always run. Fail-safe: if detection fails, run everything.
|
|
docs-scope:
|
|
runs-on: blacksmith-16vcpu-ubuntu-2404
|
|
outputs:
|
|
docs_only: ${{ steps.check.outputs.docs_only }}
|
|
docs_changed: ${{ steps.check.outputs.docs_changed }}
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v6
|
|
with:
|
|
fetch-depth: 1
|
|
fetch-tags: false
|
|
submodules: false
|
|
|
|
- name: Ensure docs-scope base commit
|
|
uses: ./.github/actions/ensure-base-commit
|
|
with:
|
|
base-sha: ${{ github.event_name == 'push' && github.event.before || github.event.pull_request.base.sha }}
|
|
fetch-ref: ${{ github.event_name == 'push' && github.ref_name || github.event.pull_request.base.ref }}
|
|
|
|
- name: Detect docs-only changes
|
|
id: check
|
|
uses: ./.github/actions/detect-docs-changes
|
|
|
|
# Detect which heavy areas are touched so CI can skip unrelated expensive jobs.
|
|
# Fail-safe: if detection fails, downstream jobs run.
|
|
changed-scope:
|
|
needs: [docs-scope]
|
|
if: needs.docs-scope.outputs.docs_only != 'true'
|
|
runs-on: blacksmith-16vcpu-ubuntu-2404
|
|
outputs:
|
|
run_node: ${{ steps.scope.outputs.run_node }}
|
|
run_macos: ${{ steps.scope.outputs.run_macos }}
|
|
run_android: ${{ steps.scope.outputs.run_android }}
|
|
run_skills_python: ${{ steps.scope.outputs.run_skills_python }}
|
|
run_windows: ${{ steps.scope.outputs.run_windows }}
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v6
|
|
with:
|
|
fetch-depth: 1
|
|
fetch-tags: false
|
|
submodules: false
|
|
|
|
- name: Ensure changed-scope base commit
|
|
uses: ./.github/actions/ensure-base-commit
|
|
with:
|
|
base-sha: ${{ github.event_name == 'push' && github.event.before || github.event.pull_request.base.sha }}
|
|
fetch-ref: ${{ github.event_name == 'push' && github.ref_name || github.event.pull_request.base.ref }}
|
|
|
|
- name: Detect changed scopes
|
|
id: scope
|
|
shell: bash
|
|
run: |
|
|
set -euo pipefail
|
|
|
|
if [ "${{ github.event_name }}" = "push" ]; then
|
|
BASE="${{ github.event.before }}"
|
|
else
|
|
BASE="${{ github.event.pull_request.base.sha }}"
|
|
fi
|
|
|
|
node scripts/ci-changed-scope.mjs --base "$BASE" --head HEAD
|
|
|
|
changed-extensions:
|
|
needs: [docs-scope, changed-scope]
|
|
if: needs.docs-scope.outputs.docs_only != 'true' && needs.changed-scope.outputs.run_node == 'true'
|
|
runs-on: blacksmith-16vcpu-ubuntu-2404
|
|
outputs:
|
|
has_changed_extensions: ${{ steps.changed.outputs.has_changed_extensions }}
|
|
changed_extensions_matrix: ${{ steps.changed.outputs.changed_extensions_matrix }}
|
|
changed_extensions_reason: ${{ steps.changed.outputs.changed_extensions_reason }}
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v6
|
|
with:
|
|
fetch-depth: 1
|
|
fetch-tags: false
|
|
submodules: false
|
|
|
|
- name: Ensure changed-extensions base commit
|
|
uses: ./.github/actions/ensure-base-commit
|
|
with:
|
|
base-sha: ${{ github.event_name == 'push' && github.event.before || github.event.pull_request.base.sha }}
|
|
fetch-ref: ${{ github.event_name == 'push' && github.ref_name || github.event.pull_request.base.ref }}
|
|
|
|
- name: Setup Node environment
|
|
uses: ./.github/actions/setup-node-env
|
|
with:
|
|
install-bun: "false"
|
|
install-deps: "false"
|
|
use-sticky-disk: "false"
|
|
|
|
- name: Detect changed extensions
|
|
id: changed
|
|
env:
|
|
BASE_SHA: ${{ github.event_name == 'push' && github.event.before || github.event.pull_request.base.sha }}
|
|
run: |
|
|
node --input-type=module <<'EOF'
|
|
import { appendFileSync } from "node:fs";
|
|
import { execFileSync } from "node:child_process";
|
|
import {
|
|
listAvailableExtensionIds,
|
|
listChangedExtensionIds,
|
|
resolveExtensionTestPlan,
|
|
} from "./scripts/test-extension.mjs";
|
|
|
|
const normalizeRelative = (filePath) => String(filePath).replaceAll("\\", "/");
|
|
const changedPaths = execFileSync("git", ["diff", "--name-only", process.env.BASE_SHA, "HEAD"], {
|
|
cwd: process.cwd(),
|
|
stdio: ["ignore", "pipe", "pipe"],
|
|
encoding: "utf8",
|
|
})
|
|
.split("\n")
|
|
.map((line) => normalizeRelative(line.trim()))
|
|
.filter((line) => line.length > 0);
|
|
const extensionFastInfraChanged = changedPaths.some((changedPath) =>
|
|
changedPath === ".github/workflows/ci.yml" ||
|
|
changedPath === "scripts/test-extension.mjs" ||
|
|
changedPath === "vitest.extensions.config.ts" ||
|
|
changedPath === "vitest.channels.config.ts" ||
|
|
changedPath === "vitest.channel-paths.mjs" ||
|
|
changedPath.startsWith(".github/actions/setup-node-env/"),
|
|
);
|
|
|
|
let extensionIds = listChangedExtensionIds({ base: process.env.BASE_SHA, head: "HEAD" });
|
|
let reason = extensionIds.length > 0 ? "changed-extensions" : "none";
|
|
|
|
if (extensionIds.length === 0 && extensionFastInfraChanged) {
|
|
let extensionCandidate = "";
|
|
let channelCandidate = "";
|
|
for (const extensionId of listAvailableExtensionIds()) {
|
|
const plan = resolveExtensionTestPlan({ targetArg: extensionId, cwd: process.cwd() });
|
|
if (plan.testFiles.length === 0) {
|
|
continue;
|
|
}
|
|
if (!extensionCandidate && plan.config === "vitest.extensions.config.ts") {
|
|
extensionCandidate = extensionId;
|
|
}
|
|
if (!channelCandidate && plan.config === "vitest.channels.config.ts") {
|
|
channelCandidate = extensionId;
|
|
}
|
|
if (extensionCandidate && channelCandidate) {
|
|
break;
|
|
}
|
|
}
|
|
|
|
const representativeIds = [];
|
|
if (extensionCandidate) {
|
|
representativeIds.push(extensionCandidate);
|
|
}
|
|
if (channelCandidate && channelCandidate !== extensionCandidate) {
|
|
representativeIds.push(channelCandidate);
|
|
}
|
|
|
|
extensionIds = representativeIds;
|
|
reason =
|
|
representativeIds.length > 0
|
|
? "extension-fast-infra-change-representative"
|
|
: "extension-fast-infra-change-no-tests";
|
|
}
|
|
|
|
const matrix = JSON.stringify({ include: extensionIds.map((extension) => ({ extension })) });
|
|
appendFileSync(process.env.GITHUB_OUTPUT, `changed_extensions_reason=${reason}\n`, "utf8");
|
|
appendFileSync(process.env.GITHUB_OUTPUT, `has_changed_extensions=${extensionIds.length > 0}\n`, "utf8");
|
|
appendFileSync(process.env.GITHUB_OUTPUT, `changed_extensions_matrix=${matrix}\n`, "utf8");
|
|
EOF
|
|
|
|
- name: Report changed-extensions selection
|
|
run: |
|
|
echo "extension-fast selection reason: ${{ steps.changed.outputs.changed_extensions_reason }}"
|
|
echo "extension-fast matrix: ${{ steps.changed.outputs.changed_extensions_matrix }}"
|
|
|
|
# Build dist once for Node-relevant changes and share it with downstream jobs.
|
|
build-artifacts:
|
|
needs: [docs-scope, changed-scope]
|
|
if: github.event_name == 'push' && needs.docs-scope.outputs.docs_only != 'true' && needs.changed-scope.outputs.run_node == 'true'
|
|
runs-on: blacksmith-16vcpu-ubuntu-2404
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v6
|
|
with:
|
|
submodules: false
|
|
|
|
- name: Ensure secrets base commit (PR fast path)
|
|
if: github.event_name == 'pull_request'
|
|
uses: ./.github/actions/ensure-base-commit
|
|
with:
|
|
base-sha: ${{ github.event.pull_request.base.sha }}
|
|
fetch-ref: ${{ github.event.pull_request.base.ref }}
|
|
|
|
- name: Setup Node environment
|
|
uses: ./.github/actions/setup-node-env
|
|
with:
|
|
install-bun: "false"
|
|
use-sticky-disk: "false"
|
|
|
|
- name: Build dist
|
|
run: pnpm build
|
|
|
|
- name: Upload dist artifact
|
|
uses: actions/upload-artifact@v7
|
|
with:
|
|
name: dist-build
|
|
path: dist/
|
|
retention-days: 1
|
|
|
|
# Validate npm pack contents after build (only on push to main, not PRs).
|
|
release-check:
|
|
needs: [docs-scope, build-artifacts]
|
|
if: github.event_name == 'push' && needs.docs-scope.outputs.docs_only != 'true'
|
|
runs-on: blacksmith-16vcpu-ubuntu-2404
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v6
|
|
with:
|
|
submodules: false
|
|
|
|
- name: Setup Node environment
|
|
uses: ./.github/actions/setup-node-env
|
|
with:
|
|
install-bun: "false"
|
|
use-sticky-disk: "false"
|
|
|
|
- name: Download dist artifact
|
|
uses: actions/download-artifact@v8
|
|
with:
|
|
name: dist-build
|
|
path: dist/
|
|
|
|
- name: Check release contents
|
|
run: pnpm release:check
|
|
|
|
checks:
|
|
needs: [docs-scope, changed-scope]
|
|
if: needs.docs-scope.outputs.docs_only != 'true' && needs.changed-scope.outputs.run_node == 'true'
|
|
runs-on: blacksmith-16vcpu-ubuntu-2404
|
|
strategy:
|
|
fail-fast: false
|
|
matrix:
|
|
include:
|
|
- runtime: node
|
|
task: test
|
|
shard_index: 1
|
|
shard_count: 2
|
|
command: pnpm canvas:a2ui:bundle && pnpm test
|
|
- runtime: node
|
|
task: test
|
|
shard_index: 2
|
|
shard_count: 2
|
|
command: pnpm canvas:a2ui:bundle && pnpm test
|
|
- runtime: node
|
|
task: extensions
|
|
command: pnpm test:extensions
|
|
- runtime: node
|
|
task: channels
|
|
command: pnpm test:channels
|
|
- runtime: node
|
|
task: contracts
|
|
command: pnpm test:contracts
|
|
- runtime: node
|
|
task: protocol
|
|
command: pnpm protocol:check
|
|
- runtime: bun
|
|
task: test
|
|
command: pnpm canvas:a2ui:bundle && bunx vitest run --config vitest.unit.config.ts
|
|
steps:
|
|
- name: Skip bun lane on pull requests
|
|
if: github.event_name == 'pull_request' && matrix.runtime == 'bun'
|
|
run: echo "Skipping Bun compatibility lane on pull requests."
|
|
|
|
- name: Checkout
|
|
if: github.event_name != 'pull_request' || matrix.runtime != 'bun'
|
|
uses: actions/checkout@v6
|
|
with:
|
|
submodules: false
|
|
|
|
- name: Setup Node environment
|
|
if: matrix.runtime != 'bun' || github.event_name != 'pull_request'
|
|
uses: ./.github/actions/setup-node-env
|
|
with:
|
|
install-bun: "${{ matrix.runtime == 'bun' }}"
|
|
use-sticky-disk: "false"
|
|
|
|
- name: Configure Node test resources
|
|
if: (github.event_name != 'pull_request' || matrix.runtime != 'bun') && matrix.task == 'test' && matrix.runtime == 'node'
|
|
env:
|
|
SHARD_COUNT: ${{ matrix.shard_count || '' }}
|
|
SHARD_INDEX: ${{ matrix.shard_index || '' }}
|
|
run: |
|
|
# `pnpm test` runs `scripts/test-parallel.mjs`, which spawns multiple Node processes.
|
|
# Default heap limits have been too low on Linux CI (V8 OOM near 4GB).
|
|
echo "OPENCLAW_TEST_WORKERS=2" >> "$GITHUB_ENV"
|
|
echo "OPENCLAW_TEST_MAX_OLD_SPACE_SIZE_MB=6144" >> "$GITHUB_ENV"
|
|
if [ -n "$SHARD_COUNT" ] && [ -n "$SHARD_INDEX" ]; then
|
|
echo "OPENCLAW_TEST_SHARDS=$SHARD_COUNT" >> "$GITHUB_ENV"
|
|
echo "OPENCLAW_TEST_SHARD_INDEX=$SHARD_INDEX" >> "$GITHUB_ENV"
|
|
fi
|
|
|
|
- name: Run ${{ matrix.task }} (${{ matrix.runtime }})
|
|
if: matrix.runtime != 'bun' || github.event_name != 'pull_request'
|
|
run: ${{ matrix.command }}
|
|
|
|
extension-fast-precheck:
|
|
name: "extension-fast-precheck"
|
|
needs: [docs-scope, changed-scope, changed-extensions]
|
|
if: needs.docs-scope.outputs.docs_only != 'true' && needs.changed-scope.outputs.run_node == 'true' && needs.changed-extensions.outputs.has_changed_extensions == 'true'
|
|
runs-on: blacksmith-16vcpu-ubuntu-2404
|
|
timeout-minutes: 10
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v6
|
|
with:
|
|
submodules: false
|
|
|
|
- name: Setup Node environment
|
|
uses: ./.github/actions/setup-node-env
|
|
with:
|
|
install-bun: "false"
|
|
use-sticky-disk: "false"
|
|
|
|
- name: Select representative extension-fast files
|
|
id: representative
|
|
run: |
|
|
node --input-type=module <<'EOF'
|
|
import { appendFileSync } from "node:fs";
|
|
import { listAvailableExtensionIds, resolveExtensionTestPlan } from "./scripts/test-extension.mjs";
|
|
|
|
let extensionFile = "";
|
|
let channelFile = "";
|
|
|
|
for (const extensionId of listAvailableExtensionIds()) {
|
|
const plan = resolveExtensionTestPlan({ targetArg: extensionId, cwd: process.cwd() });
|
|
if (plan.testFiles.length === 0) {
|
|
continue;
|
|
}
|
|
const firstFile = plan.testFiles[0] ?? "";
|
|
if (!extensionFile && plan.config === "vitest.extensions.config.ts") {
|
|
extensionFile = firstFile;
|
|
}
|
|
if (!channelFile && plan.config === "vitest.channels.config.ts") {
|
|
channelFile = firstFile;
|
|
}
|
|
if (extensionFile && channelFile) {
|
|
break;
|
|
}
|
|
}
|
|
|
|
appendFileSync(process.env.GITHUB_OUTPUT, `extension_file=${extensionFile}\n`, "utf8");
|
|
appendFileSync(process.env.GITHUB_OUTPUT, `channel_file=${channelFile}\n`, "utf8");
|
|
EOF
|
|
|
|
- name: Run extension-fast import precheck
|
|
env:
|
|
EXTENSION_FILE: ${{ steps.representative.outputs.extension_file }}
|
|
CHANNEL_FILE: ${{ steps.representative.outputs.channel_file }}
|
|
run: |
|
|
set -euo pipefail
|
|
precheck_start="$(date +%s)"
|
|
|
|
if [ -n "$EXTENSION_FILE" ]; then
|
|
echo "Running extensions precheck: $EXTENSION_FILE"
|
|
pnpm exec vitest run --config vitest.extensions.config.ts --pool=forks --maxWorkers=1 --bail=1 "$EXTENSION_FILE"
|
|
fi
|
|
|
|
if [ -n "$CHANNEL_FILE" ]; then
|
|
echo "Running channels precheck: $CHANNEL_FILE"
|
|
pnpm exec vitest run --config vitest.channels.config.ts --pool=forks --maxWorkers=1 --bail=1 "$CHANNEL_FILE"
|
|
fi
|
|
|
|
if [ -z "$EXTENSION_FILE" ] && [ -z "$CHANNEL_FILE" ]; then
|
|
echo "::warning::extension-fast precheck found no representative test files."
|
|
fi
|
|
|
|
precheck_end="$(date +%s)"
|
|
precheck_duration="$((precheck_end - precheck_start))"
|
|
{
|
|
echo "### extension-fast-precheck"
|
|
echo "- extension file: ${EXTENSION_FILE:-none}"
|
|
echo "- channel file: ${CHANNEL_FILE:-none}"
|
|
echo "- duration: ${precheck_duration}s"
|
|
} >> "$GITHUB_STEP_SUMMARY"
|
|
|
|
extension-fast:
|
|
name: "extension-fast (${{ matrix.extension }})"
|
|
needs: [docs-scope, changed-scope, changed-extensions, extension-fast-precheck]
|
|
if: needs.docs-scope.outputs.docs_only != 'true' && needs.changed-scope.outputs.run_node == 'true' && needs.changed-extensions.outputs.has_changed_extensions == 'true'
|
|
runs-on: blacksmith-16vcpu-ubuntu-2404
|
|
timeout-minutes: 25
|
|
strategy:
|
|
fail-fast: ${{ github.event_name == 'pull_request' }}
|
|
matrix: ${{ fromJson(needs.changed-extensions.outputs.changed_extensions_matrix) }}
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v6
|
|
with:
|
|
submodules: false
|
|
|
|
- name: Setup Node environment
|
|
uses: ./.github/actions/setup-node-env
|
|
with:
|
|
install-bun: "false"
|
|
use-sticky-disk: "false"
|
|
|
|
- name: Show extension-fast test plan
|
|
id: plan
|
|
env:
|
|
OPENCLAW_CHANGED_EXTENSION: ${{ matrix.extension }}
|
|
run: |
|
|
set -euo pipefail
|
|
plan_json="$(pnpm test:extension "$OPENCLAW_CHANGED_EXTENSION" --allow-empty --dry-run --json)"
|
|
config="$(printf '%s' "$plan_json" | jq -r '.config')"
|
|
roots="$(printf '%s' "$plan_json" | jq -r '.roots | join(", ")')"
|
|
tests="$(printf '%s' "$plan_json" | jq -r '.testFiles | length')"
|
|
{
|
|
echo "config=$config"
|
|
echo "tests=$tests"
|
|
} >> "$GITHUB_OUTPUT"
|
|
echo "extension-fast plan: config=$config tests=$tests roots=$roots"
|
|
{
|
|
echo "### extension-fast (${OPENCLAW_CHANGED_EXTENSION}) plan"
|
|
echo "- config: \`$config\`"
|
|
echo "- roots: \`$roots\`"
|
|
echo "- test files: \`$tests\`"
|
|
} >> "$GITHUB_STEP_SUMMARY"
|
|
|
|
- name: Run changed extension tests (timed)
|
|
env:
|
|
OPENCLAW_CHANGED_EXTENSION: ${{ matrix.extension }}
|
|
PLAN_CONFIG: ${{ steps.plan.outputs.config }}
|
|
PLAN_TESTS: ${{ steps.plan.outputs.tests }}
|
|
run: |
|
|
set -euo pipefail
|
|
test_start="$(date +%s)"
|
|
set +e
|
|
pnpm test:extension "$OPENCLAW_CHANGED_EXTENSION" --allow-empty -- --pool=forks --maxWorkers=1 --bail=1
|
|
test_status=$?
|
|
set -e
|
|
test_end="$(date +%s)"
|
|
test_duration="$((test_end - test_start))"
|
|
|
|
metrics_dir="${RUNNER_TEMP}/extension-fast-metrics"
|
|
mkdir -p "$metrics_dir"
|
|
metrics_json="${metrics_dir}/${OPENCLAW_CHANGED_EXTENSION}.json"
|
|
metrics_tsv="${metrics_dir}/${OPENCLAW_CHANGED_EXTENSION}.tsv"
|
|
|
|
jq -n \
|
|
--arg extension "$OPENCLAW_CHANGED_EXTENSION" \
|
|
--arg config "${PLAN_CONFIG:-unknown}" \
|
|
--argjson tests "${PLAN_TESTS:-0}" \
|
|
--arg runId "$GITHUB_RUN_ID" \
|
|
--arg runAttempt "$GITHUB_RUN_ATTEMPT" \
|
|
--arg sha "$GITHUB_SHA" \
|
|
--arg ref "$GITHUB_REF" \
|
|
--arg status "$test_status" \
|
|
--argjson durationSeconds "$test_duration" \
|
|
'{
|
|
extension: $extension,
|
|
config: $config,
|
|
tests: $tests,
|
|
runId: $runId,
|
|
runAttempt: $runAttempt,
|
|
sha: $sha,
|
|
ref: $ref,
|
|
status: $status,
|
|
durationSeconds: $durationSeconds
|
|
}' > "$metrics_json"
|
|
|
|
printf "extension\tconfig\ttests\tstatus\tduration_seconds\trun_id\trun_attempt\tsha\tref\n" > "$metrics_tsv"
|
|
printf "%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n" \
|
|
"$OPENCLAW_CHANGED_EXTENSION" \
|
|
"${PLAN_CONFIG:-unknown}" \
|
|
"${PLAN_TESTS:-0}" \
|
|
"$test_status" \
|
|
"$test_duration" \
|
|
"$GITHUB_RUN_ID" \
|
|
"$GITHUB_RUN_ATTEMPT" \
|
|
"$GITHUB_SHA" \
|
|
"$GITHUB_REF" >> "$metrics_tsv"
|
|
|
|
echo "extension-fast test duration: ${test_duration}s"
|
|
{
|
|
echo "### extension-fast (${OPENCLAW_CHANGED_EXTENSION}) runtime"
|
|
echo "- duration: ${test_duration}s"
|
|
echo "- exit code: ${test_status}"
|
|
echo "- metrics json: \`${metrics_json}\`"
|
|
echo "- metrics tsv: \`${metrics_tsv}\`"
|
|
} >> "$GITHUB_STEP_SUMMARY"
|
|
exit "$test_status"
|
|
|
|
- name: Upload extension-fast timing artifacts
|
|
if: always()
|
|
uses: actions/upload-artifact@v7
|
|
with:
|
|
name: extension-fast-metrics-${{ matrix.extension }}
|
|
path: |
|
|
${{ runner.temp }}/extension-fast-metrics/${{ matrix.extension }}.json
|
|
${{ runner.temp }}/extension-fast-metrics/${{ matrix.extension }}.tsv
|
|
if-no-files-found: warn
|
|
retention-days: 7
|
|
|
|
extension-fast-metrics-summary:
|
|
name: "extension-fast-metrics-summary"
|
|
needs: [docs-scope, changed-scope, changed-extensions, extension-fast]
|
|
if: always() && needs.docs-scope.outputs.docs_only != 'true' && needs.changed-scope.outputs.run_node == 'true' && needs.changed-extensions.outputs.has_changed_extensions == 'true'
|
|
runs-on: blacksmith-16vcpu-ubuntu-2404
|
|
steps:
|
|
- name: Download extension-fast timing artifacts
|
|
uses: actions/download-artifact@v8
|
|
with:
|
|
pattern: extension-fast-metrics-*
|
|
merge-multiple: true
|
|
path: extension-fast-metrics
|
|
|
|
- name: Summarize extension-fast timing
|
|
run: |
|
|
node --input-type=module <<'EOF'
|
|
import { readdirSync, readFileSync, writeFileSync, existsSync } from "node:fs";
|
|
import path from "node:path";
|
|
|
|
const metricsDir = path.resolve("extension-fast-metrics");
|
|
const summaryPath = path.resolve("extension-fast-summary.json");
|
|
const summaryTsvPath = path.resolve("extension-fast-summary.tsv");
|
|
const slaP95Seconds = 900;
|
|
const slaMaxSeconds = 1500;
|
|
|
|
if (!existsSync(metricsDir)) {
|
|
console.log("::warning::No extension-fast timing artifacts found.");
|
|
process.exit(0);
|
|
}
|
|
|
|
const rows = readdirSync(metricsDir)
|
|
.filter((entry) => entry.endsWith(".json"))
|
|
.map((entry) => {
|
|
const fullPath = path.join(metricsDir, entry);
|
|
return JSON.parse(readFileSync(fullPath, "utf8"));
|
|
})
|
|
.filter((row) => typeof row.durationSeconds === "number");
|
|
|
|
if (rows.length === 0) {
|
|
console.log("::warning::No extension-fast timing JSON rows were found.");
|
|
process.exit(0);
|
|
}
|
|
|
|
const durations = rows.map((row) => row.durationSeconds).toSorted((a, b) => a - b);
|
|
const pick = (p) => durations[Math.max(0, Math.min(durations.length - 1, Math.ceil(p * durations.length) - 1))];
|
|
const p50 = pick(0.5);
|
|
const p95 = pick(0.95);
|
|
const max = durations[durations.length - 1];
|
|
const failed = rows.filter((row) => String(row.status) !== "0").length;
|
|
|
|
const summary = {
|
|
count: rows.length,
|
|
failed,
|
|
p50Seconds: p50,
|
|
p95Seconds: p95,
|
|
maxSeconds: max,
|
|
rows,
|
|
};
|
|
writeFileSync(summaryPath, `${JSON.stringify(summary, null, 2)}\n`, "utf8");
|
|
|
|
const tsvLines = [
|
|
"extension\tconfig\ttests\tstatus\tduration_seconds\trun_id\trun_attempt\tsha\tref",
|
|
...rows.map((row) =>
|
|
[
|
|
row.extension,
|
|
row.config,
|
|
row.tests,
|
|
row.status,
|
|
row.durationSeconds,
|
|
row.runId,
|
|
row.runAttempt,
|
|
row.sha,
|
|
row.ref,
|
|
].join("\t"),
|
|
),
|
|
];
|
|
writeFileSync(summaryTsvPath, `${tsvLines.join("\n")}\n`, "utf8");
|
|
|
|
const markdown = [
|
|
"### extension-fast timing summary",
|
|
`- lanes: \`${rows.length}\``,
|
|
`- failed lanes: \`${failed}\``,
|
|
`- p50: \`${p50}s\``,
|
|
`- p95: \`${p95}s\``,
|
|
`- max: \`${max}s\``,
|
|
"",
|
|
"| extension | config | tests | status | duration (s) |",
|
|
"| --- | --- | ---: | ---: | ---: |",
|
|
...rows
|
|
.toSorted((a, b) => b.durationSeconds - a.durationSeconds)
|
|
.map(
|
|
(row) =>
|
|
`| ${row.extension} | ${row.config} | ${row.tests} | ${row.status} | ${row.durationSeconds} |`,
|
|
),
|
|
].join("\n");
|
|
writeFileSync(process.env.GITHUB_STEP_SUMMARY, `${markdown}\n`, { flag: "a" });
|
|
|
|
if (p95 > slaP95Seconds) {
|
|
console.log(
|
|
`::warning::extension-fast p95 ${p95}s exceeds SLA target ${slaP95Seconds}s.`,
|
|
);
|
|
}
|
|
if (max > slaMaxSeconds) {
|
|
console.log(
|
|
`::warning::extension-fast max ${max}s exceeds SLA ceiling ${slaMaxSeconds}s.`,
|
|
);
|
|
}
|
|
EOF
|
|
|
|
- name: Upload extension-fast timing summary artifact
|
|
if: always()
|
|
uses: actions/upload-artifact@v7
|
|
with:
|
|
name: extension-fast-metrics-summary
|
|
path: |
|
|
extension-fast-summary.json
|
|
extension-fast-summary.tsv
|
|
if-no-files-found: warn
|
|
retention-days: 7
|
|
|
|
# Types, lint, and format check.
|
|
check:
|
|
name: "check"
|
|
needs: [docs-scope, changed-scope]
|
|
if: needs.docs-scope.outputs.docs_only != 'true' && needs.changed-scope.outputs.run_node == 'true'
|
|
runs-on: blacksmith-16vcpu-ubuntu-2404
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v6
|
|
with:
|
|
submodules: false
|
|
|
|
- name: Setup Node environment
|
|
uses: ./.github/actions/setup-node-env
|
|
with:
|
|
install-bun: "false"
|
|
use-sticky-disk: "false"
|
|
|
|
- name: Check types and lint and oxfmt
|
|
run: pnpm check
|
|
|
|
- name: Strict TS build smoke
|
|
run: pnpm build:strict-smoke
|
|
|
|
- name: Enforce safe external URL opening policy
|
|
run: pnpm lint:ui:no-raw-window-open
|
|
|
|
build-smoke:
|
|
name: "build-smoke"
|
|
needs: [docs-scope, changed-scope]
|
|
if: needs.docs-scope.outputs.docs_only != 'true' && needs.changed-scope.outputs.run_node == 'true'
|
|
runs-on: blacksmith-16vcpu-ubuntu-2404
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v6
|
|
with:
|
|
submodules: false
|
|
|
|
- name: Setup Node environment
|
|
uses: ./.github/actions/setup-node-env
|
|
with:
|
|
install-bun: "false"
|
|
use-sticky-disk: "false"
|
|
|
|
- name: Build dist
|
|
run: pnpm build
|
|
|
|
- name: Smoke test CLI launcher help
|
|
run: node openclaw.mjs --help
|
|
|
|
- name: Smoke test CLI launcher status json
|
|
run: node openclaw.mjs status --json --timeout 1
|
|
|
|
- name: Smoke test built bundled plugin singleton
|
|
run: pnpm test:build:singleton
|
|
|
|
- name: Check CLI startup memory
|
|
run: pnpm test:startup:memory
|
|
|
|
gateway-watch-regression:
|
|
name: "gateway-watch-regression"
|
|
needs: [docs-scope, changed-scope]
|
|
if: needs.docs-scope.outputs.docs_only != 'true' && needs.changed-scope.outputs.run_node == 'true'
|
|
runs-on: blacksmith-16vcpu-ubuntu-2404
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v6
|
|
with:
|
|
submodules: false
|
|
|
|
- name: Setup Node environment
|
|
uses: ./.github/actions/setup-node-env
|
|
with:
|
|
install-bun: "false"
|
|
use-sticky-disk: "false"
|
|
|
|
- name: Run gateway watch regression harness
|
|
run: pnpm test:gateway:watch-regression
|
|
|
|
- name: Upload gateway watch regression artifacts
|
|
if: always()
|
|
uses: actions/upload-artifact@v7
|
|
with:
|
|
name: gateway-watch-regression
|
|
path: .local/gateway-watch-regression/
|
|
retention-days: 7
|
|
|
|
# Validate docs (format, lint, broken links) only when docs files changed.
|
|
check-docs:
|
|
needs: [docs-scope]
|
|
if: needs.docs-scope.outputs.docs_changed == 'true'
|
|
runs-on: blacksmith-16vcpu-ubuntu-2404
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v6
|
|
with:
|
|
submodules: false
|
|
|
|
- name: Setup Node environment
|
|
uses: ./.github/actions/setup-node-env
|
|
with:
|
|
install-bun: "false"
|
|
use-sticky-disk: "false"
|
|
|
|
- name: Check docs
|
|
run: pnpm check:docs
|
|
|
|
compat-node22:
|
|
name: "compat-node22"
|
|
needs: [docs-scope, changed-scope]
|
|
if: github.event_name == 'push' && needs.docs-scope.outputs.docs_only != 'true' && needs.changed-scope.outputs.run_node == 'true'
|
|
runs-on: blacksmith-16vcpu-ubuntu-2404
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v6
|
|
with:
|
|
submodules: false
|
|
|
|
- name: Setup Node 22 compatibility environment
|
|
uses: ./.github/actions/setup-node-env
|
|
with:
|
|
node-version: "22.x"
|
|
cache-key-suffix: "node22"
|
|
install-bun: "false"
|
|
use-sticky-disk: "false"
|
|
|
|
- name: Configure Node 22 test resources
|
|
run: |
|
|
# Keep the compatibility lane aligned with the default Node test lane.
|
|
echo "OPENCLAW_TEST_WORKERS=2" >> "$GITHUB_ENV"
|
|
echo "OPENCLAW_TEST_MAX_OLD_SPACE_SIZE_MB=6144" >> "$GITHUB_ENV"
|
|
|
|
- name: Build under Node 22
|
|
run: pnpm build
|
|
|
|
- name: Run tests under Node 22
|
|
run: pnpm test
|
|
|
|
- name: Verify npm pack under Node 22
|
|
run: pnpm release:check
|
|
|
|
skills-python:
|
|
needs: [docs-scope, changed-scope]
|
|
if: needs.docs-scope.outputs.docs_only != 'true' && needs.changed-scope.outputs.run_skills_python == 'true'
|
|
runs-on: blacksmith-16vcpu-ubuntu-2404
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v6
|
|
with:
|
|
submodules: false
|
|
|
|
- name: Setup Python
|
|
uses: actions/setup-python@v6
|
|
with:
|
|
python-version: "3.12"
|
|
|
|
- name: Install Python tooling
|
|
run: |
|
|
python -m pip install --upgrade pip
|
|
python -m pip install pytest ruff pyyaml
|
|
|
|
- name: Lint Python skill scripts
|
|
run: python -m ruff check skills
|
|
|
|
- name: Test skill Python scripts
|
|
run: python -m pytest -q skills
|
|
|
|
secrets:
|
|
runs-on: blacksmith-16vcpu-ubuntu-2404
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v6
|
|
with:
|
|
submodules: false
|
|
|
|
- name: Ensure secrets base commit
|
|
uses: ./.github/actions/ensure-base-commit
|
|
with:
|
|
base-sha: ${{ github.event_name == 'push' && github.event.before || github.event.pull_request.base.sha }}
|
|
fetch-ref: ${{ github.event_name == 'push' && github.ref_name || github.event.pull_request.base.ref }}
|
|
|
|
- name: Setup Node environment
|
|
uses: ./.github/actions/setup-node-env
|
|
with:
|
|
install-bun: "false"
|
|
use-sticky-disk: "false"
|
|
install-deps: "false"
|
|
|
|
- name: Setup Python
|
|
id: setup-python
|
|
uses: actions/setup-python@v6
|
|
with:
|
|
python-version: "3.12"
|
|
cache: "pip"
|
|
cache-dependency-path: |
|
|
pyproject.toml
|
|
.pre-commit-config.yaml
|
|
.github/workflows/ci.yml
|
|
|
|
- name: Restore pre-commit cache
|
|
uses: actions/cache@v5
|
|
with:
|
|
path: ~/.cache/pre-commit
|
|
key: pre-commit-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('.pre-commit-config.yaml') }}
|
|
|
|
- name: Install pre-commit
|
|
run: |
|
|
python -m pip install --upgrade pip
|
|
python -m pip install pre-commit
|
|
|
|
- name: Detect committed private keys
|
|
run: pre-commit run --all-files detect-private-key
|
|
|
|
- name: Audit changed GitHub workflows with zizmor
|
|
env:
|
|
BASE_SHA: ${{ github.event_name == 'push' && github.event.before || github.event.pull_request.base.sha }}
|
|
run: |
|
|
set -euo pipefail
|
|
|
|
if [ -z "${BASE_SHA:-}" ] || [ "${BASE_SHA}" = "0000000000000000000000000000000000000000" ]; then
|
|
echo "No usable base SHA detected; skipping zizmor."
|
|
exit 0
|
|
fi
|
|
|
|
if ! git cat-file -e "${BASE_SHA}^{commit}" 2>/dev/null; then
|
|
echo "Base SHA ${BASE_SHA} is unavailable; skipping zizmor."
|
|
exit 0
|
|
fi
|
|
|
|
mapfile -t workflow_files < <(
|
|
git diff --name-only "${BASE_SHA}" HEAD -- '.github/workflows/*.yml' '.github/workflows/*.yaml'
|
|
)
|
|
if [ "${#workflow_files[@]}" -eq 0 ]; then
|
|
echo "No workflow changes detected; skipping zizmor."
|
|
exit 0
|
|
fi
|
|
|
|
printf 'Auditing workflow files:\n%s\n' "${workflow_files[@]}"
|
|
pre-commit run zizmor --files "${workflow_files[@]}"
|
|
|
|
- name: Audit production dependencies
|
|
run: pre-commit run --all-files pnpm-audit-prod
|
|
|
|
checks-windows:
|
|
needs: [docs-scope, changed-scope]
|
|
if: needs.docs-scope.outputs.docs_only != 'true' && needs.changed-scope.outputs.run_windows == 'true'
|
|
runs-on: blacksmith-32vcpu-windows-2025
|
|
timeout-minutes: 45
|
|
env:
|
|
NODE_OPTIONS: --max-old-space-size=6144
|
|
# Keep total concurrency predictable on the 32 vCPU runner.
|
|
# Windows shard 2 has shown intermittent instability at 2 workers.
|
|
OPENCLAW_TEST_WORKERS: 1
|
|
defaults:
|
|
run:
|
|
shell: bash
|
|
strategy:
|
|
fail-fast: false
|
|
matrix:
|
|
include:
|
|
- runtime: node
|
|
task: test
|
|
shard_index: 1
|
|
shard_count: 6
|
|
command: pnpm test
|
|
- runtime: node
|
|
task: test
|
|
shard_index: 2
|
|
shard_count: 6
|
|
command: pnpm test
|
|
- runtime: node
|
|
task: test
|
|
shard_index: 3
|
|
shard_count: 6
|
|
command: pnpm test
|
|
- runtime: node
|
|
task: test
|
|
shard_index: 4
|
|
shard_count: 6
|
|
command: pnpm test
|
|
- runtime: node
|
|
task: test
|
|
shard_index: 5
|
|
shard_count: 6
|
|
command: pnpm test
|
|
- runtime: node
|
|
task: test
|
|
shard_index: 6
|
|
shard_count: 6
|
|
command: pnpm test
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v6
|
|
with:
|
|
submodules: false
|
|
|
|
- name: Try to exclude workspace from Windows Defender (best-effort)
|
|
shell: pwsh
|
|
run: |
|
|
$cmd = Get-Command Add-MpPreference -ErrorAction SilentlyContinue
|
|
if (-not $cmd) {
|
|
Write-Host "Add-MpPreference not available, skipping Defender exclusions."
|
|
exit 0
|
|
}
|
|
|
|
try {
|
|
# Defender sometimes intercepts process spawning (vitest workers). If this fails
|
|
# (eg hardened images), keep going and rely on worker limiting above.
|
|
Add-MpPreference -ExclusionPath "$env:GITHUB_WORKSPACE" -ErrorAction Stop
|
|
Add-MpPreference -ExclusionProcess "node.exe" -ErrorAction Stop
|
|
Write-Host "Defender exclusions applied."
|
|
} catch {
|
|
Write-Warning "Failed to apply Defender exclusions, continuing. $($_.Exception.Message)"
|
|
}
|
|
|
|
- name: Setup Node.js
|
|
uses: actions/setup-node@v6
|
|
with:
|
|
node-version: 24.x
|
|
check-latest: false
|
|
|
|
- name: Setup pnpm + cache store
|
|
uses: ./.github/actions/setup-pnpm-store-cache
|
|
with:
|
|
pnpm-version: "10.23.0"
|
|
cache-key-suffix: "node24"
|
|
# Sticky disk mount currently retries/fails on every shard and adds ~50s
|
|
# before install while still yielding zero pnpm store reuse.
|
|
# Try exact-key actions/cache restores instead to recover store reuse
|
|
# without the sticky-disk mount penalty.
|
|
use-sticky-disk: "false"
|
|
use-restore-keys: "false"
|
|
use-actions-cache: "true"
|
|
|
|
- name: Runtime versions
|
|
run: |
|
|
node -v
|
|
npm -v
|
|
pnpm -v
|
|
|
|
- name: Capture node path
|
|
run: echo "NODE_BIN=$(dirname \"$(node -p \"process.execPath\")\")" >> "$GITHUB_ENV"
|
|
|
|
- name: Install dependencies
|
|
env:
|
|
CI: true
|
|
run: |
|
|
export PATH="$NODE_BIN:$PATH"
|
|
which node
|
|
node -v
|
|
pnpm -v
|
|
# Persist Windows-native postinstall outputs in the pnpm store so restored
|
|
# caches can skip repeated rebuild/download work on later shards/runs.
|
|
pnpm install --frozen-lockfile --prefer-offline --ignore-scripts=false --config.engine-strict=false --config.enable-pre-post-scripts=true --config.side-effects-cache=true || pnpm install --frozen-lockfile --prefer-offline --ignore-scripts=false --config.engine-strict=false --config.enable-pre-post-scripts=true --config.side-effects-cache=true
|
|
|
|
- name: Configure test shard (Windows)
|
|
if: matrix.task == 'test'
|
|
run: |
|
|
echo "OPENCLAW_TEST_SHARDS=${{ matrix.shard_count }}" >> "$GITHUB_ENV"
|
|
echo "OPENCLAW_TEST_SHARD_INDEX=${{ matrix.shard_index }}" >> "$GITHUB_ENV"
|
|
|
|
- name: Build A2UI bundle (Windows)
|
|
if: matrix.task == 'test'
|
|
run: pnpm canvas:a2ui:bundle
|
|
|
|
- name: Run ${{ matrix.task }} (${{ matrix.runtime }})
|
|
run: ${{ matrix.command }}
|
|
|
|
# Consolidated macOS job: runs TS tests + Swift lint/build/test sequentially
|
|
# on a single runner. GitHub limits macOS concurrent jobs to 5 per org;
|
|
# running 4 separate jobs per PR (as before) starved the queue. One job
|
|
# per PR allows 5 PRs to run macOS checks simultaneously.
|
|
macos:
|
|
needs: [docs-scope, changed-scope, check]
|
|
if: github.event_name == 'pull_request' && needs.docs-scope.outputs.docs_only != 'true' && needs.changed-scope.outputs.run_macos == 'true'
|
|
runs-on: macos-latest
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v6
|
|
with:
|
|
submodules: false
|
|
|
|
- name: Setup Node environment
|
|
uses: ./.github/actions/setup-node-env
|
|
with:
|
|
install-bun: "false"
|
|
|
|
# --- Run all checks sequentially (fast gates first) ---
|
|
- name: TS tests (macOS)
|
|
env:
|
|
NODE_OPTIONS: --max-old-space-size=4096
|
|
run: pnpm test
|
|
|
|
# --- Xcode/Swift setup ---
|
|
- name: Select Xcode 26.1
|
|
run: |
|
|
sudo xcode-select -s /Applications/Xcode_26.1.app
|
|
xcodebuild -version
|
|
|
|
- name: Install XcodeGen / SwiftLint / SwiftFormat
|
|
run: brew install xcodegen swiftlint swiftformat
|
|
|
|
- name: Show toolchain
|
|
run: |
|
|
sw_vers
|
|
xcodebuild -version
|
|
swift --version
|
|
|
|
- name: Swift lint
|
|
run: |
|
|
swiftlint --config .swiftlint.yml
|
|
swiftformat --lint apps/macos/Sources --config .swiftformat
|
|
|
|
- name: Cache SwiftPM
|
|
uses: actions/cache@v5
|
|
with:
|
|
path: ~/Library/Caches/org.swift.swiftpm
|
|
key: ${{ runner.os }}-swiftpm-${{ hashFiles('apps/macos/Package.resolved') }}
|
|
restore-keys: |
|
|
${{ runner.os }}-swiftpm-
|
|
|
|
- name: Swift build (release)
|
|
run: |
|
|
set -euo pipefail
|
|
for attempt in 1 2 3; do
|
|
if swift build --package-path apps/macos --configuration release; then
|
|
exit 0
|
|
fi
|
|
echo "swift build failed (attempt $attempt/3). Retrying…"
|
|
sleep $((attempt * 20))
|
|
done
|
|
exit 1
|
|
|
|
- name: Swift test
|
|
run: |
|
|
set -euo pipefail
|
|
for attempt in 1 2 3; do
|
|
if swift test --package-path apps/macos --parallel --enable-code-coverage --show-codecov-path; then
|
|
exit 0
|
|
fi
|
|
echo "swift test failed (attempt $attempt/3). Retrying…"
|
|
sleep $((attempt * 20))
|
|
done
|
|
exit 1
|
|
|
|
ios:
|
|
if: false # ignore iOS in CI for now
|
|
runs-on: macos-latest
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v6
|
|
with:
|
|
submodules: false
|
|
|
|
- name: Select Xcode 26.1
|
|
run: |
|
|
sudo xcode-select -s /Applications/Xcode_26.1.app
|
|
xcodebuild -version
|
|
|
|
- name: Install XcodeGen
|
|
run: brew install xcodegen
|
|
|
|
- name: Install SwiftLint / SwiftFormat
|
|
run: brew install swiftlint swiftformat
|
|
|
|
- name: Show toolchain
|
|
run: |
|
|
sw_vers
|
|
xcodebuild -version
|
|
swift --version
|
|
|
|
- name: Generate iOS project
|
|
run: |
|
|
cd apps/ios
|
|
xcodegen generate
|
|
|
|
- name: iOS tests
|
|
run: |
|
|
set -euo pipefail
|
|
RESULT_BUNDLE_PATH="$RUNNER_TEMP/Clawdis-iOS.xcresult"
|
|
DEST_ID="$(
|
|
python3 - <<'PY'
|
|
import json
|
|
import subprocess
|
|
import sys
|
|
import uuid
|
|
|
|
def sh(args: list[str]) -> str:
|
|
return subprocess.check_output(args, text=True).strip()
|
|
|
|
# Prefer an already-created iPhone simulator if it exists.
|
|
devices = json.loads(sh(["xcrun", "simctl", "list", "devices", "-j"]))
|
|
candidates: list[tuple[str, str]] = []
|
|
for runtime, devs in (devices.get("devices") or {}).items():
|
|
for dev in devs or []:
|
|
if not dev.get("isAvailable"):
|
|
continue
|
|
name = str(dev.get("name") or "")
|
|
udid = str(dev.get("udid") or "")
|
|
if not udid or not name.startswith("iPhone"):
|
|
continue
|
|
candidates.append((name, udid))
|
|
|
|
candidates.sort(key=lambda it: (0 if "iPhone 16" in it[0] else 1, it[0]))
|
|
if candidates:
|
|
print(candidates[0][1])
|
|
sys.exit(0)
|
|
|
|
# Otherwise, create one from the newest available iOS runtime.
|
|
runtimes = json.loads(sh(["xcrun", "simctl", "list", "runtimes", "-j"])).get("runtimes") or []
|
|
ios = [rt for rt in runtimes if rt.get("platform") == "iOS" and rt.get("isAvailable")]
|
|
if not ios:
|
|
print("No available iOS runtimes found.", file=sys.stderr)
|
|
sys.exit(1)
|
|
|
|
def version_key(rt: dict) -> tuple[int, ...]:
|
|
parts: list[int] = []
|
|
for p in str(rt.get("version") or "0").split("."):
|
|
try:
|
|
parts.append(int(p))
|
|
except ValueError:
|
|
parts.append(0)
|
|
return tuple(parts)
|
|
|
|
ios.sort(key=version_key, reverse=True)
|
|
runtime = ios[0]
|
|
runtime_id = str(runtime.get("identifier") or "")
|
|
if not runtime_id:
|
|
print("Missing iOS runtime identifier.", file=sys.stderr)
|
|
sys.exit(1)
|
|
|
|
supported = runtime.get("supportedDeviceTypes") or []
|
|
iphones = [dt for dt in supported if dt.get("productFamily") == "iPhone"]
|
|
if not iphones:
|
|
print("No iPhone device types for iOS runtime.", file=sys.stderr)
|
|
sys.exit(1)
|
|
|
|
iphones.sort(
|
|
key=lambda dt: (
|
|
0 if "iPhone 16" in str(dt.get("name") or "") else 1,
|
|
str(dt.get("name") or ""),
|
|
)
|
|
)
|
|
device_type_id = str(iphones[0].get("identifier") or "")
|
|
if not device_type_id:
|
|
print("Missing iPhone device type identifier.", file=sys.stderr)
|
|
sys.exit(1)
|
|
|
|
sim_name = f"CI iPhone {uuid.uuid4().hex[:8]}"
|
|
udid = sh(["xcrun", "simctl", "create", sim_name, device_type_id, runtime_id])
|
|
if not udid:
|
|
print("Failed to create iPhone simulator.", file=sys.stderr)
|
|
sys.exit(1)
|
|
print(udid)
|
|
PY
|
|
)"
|
|
echo "Using iOS Simulator id: $DEST_ID"
|
|
xcodebuild test \
|
|
-project apps/ios/Clawdis.xcodeproj \
|
|
-scheme Clawdis \
|
|
-destination "platform=iOS Simulator,id=$DEST_ID" \
|
|
-resultBundlePath "$RESULT_BUNDLE_PATH" \
|
|
-enableCodeCoverage YES
|
|
|
|
- name: iOS coverage summary
|
|
run: |
|
|
set -euo pipefail
|
|
RESULT_BUNDLE_PATH="$RUNNER_TEMP/Clawdis-iOS.xcresult"
|
|
xcrun xccov view --report --only-targets "$RESULT_BUNDLE_PATH"
|
|
|
|
- name: iOS coverage gate (43%)
|
|
run: |
|
|
set -euo pipefail
|
|
RESULT_BUNDLE_PATH="$RUNNER_TEMP/Clawdis-iOS.xcresult"
|
|
RESULT_BUNDLE_PATH="$RESULT_BUNDLE_PATH" python3 - <<'PY'
|
|
import json
|
|
import os
|
|
import subprocess
|
|
import sys
|
|
|
|
target_name = "Clawdis.app"
|
|
minimum = 0.43
|
|
|
|
report = json.loads(
|
|
subprocess.check_output(
|
|
["xcrun", "xccov", "view", "--report", "--json", os.environ["RESULT_BUNDLE_PATH"]],
|
|
text=True,
|
|
)
|
|
)
|
|
|
|
target_coverage = None
|
|
for target in report.get("targets", []):
|
|
if target.get("name") == target_name:
|
|
target_coverage = float(target["lineCoverage"])
|
|
break
|
|
|
|
if target_coverage is None:
|
|
print(f"Could not find coverage for target: {target_name}")
|
|
sys.exit(1)
|
|
|
|
print(f"{target_name} line coverage: {target_coverage * 100:.2f}% (min {minimum * 100:.2f}%)")
|
|
if target_coverage + 1e-12 < minimum:
|
|
sys.exit(1)
|
|
PY
|
|
|
|
android:
|
|
needs: [docs-scope, changed-scope]
|
|
if: needs.docs-scope.outputs.docs_only != 'true' && needs.changed-scope.outputs.run_android == 'true'
|
|
runs-on: blacksmith-16vcpu-ubuntu-2404
|
|
strategy:
|
|
fail-fast: false
|
|
matrix:
|
|
include:
|
|
- task: test
|
|
command: ./gradlew --no-daemon :app:testDebugUnitTest
|
|
- task: build
|
|
command: ./gradlew --no-daemon :app:assembleDebug
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v6
|
|
with:
|
|
submodules: false
|
|
|
|
- name: Setup Java
|
|
uses: actions/setup-java@v5
|
|
with:
|
|
distribution: temurin
|
|
# Keep sdkmanager on the stable JDK path for Linux CI runners.
|
|
java-version: 17
|
|
|
|
- name: Setup Android SDK cmdline-tools
|
|
run: |
|
|
set -euo pipefail
|
|
ANDROID_SDK_ROOT="$HOME/.android-sdk"
|
|
CMDLINE_TOOLS_VERSION="12266719"
|
|
ARCHIVE="commandlinetools-linux-${CMDLINE_TOOLS_VERSION}_latest.zip"
|
|
URL="https://dl.google.com/android/repository/${ARCHIVE}"
|
|
|
|
mkdir -p "$ANDROID_SDK_ROOT/cmdline-tools"
|
|
curl -fsSL "$URL" -o "/tmp/${ARCHIVE}"
|
|
rm -rf "$ANDROID_SDK_ROOT/cmdline-tools/latest"
|
|
unzip -q "/tmp/${ARCHIVE}" -d "$ANDROID_SDK_ROOT/cmdline-tools"
|
|
mv "$ANDROID_SDK_ROOT/cmdline-tools/cmdline-tools" "$ANDROID_SDK_ROOT/cmdline-tools/latest"
|
|
|
|
echo "ANDROID_SDK_ROOT=$ANDROID_SDK_ROOT" >> "$GITHUB_ENV"
|
|
echo "ANDROID_HOME=$ANDROID_SDK_ROOT" >> "$GITHUB_ENV"
|
|
echo "$ANDROID_SDK_ROOT/cmdline-tools/latest/bin" >> "$GITHUB_PATH"
|
|
echo "$ANDROID_SDK_ROOT/platform-tools" >> "$GITHUB_PATH"
|
|
|
|
- name: Setup Gradle
|
|
uses: gradle/actions/setup-gradle@v5
|
|
with:
|
|
gradle-version: 8.11.1
|
|
|
|
- name: Install Android SDK packages
|
|
run: |
|
|
yes | sdkmanager --sdk_root="${ANDROID_SDK_ROOT}" --licenses >/dev/null
|
|
sdkmanager --sdk_root="${ANDROID_SDK_ROOT}" --install \
|
|
"platform-tools" \
|
|
"platforms;android-36" \
|
|
"build-tools;36.0.0"
|
|
|
|
- name: Run Android ${{ matrix.task }}
|
|
working-directory: apps/android
|
|
run: ${{ matrix.command }}
|