feat: add runner conversion scripts and strengthen cutover automation
This commit is contained in:
210
runners-conversion/periodVault/actions-local.sh
Executable file
210
runners-conversion/periodVault/actions-local.sh
Executable file
@@ -0,0 +1,210 @@
|
||||
#!/usr/bin/env bash
|
||||
# actions-local.sh
|
||||
# Local GitHub Actions self-hosted runner lifecycle helper.
|
||||
set -euo pipefail
|
||||
|
||||
RUNNER_DIR="${RUNNER_DIR:-$HOME/.periodvault-actions-runner}"
|
||||
RUNNER_LABELS="${RUNNER_LABELS:-periodvault}"
|
||||
RUNNER_NAME="${RUNNER_NAME:-$(hostname)-periodvault}"
|
||||
RUNNER_WORKDIR="${RUNNER_WORKDIR:-_work}"
|
||||
RUNNER_PID_FILE="${RUNNER_PID_FILE:-$RUNNER_DIR/.runner.pid}"
|
||||
RUNNER_LOG_FILE="${RUNNER_LOG_FILE:-$RUNNER_DIR/runner.log}"
|
||||
|
||||
if git config --get remote.origin.url >/dev/null 2>&1; then
|
||||
ORIGIN_URL="$(git config --get remote.origin.url)"
|
||||
else
|
||||
ORIGIN_URL=""
|
||||
fi
|
||||
|
||||
if [[ -n "$ORIGIN_URL" && "$ORIGIN_URL" =~ ^git@github\.com:(.*)\.git$ ]]; then
|
||||
RUNNER_URL_DEFAULT="https://github.com/${BASH_REMATCH[1]}"
|
||||
elif [[ -n "$ORIGIN_URL" && "$ORIGIN_URL" =~ ^https://github\.com/.*$ ]]; then
|
||||
RUNNER_URL_DEFAULT="${ORIGIN_URL%.git}"
|
||||
else
|
||||
RUNNER_URL_DEFAULT=""
|
||||
fi
|
||||
|
||||
RUNNER_URL="${RUNNER_URL:-$RUNNER_URL_DEFAULT}"
|
||||
|
||||
ACT_WORKFLOW="${ACT_WORKFLOW:-.github/workflows/ci.yml}"
|
||||
ACT_IMAGE="${ACT_IMAGE:-ghcr.io/catthehacker/ubuntu:act-latest}"
|
||||
ACT_DOCKER_SOCKET="${ACT_DOCKER_SOCKET:-/Users/s/.colima/augur-actions/docker.sock}"
|
||||
ACT_DAEMON_SOCKET="${ACT_DAEMON_SOCKET:-/var/run/docker.sock}"
|
||||
ACT_DOCKER_CONFIG="${ACT_DOCKER_CONFIG:-/tmp/act-docker-config}"
|
||||
|
||||
usage() {
|
||||
cat <<EOF
|
||||
Usage: ./scripts/actions-local.sh <setup|start|stop|status|remove|run> [job-id]
|
||||
|
||||
Environment variables:
|
||||
RUNNER_DIR Runner installation directory (default: $RUNNER_DIR)
|
||||
RUNNER_URL GitHub repo/org URL for runner registration
|
||||
RUNNER_TOKEN Registration/removal token (required for setup/remove)
|
||||
RUNNER_LABELS Runner labels (default: $RUNNER_LABELS)
|
||||
RUNNER_NAME Runner name (default: $RUNNER_NAME)
|
||||
RUNNER_WORKDIR Runner work dir (default: $RUNNER_WORKDIR)
|
||||
|
||||
Local Actions execution (`run`) variables:
|
||||
ACT_WORKFLOW Workflow file path (default: $ACT_WORKFLOW)
|
||||
ACT_IMAGE Container image for self-hosted label mapping (default: $ACT_IMAGE)
|
||||
ACT_DOCKER_SOCKET Docker host socket (default: $ACT_DOCKER_SOCKET)
|
||||
ACT_DAEMON_SOCKET In-container daemon socket path (default: $ACT_DAEMON_SOCKET)
|
||||
ACT_DOCKER_CONFIG Docker config dir used by act (default: $ACT_DOCKER_CONFIG)
|
||||
EOF
|
||||
}
|
||||
|
||||
ensure_runner_binaries() {
|
||||
if [[ ! -x "$RUNNER_DIR/config.sh" || ! -x "$RUNNER_DIR/run.sh" ]]; then
|
||||
echo "[actions-local] Missing runner binaries in $RUNNER_DIR."
|
||||
echo "[actions-local] Download and extract GitHub runner there first."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
ensure_runner_url() {
|
||||
if [[ -z "$RUNNER_URL" ]]; then
|
||||
echo "[actions-local] RUNNER_URL is empty."
|
||||
echo "[actions-local] Set RUNNER_URL=https://github.com/<owner>/<repo> and retry."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
require_token() {
|
||||
if [[ -z "${RUNNER_TOKEN:-}" ]]; then
|
||||
echo "[actions-local] RUNNER_TOKEN is required for this command."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
cmd_setup() {
|
||||
ensure_runner_binaries
|
||||
ensure_runner_url
|
||||
require_token
|
||||
|
||||
if [[ -f "$RUNNER_DIR/.runner" ]]; then
|
||||
echo "[actions-local] Runner already configured in $RUNNER_DIR (idempotent no-op)."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
(
|
||||
cd "$RUNNER_DIR"
|
||||
./config.sh \
|
||||
--unattended \
|
||||
--replace \
|
||||
--url "$RUNNER_URL" \
|
||||
--token "$RUNNER_TOKEN" \
|
||||
--name "$RUNNER_NAME" \
|
||||
--labels "$RUNNER_LABELS" \
|
||||
--work "$RUNNER_WORKDIR"
|
||||
)
|
||||
echo "[actions-local] Runner configured."
|
||||
}
|
||||
|
||||
cmd_start() {
|
||||
ensure_runner_binaries
|
||||
if [[ ! -f "$RUNNER_DIR/.runner" ]]; then
|
||||
echo "[actions-local] Runner not configured. Run setup first."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -f "$RUNNER_PID_FILE" ]] && kill -0 "$(cat "$RUNNER_PID_FILE")" >/dev/null 2>&1; then
|
||||
echo "[actions-local] Runner already running (pid $(cat "$RUNNER_PID_FILE"))."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
(
|
||||
cd "$RUNNER_DIR"
|
||||
nohup ./run.sh >"$RUNNER_LOG_FILE" 2>&1 &
|
||||
echo $! >"$RUNNER_PID_FILE"
|
||||
)
|
||||
echo "[actions-local] Runner started (pid $(cat "$RUNNER_PID_FILE"))."
|
||||
echo "[actions-local] Log: $RUNNER_LOG_FILE"
|
||||
}
|
||||
|
||||
cmd_stop() {
|
||||
if [[ ! -f "$RUNNER_PID_FILE" ]]; then
|
||||
echo "[actions-local] Runner is not running."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
pid="$(cat "$RUNNER_PID_FILE")"
|
||||
if kill -0 "$pid" >/dev/null 2>&1; then
|
||||
kill "$pid"
|
||||
rm -f "$RUNNER_PID_FILE"
|
||||
echo "[actions-local] Runner stopped (pid $pid)."
|
||||
else
|
||||
rm -f "$RUNNER_PID_FILE"
|
||||
echo "[actions-local] Runner pid file was stale; cleaned up."
|
||||
fi
|
||||
}
|
||||
|
||||
cmd_status() {
|
||||
if [[ -f "$RUNNER_DIR/.runner" ]]; then
|
||||
echo "[actions-local] configured: yes"
|
||||
else
|
||||
echo "[actions-local] configured: no"
|
||||
fi
|
||||
|
||||
if [[ -f "$RUNNER_PID_FILE" ]] && kill -0 "$(cat "$RUNNER_PID_FILE")" >/dev/null 2>&1; then
|
||||
echo "[actions-local] running: yes (pid $(cat "$RUNNER_PID_FILE"))"
|
||||
else
|
||||
echo "[actions-local] running: no"
|
||||
fi
|
||||
|
||||
echo "[actions-local] runner-dir: $RUNNER_DIR"
|
||||
echo "[actions-local] runner-labels: $RUNNER_LABELS"
|
||||
}
|
||||
|
||||
cmd_remove() {
|
||||
ensure_runner_binaries
|
||||
require_token
|
||||
if [[ ! -f "$RUNNER_DIR/.runner" ]]; then
|
||||
echo "[actions-local] Runner is not configured."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
(
|
||||
cd "$RUNNER_DIR"
|
||||
./config.sh remove --token "$RUNNER_TOKEN"
|
||||
)
|
||||
echo "[actions-local] Runner registration removed."
|
||||
}
|
||||
|
||||
cmd_run() {
|
||||
local job="${1:-sdd-gate}"
|
||||
|
||||
if ! command -v act >/dev/null 2>&1; then
|
||||
echo "[actions-local] 'act' is required for local workflow execution."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p "$ACT_DOCKER_CONFIG"
|
||||
if [[ ! -f "$ACT_DOCKER_CONFIG/config.json" ]]; then
|
||||
printf '{"auths":{}}\n' >"$ACT_DOCKER_CONFIG/config.json"
|
||||
fi
|
||||
|
||||
DOCKER_CONFIG="$ACT_DOCKER_CONFIG" \
|
||||
DOCKER_HOST="unix://$ACT_DOCKER_SOCKET" \
|
||||
act -W "$ACT_WORKFLOW" \
|
||||
-j "$job" \
|
||||
-P "self-hosted=$ACT_IMAGE" \
|
||||
-P "macos-latest=$ACT_IMAGE" \
|
||||
--container-architecture linux/amd64 \
|
||||
--container-daemon-socket "$ACT_DAEMON_SOCKET"
|
||||
}
|
||||
|
||||
COMMAND="${1:-}"
|
||||
case "$COMMAND" in
|
||||
setup) cmd_setup ;;
|
||||
start) cmd_start ;;
|
||||
stop) cmd_stop ;;
|
||||
status) cmd_status ;;
|
||||
remove) cmd_remove ;;
|
||||
run) cmd_run "${2:-}" ;;
|
||||
""|--help|-h) usage ;;
|
||||
*)
|
||||
echo "[actions-local] Unknown command: $COMMAND"
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
152
runners-conversion/periodVault/check-process.sh
Executable file
152
runners-conversion/periodVault/check-process.sh
Executable file
@@ -0,0 +1,152 @@
|
||||
#!/usr/bin/env bash
|
||||
# check-process.sh
|
||||
# Process compliance checks for PR branches.
|
||||
# Validates: no main commits, no .DS_Store, scripts executable,
|
||||
# spec artifacts exist, iteration counter incremented, commit tags,
|
||||
# and file-scope allowlist enforcement.
|
||||
set -euo pipefail
|
||||
|
||||
BASE_REF="${1:-origin/main}"
|
||||
|
||||
if ! git rev-parse --verify "$BASE_REF" >/dev/null 2>&1; then
|
||||
BASE_REF="HEAD~1"
|
||||
fi
|
||||
|
||||
BRANCH="$(git rev-parse --abbrev-ref HEAD)"
|
||||
# In GitHub Actions merge refs, HEAD is detached. Derive branch from GITHUB_HEAD_REF
|
||||
# or from the spec directory that matches changed files.
|
||||
if [[ "$BRANCH" == "HEAD" ]]; then
|
||||
if [[ -n "${GITHUB_HEAD_REF:-}" ]]; then
|
||||
BRANCH="$GITHUB_HEAD_REF"
|
||||
else
|
||||
# Fallback: find the spec directory from changed files
|
||||
for f in "${CHANGED_FILES[@]:-}"; do
|
||||
if [[ "$f" == specs/*/spec.md ]]; then
|
||||
BRANCH="${f#specs/}"
|
||||
BRANCH="${BRANCH%/spec.md}"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
fi
|
||||
if [[ "$BRANCH" == "main" ]]; then
|
||||
echo "[check-process] Failing: direct changes on 'main' are not allowed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
CHANGED_FILES=()
|
||||
while IFS= read -r line; do
|
||||
[[ -n "$line" ]] && CHANGED_FILES+=("$line")
|
||||
done < <(git diff --name-only "$BASE_REF"...HEAD)
|
||||
|
||||
if [[ ${#CHANGED_FILES[@]} -eq 0 ]]; then
|
||||
echo "[check-process] No changed files relative to $BASE_REF."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
FAILURES=0
|
||||
|
||||
# --- Check 1: No .DS_Store ---
|
||||
if command -v rg >/dev/null 2>&1; then
|
||||
HAS_DS_STORE="$(printf '%s\n' "${CHANGED_FILES[@]}" | rg -q '(^|/)\.DS_Store$' && echo 1 || echo 0)"
|
||||
else
|
||||
HAS_DS_STORE="$(printf '%s\n' "${CHANGED_FILES[@]}" | grep -Eq '(^|/)\.DS_Store$' && echo 1 || echo 0)"
|
||||
fi
|
||||
if [[ "$HAS_DS_STORE" == "1" ]]; then
|
||||
echo "[check-process] FAIL: .DS_Store must not be committed."
|
||||
FAILURES=$((FAILURES + 1))
|
||||
fi
|
||||
|
||||
# --- Check 2: Scripts executable ---
|
||||
for file in "${CHANGED_FILES[@]}"; do
|
||||
if [[ "$file" == scripts/*.sh ]] && [[ -f "$file" ]] && [[ ! -x "$file" ]]; then
|
||||
echo "[check-process] FAIL: script is not executable: $file"
|
||||
FAILURES=$((FAILURES + 1))
|
||||
fi
|
||||
done
|
||||
|
||||
# --- Check 3: Spec artifacts exist ---
|
||||
SPEC_DIR="specs/${BRANCH}"
|
||||
if [[ -d "$SPEC_DIR" ]]; then
|
||||
for artifact in spec.md plan.md tasks.md allowed-files.txt; do
|
||||
if [[ ! -f "$SPEC_DIR/$artifact" ]]; then
|
||||
echo "[check-process] FAIL: missing spec artifact: $SPEC_DIR/$artifact"
|
||||
FAILURES=$((FAILURES + 1))
|
||||
fi
|
||||
done
|
||||
else
|
||||
echo "[check-process] FAIL: spec directory not found: $SPEC_DIR"
|
||||
FAILURES=$((FAILURES + 1))
|
||||
fi
|
||||
|
||||
# --- Check 4: ITERATION incremented ---
|
||||
if [[ -f ITERATION ]]; then
|
||||
BRANCH_ITER="$(tr -d '[:space:]' < ITERATION)"
|
||||
BASE_ITER="$(git show "$BASE_REF":ITERATION 2>/dev/null | tr -d '[:space:]' || echo "0")"
|
||||
if [[ "$BRANCH_ITER" -le "$BASE_ITER" ]] 2>/dev/null; then
|
||||
echo "[check-process] FAIL: ITERATION ($BRANCH_ITER) must be > base ($BASE_ITER)"
|
||||
FAILURES=$((FAILURES + 1))
|
||||
fi
|
||||
fi
|
||||
|
||||
# --- Check 5: Commit messages contain [iter N] ---
|
||||
# Skip merge commits (merge resolution, GitHub merge refs) — they don't carry iter tags.
|
||||
COMMITS_WITHOUT_TAG=0
|
||||
while IFS= read -r msg; do
|
||||
# Skip merge commits (start with "Merge " or "merge:")
|
||||
if echo "$msg" | grep -qEi '^(Merge |merge:)'; then
|
||||
continue
|
||||
fi
|
||||
if ! echo "$msg" | grep -qE '\[iter [0-9]+\]'; then
|
||||
echo "[check-process] FAIL: commit missing [iter N] tag: $msg"
|
||||
COMMITS_WITHOUT_TAG=$((COMMITS_WITHOUT_TAG + 1))
|
||||
fi
|
||||
done < <(git log --format='%s' "$BASE_REF"...HEAD)
|
||||
if [[ $COMMITS_WITHOUT_TAG -gt 0 ]]; then
|
||||
FAILURES=$((FAILURES + COMMITS_WITHOUT_TAG))
|
||||
fi
|
||||
|
||||
# --- Check 6: File-scope allowlist ---
|
||||
ALLOWLIST="$SPEC_DIR/allowed-files.txt"
|
||||
if [[ -f "$ALLOWLIST" ]]; then
|
||||
ALLOWED_PATTERNS=()
|
||||
while IFS= read -r line; do
|
||||
# Skip comments and blank lines
|
||||
line="$(echo "$line" | sed 's/#.*//' | xargs)"
|
||||
[[ -z "$line" ]] && continue
|
||||
ALLOWED_PATTERNS+=("$line")
|
||||
done < "$ALLOWLIST"
|
||||
|
||||
for file in "${CHANGED_FILES[@]}"; do
|
||||
MATCHED=false
|
||||
for pattern in "${ALLOWED_PATTERNS[@]}"; do
|
||||
# Use bash pattern matching (supports * and **)
|
||||
# Convert ** to match any path and * to match within directory
|
||||
local_pattern="${pattern}"
|
||||
# shellcheck disable=SC2254
|
||||
if [[ "$file" == $local_pattern ]]; then
|
||||
MATCHED=true
|
||||
break
|
||||
fi
|
||||
# Also try fnmatch-style: specs/foo/* should match specs/foo/bar.md
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
if python3 -c "import fnmatch; exit(0 if fnmatch.fnmatch('$file', '$local_pattern') else 1)" 2>/dev/null; then
|
||||
MATCHED=true
|
||||
break
|
||||
fi
|
||||
fi
|
||||
done
|
||||
if [[ "$MATCHED" == "false" ]]; then
|
||||
echo "[check-process] FAIL: file not in allowlist: $file"
|
||||
FAILURES=$((FAILURES + 1))
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# --- Result ---
|
||||
if [[ $FAILURES -gt 0 ]]; then
|
||||
echo "[check-process] FAILED ($FAILURES issues)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "[check-process] PASS ($BASE_REF...HEAD)"
|
||||
102
runners-conversion/periodVault/ci-local.sh
Executable file
102
runners-conversion/periodVault/ci-local.sh
Executable file
@@ -0,0 +1,102 @@
|
||||
#!/usr/bin/env bash
|
||||
# ci-local.sh
|
||||
# Local equivalent of CI checks for self-hosted runner validation.
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
RUN_CONTRACTS=0
|
||||
RUN_BACKEND=0
|
||||
RUN_ANDROID=0
|
||||
RUN_IOS=0
|
||||
SKIP_INSTALL=0
|
||||
|
||||
usage() {
|
||||
cat <<'EOF'
|
||||
Usage: ./scripts/ci-local.sh [options]
|
||||
|
||||
Options:
|
||||
--contracts Run process/SDD/TDD gate scripts.
|
||||
--backend Run lint + shared/android unit tests.
|
||||
--android Run Android emulator UI tests.
|
||||
--ios Run iOS simulator UI tests.
|
||||
--all Run contracts + backend + android + ios.
|
||||
--skip-install Skip setup bootstrap check.
|
||||
--help Show this help.
|
||||
|
||||
If no test scope flags are provided, defaults to: --contracts --backend
|
||||
EOF
|
||||
}
|
||||
|
||||
for arg in "$@"; do
|
||||
case "$arg" in
|
||||
--contracts) RUN_CONTRACTS=1 ;;
|
||||
--backend) RUN_BACKEND=1 ;;
|
||||
--android) RUN_ANDROID=1 ;;
|
||||
--ios) RUN_IOS=1 ;;
|
||||
--all)
|
||||
RUN_CONTRACTS=1
|
||||
RUN_BACKEND=1
|
||||
RUN_ANDROID=1
|
||||
RUN_IOS=1
|
||||
;;
|
||||
--skip-install) SKIP_INSTALL=1 ;;
|
||||
--help|-h) usage; exit 0 ;;
|
||||
*)
|
||||
echo "[ci-local] Unknown option: $arg"
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ $RUN_CONTRACTS -eq 0 && $RUN_BACKEND -eq 0 && $RUN_ANDROID -eq 0 && $RUN_IOS -eq 0 ]]; then
|
||||
RUN_CONTRACTS=1
|
||||
RUN_BACKEND=1
|
||||
fi
|
||||
|
||||
if [[ $SKIP_INSTALL -eq 0 ]]; then
|
||||
"$SCRIPT_DIR/setup-dev-environment.sh" --verify
|
||||
fi
|
||||
|
||||
TIMESTAMP="$(date +%Y%m%d-%H%M%S)"
|
||||
LOG_DIR="$PROJECT_ROOT/build/local-ci"
|
||||
LOG_FILE="$LOG_DIR/local-ci-$TIMESTAMP.log"
|
||||
mkdir -p "$LOG_DIR"
|
||||
|
||||
run_step() {
|
||||
local step_name="$1"
|
||||
shift
|
||||
echo ""
|
||||
echo "================================================"
|
||||
echo "[ci-local] $step_name"
|
||||
echo "================================================"
|
||||
"$@" 2>&1 | tee -a "$LOG_FILE"
|
||||
}
|
||||
|
||||
echo "[ci-local] Writing log to $LOG_FILE"
|
||||
echo "[ci-local] Starting local CI run at $(date -u '+%Y-%m-%dT%H:%M:%SZ')" | tee -a "$LOG_FILE"
|
||||
|
||||
if [[ $RUN_CONTRACTS -eq 1 ]]; then
|
||||
run_step "check-process" "$SCRIPT_DIR/check-process.sh" origin/main
|
||||
run_step "validate-sdd" "$SCRIPT_DIR/validate-sdd.sh" origin/main
|
||||
run_step "validate-tdd" env FORCE_AUDIT_GATES=1 "$SCRIPT_DIR/validate-tdd.sh" origin/main
|
||||
fi
|
||||
|
||||
if [[ $RUN_BACKEND -eq 1 ]]; then
|
||||
run_step "ktlint+unit-tests" ./gradlew ktlintCheck shared:jvmTest androidApp:testDebugUnitTest
|
||||
fi
|
||||
|
||||
if [[ $RUN_ANDROID -eq 1 ]]; then
|
||||
run_step "android-ui-tests" "$SCRIPT_DIR/run-emulator-tests.sh" android
|
||||
fi
|
||||
|
||||
if [[ $RUN_IOS -eq 1 ]]; then
|
||||
run_step "ios-ui-tests" "$SCRIPT_DIR/run-emulator-tests.sh" ios
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "[ci-local] PASS"
|
||||
echo "[ci-local] Log: $LOG_FILE"
|
||||
244
runners-conversion/periodVault/fix-android-emulator.sh
Executable file
244
runners-conversion/periodVault/fix-android-emulator.sh
Executable file
@@ -0,0 +1,244 @@
|
||||
#!/usr/bin/env bash
|
||||
# fix-android-emulator.sh — Install Android OS system image and fix/create phone or Wear OS AVD
|
||||
# Usage: ./scripts/fix-android-emulator.sh
|
||||
# Run when emulator fails with "No initial system image for this configuration".
|
||||
# Supports phone (default) and Wear OS emulators. Requires: Android SDK (ANDROID_HOME or
|
||||
# ~/Library/Android/sdk). Installs SDK command-line tools if missing.
|
||||
#
|
||||
# ENV VARs (defaults use latest SDK):
|
||||
# ANDROID_HOME SDK root (default: $HOME/Library/Android/sdk on macOS)
|
||||
# ANDROID_SDK_ROOT Same as ANDROID_HOME if set
|
||||
# ANDROID_EMULATOR_API_LEVEL API level, e.g. 35 or 30 (default: auto = latest from sdkmanager --list)
|
||||
# ANDROID_AVD_NAME AVD name to fix or create (default: phone, or wear when type=wearos)
|
||||
# ANDROID_EMULATOR_DEVICE Device profile for new AVDs (default: pixel_8 for phone, wear_os_square for Wear)
|
||||
# ANDROID_EMULATOR_TYPE phone (default) or wearos — which system image and device profile to use
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m'
|
||||
|
||||
# --- Default ENV VARs: latest SDK ---
|
||||
# ANDROID_HOME: SDK root (default: $HOME/Library/Android/sdk on macOS)
|
||||
export ANDROID_HOME="${ANDROID_HOME:-${ANDROID_SDK_ROOT:-}}"
|
||||
if [[ -z "$ANDROID_HOME" ]]; then
|
||||
if [[ -d "$HOME/Library/Android/sdk" ]]; then
|
||||
export ANDROID_HOME="$HOME/Library/Android/sdk"
|
||||
else
|
||||
echo -e "${RED}ERROR: ANDROID_HOME not set and ~/Library/Android/sdk not found.${NC}"
|
||||
echo "Set ANDROID_HOME to your Android SDK root, or install Android Studio / SDK."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Emulator type: phone (default) or wearos — determines system image and default device profile
|
||||
EMULATOR_TYPE="${ANDROID_EMULATOR_TYPE:-phone}"
|
||||
EMULATOR_TYPE=$(echo "$EMULATOR_TYPE" | tr '[:upper:]' '[:lower:]')
|
||||
# AVD name and device profile (override with ANDROID_AVD_NAME / ANDROID_EMULATOR_DEVICE)
|
||||
if [[ "$EMULATOR_TYPE" == "wearos" ]]; then
|
||||
AVD_NAME="${ANDROID_AVD_NAME:-wear}"
|
||||
DEVICE_PROFILE="${ANDROID_EMULATOR_DEVICE:-wear_os_square}"
|
||||
else
|
||||
AVD_NAME="${ANDROID_AVD_NAME:-phone}"
|
||||
DEVICE_PROFILE="${ANDROID_EMULATOR_DEVICE:-pixel_8}"
|
||||
fi
|
||||
|
||||
# --- Find or install SDK command-line tools (sdkmanager, avdmanager) ---
|
||||
SDKMANAGER=""
|
||||
AVDMANAGER=""
|
||||
for d in "$ANDROID_HOME/cmdline-tools/latest/bin" "$ANDROID_HOME/tools/bin"; do
|
||||
if [[ -x "$d/sdkmanager" ]]; then
|
||||
SDKMANAGER="$d/sdkmanager"
|
||||
AVDMANAGER="$d/avdmanager"
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [[ -z "$SDKMANAGER" ]] && command -v sdkmanager &>/dev/null; then
|
||||
SDKMANAGER="sdkmanager"
|
||||
AVDMANAGER="avdmanager"
|
||||
fi
|
||||
|
||||
install_cmdline_tools() {
|
||||
echo -e "${YELLOW}Downloading Android SDK command-line tools...${NC}"
|
||||
local zip_url="https://dl.google.com/android/repository/commandlinetools-mac-11076708_latest.zip"
|
||||
local zip_file="$PROJECT_ROOT/build/cmdlinetools.zip"
|
||||
local tmp_dir="$ANDROID_HOME/cmdline-tools"
|
||||
mkdir -p "$(dirname "$zip_file")" "$tmp_dir"
|
||||
if ! curl -fsSL -o "$zip_file" "$zip_url"; then
|
||||
echo -e "${RED}Download failed. Install command-line tools manually:${NC}"
|
||||
echo " Android Studio → Settings → Appearance & Behavior → System Settings → Android SDK"
|
||||
echo " → SDK Tools tab → check 'Android SDK Command-line Tools (latest)' → Apply"
|
||||
exit 1
|
||||
fi
|
||||
(cd "$tmp_dir" && unzip -q -o "$zip_file" && mv cmdline-tools latest 2>/dev/null || true)
|
||||
rm -f "$zip_file"
|
||||
SDKMANAGER="$ANDROID_HOME/cmdline-tools/latest/bin/sdkmanager"
|
||||
AVDMANAGER="$ANDROID_HOME/cmdline-tools/latest/bin/avdmanager"
|
||||
if [[ ! -x "$SDKMANAGER" ]]; then
|
||||
# Some zips unpack to cmdline-tools/ inside the zip
|
||||
if [[ -d "$tmp_dir/cmdline-tools" ]]; then
|
||||
mv "$tmp_dir/cmdline-tools" "$tmp_dir/latest"
|
||||
fi
|
||||
SDKMANAGER="$ANDROID_HOME/cmdline-tools/latest/bin/sdkmanager"
|
||||
AVDMANAGER="$ANDROID_HOME/cmdline-tools/latest/bin/avdmanager"
|
||||
fi
|
||||
if [[ ! -x "$SDKMANAGER" ]]; then
|
||||
echo -e "${RED}Command-line tools install failed. Install from Android Studio SDK Manager.${NC}"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "${GREEN}Command-line tools installed.${NC}"
|
||||
}
|
||||
|
||||
if [[ -z "$SDKMANAGER" ]] || [[ ! -x "$SDKMANAGER" ]]; then
|
||||
install_cmdline_tools
|
||||
fi
|
||||
|
||||
# --- Ensure PATH for this script ---
|
||||
export PATH="$ANDROID_HOME/cmdline-tools/latest/bin:$ANDROID_HOME/emulator:$ANDROID_HOME/platform-tools:$PATH"
|
||||
|
||||
# --- Default to latest SDK system image (when ANDROID_EMULATOR_API_LEVEL unset) ---
|
||||
# Parses sdkmanager --list for highest API level with Google Play arm64-v8a image.
|
||||
set_latest_system_image() {
|
||||
local list_output
|
||||
list_output=$("$SDKMANAGER" --list 2>/dev/null) || true
|
||||
local best_api=0
|
||||
local best_package=""
|
||||
local pkg api
|
||||
# Match package lines (path may be first column or whole line): system-images;android-NN;google_apis...;arm64-v8a
|
||||
while IFS= read -r line; do
|
||||
pkg=$(echo "$line" | sed -n 's/.*\(system-images;android-[0-9][0-9]*;google_apis[^;]*;arm64-v8a\).*/\1/p')
|
||||
[[ -z "$pkg" ]] && continue
|
||||
api=$(echo "$pkg" | sed 's/.*android-\([0-9][0-9]*\).*/\1/')
|
||||
if [[ "$api" =~ ^[0-9]+$ ]] && [[ "$api" -gt "$best_api" ]]; then
|
||||
best_api="$api"
|
||||
best_package="$pkg"
|
||||
fi
|
||||
done <<< "$list_output"
|
||||
if [[ -n "$best_package" ]] && [[ "$best_api" -gt 0 ]]; then
|
||||
ANDROID_EMULATOR_API_LEVEL="$best_api"
|
||||
SYSTEM_IMAGE_PACKAGE="$best_package"
|
||||
echo -e "${GREEN}Using latest SDK system image: API $best_api ($SYSTEM_IMAGE_PACKAGE)${NC}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Parses sdkmanager --list for highest API level with Wear OS image.
|
||||
# Matches: system-images;android-NN;wear;arm64-v8a or ...;google_apis;wear_os_arm64
|
||||
set_latest_system_image_wear() {
|
||||
local list_output
|
||||
list_output=$("$SDKMANAGER" --list 2>/dev/null) || true
|
||||
local best_api=0
|
||||
local best_package=""
|
||||
local pkg api
|
||||
while IFS= read -r line; do
|
||||
# Must be a system image line containing android-NN and wear (wear; or wear_os)
|
||||
[[ "$line" != *"system-images"* ]] && continue
|
||||
[[ "$line" != *"android-"* ]] && continue
|
||||
[[ "$line" != *"wear"* ]] && continue
|
||||
# Extract package: system-images;android-NN;... (semicolon-separated, may be first column)
|
||||
pkg=$(echo "$line" | sed -n 's/.*\(system-images;android-[0-9][0-9]*;[^;]*;[^;]*\).*/\1/p')
|
||||
[[ -z "$pkg" ]] && continue
|
||||
api=$(echo "$pkg" | sed 's/.*android-\([0-9][0-9]*\).*/\1/')
|
||||
if [[ "$api" =~ ^[0-9]+$ ]] && [[ "$api" -gt "$best_api" ]]; then
|
||||
best_api="$api"
|
||||
best_package="$pkg"
|
||||
fi
|
||||
done <<< "$list_output"
|
||||
if [[ -n "$best_package" ]] && [[ "$best_api" -gt 0 ]]; then
|
||||
ANDROID_EMULATOR_API_LEVEL="$best_api"
|
||||
SYSTEM_IMAGE_PACKAGE="$best_package"
|
||||
echo -e "${GREEN}Using latest Wear OS system image: API $best_api ($SYSTEM_IMAGE_PACKAGE)${NC}"
|
||||
fi
|
||||
}
|
||||
|
||||
# If ANDROID_EMULATOR_API_LEVEL not set, detect latest from SDK (phone or Wear OS)
|
||||
if [[ -z "${ANDROID_EMULATOR_API_LEVEL:-}" ]]; then
|
||||
if [[ "$EMULATOR_TYPE" == "wearos" ]]; then
|
||||
set_latest_system_image_wear
|
||||
else
|
||||
set_latest_system_image
|
||||
fi
|
||||
fi
|
||||
|
||||
# Fallback when detection didn't set a package (e.g. no sdkmanager list)
|
||||
API_LEVEL="${ANDROID_EMULATOR_API_LEVEL:-35}"
|
||||
if [[ -z "${SYSTEM_IMAGE_PACKAGE:-}" ]]; then
|
||||
if [[ "$EMULATOR_TYPE" == "wearos" ]]; then
|
||||
# Wear OS: images often at API 30; package format android-NN;wear;arm64-v8a
|
||||
WEAR_API="${ANDROID_EMULATOR_API_LEVEL:-30}"
|
||||
SYSTEM_IMAGE_PACKAGE="system-images;android-${WEAR_API};wear;arm64-v8a"
|
||||
API_LEVEL="$WEAR_API"
|
||||
elif [[ "$API_LEVEL" == "36" ]]; then
|
||||
SYSTEM_IMAGE_PACKAGE="system-images;android-36;google_apis_playstore_ps16k;arm64-v8a"
|
||||
else
|
||||
SYSTEM_IMAGE_PACKAGE="system-images;android-${API_LEVEL};google_apis_playstore;arm64-v8a"
|
||||
fi
|
||||
fi
|
||||
|
||||
# --- Accept licenses (non-interactive) ---
|
||||
echo -e "${YELLOW}Accepting SDK licenses...${NC}"
|
||||
yes 2>/dev/null | "$SDKMANAGER" --licenses >/dev/null 2>&1 || true
|
||||
|
||||
# --- Install system image ---
|
||||
echo -e "${YELLOW}Installing system image: $SYSTEM_IMAGE_PACKAGE${NC}"
|
||||
if ! "$SDKMANAGER" "$SYSTEM_IMAGE_PACKAGE"; then
|
||||
echo -e "${RED}Failed to install system image. Try a different API level:${NC}"
|
||||
echo " ANDROID_EMULATOR_API_LEVEL=34 $0"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# --- Verify image has system.img (path from package: a;b;c;d -> a/b/c/d) ---
|
||||
REL_IMAGE_DIR=$(echo "$SYSTEM_IMAGE_PACKAGE" | sed 's/;/\//g')
|
||||
IMAGE_DIR="$ANDROID_HOME/$REL_IMAGE_DIR"
|
||||
if [[ ! -f "$IMAGE_DIR/system.img" ]]; then
|
||||
echo -e "${RED}Installed image missing system.img at $IMAGE_DIR${NC}"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "${GREEN}System image OK: $IMAGE_DIR${NC}"
|
||||
|
||||
# --- Resolve AVD directory (phone may point to e.g. Pixel_9_Pro.avd via .ini) ---
|
||||
AVD_INI="$HOME/.android/avd/${AVD_NAME}.ini"
|
||||
AVD_DIR=""
|
||||
if [[ -f "$AVD_INI" ]]; then
|
||||
AVD_PATH=$(grep "^path=" "$AVD_INI" 2>/dev/null | cut -d= -f2-)
|
||||
if [[ -n "$AVD_PATH" ]] && [[ -d "$AVD_PATH" ]]; then
|
||||
AVD_DIR="$AVD_PATH"
|
||||
fi
|
||||
fi
|
||||
if [[ -z "$AVD_DIR" ]]; then
|
||||
AVD_DIR="$HOME/.android/avd/${AVD_NAME}.avd"
|
||||
fi
|
||||
|
||||
# Update existing AVD config to use the working system image
|
||||
if [[ -d "$AVD_DIR" ]] && [[ -f "$AVD_DIR/config.ini" ]] && [[ -f "$IMAGE_DIR/system.img" ]]; then
|
||||
CONFIG="$AVD_DIR/config.ini"
|
||||
if grep -q "image.sysdir" "$CONFIG"; then
|
||||
# Portable sed: write to temp then mv (macOS sed -i needs backup arg)
|
||||
sed "s|image.sysdir.1=.*|image.sysdir.1=$REL_IMAGE_DIR/|" "$CONFIG" > "${CONFIG}.tmp"
|
||||
mv "${CONFIG}.tmp" "$CONFIG"
|
||||
echo -e "${GREEN}Updated AVD config to use $REL_IMAGE_DIR${NC}"
|
||||
fi
|
||||
elif [[ ! -d "$AVD_DIR" ]]; then
|
||||
echo -e "${YELLOW}Creating AVD '$AVD_NAME' with device profile $DEVICE_PROFILE...${NC}"
|
||||
echo no | "$AVDMANAGER" create avd \
|
||||
-n "$AVD_NAME" \
|
||||
-k "$SYSTEM_IMAGE_PACKAGE" \
|
||||
-d "$DEVICE_PROFILE" \
|
||||
--force
|
||||
echo -e "${GREEN}AVD '$AVD_NAME' created.${NC}"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}Done. Start the emulator with:${NC}"
|
||||
echo " emulator -avd $AVD_NAME"
|
||||
echo ""
|
||||
if [[ "$EMULATOR_TYPE" == "wearos" ]]; then
|
||||
echo "Fix Wear OS only: ANDROID_EMULATOR_TYPE=wearos $0"
|
||||
echo "Or fix both phone and Wear: $0 && ANDROID_EMULATOR_TYPE=wearos $0"
|
||||
else
|
||||
echo "Or run deploy: ./scripts/deploy-emulator.sh android"
|
||||
fi
|
||||
echo ""
|
||||
77
runners-conversion/periodVault/init-audit.sh
Executable file
77
runners-conversion/periodVault/init-audit.sh
Executable file
@@ -0,0 +1,77 @@
|
||||
#!/usr/bin/env bash
|
||||
# init-audit.sh
|
||||
# Initializes local audit scaffolding used by process gates.
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
AUDIT_DIR="$PROJECT_ROOT/audit"
|
||||
|
||||
mkdir -p "$AUDIT_DIR"
|
||||
|
||||
if [[ ! -f "$AUDIT_DIR/requirements.json" ]]; then
|
||||
cat >"$AUDIT_DIR/requirements.json" <<'JSON'
|
||||
{
|
||||
"version": 1,
|
||||
"lastUpdated": "2026-02-21",
|
||||
"requirements": [
|
||||
{
|
||||
"id": "R-CI-SELF-HOSTED",
|
||||
"description": "CI jobs run on self-hosted runner labels with documented fallback."
|
||||
},
|
||||
{
|
||||
"id": "R-DEV-SETUP",
|
||||
"description": "Repository provides idempotent bootstrap script and verification commands."
|
||||
},
|
||||
{
|
||||
"id": "R-DEV-GUIDE",
|
||||
"description": "Developer guide is aligned with README, scripts, and local workflow."
|
||||
}
|
||||
]
|
||||
}
|
||||
JSON
|
||||
echo "[init-audit] Created audit/requirements.json"
|
||||
else
|
||||
echo "[init-audit] Found audit/requirements.json"
|
||||
fi
|
||||
|
||||
if [[ ! -f "$AUDIT_DIR/test-runs.json" ]]; then
|
||||
cat >"$AUDIT_DIR/test-runs.json" <<'JSON'
|
||||
{
|
||||
"version": 1,
|
||||
"runs": []
|
||||
}
|
||||
JSON
|
||||
echo "[init-audit] Created audit/test-runs.json"
|
||||
else
|
||||
echo "[init-audit] Found audit/test-runs.json"
|
||||
fi
|
||||
|
||||
if [[ ! -f "$PROJECT_ROOT/CODEX-REPORT.md" ]]; then
|
||||
cat >"$PROJECT_ROOT/CODEX-REPORT.md" <<'MD'
|
||||
# CODEX Report
|
||||
|
||||
## Requirements Mapping
|
||||
- R-CI-SELF-HOSTED: pending
|
||||
- R-DEV-SETUP: pending
|
||||
- R-DEV-GUIDE: pending
|
||||
|
||||
## Constitution Compliance Matrix
|
||||
| Principle | Status | Notes |
|
||||
|-----------|--------|-------|
|
||||
| I | pending | |
|
||||
| X | pending | |
|
||||
| XX | pending | |
|
||||
|
||||
## Evidence
|
||||
- Add command outputs and CI links.
|
||||
|
||||
## Risks
|
||||
- Add known risks and mitigations.
|
||||
MD
|
||||
echo "[init-audit] Created CODEX-REPORT.md template"
|
||||
else
|
||||
echo "[init-audit] Found CODEX-REPORT.md"
|
||||
fi
|
||||
|
||||
echo "[init-audit] Audit scaffolding ready."
|
||||
107
runners-conversion/periodVault/monitor-pr-checks.sh
Executable file
107
runners-conversion/periodVault/monitor-pr-checks.sh
Executable file
@@ -0,0 +1,107 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
usage() {
|
||||
cat <<'EOF'
|
||||
Usage: scripts/monitor-pr-checks.sh <pr-number>
|
||||
|
||||
Environment overrides:
|
||||
CHECK_FAST_INTERVAL_SECONDS default: 60
|
||||
CHECK_SLOW_INTERVAL_SECONDS default: 180
|
||||
CHECK_MIN_FAST_WINDOW_SECONDS default: 900
|
||||
CHECK_STABLE_CYCLES_FOR_SLOW default: 5
|
||||
EOF
|
||||
}
|
||||
|
||||
if [[ "${1:-}" == "-h" ]] || [[ "${1:-}" == "--help" ]]; then
|
||||
usage
|
||||
exit 0
|
||||
fi
|
||||
|
||||
PR_NUMBER="${1:-}"
|
||||
if [[ -z "$PR_NUMBER" ]]; then
|
||||
usage >&2
|
||||
exit 2
|
||||
fi
|
||||
|
||||
FAST_INTERVAL_SECONDS="${CHECK_FAST_INTERVAL_SECONDS:-60}"
|
||||
SLOW_INTERVAL_SECONDS="${CHECK_SLOW_INTERVAL_SECONDS:-180}"
|
||||
MIN_FAST_WINDOW_SECONDS="${CHECK_MIN_FAST_WINDOW_SECONDS:-900}"
|
||||
STABLE_CYCLES_FOR_SLOW="${CHECK_STABLE_CYCLES_FOR_SLOW:-5}"
|
||||
|
||||
start_ts="$(date +%s)"
|
||||
stable_cycles=0
|
||||
last_fingerprint=""
|
||||
err_file="$(mktemp)"
|
||||
trap 'rm -f "$err_file"' EXIT
|
||||
|
||||
echo "Monitoring PR #${PR_NUMBER} checks"
|
||||
echo "Policy: fast=${FAST_INTERVAL_SECONDS}s, slow=${SLOW_INTERVAL_SECONDS}s, min-fast-window=${MIN_FAST_WINDOW_SECONDS}s, stable-cycles-for-slow=${STABLE_CYCLES_FOR_SLOW}"
|
||||
|
||||
while true; do
|
||||
now_ts="$(date +%s)"
|
||||
elapsed="$((now_ts - start_ts))"
|
||||
elapsed_mm="$((elapsed / 60))"
|
||||
elapsed_ss="$((elapsed % 60))"
|
||||
|
||||
if ! checks_json="$(gh pr checks "$PR_NUMBER" --json name,state,link 2>"$err_file")"; then
|
||||
err_msg="$(tr '\n' ' ' <"$err_file" | sed 's/[[:space:]]\+/ /g; s/^ //; s/ $//')"
|
||||
echo "[$(date -u '+%Y-%m-%dT%H:%M:%SZ')] elapsed ${elapsed_mm}m${elapsed_ss}s | check query failed: ${err_msg:-unknown error}"
|
||||
sleep "$FAST_INTERVAL_SECONDS"
|
||||
continue
|
||||
fi
|
||||
if [[ "$checks_json" == "[]" ]]; then
|
||||
echo "[$(date -u '+%Y-%m-%dT%H:%M:%SZ')] elapsed ${elapsed_mm}m${elapsed_ss}s | no checks yet"
|
||||
sleep "$FAST_INTERVAL_SECONDS"
|
||||
continue
|
||||
fi
|
||||
|
||||
success_count="$(jq '[.[] | select(.state=="SUCCESS")] | length' <<<"$checks_json")"
|
||||
failure_count="$(jq '[.[] | select(.state=="FAILURE" or .state=="ERROR" or .state=="STARTUP_FAILURE" or .state=="TIMED_OUT")] | length' <<<"$checks_json")"
|
||||
cancelled_count="$(jq '[.[] | select(.state=="CANCELLED")] | length' <<<"$checks_json")"
|
||||
skipped_count="$(jq '[.[] | select(.state=="SKIPPED" or .state=="NEUTRAL")] | length' <<<"$checks_json")"
|
||||
active_count="$(jq '[.[] | select(.state=="PENDING" or .state=="QUEUED" or .state=="IN_PROGRESS" or .state=="WAITING" or .state=="REQUESTED")] | length' <<<"$checks_json")"
|
||||
total_count="$(jq 'length' <<<"$checks_json")"
|
||||
|
||||
fingerprint="$(jq -r 'sort_by(.name) | map("\(.name)=\(.state)") | join(";")' <<<"$checks_json")"
|
||||
if [[ "$fingerprint" == "$last_fingerprint" ]]; then
|
||||
stable_cycles="$((stable_cycles + 1))"
|
||||
else
|
||||
stable_cycles=0
|
||||
last_fingerprint="$fingerprint"
|
||||
fi
|
||||
|
||||
echo "[$(date -u '+%Y-%m-%dT%H:%M:%SZ')] elapsed ${elapsed_mm}m${elapsed_ss}s | total=${total_count} success=${success_count} skipped=${skipped_count} active=${active_count} failed=${failure_count} cancelled=${cancelled_count}"
|
||||
|
||||
if [[ "$failure_count" -gt 0 ]]; then
|
||||
echo "Failing checks:"
|
||||
jq -r '.[] | select(.state=="FAILURE" or .state=="ERROR" or .state=="STARTUP_FAILURE" or .state=="TIMED_OUT") | " - \(.name): \(.state) \(.link)"' <<<"$checks_json"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$active_count" -eq 0 ]]; then
|
||||
if [[ "$cancelled_count" -gt 0 ]]; then
|
||||
echo "Checks ended with cancellations."
|
||||
jq -r '.[] | select(.state=="CANCELLED") | " - \(.name): \(.link)"' <<<"$checks_json"
|
||||
exit 1
|
||||
fi
|
||||
if [[ "$((success_count + skipped_count))" -eq "$total_count" ]]; then
|
||||
echo "All checks passed."
|
||||
exit 0
|
||||
fi
|
||||
echo "Checks finished with non-success states."
|
||||
jq -r '.[] | " - \(.name): \(.state) \(.link)"' <<<"$checks_json"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if (( elapsed < MIN_FAST_WINDOW_SECONDS )); then
|
||||
sleep "$FAST_INTERVAL_SECONDS"
|
||||
continue
|
||||
fi
|
||||
|
||||
if (( stable_cycles >= STABLE_CYCLES_FOR_SLOW )); then
|
||||
sleep "$SLOW_INTERVAL_SECONDS"
|
||||
else
|
||||
sleep "$FAST_INTERVAL_SECONDS"
|
||||
fi
|
||||
done
|
||||
538
runners-conversion/periodVault/run-emulator-tests.sh
Executable file
538
runners-conversion/periodVault/run-emulator-tests.sh
Executable file
@@ -0,0 +1,538 @@
|
||||
#!/usr/bin/env bash
|
||||
# run-emulator-tests.sh — Run all emulator/simulator UI tests for PeriodVault
|
||||
# Usage: ./scripts/run-emulator-tests.sh [android|ios|all]
|
||||
# Logs to build/emulator-tests.log; script reads the log to detect adb errors (e.g. multiple devices).
|
||||
#
|
||||
# iOS watchdog env controls:
|
||||
# IOS_HEARTBEAT_SECONDS (default: 30)
|
||||
# IOS_STARTUP_PROGRESS_TIMEOUT_SECONDS (default: 900)
|
||||
# IOS_TEST_STALL_TIMEOUT_SECONDS (default: 480)
|
||||
# IOS_UNRESPONSIVE_STALL_TIMEOUT_SECONDS(default: 120)
|
||||
# IOS_HARD_TIMEOUT_SECONDS (default: 10800)
|
||||
# IOS_ACTIVE_CPU_THRESHOLD (default: 1.0)
|
||||
set -euo pipefail
|
||||
|
||||
PLATFORM="${1:-all}"
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
# shellcheck source=scripts/lib.sh
|
||||
source "$SCRIPT_DIR/lib.sh"
|
||||
ensure_log_file "emulator-tests.log"
|
||||
|
||||
# Start Android emulator headless for test runs (no GUI window needed)
|
||||
export EMULATOR_HEADLESS=1
|
||||
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m'
|
||||
|
||||
ANDROID_PASS=0
|
||||
IOS_PASS=0
|
||||
ANDROID_FAIL=0
|
||||
IOS_FAIL=0
|
||||
|
||||
run_android() {
|
||||
echo -e "${YELLOW}=== Android Emulator Tests ===${NC}"
|
||||
|
||||
if ! ensure_android_emulator; then
|
||||
echo -e "${RED}ERROR: Could not start or connect to Android emulator. See $LOG_FILE${NC}"
|
||||
ANDROID_FAIL=1
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Disable animations for stable UI tests
|
||||
run_and_log "adb_disable_animations" adb shell "settings put global window_animation_scale 0; settings put global transition_animation_scale 0; settings put global animator_duration_scale 0" || true
|
||||
|
||||
# Pre-flight: verify emulator is responsive via adb shell
|
||||
echo "Verifying Android emulator is responsive..."
|
||||
if ! adb shell getprop sys.boot_completed 2>/dev/null | grep -q "1"; then
|
||||
echo -e "${RED}ERROR: Android emulator not responsive (sys.boot_completed != 1). Aborting.${NC}"
|
||||
ANDROID_FAIL=1
|
||||
return 1
|
||||
fi
|
||||
echo "Android emulator is responsive."
|
||||
|
||||
# Uninstall the app to ensure a clean database for tests
|
||||
echo "Cleaning app data..."
|
||||
adb uninstall periodvault.androidApp 2>/dev/null || true
|
||||
adb uninstall periodvault.androidApp.test 2>/dev/null || true
|
||||
|
||||
echo "Running Android instrumented tests..."
|
||||
local GRADLE_PID
|
||||
local GRADLE_EXIT=0
|
||||
local TOTAL_ANDROID_TESTS=0
|
||||
TOTAL_ANDROID_TESTS=$(find androidApp/src/androidTest -name '*.kt' -type f -exec grep -hE '@Test' {} + 2>/dev/null | wc -l | tr -d ' ')
|
||||
if [[ -z "$TOTAL_ANDROID_TESTS" ]]; then
|
||||
TOTAL_ANDROID_TESTS=0
|
||||
fi
|
||||
./gradlew androidApp:connectedDebugAndroidTest 2>&1 &
|
||||
GRADLE_PID=$!
|
||||
|
||||
# Progress/liveness watchdog:
|
||||
# - emits heartbeat every 30s with completed Android test cases and emulator health
|
||||
# - kills early only if emulator is unresponsive and test progress is stalled for 10m
|
||||
# - retains a generous hard timeout as last-resort safety net
|
||||
local HEARTBEAT_SECONDS=30
|
||||
local UNRESPONSIVE_STALL_TIMEOUT_SECONDS=600
|
||||
local HARD_TIMEOUT_SECONDS=7200 # 2 hours
|
||||
(
|
||||
local start_ts now_ts elapsed
|
||||
local last_progress_ts
|
||||
local completed=0
|
||||
local last_completed=0
|
||||
local stale_seconds=0
|
||||
local emu_health=""
|
||||
|
||||
start_ts=$(date +%s)
|
||||
last_progress_ts=$start_ts
|
||||
|
||||
while kill -0 $GRADLE_PID 2>/dev/null; do
|
||||
sleep "$HEARTBEAT_SECONDS"
|
||||
now_ts=$(date +%s)
|
||||
elapsed=$((now_ts - start_ts))
|
||||
|
||||
completed=$(find androidApp/build/outputs/androidTest-results/connected -name '*.xml' -type f -exec grep -ho "<testcase " {} + 2>/dev/null | wc -l | tr -d ' ')
|
||||
if [[ -z "$completed" ]]; then
|
||||
completed=0
|
||||
fi
|
||||
|
||||
if [[ "$completed" -gt "$last_completed" ]]; then
|
||||
last_progress_ts=$now_ts
|
||||
last_completed=$completed
|
||||
fi
|
||||
|
||||
if adb shell getprop sys.boot_completed 2>/dev/null | grep -q "1"; then
|
||||
emu_health="responsive"
|
||||
else
|
||||
emu_health="UNRESPONSIVE"
|
||||
fi
|
||||
|
||||
stale_seconds=$((now_ts - last_progress_ts))
|
||||
local elapsed_mm elapsed_ss
|
||||
elapsed_mm=$((elapsed / 60))
|
||||
elapsed_ss=$((elapsed % 60))
|
||||
|
||||
if [[ "$TOTAL_ANDROID_TESTS" -gt 0 ]]; then
|
||||
echo "Android progress: ${completed}/${TOTAL_ANDROID_TESTS} tests complete | elapsed ${elapsed_mm}m${elapsed_ss}s | emulator ${emu_health}"
|
||||
else
|
||||
echo "Android progress: ${completed} tests complete | elapsed ${elapsed_mm}m${elapsed_ss}s | emulator ${emu_health}"
|
||||
fi
|
||||
|
||||
if [[ "$elapsed" -ge "$HARD_TIMEOUT_SECONDS" ]]; then
|
||||
echo "WATCHDOG: killing Gradle (PID $GRADLE_PID) after hard timeout ${HARD_TIMEOUT_SECONDS}s"
|
||||
kill $GRADLE_PID 2>/dev/null || true
|
||||
sleep 5
|
||||
kill -9 $GRADLE_PID 2>/dev/null || true
|
||||
break
|
||||
fi
|
||||
|
||||
if [[ "$emu_health" == "UNRESPONSIVE" ]] && [[ "$stale_seconds" -ge "$UNRESPONSIVE_STALL_TIMEOUT_SECONDS" ]]; then
|
||||
echo "WATCHDOG: killing Gradle (PID $GRADLE_PID) - emulator unresponsive and no progress for ${stale_seconds}s"
|
||||
kill $GRADLE_PID 2>/dev/null || true
|
||||
sleep 5
|
||||
kill -9 $GRADLE_PID 2>/dev/null || true
|
||||
break
|
||||
fi
|
||||
done
|
||||
) &
|
||||
local WATCHDOG_PID=$!
|
||||
wait $GRADLE_PID 2>/dev/null || GRADLE_EXIT=$?
|
||||
kill $WATCHDOG_PID 2>/dev/null || true
|
||||
wait $WATCHDOG_PID 2>/dev/null || true
|
||||
|
||||
if [[ $GRADLE_EXIT -eq 137 ]] || [[ $GRADLE_EXIT -eq 143 ]]; then
|
||||
echo -e "${RED}Android emulator tests terminated by watchdog${NC}"
|
||||
ANDROID_FAIL=1
|
||||
run_and_log "adb_restore_animations" adb shell "settings put global window_animation_scale 1; settings put global transition_animation_scale 1; settings put global animator_duration_scale 1" || true
|
||||
return 1
|
||||
elif [[ $GRADLE_EXIT -eq 0 ]]; then
|
||||
echo -e "${GREEN}Android emulator tests PASSED${NC}"
|
||||
ANDROID_PASS=1
|
||||
# Emit runtime evidence for CI tracking
|
||||
local android_duration_s=""
|
||||
local android_test_count=""
|
||||
if [[ -f androidApp/build/reports/androidTests/connected/debug/index.html ]]; then
|
||||
android_test_count="$(grep -o '<div class="counter">[0-9]*</div>' androidApp/build/reports/androidTests/connected/debug/index.html | head -1 | grep -o '[0-9]*' || echo "")"
|
||||
android_duration_s="$(grep -o '<div class="counter">[0-9a-z.]*s</div>' androidApp/build/reports/androidTests/connected/debug/index.html | head -1 | grep -o '[0-9.]*' || echo "")"
|
||||
fi
|
||||
echo "RUNTIME_EVIDENCE: {\"suite\": \"android_ui\", \"tests\": ${android_test_count:-0}, \"duration\": \"${android_duration_s:-unknown}s\", \"timestamp\": \"$(date -u '+%Y-%m-%dT%H:%M:%SZ')\"}"
|
||||
else
|
||||
echo -e "${RED}Android emulator tests FAILED${NC}"
|
||||
ANDROID_FAIL=1
|
||||
echo "Test reports: androidApp/build/reports/androidTests/connected/debug/"
|
||||
run_and_log "adb_restore_animations" adb shell "settings put global window_animation_scale 1; settings put global transition_animation_scale 1; settings put global animator_duration_scale 1" || true
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Re-enable animations
|
||||
run_and_log "adb_restore_animations" adb shell "settings put global window_animation_scale 1; settings put global transition_animation_scale 1; settings put global animator_duration_scale 1" || true
|
||||
}
|
||||
|
||||
run_ios() {
|
||||
echo -e "${YELLOW}=== iOS Simulator Tests ===${NC}"
|
||||
|
||||
# Find an available simulator
|
||||
local SIM_ID
|
||||
SIM_ID=$(xcrun simctl list devices available -j 2>/dev/null | python3 -c "
|
||||
import json, sys
|
||||
data = json.load(sys.stdin)
|
||||
for runtime, devices in data.get('devices', {}).items():
|
||||
if 'iOS' in runtime:
|
||||
for d in devices:
|
||||
if d.get('isAvailable'):
|
||||
print(d['udid'])
|
||||
sys.exit(0)
|
||||
sys.exit(1)
|
||||
" 2>/dev/null) || true
|
||||
|
||||
if [[ -z "$SIM_ID" ]]; then
|
||||
echo -e "${RED}ERROR: No available iOS simulator found.${NC}"
|
||||
IOS_FAIL=1
|
||||
return 1
|
||||
fi
|
||||
|
||||
local SIM_NAME
|
||||
SIM_NAME=$(xcrun simctl list devices available | grep "$SIM_ID" | sed 's/ (.*//' | xargs)
|
||||
echo "Using simulator: $SIM_NAME ($SIM_ID)"
|
||||
|
||||
# Boot simulator if needed
|
||||
xcrun simctl boot "$SIM_ID" 2>/dev/null || true
|
||||
|
||||
# Health check: verify simulator is actually responsive (not just "Booted" in simctl)
|
||||
echo "Verifying simulator is responsive..."
|
||||
local HEALTH_OK=false
|
||||
for i in 1 2 3 4 5; do
|
||||
if xcrun simctl spawn "$SIM_ID" launchctl print system >/dev/null 2>&1; then
|
||||
HEALTH_OK=true
|
||||
break
|
||||
fi
|
||||
echo " Attempt $i/5: simulator not responsive, waiting 5s..."
|
||||
sleep 5
|
||||
done
|
||||
if [[ "$HEALTH_OK" != "true" ]]; then
|
||||
echo -e "${RED}ERROR: Simulator $SIM_NAME ($SIM_ID) reports Booted but is not responsive.${NC}"
|
||||
echo "Attempting full restart..."
|
||||
xcrun simctl shutdown "$SIM_ID" 2>/dev/null || true
|
||||
sleep 3
|
||||
xcrun simctl boot "$SIM_ID" 2>/dev/null || true
|
||||
sleep 10
|
||||
if ! xcrun simctl spawn "$SIM_ID" launchctl print system >/dev/null 2>&1; then
|
||||
echo -e "${RED}ERROR: Simulator still unresponsive after restart. Aborting.${NC}"
|
||||
IOS_FAIL=1
|
||||
return 1
|
||||
fi
|
||||
echo "Simulator recovered after restart."
|
||||
fi
|
||||
echo "Simulator is responsive."
|
||||
|
||||
# Generate Xcode project if needed
|
||||
if [[ ! -f iosApp/iosApp.xcodeproj/project.pbxproj ]]; then
|
||||
echo "Generating Xcode project..."
|
||||
(cd iosApp && xcodegen generate)
|
||||
fi
|
||||
|
||||
# --- Phase 1: Build (synchronous, fail-fast) ---
|
||||
echo "Building iOS UI tests..."
|
||||
local BUILD_DIR
|
||||
BUILD_DIR=$(mktemp -d)
|
||||
local BUILD_LOG
|
||||
BUILD_LOG=$(mktemp)
|
||||
local BUILD_START
|
||||
BUILD_START=$(date +%s)
|
||||
|
||||
xcodebuild build-for-testing \
|
||||
-project iosApp/iosApp.xcodeproj \
|
||||
-scheme iosApp \
|
||||
-destination "platform=iOS Simulator,id=$SIM_ID" \
|
||||
-derivedDataPath "$BUILD_DIR" \
|
||||
> "$BUILD_LOG" 2>&1
|
||||
|
||||
local BUILD_EXIT=$?
|
||||
local BUILD_END
|
||||
BUILD_END=$(date +%s)
|
||||
echo "iOS build phase: $((BUILD_END - BUILD_START))s (exit=$BUILD_EXIT)"
|
||||
|
||||
if [[ $BUILD_EXIT -ne 0 ]]; then
|
||||
echo -e "${RED}BUILD FAILED — last 30 lines:${NC}"
|
||||
tail -30 "$BUILD_LOG"
|
||||
rm -f "$BUILD_LOG"
|
||||
rm -rf "$BUILD_DIR"
|
||||
IOS_FAIL=1
|
||||
return 1
|
||||
fi
|
||||
rm -f "$BUILD_LOG"
|
||||
|
||||
# Disable animations for stable, faster UI tests
|
||||
echo "Disabling simulator animations..."
|
||||
xcrun simctl spawn "$SIM_ID" defaults write com.apple.Accessibility ReduceMotionEnabled -bool YES 2>/dev/null || true
|
||||
|
||||
# Uninstall the app to ensure a clean database for tests
|
||||
echo "Cleaning app data..."
|
||||
xcrun simctl uninstall "$SIM_ID" com.periodvault.app 2>/dev/null || true
|
||||
|
||||
# --- Phase 2: Test (background with watchdog, parallel execution) ---
|
||||
echo "Running iOS UI tests (parallel enabled)..."
|
||||
local TEST_EXIT=0
|
||||
local TEST_LOG
|
||||
TEST_LOG=$(mktemp)
|
||||
local RESULT_BUNDLE_DIR
|
||||
RESULT_BUNDLE_DIR=$(mktemp -d)
|
||||
local RESULT_BUNDLE_PATH="$RESULT_BUNDLE_DIR/ios-ui-tests.xcresult"
|
||||
local TOTAL_IOS_TESTS=0
|
||||
TOTAL_IOS_TESTS=$(find iosApp/iosAppUITests -name '*.swift' -print0 2>/dev/null | xargs -0 grep -hE '^[[:space:]]*func[[:space:]]+test' 2>/dev/null | wc -l | tr -d ' ')
|
||||
if [[ -z "$TOTAL_IOS_TESTS" ]]; then
|
||||
TOTAL_IOS_TESTS=0
|
||||
fi
|
||||
local TEST_START
|
||||
TEST_START=$(date +%s)
|
||||
|
||||
xcodebuild test-without-building \
|
||||
-project iosApp/iosApp.xcodeproj \
|
||||
-scheme iosApp \
|
||||
-destination "platform=iOS Simulator,id=$SIM_ID" \
|
||||
-only-testing:iosAppUITests \
|
||||
-derivedDataPath "$BUILD_DIR" \
|
||||
-resultBundlePath "$RESULT_BUNDLE_PATH" \
|
||||
-parallel-testing-enabled YES \
|
||||
> "$TEST_LOG" 2>&1 &
|
||||
local XCODE_PID=$!
|
||||
|
||||
# Progress/liveness watchdog:
|
||||
# - emits heartbeat with completed test count and simulator health
|
||||
# - fails fast when CoreSimulatorService is unhealthy
|
||||
# - treats test completion, xcodebuild CPU, and log growth as activity
|
||||
# - fails when startup/test activity stalls beyond configured thresholds
|
||||
# - keeps a hard cap as a final safety net
|
||||
local HEARTBEAT_SECONDS="${IOS_HEARTBEAT_SECONDS:-30}"
|
||||
local STARTUP_PROGRESS_TIMEOUT_SECONDS="${IOS_STARTUP_PROGRESS_TIMEOUT_SECONDS:-900}"
|
||||
local TEST_STALL_TIMEOUT_SECONDS="${IOS_TEST_STALL_TIMEOUT_SECONDS:-480}"
|
||||
local UNRESPONSIVE_STALL_TIMEOUT_SECONDS="${IOS_UNRESPONSIVE_STALL_TIMEOUT_SECONDS:-120}"
|
||||
local HARD_TIMEOUT_SECONDS="${IOS_HARD_TIMEOUT_SECONDS:-10800}" # 3 hours
|
||||
local ACTIVE_CPU_THRESHOLD="${IOS_ACTIVE_CPU_THRESHOLD:-1.0}"
|
||||
|
||||
echo "iOS watchdog: heartbeat=${HEARTBEAT_SECONDS}s startup_timeout=${STARTUP_PROGRESS_TIMEOUT_SECONDS}s test_stall_timeout=${TEST_STALL_TIMEOUT_SECONDS}s unresponsive_timeout=${UNRESPONSIVE_STALL_TIMEOUT_SECONDS}s hard_timeout=${HARD_TIMEOUT_SECONDS}s cpu_active_threshold=${ACTIVE_CPU_THRESHOLD}%"
|
||||
(
|
||||
local start_ts now_ts elapsed
|
||||
local last_test_progress_ts
|
||||
local last_activity_ts
|
||||
local completed=0
|
||||
local last_completed=0
|
||||
local stale_seconds=0
|
||||
local sim_health=""
|
||||
local first_test_seen=false
|
||||
local simctl_health_output=""
|
||||
local log_size=0
|
||||
local last_log_size=0
|
||||
local xcode_cpu="0.0"
|
||||
local xcode_cpu_raw=""
|
||||
|
||||
start_ts=$(date +%s)
|
||||
last_test_progress_ts=$start_ts
|
||||
last_activity_ts=$start_ts
|
||||
|
||||
while kill -0 $XCODE_PID 2>/dev/null; do
|
||||
sleep "$HEARTBEAT_SECONDS"
|
||||
now_ts=$(date +%s)
|
||||
elapsed=$((now_ts - start_ts))
|
||||
|
||||
# Keep watchdog alive before first completed test appears; do not fail on zero matches.
|
||||
completed=$(grep -E -c "Test [Cc]ase .* (passed|failed)" "$TEST_LOG" 2>/dev/null || true)
|
||||
if [[ -z "$completed" ]]; then
|
||||
completed=0
|
||||
fi
|
||||
|
||||
if [[ "$completed" -gt "$last_completed" ]]; then
|
||||
last_test_progress_ts=$now_ts
|
||||
last_activity_ts=$now_ts
|
||||
last_completed=$completed
|
||||
first_test_seen=true
|
||||
fi
|
||||
|
||||
# xcodebuild output growth indicates ongoing work even when a test has not completed yet.
|
||||
log_size=$(wc -c < "$TEST_LOG" 2>/dev/null || echo 0)
|
||||
if [[ -n "$log_size" ]] && [[ "$log_size" -gt "$last_log_size" ]]; then
|
||||
last_log_size=$log_size
|
||||
last_activity_ts=$now_ts
|
||||
fi
|
||||
|
||||
# CPU usage provides another liveness signal during long-running UI tests.
|
||||
xcode_cpu_raw=$(ps -p "$XCODE_PID" -o %cpu= 2>/dev/null | tr -d ' ' || true)
|
||||
if [[ -n "$xcode_cpu_raw" ]]; then
|
||||
xcode_cpu="$xcode_cpu_raw"
|
||||
else
|
||||
xcode_cpu="0.0"
|
||||
fi
|
||||
if awk "BEGIN { exit !($xcode_cpu >= $ACTIVE_CPU_THRESHOLD) }"; then
|
||||
last_activity_ts=$now_ts
|
||||
fi
|
||||
|
||||
if simctl_health_output=$(xcrun simctl spawn "$SIM_ID" launchctl print system 2>&1); then
|
||||
sim_health="responsive"
|
||||
else
|
||||
sim_health="UNRESPONSIVE"
|
||||
|
||||
# Fail fast when the simulator service itself is down. Waiting longer does not recover this state.
|
||||
if echo "$simctl_health_output" | grep -Eiq "CoreSimulatorService connection became invalid|not connected to CoreSimulatorService|Unable to locate device set|Connection refused|simdiskimaged.*(crashed|not responding)|Unable to discover any Simulator runtimes"; then
|
||||
echo "WATCHDOG: CoreSimulatorService unhealthy; killing xcodebuild (PID $XCODE_PID) immediately"
|
||||
echo "$simctl_health_output" | head -5 | sed 's/^/ simctl: /'
|
||||
kill $XCODE_PID 2>/dev/null || true
|
||||
sleep 5
|
||||
kill -9 $XCODE_PID 2>/dev/null || true
|
||||
break
|
||||
fi
|
||||
fi
|
||||
|
||||
stale_seconds=$((now_ts - last_activity_ts))
|
||||
local elapsed_mm elapsed_ss
|
||||
elapsed_mm=$((elapsed / 60))
|
||||
elapsed_ss=$((elapsed % 60))
|
||||
|
||||
if [[ "$TOTAL_IOS_TESTS" -gt 0 ]]; then
|
||||
echo "iOS progress: ${completed}/${TOTAL_IOS_TESTS} tests complete | elapsed ${elapsed_mm}m${elapsed_ss}s | simulator ${sim_health} | xcodebuild cpu ${xcode_cpu}%"
|
||||
else
|
||||
echo "iOS progress: ${completed} tests complete | elapsed ${elapsed_mm}m${elapsed_ss}s | simulator ${sim_health} | xcodebuild cpu ${xcode_cpu}%"
|
||||
fi
|
||||
|
||||
if [[ "$elapsed" -ge "$HARD_TIMEOUT_SECONDS" ]]; then
|
||||
echo "WATCHDOG: killing xcodebuild (PID $XCODE_PID) after hard timeout ${HARD_TIMEOUT_SECONDS}s"
|
||||
kill $XCODE_PID 2>/dev/null || true
|
||||
sleep 5
|
||||
kill -9 $XCODE_PID 2>/dev/null || true
|
||||
break
|
||||
fi
|
||||
|
||||
if [[ "$first_test_seen" != "true" ]] && [[ "$elapsed" -ge "$STARTUP_PROGRESS_TIMEOUT_SECONDS" ]]; then
|
||||
echo "WATCHDOG: killing xcodebuild (PID $XCODE_PID) - no completed iOS test observed within startup timeout (${STARTUP_PROGRESS_TIMEOUT_SECONDS}s)"
|
||||
kill $XCODE_PID 2>/dev/null || true
|
||||
sleep 5
|
||||
kill -9 $XCODE_PID 2>/dev/null || true
|
||||
break
|
||||
fi
|
||||
|
||||
if [[ "$first_test_seen" == "true" ]] && [[ "$stale_seconds" -ge "$TEST_STALL_TIMEOUT_SECONDS" ]]; then
|
||||
echo "WATCHDOG: killing xcodebuild (PID $XCODE_PID) - no iOS test activity for ${stale_seconds}s"
|
||||
kill $XCODE_PID 2>/dev/null || true
|
||||
sleep 5
|
||||
kill -9 $XCODE_PID 2>/dev/null || true
|
||||
break
|
||||
fi
|
||||
|
||||
if [[ "$sim_health" == "UNRESPONSIVE" ]] && [[ "$stale_seconds" -ge "$UNRESPONSIVE_STALL_TIMEOUT_SECONDS" ]]; then
|
||||
echo "WATCHDOG: killing xcodebuild (PID $XCODE_PID) - simulator unresponsive and no test activity for ${stale_seconds}s"
|
||||
kill $XCODE_PID 2>/dev/null || true
|
||||
sleep 5
|
||||
kill -9 $XCODE_PID 2>/dev/null || true
|
||||
break
|
||||
fi
|
||||
done
|
||||
) &
|
||||
local WATCHDOG_PID=$!
|
||||
|
||||
wait $XCODE_PID 2>/dev/null || TEST_EXIT=$?
|
||||
kill $WATCHDOG_PID 2>/dev/null || true
|
||||
wait $WATCHDOG_PID 2>/dev/null || true
|
||||
|
||||
local TEST_END
|
||||
TEST_END=$(date +%s)
|
||||
echo "iOS test phase: $((TEST_END - TEST_START))s (exit=$TEST_EXIT)"
|
||||
echo "iOS total (build+test): $((TEST_END - BUILD_START))s"
|
||||
|
||||
# Show test summary (passed/failed counts and any failures)
|
||||
echo "--- Test Results ---"
|
||||
grep -E "Test [Cc]ase .* (passed|failed)" "$TEST_LOG" || true
|
||||
echo ""
|
||||
echo "--- Failures ---"
|
||||
grep -E "(FAIL|error:|\*\* TEST FAILED)" "$TEST_LOG" || echo " (none)"
|
||||
echo ""
|
||||
echo "--- Last 20 lines ---"
|
||||
tail -20 "$TEST_LOG"
|
||||
rm -f "$TEST_LOG"
|
||||
|
||||
if [[ $TEST_EXIT -eq 0 ]]; then
|
||||
local SKIP_ALLOWLIST="${IOS_SKIPPED_TESTS_ALLOWLIST:-audit/ios-skipped-tests-allowlist.txt}"
|
||||
if ! bash "$SCRIPT_DIR/validate-ios-skipped-tests.sh" "$RESULT_BUNDLE_PATH" "$SKIP_ALLOWLIST"; then
|
||||
echo -e "${RED}iOS skipped-test gate FAILED${NC}"
|
||||
TEST_EXIT=1
|
||||
fi
|
||||
fi
|
||||
|
||||
rm -rf "$RESULT_BUNDLE_DIR"
|
||||
rm -rf "$BUILD_DIR"
|
||||
|
||||
# Re-enable animations
|
||||
xcrun simctl spawn "$SIM_ID" defaults write com.apple.Accessibility ReduceMotionEnabled -bool NO 2>/dev/null || true
|
||||
|
||||
if [[ $TEST_EXIT -eq 137 ]] || [[ $TEST_EXIT -eq 143 ]]; then
|
||||
echo -e "${RED}iOS simulator tests terminated by watchdog${NC}"
|
||||
IOS_FAIL=1
|
||||
return 1
|
||||
elif [[ $TEST_EXIT -eq 0 ]]; then
|
||||
echo -e "${GREEN}iOS simulator tests PASSED${NC}"
|
||||
IOS_PASS=1
|
||||
# Emit runtime evidence for CI tracking
|
||||
local ios_test_count=""
|
||||
ios_test_count="$TOTAL_IOS_TESTS"
|
||||
local ios_elapsed_s=""
|
||||
ios_elapsed_s="$(($(date +%s) - $(date -j -f "%Y-%m-%dT%H:%M:%SZ" "$(date -u '+%Y-%m-%dT%H:%M:%SZ')" +%s 2>/dev/null || echo 0)))"
|
||||
echo "RUNTIME_EVIDENCE: {\"suite\": \"ios_ui\", \"tests\": ${ios_test_count:-0}, \"timestamp\": \"$(date -u '+%Y-%m-%dT%H:%M:%SZ')\"}"
|
||||
else
|
||||
echo -e "${RED}iOS simulator tests FAILED${NC}"
|
||||
IOS_FAIL=1
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
echo "================================================"
|
||||
echo " PeriodVault Emulator/Simulator Test Runner"
|
||||
echo "================================================"
|
||||
echo ""
|
||||
|
||||
case "$PLATFORM" in
|
||||
android)
|
||||
run_android
|
||||
;;
|
||||
ios)
|
||||
run_ios
|
||||
;;
|
||||
all)
|
||||
run_android || true
|
||||
echo ""
|
||||
run_ios || true
|
||||
;;
|
||||
*)
|
||||
echo "Usage: $0 [android|ios|all]"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
echo ""
|
||||
echo "================================================"
|
||||
echo " Results Summary"
|
||||
echo "================================================"
|
||||
if [[ "$PLATFORM" == "all" || "$PLATFORM" == "android" ]]; then
|
||||
if [[ $ANDROID_PASS -eq 1 ]]; then
|
||||
echo -e " Android: ${GREEN}PASSED${NC}"
|
||||
elif [[ $ANDROID_FAIL -eq 1 ]]; then
|
||||
echo -e " Android: ${RED}FAILED${NC}"
|
||||
else
|
||||
echo -e " Android: ${YELLOW}SKIPPED${NC}"
|
||||
fi
|
||||
fi
|
||||
if [[ "$PLATFORM" == "all" || "$PLATFORM" == "ios" ]]; then
|
||||
if [[ $IOS_PASS -eq 1 ]]; then
|
||||
echo -e " iOS: ${GREEN}PASSED${NC}"
|
||||
elif [[ $IOS_FAIL -eq 1 ]]; then
|
||||
echo -e " iOS: ${RED}FAILED${NC}"
|
||||
else
|
||||
echo -e " iOS: ${YELLOW}SKIPPED${NC}"
|
||||
fi
|
||||
fi
|
||||
echo "================================================"
|
||||
|
||||
# Exit with failure if any platform failed
|
||||
if [[ $ANDROID_FAIL -eq 1 ]] || [[ $IOS_FAIL -eq 1 ]]; then
|
||||
exit 1
|
||||
fi
|
||||
730
runners-conversion/periodVault/runner.sh
Executable file
730
runners-conversion/periodVault/runner.sh
Executable file
@@ -0,0 +1,730 @@
|
||||
#!/usr/bin/env bash
|
||||
# runner.sh — Setup, manage, and tear down a GitHub Actions self-hosted runner.
|
||||
#
|
||||
# Supports two platforms:
|
||||
# - macOS: Installs the runner agent natively, manages it as a launchd service.
|
||||
# - Linux: Delegates to Docker-based runner infrastructure in infra/runners/.
|
||||
#
|
||||
# Typical flow:
|
||||
# 1) ./scripts/runner.sh --mode setup # install/configure runner
|
||||
# 2) ./scripts/runner.sh --mode status # verify runner is online
|
||||
# 3) (push/PR triggers CI on the self-hosted runner)
|
||||
# 4) ./scripts/runner.sh --mode stop # stop runner
|
||||
# 5) ./scripts/runner.sh --mode uninstall # deregister and clean up
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
MODE=""
|
||||
RUNNER_DIR="${PERIODVAULT_RUNNER_DIR:-${HOME}/.periodvault-runner}"
|
||||
RUNNER_LABELS="self-hosted,macOS,periodvault"
|
||||
RUNNER_NAME=""
|
||||
REPO_SLUG=""
|
||||
REG_TOKEN=""
|
||||
FORCE=false
|
||||
FOREGROUND=false
|
||||
PUSH_REGISTRY=""
|
||||
BUILD_TARGET=""
|
||||
|
||||
PLIST_LABEL="com.periodvault.actions-runner"
|
||||
PLIST_PATH="${HOME}/Library/LaunchAgents/${PLIST_LABEL}.plist"
|
||||
|
||||
# Resolved during Linux operations
|
||||
INFRA_DIR=""
|
||||
|
||||
usage() {
|
||||
cat <<'EOF'
|
||||
Usage:
|
||||
./scripts/runner.sh --mode <setup|start|stop|status|build-image|uninstall> [options]
|
||||
|
||||
Required:
|
||||
--mode MODE One of: setup, start, stop, status, build-image, uninstall
|
||||
|
||||
Options (macOS):
|
||||
--runner-dir DIR Installation directory (default: ~/.periodvault-runner)
|
||||
--labels LABELS Comma-separated labels (default: self-hosted,macOS,periodvault)
|
||||
--name NAME Runner name (default: periodvault-<hostname>)
|
||||
--repo OWNER/REPO GitHub repository (default: auto-detected from git remote)
|
||||
--token TOKEN Registration/removal token (prompted if not provided)
|
||||
--force Force re-setup even if already configured
|
||||
--foreground Start in foreground instead of launchd service
|
||||
|
||||
Options (Linux — Docker mode):
|
||||
On Linux, this script delegates to Docker Compose in infra/runners/.
|
||||
Configuration is managed via .env and envs/*.env files.
|
||||
See infra/runners/README.md for details.
|
||||
|
||||
Options (build-image):
|
||||
--target TARGET Dockerfile target: slim or full (default: builds both)
|
||||
--push REGISTRY Tag and push to a registry (e.g. localhost:5000)
|
||||
|
||||
Common:
|
||||
-h, --help Show this help
|
||||
|
||||
Examples (macOS):
|
||||
./scripts/runner.sh --mode setup
|
||||
./scripts/runner.sh --mode setup --token ghp_xxxxx
|
||||
./scripts/runner.sh --mode start
|
||||
./scripts/runner.sh --mode start --foreground
|
||||
./scripts/runner.sh --mode status
|
||||
./scripts/runner.sh --mode stop
|
||||
./scripts/runner.sh --mode uninstall
|
||||
|
||||
Examples (Linux):
|
||||
./scripts/runner.sh --mode setup # prompts for .env, starts runners
|
||||
./scripts/runner.sh --mode start # docker compose up -d
|
||||
./scripts/runner.sh --mode stop # docker compose down
|
||||
./scripts/runner.sh --mode status # docker compose ps + logs
|
||||
./scripts/runner.sh --mode uninstall # docker compose down -v --rmi local
|
||||
|
||||
Examples (build-image — works on any OS):
|
||||
./scripts/runner.sh --mode build-image # build slim + full
|
||||
./scripts/runner.sh --mode build-image --target slim # build slim only
|
||||
./scripts/runner.sh --mode build-image --push localhost:5000 # build + push to local registry
|
||||
|
||||
Environment overrides:
|
||||
PERIODVAULT_RUNNER_DIR Runner installation directory (macOS only)
|
||||
EOF
|
||||
}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
log() {
|
||||
printf '[runner] %s\n' "$*"
|
||||
}
|
||||
|
||||
warn() {
|
||||
printf '[runner] WARNING: %s\n' "$*" >&2
|
||||
}
|
||||
|
||||
die() {
|
||||
printf '[runner] ERROR: %s\n' "$*" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
require_cmd() {
|
||||
local cmd="$1"
|
||||
command -v "$cmd" >/dev/null 2>&1 || die "required command not found: $cmd"
|
||||
}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Platform detection
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
detect_os() {
|
||||
case "$(uname -s)" in
|
||||
Darwin) printf 'darwin' ;;
|
||||
Linux) printf 'linux' ;;
|
||||
*) die "Unsupported OS: $(uname -s). This script supports macOS and Linux." ;;
|
||||
esac
|
||||
}
|
||||
|
||||
ensure_macos() {
|
||||
[[ "$(detect_os)" == "darwin" ]] || die "This operation requires macOS."
|
||||
}
|
||||
|
||||
find_infra_dir() {
|
||||
local script_dir
|
||||
script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
local repo_root="${script_dir}/.."
|
||||
INFRA_DIR="$(cd "${repo_root}/infra/runners" 2>/dev/null && pwd)" || true
|
||||
|
||||
if [[ -z "$INFRA_DIR" ]] || [[ ! -f "${INFRA_DIR}/docker-compose.yml" ]]; then
|
||||
die "Could not find infra/runners/docker-compose.yml. Ensure you are running from the periodvault repo."
|
||||
fi
|
||||
}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Argument parsing
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
parse_args() {
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--mode)
|
||||
shift; [[ $# -gt 0 ]] || die "--mode requires a value"
|
||||
MODE="$1"; shift ;;
|
||||
--runner-dir)
|
||||
shift; [[ $# -gt 0 ]] || die "--runner-dir requires a value"
|
||||
RUNNER_DIR="$1"; shift ;;
|
||||
--labels)
|
||||
shift; [[ $# -gt 0 ]] || die "--labels requires a value"
|
||||
RUNNER_LABELS="$1"; shift ;;
|
||||
--name)
|
||||
shift; [[ $# -gt 0 ]] || die "--name requires a value"
|
||||
RUNNER_NAME="$1"; shift ;;
|
||||
--repo)
|
||||
shift; [[ $# -gt 0 ]] || die "--repo requires a value"
|
||||
REPO_SLUG="$1"; shift ;;
|
||||
--token)
|
||||
shift; [[ $# -gt 0 ]] || die "--token requires a value"
|
||||
REG_TOKEN="$1"; shift ;;
|
||||
--target)
|
||||
shift; [[ $# -gt 0 ]] || die "--target requires a value (slim or full)"
|
||||
BUILD_TARGET="$1"; shift ;;
|
||||
--force)
|
||||
FORCE=true; shift ;;
|
||||
--foreground)
|
||||
FOREGROUND=true; shift ;;
|
||||
--push)
|
||||
shift; [[ $# -gt 0 ]] || die "--push requires a registry address (e.g. localhost:5000)"
|
||||
PUSH_REGISTRY="$1"; shift ;;
|
||||
-h|--help)
|
||||
usage; exit 0 ;;
|
||||
*)
|
||||
die "unknown argument: $1" ;;
|
||||
esac
|
||||
done
|
||||
|
||||
[[ -n "$MODE" ]] || die "--mode is required (setup|start|stop|status|build-image|uninstall)"
|
||||
case "$MODE" in
|
||||
setup|start|stop|status|build-image|uninstall) ;;
|
||||
*) die "invalid --mode: $MODE (expected setup|start|stop|status|build-image|uninstall)" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Repo detection
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
detect_repo() {
|
||||
if [[ -n "$REPO_SLUG" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
local remote_url=""
|
||||
remote_url="$(git remote get-url origin 2>/dev/null || true)"
|
||||
if [[ -z "$remote_url" ]]; then
|
||||
die "Could not detect repository from git remote. Use --repo OWNER/REPO."
|
||||
fi
|
||||
|
||||
REPO_SLUG="$(printf '%s' "$remote_url" \
|
||||
| sed -E 's#^(https?://github\.com/|git@github\.com:)##' \
|
||||
| sed -E 's/\.git$//')"
|
||||
|
||||
if [[ -z "$REPO_SLUG" ]] || ! printf '%s' "$REPO_SLUG" | grep -qE '^[^/]+/[^/]+$'; then
|
||||
die "Could not parse OWNER/REPO from remote URL: $remote_url. Use --repo OWNER/REPO."
|
||||
fi
|
||||
|
||||
log "Auto-detected repository: $REPO_SLUG"
|
||||
}
|
||||
|
||||
# ===========================================================================
|
||||
# macOS: Native runner agent + launchd service
|
||||
# ===========================================================================
|
||||
|
||||
detect_arch() {
|
||||
local arch
|
||||
arch="$(uname -m)"
|
||||
case "$arch" in
|
||||
arm64|aarch64) printf 'arm64' ;;
|
||||
x86_64) printf 'x64' ;;
|
||||
*) die "Unsupported architecture: $arch" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
download_runner() {
|
||||
require_cmd curl
|
||||
require_cmd shasum
|
||||
require_cmd tar
|
||||
|
||||
local arch
|
||||
arch="$(detect_arch)"
|
||||
|
||||
log "Fetching latest runner release metadata..."
|
||||
local release_json
|
||||
release_json="$(curl -fsSL "https://api.github.com/repos/actions/runner/releases/latest")"
|
||||
|
||||
local version
|
||||
version="$(printf '%s' "$release_json" | grep '"tag_name"' | sed -E 's/.*"v([^"]+)".*/\1/')"
|
||||
if [[ -z "$version" ]]; then
|
||||
die "Could not determine latest runner version from GitHub API."
|
||||
fi
|
||||
log "Latest runner version: $version"
|
||||
|
||||
local tarball="actions-runner-osx-${arch}-${version}.tar.gz"
|
||||
local download_url="https://github.com/actions/runner/releases/download/v${version}/${tarball}"
|
||||
|
||||
local sha_marker="osx-${arch}"
|
||||
local expected_sha=""
|
||||
expected_sha="$(printf '%s' "$release_json" \
|
||||
| python3 -c "
|
||||
import json,sys,re
|
||||
body = json.load(sys.stdin).get('body','')
|
||||
m = re.search(r'<!-- BEGIN SHA ${sha_marker} -->([0-9a-f]{64})<!-- END SHA ${sha_marker} -->', body)
|
||||
print(m.group(1) if m else '')
|
||||
" 2>/dev/null || true)"
|
||||
|
||||
mkdir -p "$RUNNER_DIR"
|
||||
local dest="${RUNNER_DIR}/${tarball}"
|
||||
|
||||
if [[ -f "$dest" ]]; then
|
||||
log "Tarball already exists: $dest"
|
||||
else
|
||||
log "Downloading: $download_url"
|
||||
curl -fSL -o "$dest" "$download_url"
|
||||
fi
|
||||
|
||||
if [[ -n "$expected_sha" ]]; then
|
||||
log "Verifying SHA256 checksum..."
|
||||
local actual_sha
|
||||
actual_sha="$(shasum -a 256 "$dest" | awk '{print $1}')"
|
||||
if [[ "$actual_sha" != "$expected_sha" ]]; then
|
||||
rm -f "$dest"
|
||||
die "Checksum mismatch. Expected: $expected_sha, Got: $actual_sha"
|
||||
fi
|
||||
log "Checksum verified."
|
||||
else
|
||||
warn "Could not extract expected SHA256 from release metadata; skipping verification."
|
||||
fi
|
||||
|
||||
log "Extracting runner into $RUNNER_DIR..."
|
||||
tar -xzf "$dest" -C "$RUNNER_DIR"
|
||||
rm -f "$dest"
|
||||
|
||||
log "Runner extracted (version $version)."
|
||||
}
|
||||
|
||||
prompt_token() {
|
||||
if [[ -n "$REG_TOKEN" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
log ""
|
||||
log "A registration token is required."
|
||||
log "Obtain one from: https://github.com/${REPO_SLUG}/settings/actions/runners/new"
|
||||
log "Or via the API:"
|
||||
log " curl -X POST -H 'Authorization: token YOUR_PAT' \\"
|
||||
log " https://api.github.com/repos/${REPO_SLUG}/actions/runners/registration-token"
|
||||
log ""
|
||||
printf '[runner] Enter registration token: '
|
||||
read -r REG_TOKEN
|
||||
[[ -n "$REG_TOKEN" ]] || die "No token provided."
|
||||
}
|
||||
|
||||
register_runner() {
|
||||
if [[ -z "$RUNNER_NAME" ]]; then
|
||||
RUNNER_NAME="periodvault-$(hostname -s)"
|
||||
fi
|
||||
|
||||
log "Registering runner '${RUNNER_NAME}' with labels '${RUNNER_LABELS}'..."
|
||||
|
||||
local config_args=(
|
||||
--url "https://github.com/${REPO_SLUG}"
|
||||
--token "$REG_TOKEN"
|
||||
--name "$RUNNER_NAME"
|
||||
--labels "$RUNNER_LABELS"
|
||||
--work "${RUNNER_DIR}/_work"
|
||||
--unattended
|
||||
)
|
||||
|
||||
if [[ "$FORCE" == "true" ]]; then
|
||||
config_args+=(--replace)
|
||||
fi
|
||||
|
||||
"${RUNNER_DIR}/config.sh" "${config_args[@]}"
|
||||
log "Runner registered."
|
||||
}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# launchd service management (macOS)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
create_plist() {
|
||||
mkdir -p "${RUNNER_DIR}/logs"
|
||||
mkdir -p "$(dirname "$PLIST_PATH")"
|
||||
|
||||
cat > "$PLIST_PATH" <<EOF
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>Label</key>
|
||||
<string>${PLIST_LABEL}</string>
|
||||
<key>ProgramArguments</key>
|
||||
<array>
|
||||
<string>${RUNNER_DIR}/run.sh</string>
|
||||
</array>
|
||||
<key>WorkingDirectory</key>
|
||||
<string>${RUNNER_DIR}</string>
|
||||
<key>RunAtLoad</key>
|
||||
<true/>
|
||||
<key>KeepAlive</key>
|
||||
<true/>
|
||||
<key>StandardOutPath</key>
|
||||
<string>${RUNNER_DIR}/logs/stdout.log</string>
|
||||
<key>StandardErrorPath</key>
|
||||
<string>${RUNNER_DIR}/logs/stderr.log</string>
|
||||
<key>EnvironmentVariables</key>
|
||||
<dict>
|
||||
<key>PATH</key>
|
||||
<string>/opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin</string>
|
||||
<key>HOME</key>
|
||||
<string>${HOME}</string>
|
||||
</dict>
|
||||
</dict>
|
||||
</plist>
|
||||
EOF
|
||||
|
||||
log "Launchd plist created: $PLIST_PATH"
|
||||
}
|
||||
|
||||
load_service() {
|
||||
if launchctl list 2>/dev/null | grep -q "$PLIST_LABEL"; then
|
||||
log "Service already loaded; unloading first..."
|
||||
launchctl unload "$PLIST_PATH" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
launchctl load "$PLIST_PATH"
|
||||
log "Service loaded."
|
||||
}
|
||||
|
||||
unload_service() {
|
||||
if launchctl list 2>/dev/null | grep -q "$PLIST_LABEL"; then
|
||||
launchctl unload "$PLIST_PATH" 2>/dev/null || true
|
||||
log "Service unloaded."
|
||||
else
|
||||
log "Service is not loaded."
|
||||
fi
|
||||
}
|
||||
|
||||
service_is_running() {
|
||||
launchctl list 2>/dev/null | grep -q "$PLIST_LABEL"
|
||||
}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# macOS mode implementations
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
do_setup_darwin() {
|
||||
detect_repo
|
||||
|
||||
if [[ -f "${RUNNER_DIR}/.runner" ]] && [[ "$FORCE" != "true" ]]; then
|
||||
log "Runner already configured at $RUNNER_DIR."
|
||||
log "Use --force to re-setup."
|
||||
do_status_darwin
|
||||
return
|
||||
fi
|
||||
|
||||
download_runner
|
||||
prompt_token
|
||||
register_runner
|
||||
create_plist
|
||||
load_service
|
||||
|
||||
log ""
|
||||
log "Setup complete. Runner is registered and running."
|
||||
log ""
|
||||
log "To activate self-hosted CI, set these repository variables:"
|
||||
log ' CI_RUNS_ON_MACOS: ["self-hosted", "macOS", "periodvault"]'
|
||||
log ""
|
||||
log "Via CLI:"
|
||||
log ' gh variable set CI_RUNS_ON_MACOS --body '"'"'["self-hosted","macOS","periodvault"]'"'"
|
||||
log ""
|
||||
log "Energy saver: ensure your Mac does not sleep while the runner is active."
|
||||
log " System Settings > Energy Saver > Prevent automatic sleeping"
|
||||
}
|
||||
|
||||
do_start_darwin() {
|
||||
[[ -f "${RUNNER_DIR}/.runner" ]] || die "Runner not configured. Run --mode setup first."
|
||||
|
||||
if [[ "$FOREGROUND" == "true" ]]; then
|
||||
log "Starting runner in foreground (Ctrl-C to stop)..."
|
||||
exec "${RUNNER_DIR}/run.sh"
|
||||
fi
|
||||
|
||||
if service_is_running; then
|
||||
log "Runner service is already running."
|
||||
return
|
||||
fi
|
||||
|
||||
if [[ ! -f "$PLIST_PATH" ]]; then
|
||||
log "Plist not found; recreating..."
|
||||
create_plist
|
||||
fi
|
||||
|
||||
load_service
|
||||
log "Runner started."
|
||||
}
|
||||
|
||||
do_stop_darwin() {
|
||||
unload_service
|
||||
log "Runner stopped."
|
||||
}
|
||||
|
||||
do_status_darwin() {
|
||||
log "Runner directory: $RUNNER_DIR"
|
||||
|
||||
if [[ ! -f "${RUNNER_DIR}/.runner" ]]; then
|
||||
log "Status: NOT CONFIGURED"
|
||||
log "Run --mode setup to install and register the runner."
|
||||
return
|
||||
fi
|
||||
|
||||
local runner_name=""
|
||||
if command -v python3 >/dev/null 2>&1; then
|
||||
runner_name="$(python3 -c "import json,sys; d=json.load(open(sys.argv[1])); print(d.get('agentName',''))" "${RUNNER_DIR}/.runner" 2>/dev/null || true)"
|
||||
fi
|
||||
if [[ -z "$runner_name" ]]; then
|
||||
runner_name="(could not parse)"
|
||||
fi
|
||||
|
||||
log "Runner name: $runner_name"
|
||||
|
||||
if service_is_running; then
|
||||
log "Service: RUNNING"
|
||||
else
|
||||
log "Service: STOPPED"
|
||||
fi
|
||||
|
||||
if pgrep -f "Runner.Listener" >/dev/null 2>&1; then
|
||||
log "Process: ACTIVE (Runner.Listener found)"
|
||||
else
|
||||
log "Process: INACTIVE"
|
||||
fi
|
||||
|
||||
local log_file="${RUNNER_DIR}/logs/stdout.log"
|
||||
if [[ -f "$log_file" ]]; then
|
||||
log ""
|
||||
log "Recent log output (last 10 lines):"
|
||||
tail -n 10 "$log_file" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
local diag_dir="${RUNNER_DIR}/_diag"
|
||||
if [[ -d "$diag_dir" ]]; then
|
||||
local latest_diag
|
||||
latest_diag="$(ls -t "${diag_dir}"/Runner_*.log 2>/dev/null | head -n1 || true)"
|
||||
if [[ -n "$latest_diag" ]]; then
|
||||
log ""
|
||||
log "Latest runner diagnostic (last 5 lines):"
|
||||
tail -n 5 "$latest_diag" 2>/dev/null || true
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
do_uninstall_darwin() {
|
||||
log "Uninstalling self-hosted runner..."
|
||||
|
||||
unload_service
|
||||
|
||||
if [[ -f "$PLIST_PATH" ]]; then
|
||||
rm -f "$PLIST_PATH"
|
||||
log "Removed plist: $PLIST_PATH"
|
||||
fi
|
||||
|
||||
if [[ -f "${RUNNER_DIR}/config.sh" ]]; then
|
||||
if [[ -z "$REG_TOKEN" ]]; then
|
||||
detect_repo
|
||||
log ""
|
||||
log "A removal token is required to deregister the runner."
|
||||
log "Obtain one from: https://github.com/${REPO_SLUG}/settings/actions/runners"
|
||||
log "Or via the API:"
|
||||
log " curl -X POST -H 'Authorization: token YOUR_PAT' \\"
|
||||
log " https://api.github.com/repos/${REPO_SLUG}/actions/runners/remove-token"
|
||||
log ""
|
||||
printf '[runner] Enter removal token (or press Enter to skip deregistration): '
|
||||
read -r REG_TOKEN
|
||||
fi
|
||||
|
||||
if [[ -n "$REG_TOKEN" ]]; then
|
||||
"${RUNNER_DIR}/config.sh" remove --token "$REG_TOKEN" || warn "Deregistration failed; you may need to remove the runner manually from GitHub settings."
|
||||
log "Runner deregistered from GitHub."
|
||||
else
|
||||
warn "Skipping deregistration. Remove the runner manually from GitHub settings."
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -d "$RUNNER_DIR" ]]; then
|
||||
log "Removing runner directory: $RUNNER_DIR"
|
||||
rm -rf "$RUNNER_DIR"
|
||||
log "Runner directory removed."
|
||||
fi
|
||||
|
||||
log "Uninstall complete."
|
||||
}
|
||||
|
||||
# ===========================================================================
|
||||
# Linux: Docker-based runner via infra/runners/
|
||||
# ===========================================================================
|
||||
|
||||
ensure_docker() {
|
||||
require_cmd docker
|
||||
|
||||
if docker compose version >/dev/null 2>&1; then
|
||||
return
|
||||
fi
|
||||
|
||||
if command -v docker-compose >/dev/null 2>&1; then
|
||||
warn "Found docker-compose (standalone). docker compose v2 plugin is recommended."
|
||||
return
|
||||
fi
|
||||
|
||||
die "docker compose is required. Install Docker Compose v2: https://docs.docker.com/compose/install/"
|
||||
}
|
||||
|
||||
compose() {
|
||||
docker compose -f "${INFRA_DIR}/docker-compose.yml" "$@"
|
||||
}
|
||||
|
||||
do_build_image() {
|
||||
find_infra_dir
|
||||
ensure_docker
|
||||
|
||||
local targets=()
|
||||
if [[ -n "$BUILD_TARGET" ]]; then
|
||||
targets+=("$BUILD_TARGET")
|
||||
else
|
||||
targets+=("slim" "full")
|
||||
fi
|
||||
|
||||
for target in "${targets[@]}"; do
|
||||
local image_tag="periodvault-runner:${target}"
|
||||
if [[ -n "$PUSH_REGISTRY" ]]; then
|
||||
image_tag="${PUSH_REGISTRY}/periodvault-runner:${target}"
|
||||
fi
|
||||
|
||||
log "Building runner image: ${image_tag} (target: ${target}, platform: linux/amd64)"
|
||||
DOCKER_BUILDKIT=1 docker build --platform linux/amd64 --pull \
|
||||
--target "$target" \
|
||||
-t "$image_tag" \
|
||||
"$INFRA_DIR"
|
||||
|
||||
if [[ -n "$PUSH_REGISTRY" ]]; then
|
||||
log "Pushing ${image_tag}..."
|
||||
docker push "$image_tag"
|
||||
log "Image pushed: ${image_tag}"
|
||||
else
|
||||
log "Image built locally: ${image_tag}"
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -z "$PUSH_REGISTRY" ]]; then
|
||||
log ""
|
||||
log "Use --push <registry> to push to a registry."
|
||||
log "Example: ./scripts/runner.sh --mode build-image --push localhost:5000"
|
||||
fi
|
||||
}
|
||||
|
||||
do_setup_linux() {
|
||||
find_infra_dir
|
||||
ensure_docker
|
||||
|
||||
log "Docker-based runner setup (infra/runners/)"
|
||||
log ""
|
||||
|
||||
if [[ ! -f "${INFRA_DIR}/.env" ]]; then
|
||||
if [[ -f "${INFRA_DIR}/.env.example" ]]; then
|
||||
cp "${INFRA_DIR}/.env.example" "${INFRA_DIR}/.env"
|
||||
log "Created ${INFRA_DIR}/.env from template."
|
||||
log "Edit this file to set your GITHUB_PAT."
|
||||
log ""
|
||||
printf '[runner] Enter your GitHub PAT (or press Enter to edit .env manually later): '
|
||||
read -r pat_input
|
||||
if [[ -n "$pat_input" ]]; then
|
||||
sed -i "s/^GITHUB_PAT=.*/GITHUB_PAT=${pat_input}/" "${INFRA_DIR}/.env"
|
||||
log "GITHUB_PAT set in .env"
|
||||
fi
|
||||
else
|
||||
die "Missing .env.example template in ${INFRA_DIR}"
|
||||
fi
|
||||
else
|
||||
log ".env already exists; skipping."
|
||||
fi
|
||||
|
||||
if [[ ! -f "${INFRA_DIR}/envs/periodvault.env" ]]; then
|
||||
if [[ -f "${INFRA_DIR}/envs/periodvault.env.example" ]]; then
|
||||
cp "${INFRA_DIR}/envs/periodvault.env.example" "${INFRA_DIR}/envs/periodvault.env"
|
||||
log "Created ${INFRA_DIR}/envs/periodvault.env from template."
|
||||
log "Edit this file to configure REPO_URL, RUNNER_NAME, and resource limits."
|
||||
else
|
||||
die "Missing envs/periodvault.env.example template in ${INFRA_DIR}"
|
||||
fi
|
||||
else
|
||||
log "envs/periodvault.env already exists; skipping."
|
||||
fi
|
||||
|
||||
log ""
|
||||
log "Starting runners..."
|
||||
compose up -d
|
||||
|
||||
log ""
|
||||
log "Setup complete. Verify with: ./scripts/runner.sh --mode status"
|
||||
log ""
|
||||
log "To activate self-hosted CI, set these repository variables:"
|
||||
log ' gh variable set CI_RUNS_ON --body '"'"'["self-hosted","Linux","X64"]'"'"
|
||||
log ' gh variable set CI_RUNS_ON_ANDROID --body '"'"'["self-hosted","Linux","X64","android-emulator"]'"'"
|
||||
}
|
||||
|
||||
do_start_linux() {
|
||||
find_infra_dir
|
||||
ensure_docker
|
||||
|
||||
log "Starting Docker runners..."
|
||||
compose up -d
|
||||
log "Runners started."
|
||||
}
|
||||
|
||||
do_stop_linux() {
|
||||
find_infra_dir
|
||||
ensure_docker
|
||||
|
||||
log "Stopping Docker runners..."
|
||||
compose down
|
||||
log "Runners stopped."
|
||||
}
|
||||
|
||||
do_status_linux() {
|
||||
find_infra_dir
|
||||
ensure_docker
|
||||
|
||||
log "Docker runner status (infra/runners/):"
|
||||
log ""
|
||||
compose ps
|
||||
log ""
|
||||
log "Recent logs (last 20 lines):"
|
||||
compose logs --tail 20 2>/dev/null || true
|
||||
}
|
||||
|
||||
do_uninstall_linux() {
|
||||
find_infra_dir
|
||||
ensure_docker
|
||||
|
||||
log "Uninstalling Docker runners..."
|
||||
compose down -v --rmi local 2>/dev/null || compose down -v
|
||||
log "Docker runners removed (containers, volumes, local images)."
|
||||
log ""
|
||||
log "Note: Runners should auto-deregister from GitHub (ephemeral mode)."
|
||||
log "If stale runners remain, remove them manually:"
|
||||
log " gh api -X DELETE repos/OWNER/REPO/actions/runners/RUNNER_ID"
|
||||
}
|
||||
|
||||
# ===========================================================================
|
||||
# Entry point
|
||||
# ===========================================================================
|
||||
|
||||
main() {
|
||||
parse_args "$@"
|
||||
|
||||
local os
|
||||
os="$(detect_os)"
|
||||
|
||||
case "$MODE" in
|
||||
setup)
|
||||
if [[ "$os" == "darwin" ]]; then do_setup_darwin; else do_setup_linux; fi ;;
|
||||
start)
|
||||
if [[ "$os" == "darwin" ]]; then do_start_darwin; else do_start_linux; fi ;;
|
||||
stop)
|
||||
if [[ "$os" == "darwin" ]]; then do_stop_darwin; else do_stop_linux; fi ;;
|
||||
status)
|
||||
if [[ "$os" == "darwin" ]]; then do_status_darwin; else do_status_linux; fi ;;
|
||||
build-image)
|
||||
do_build_image ;;
|
||||
uninstall)
|
||||
if [[ "$os" == "darwin" ]]; then do_uninstall_darwin; else do_uninstall_linux; fi ;;
|
||||
*)
|
||||
die "unexpected mode: $MODE" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
main "$@"
|
||||
280
runners-conversion/periodVault/setup-dev-environment.sh
Executable file
280
runners-conversion/periodVault/setup-dev-environment.sh
Executable file
@@ -0,0 +1,280 @@
|
||||
#!/usr/bin/env bash
|
||||
# setup-dev-environment.sh
|
||||
# Idempotent bootstrap for local Period Vault development.
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
INSTALL_MISSING=0
|
||||
RUN_CHECKS=0
|
||||
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
usage() {
|
||||
cat <<'EOF'
|
||||
Usage: ./scripts/setup-dev-environment.sh [--install] [--verify] [--help]
|
||||
|
||||
Options:
|
||||
--install Attempt safe auto-install for supported tools (Homebrew on macOS).
|
||||
--verify Run post-setup verification commands.
|
||||
--help Show this help.
|
||||
|
||||
Notes:
|
||||
- Script is idempotent and safe to re-run.
|
||||
- Without --install, the script reports actionable install commands.
|
||||
- It never writes credentials/tokens and does not run privileged commands automatically.
|
||||
EOF
|
||||
}
|
||||
|
||||
log() { printf "${BLUE}[%s]${NC} %s\n" "setup" "$*"; }
|
||||
ok() { printf "${GREEN}[ok]${NC} %s\n" "$*"; }
|
||||
warn() { printf "${YELLOW}[warn]${NC} %s\n" "$*"; }
|
||||
fail() { printf "${RED}[error]${NC} %s\n" "$*" >&2; }
|
||||
|
||||
for arg in "$@"; do
|
||||
case "$arg" in
|
||||
--install) INSTALL_MISSING=1 ;;
|
||||
--verify) RUN_CHECKS=1 ;;
|
||||
--help|-h) usage; exit 0 ;;
|
||||
*)
|
||||
fail "Unknown option: $arg"
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ ! -x "$PROJECT_ROOT/gradlew" ]]; then
|
||||
fail "Missing executable Gradle wrapper at ./gradlew."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
OS="$(uname -s)"
|
||||
IS_MAC=0
|
||||
IS_LINUX=0
|
||||
case "$OS" in
|
||||
Darwin) IS_MAC=1 ;;
|
||||
Linux) IS_LINUX=1 ;;
|
||||
*)
|
||||
warn "Unsupported OS: $OS. Script will run checks but skip auto-install."
|
||||
;;
|
||||
esac
|
||||
|
||||
declare -a REQUIRED_TOOLS
|
||||
declare -a OPTIONAL_TOOLS
|
||||
declare -a MISSING_REQUIRED
|
||||
declare -a MISSING_OPTIONAL
|
||||
declare -a REMEDIATION_HINTS
|
||||
|
||||
REQUIRED_TOOLS=(git java)
|
||||
OPTIONAL_TOOLS=(gh act adb emulator avdmanager sdkmanager)
|
||||
|
||||
if [[ $IS_MAC -eq 1 ]]; then
|
||||
REQUIRED_TOOLS+=(xcodebuild xcrun)
|
||||
fi
|
||||
|
||||
have_cmd() {
|
||||
command -v "$1" >/dev/null 2>&1
|
||||
}
|
||||
|
||||
append_unique_hint() {
|
||||
local hint="$1"
|
||||
local existing
|
||||
for existing in "${REMEDIATION_HINTS[@]:-}"; do
|
||||
if [[ "$existing" == "$hint" ]]; then
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
REMEDIATION_HINTS+=("$hint")
|
||||
}
|
||||
|
||||
detect_java_major() {
|
||||
local raw version major
|
||||
raw="$(java -version 2>&1 | head -n 1 || true)"
|
||||
version="$(echo "$raw" | sed -E 's/.*"([0-9]+)(\.[0-9]+.*)?".*/\1/' || true)"
|
||||
if [[ -z "$version" ]]; then
|
||||
echo "0"
|
||||
return 0
|
||||
fi
|
||||
major="$version"
|
||||
echo "$major"
|
||||
}
|
||||
|
||||
install_with_brew() {
|
||||
local formula="$1"
|
||||
if ! have_cmd brew; then
|
||||
append_unique_hint "Install Homebrew first: https://brew.sh/"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if brew list --formula "$formula" >/dev/null 2>&1; then
|
||||
ok "brew formula '$formula' already installed"
|
||||
return 0
|
||||
fi
|
||||
|
||||
log "Installing '$formula' via Homebrew..."
|
||||
if brew install "$formula"; then
|
||||
ok "Installed '$formula'"
|
||||
return 0
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
try_install_tool() {
|
||||
local tool="$1"
|
||||
if [[ $INSTALL_MISSING -ne 1 ]]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [[ $IS_MAC -eq 1 ]]; then
|
||||
case "$tool" in
|
||||
git) install_with_brew git ;;
|
||||
gh) install_with_brew gh ;;
|
||||
act) install_with_brew act ;;
|
||||
java)
|
||||
install_with_brew openjdk@17
|
||||
append_unique_hint "If needed, configure JAVA_HOME for JDK 17+: export JAVA_HOME=\$(/usr/libexec/java_home -v 17)"
|
||||
;;
|
||||
*)
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
return $?
|
||||
fi
|
||||
|
||||
if [[ $IS_LINUX -eq 1 ]]; then
|
||||
append_unique_hint "Install '$tool' using your distro package manager and re-run this script."
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
tool_hint() {
|
||||
local tool="$1"
|
||||
if [[ $IS_MAC -eq 1 ]]; then
|
||||
case "$tool" in
|
||||
git|gh|act) echo "brew install $tool" ;;
|
||||
java) echo "brew install openjdk@17 && export JAVA_HOME=\$(/usr/libexec/java_home -v 17)" ;;
|
||||
xcodebuild|xcrun) echo "Install Xcode from the App Store and run: sudo xcodebuild -runFirstLaunch" ;;
|
||||
adb|emulator|avdmanager|sdkmanager) echo "Install Android Studio + Android SDK command-line tools, then add platform-tools/emulator/cmdline-tools/latest/bin to PATH." ;;
|
||||
*) echo "Install '$tool' and ensure it is on PATH." ;;
|
||||
esac
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ $IS_LINUX -eq 1 ]]; then
|
||||
case "$tool" in
|
||||
git) echo "sudo apt-get install -y git" ;;
|
||||
java) echo "sudo apt-get install -y openjdk-17-jdk" ;;
|
||||
gh) echo "Install GitHub CLI from https://cli.github.com/" ;;
|
||||
act) echo "Install act from https://github.com/nektos/act" ;;
|
||||
*) echo "Install '$tool' using your package manager and add it to PATH." ;;
|
||||
esac
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "Install '$tool' and ensure it is on PATH."
|
||||
}
|
||||
|
||||
log "Checking local development prerequisites..."
|
||||
|
||||
for tool in "${REQUIRED_TOOLS[@]}"; do
|
||||
if have_cmd "$tool"; then
|
||||
ok "Found required tool: $tool"
|
||||
else
|
||||
warn "Missing required tool: $tool"
|
||||
if try_install_tool "$tool" && have_cmd "$tool"; then
|
||||
ok "Auto-installed required tool: $tool"
|
||||
else
|
||||
MISSING_REQUIRED+=("$tool")
|
||||
append_unique_hint "$(tool_hint "$tool")"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
for tool in "${OPTIONAL_TOOLS[@]}"; do
|
||||
if have_cmd "$tool"; then
|
||||
ok "Found optional tool: $tool"
|
||||
else
|
||||
warn "Missing optional tool: $tool"
|
||||
if try_install_tool "$tool" && have_cmd "$tool"; then
|
||||
ok "Auto-installed optional tool: $tool"
|
||||
else
|
||||
MISSING_OPTIONAL+=("$tool")
|
||||
append_unique_hint "$(tool_hint "$tool")"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if have_cmd java; then
|
||||
JAVA_MAJOR="$(detect_java_major)"
|
||||
if [[ "$JAVA_MAJOR" =~ ^[0-9]+$ ]] && [[ "$JAVA_MAJOR" -ge 17 ]]; then
|
||||
ok "Java version is compatible (major=$JAVA_MAJOR)"
|
||||
else
|
||||
fail "Java 17+ is required (detected major=$JAVA_MAJOR)."
|
||||
append_unique_hint "$(tool_hint "java")"
|
||||
if [[ ! " ${MISSING_REQUIRED[*]} " =~ " java " ]]; then
|
||||
MISSING_REQUIRED+=("java")
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
log "Installing git hooks (idempotent)..."
|
||||
"$SCRIPT_DIR/install-hooks.sh"
|
||||
ok "Git hooks configured"
|
||||
|
||||
echo ""
|
||||
echo "================================================"
|
||||
echo "Setup Summary"
|
||||
echo "================================================"
|
||||
if [[ ${#MISSING_REQUIRED[@]} -eq 0 ]]; then
|
||||
ok "All required prerequisites are available."
|
||||
else
|
||||
fail "Missing required prerequisites: ${MISSING_REQUIRED[*]}"
|
||||
fi
|
||||
|
||||
if [[ ${#MISSING_OPTIONAL[@]} -eq 0 ]]; then
|
||||
ok "All optional developer tools are available."
|
||||
else
|
||||
warn "Missing optional tools: ${MISSING_OPTIONAL[*]}"
|
||||
fi
|
||||
|
||||
if [[ ${#REMEDIATION_HINTS[@]} -gt 0 ]]; then
|
||||
echo ""
|
||||
echo "Suggested remediation:"
|
||||
for hint in "${REMEDIATION_HINTS[@]}"; do
|
||||
echo " - $hint"
|
||||
done
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Verification commands:"
|
||||
echo " - ./gradlew shared:jvmTest"
|
||||
echo " - ./scripts/run-emulator-tests.sh android"
|
||||
echo " - ./scripts/run-emulator-tests.sh ios"
|
||||
echo " - ./scripts/verify.sh"
|
||||
|
||||
if [[ $RUN_CHECKS -eq 1 ]]; then
|
||||
echo ""
|
||||
log "Running lightweight verification commands..."
|
||||
"$PROJECT_ROOT/gradlew" --version >/dev/null
|
||||
ok "Gradle wrapper check passed"
|
||||
if have_cmd gh; then
|
||||
gh --version >/dev/null
|
||||
ok "GitHub CLI check passed"
|
||||
fi
|
||||
if have_cmd xcrun; then
|
||||
xcrun simctl list devices available >/dev/null
|
||||
ok "iOS simulator listing check passed"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ ${#MISSING_REQUIRED[@]} -gt 0 ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ok "Developer environment bootstrap completed."
|
||||
72
runners-conversion/periodVault/setup.sh
Executable file
72
runners-conversion/periodVault/setup.sh
Executable file
@@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env bash
|
||||
# setup.sh — Cross-platform developer environment setup entrypoint.
|
||||
#
|
||||
# macOS: Dispatches to scripts/setup-dev-environment.sh (full bootstrap).
|
||||
# Linux: Minimal bootstrap (JDK check, git hooks, Gradle dependencies).
|
||||
#
|
||||
# Usage: ./scripts/setup.sh [--install] [--verify]
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
OS="$(uname -s)"
|
||||
|
||||
if [[ "$OS" == "Darwin" ]]; then
|
||||
exec "${SCRIPT_DIR}/setup-dev-environment.sh" "$@"
|
||||
fi
|
||||
|
||||
if [[ "$OS" != "Linux" ]]; then
|
||||
echo "Unsupported OS: $OS. This script supports macOS and Linux."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# --- Linux bootstrap ---
|
||||
|
||||
echo "=== periodvault development setup (Linux) ==="
|
||||
echo ""
|
||||
|
||||
# Check JDK
|
||||
if command -v java >/dev/null 2>&1; then
|
||||
JAVA_MAJOR="$(java -version 2>&1 | head -1 | sed -E 's/.*"([0-9]+).*/\1/')"
|
||||
echo "[ok] Java is installed (major version: $JAVA_MAJOR)"
|
||||
if [[ "$JAVA_MAJOR" -lt 17 ]]; then
|
||||
echo "[warn] JDK 17+ is required. Found major version $JAVA_MAJOR."
|
||||
echo " Install: sudo apt-get install -y openjdk-17-jdk-headless"
|
||||
fi
|
||||
else
|
||||
echo "[error] Java not found."
|
||||
echo " Install: sudo apt-get install -y openjdk-17-jdk-headless"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check Android SDK
|
||||
if [[ -n "${ANDROID_HOME:-}" ]]; then
|
||||
echo "[ok] ANDROID_HOME is set: $ANDROID_HOME"
|
||||
else
|
||||
echo "[warn] ANDROID_HOME not set. Android SDK may not be available."
|
||||
echo " Set ANDROID_HOME to your Android SDK path for Android builds."
|
||||
fi
|
||||
|
||||
# Install git hooks
|
||||
if [[ -x "$SCRIPT_DIR/install-hooks.sh" ]]; then
|
||||
echo ""
|
||||
echo "Installing git hooks..."
|
||||
"$SCRIPT_DIR/install-hooks.sh"
|
||||
echo "[ok] Git hooks configured"
|
||||
fi
|
||||
|
||||
# Download Gradle dependencies
|
||||
if [[ -x "$PROJECT_ROOT/gradlew" ]]; then
|
||||
echo ""
|
||||
echo "Downloading Gradle dependencies..."
|
||||
"$PROJECT_ROOT/gradlew" --no-daemon dependencies > /dev/null 2>&1 || true
|
||||
echo "[ok] Gradle dependencies downloaded"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "=== Setup complete (Linux) ==="
|
||||
echo ""
|
||||
echo "Verification commands:"
|
||||
echo " ./gradlew shared:jvmTest"
|
||||
echo " ./gradlew androidApp:testDebugUnitTest"
|
||||
31
runners-conversion/periodVault/test-audit-enforcement.sh
Executable file
31
runners-conversion/periodVault/test-audit-enforcement.sh
Executable file
@@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env bash
|
||||
# test-audit-enforcement.sh
|
||||
# Smoke checks for process/audit enforcement scripts.
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
"$SCRIPT_DIR/check-process.sh" HEAD~1
|
||||
"$SCRIPT_DIR/validate-sdd.sh" HEAD~1
|
||||
FORCE_AUDIT_GATES=1 "$SCRIPT_DIR/validate-tdd.sh" HEAD~1
|
||||
|
||||
TMP_REPORT="$(mktemp)"
|
||||
cat >"$TMP_REPORT" <<'MD'
|
||||
# CODEX Report
|
||||
## Requirements Mapping
|
||||
- sample
|
||||
## Constitution Compliance Matrix
|
||||
| Principle | Status | Notes |
|
||||
|-----------|--------|-------|
|
||||
| I | pass | sample |
|
||||
## Evidence
|
||||
- sample
|
||||
## Risks
|
||||
- sample
|
||||
MD
|
||||
"$SCRIPT_DIR/validate-audit-report.sh" "$TMP_REPORT"
|
||||
rm -f "$TMP_REPORT"
|
||||
|
||||
echo "[test-audit-enforcement] PASS"
|
||||
476
runners-conversion/periodVault/test-infra-runners.sh
Executable file
476
runners-conversion/periodVault/test-infra-runners.sh
Executable file
@@ -0,0 +1,476 @@
|
||||
#!/usr/bin/env bash
|
||||
# test-infra-runners.sh — Integration tests for self-hosted CI runner infrastructure.
|
||||
#
|
||||
# Tests cover:
|
||||
# 1. Shell script syntax (bash -n) for all infrastructure scripts
|
||||
# 2. runner.sh argument parsing and help output
|
||||
# 3. setup.sh cross-platform dispatch logic
|
||||
# 4. Docker image builds (slim + full) with content verification
|
||||
# 5. Docker Compose configuration validation
|
||||
# 6. ci.yml runner variable expression syntax
|
||||
# 7. lib.sh headless emulator function structure
|
||||
# 8. entrypoint.sh env validation logic
|
||||
#
|
||||
# Usage: ./scripts/test-infra-runners.sh [--skip-docker]
|
||||
#
|
||||
# --skip-docker Skip Docker image build tests (useful in CI without Docker)
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
PASS_COUNT=0
|
||||
FAIL_COUNT=0
|
||||
SKIP_COUNT=0
|
||||
SKIP_DOCKER=false
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
log() { echo "[test-infra] $*"; }
|
||||
pass() { PASS_COUNT=$((PASS_COUNT + 1)); log "PASS: $*"; }
|
||||
fail() { FAIL_COUNT=$((FAIL_COUNT + 1)); log "FAIL: $*"; }
|
||||
skip() { SKIP_COUNT=$((SKIP_COUNT + 1)); log "SKIP: $*"; }
|
||||
|
||||
assert_file_exists() {
|
||||
local path="$1" label="$2"
|
||||
if [[ -f "$path" ]]; then
|
||||
pass "$label"
|
||||
else
|
||||
fail "$label — file not found: $path"
|
||||
fi
|
||||
}
|
||||
|
||||
assert_file_executable() {
|
||||
local path="$1" label="$2"
|
||||
if [[ -x "$path" ]]; then
|
||||
pass "$label"
|
||||
else
|
||||
fail "$label — not executable: $path"
|
||||
fi
|
||||
}
|
||||
|
||||
assert_contains() {
|
||||
local haystack="$1" needle="$2" label="$3"
|
||||
if echo "$haystack" | grep -qF -- "$needle"; then
|
||||
pass "$label"
|
||||
else
|
||||
fail "$label — expected to contain: $needle"
|
||||
fi
|
||||
}
|
||||
|
||||
assert_not_contains() {
|
||||
local haystack="$1" needle="$2" label="$3"
|
||||
if ! echo "$haystack" | grep -qF -- "$needle"; then
|
||||
pass "$label"
|
||||
else
|
||||
fail "$label — should NOT contain: $needle"
|
||||
fi
|
||||
}
|
||||
|
||||
assert_exit_code() {
|
||||
local expected="$1" label="$2"
|
||||
shift 2
|
||||
local actual
|
||||
set +e
|
||||
"$@" >/dev/null 2>&1
|
||||
actual=$?
|
||||
set -e
|
||||
if [[ "$actual" -eq "$expected" ]]; then
|
||||
pass "$label"
|
||||
else
|
||||
fail "$label — expected exit $expected, got $actual"
|
||||
fi
|
||||
}
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Parse args
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--skip-docker) SKIP_DOCKER=true; shift ;;
|
||||
*) echo "Unknown arg: $1"; exit 1 ;;
|
||||
esac
|
||||
done
|
||||
|
||||
# ===========================================================================
|
||||
# Section 1: File existence and permissions
|
||||
# ===========================================================================
|
||||
|
||||
log ""
|
||||
log "=== Section 1: File existence and permissions ==="
|
||||
|
||||
assert_file_exists "$PROJECT_ROOT/infra/runners/Dockerfile" "Dockerfile exists"
|
||||
assert_file_exists "$PROJECT_ROOT/infra/runners/docker-compose.yml" "docker-compose.yml exists"
|
||||
assert_file_exists "$PROJECT_ROOT/infra/runners/entrypoint.sh" "entrypoint.sh exists"
|
||||
assert_file_exists "$PROJECT_ROOT/infra/runners/.env.example" "env.example exists"
|
||||
assert_file_exists "$PROJECT_ROOT/infra/runners/envs/periodvault.env.example" "periodvault.env.example exists"
|
||||
assert_file_exists "$PROJECT_ROOT/infra/runners/.gitignore" ".gitignore exists"
|
||||
assert_file_exists "$PROJECT_ROOT/infra/runners/README.md" "runners README exists"
|
||||
assert_file_exists "$PROJECT_ROOT/scripts/runner.sh" "runner.sh exists"
|
||||
assert_file_exists "$PROJECT_ROOT/scripts/setup.sh" "setup.sh exists"
|
||||
assert_file_exists "$PROJECT_ROOT/.github/workflows/build-runner-image.yml" "build-runner-image workflow exists"
|
||||
|
||||
assert_file_executable "$PROJECT_ROOT/infra/runners/entrypoint.sh" "entrypoint.sh is executable"
|
||||
assert_file_executable "$PROJECT_ROOT/scripts/runner.sh" "runner.sh is executable"
|
||||
assert_file_executable "$PROJECT_ROOT/scripts/setup.sh" "setup.sh is executable"
|
||||
|
||||
# ===========================================================================
|
||||
# Section 2: Shell script syntax validation (bash -n)
|
||||
# ===========================================================================
|
||||
|
||||
log ""
|
||||
log "=== Section 2: Shell script syntax ==="
|
||||
|
||||
for script in \
|
||||
"$PROJECT_ROOT/scripts/runner.sh" \
|
||||
"$PROJECT_ROOT/scripts/setup.sh" \
|
||||
"$PROJECT_ROOT/infra/runners/entrypoint.sh"; do
|
||||
name="$(basename "$script")"
|
||||
if bash -n "$script" 2>/dev/null; then
|
||||
pass "bash -n $name"
|
||||
else
|
||||
fail "bash -n $name — syntax error"
|
||||
fi
|
||||
done
|
||||
|
||||
# ===========================================================================
|
||||
# Section 3: runner.sh argument parsing
|
||||
# ===========================================================================
|
||||
|
||||
log ""
|
||||
log "=== Section 3: runner.sh argument parsing ==="
|
||||
|
||||
# --help should exit 0 and print usage
|
||||
HELP_OUT="$("$PROJECT_ROOT/scripts/runner.sh" --help 2>&1)" || true
|
||||
assert_contains "$HELP_OUT" "Usage:" "runner.sh --help shows usage"
|
||||
assert_contains "$HELP_OUT" "--mode" "runner.sh --help mentions --mode"
|
||||
assert_contains "$HELP_OUT" "build-image" "runner.sh --help mentions build-image"
|
||||
assert_exit_code 0 "runner.sh --help exits 0" "$PROJECT_ROOT/scripts/runner.sh" --help
|
||||
|
||||
# Missing --mode should fail
|
||||
assert_exit_code 1 "runner.sh without --mode exits 1" "$PROJECT_ROOT/scripts/runner.sh"
|
||||
|
||||
# Invalid mode should fail
|
||||
assert_exit_code 1 "runner.sh --mode invalid exits 1" "$PROJECT_ROOT/scripts/runner.sh" --mode invalid
|
||||
|
||||
# ===========================================================================
|
||||
# Section 4: setup.sh platform dispatch
|
||||
# ===========================================================================
|
||||
|
||||
log ""
|
||||
log "=== Section 4: setup.sh structure ==="
|
||||
|
||||
SETUP_CONTENT="$(cat "$PROJECT_ROOT/scripts/setup.sh")"
|
||||
assert_contains "$SETUP_CONTENT" "Darwin" "setup.sh handles macOS"
|
||||
assert_contains "$SETUP_CONTENT" "Linux" "setup.sh handles Linux"
|
||||
assert_contains "$SETUP_CONTENT" "setup-dev-environment.sh" "setup.sh dispatches to setup-dev-environment.sh"
|
||||
|
||||
# ===========================================================================
|
||||
# Section 5: entrypoint.sh validation logic
|
||||
# ===========================================================================
|
||||
|
||||
log ""
|
||||
log "=== Section 5: entrypoint.sh structure ==="
|
||||
|
||||
ENTRY_CONTENT="$(cat "$PROJECT_ROOT/infra/runners/entrypoint.sh")"
|
||||
assert_contains "$ENTRY_CONTENT" "GITHUB_PAT" "entrypoint.sh validates GITHUB_PAT"
|
||||
assert_contains "$ENTRY_CONTENT" "REPO_URL" "entrypoint.sh validates REPO_URL"
|
||||
assert_contains "$ENTRY_CONTENT" "RUNNER_NAME" "entrypoint.sh validates RUNNER_NAME"
|
||||
assert_contains "$ENTRY_CONTENT" "--ephemeral" "entrypoint.sh uses ephemeral mode"
|
||||
assert_contains "$ENTRY_CONTENT" "trap cleanup" "entrypoint.sh traps for cleanup"
|
||||
assert_contains "$ENTRY_CONTENT" "registration-token" "entrypoint.sh generates registration token"
|
||||
assert_contains "$ENTRY_CONTENT" "remove-token" "entrypoint.sh handles removal token"
|
||||
|
||||
# ===========================================================================
|
||||
# Section 6: Dockerfile structure
|
||||
# ===========================================================================
|
||||
|
||||
log ""
|
||||
log "=== Section 6: Dockerfile structure ==="
|
||||
|
||||
DOCKERFILE="$(cat "$PROJECT_ROOT/infra/runners/Dockerfile")"
|
||||
assert_contains "$DOCKERFILE" "FROM ubuntu:24.04 AS base" "Dockerfile has base stage"
|
||||
assert_contains "$DOCKERFILE" "FROM base AS slim" "Dockerfile has slim stage"
|
||||
assert_contains "$DOCKERFILE" "FROM slim AS full" "Dockerfile has full stage"
|
||||
assert_contains "$DOCKERFILE" "openjdk-17-jdk-headless" "Dockerfile installs JDK 17"
|
||||
assert_contains "$DOCKERFILE" "platforms;android-34" "Dockerfile installs Android SDK 34"
|
||||
assert_contains "$DOCKERFILE" "build-tools;34.0.0" "Dockerfile installs build-tools 34"
|
||||
assert_contains "$DOCKERFILE" "system-images;android-34;google_apis;x86_64" "Full stage includes system images"
|
||||
assert_contains "$DOCKERFILE" "avdmanager create avd" "Full stage pre-creates AVD"
|
||||
assert_contains "$DOCKERFILE" "kvm" "Full stage sets up KVM group"
|
||||
assert_contains "$DOCKERFILE" "HEALTHCHECK" "Dockerfile has HEALTHCHECK"
|
||||
assert_contains "$DOCKERFILE" "ENTRYPOINT" "Dockerfile has ENTRYPOINT"
|
||||
assert_contains "$DOCKERFILE" 'userdel -r ubuntu' "Dockerfile removes ubuntu user (GID 1000 conflict fix)"
|
||||
|
||||
# ===========================================================================
|
||||
# Section 7: docker-compose.yml structure
|
||||
# ===========================================================================
|
||||
|
||||
log ""
|
||||
log "=== Section 7: docker-compose.yml structure ==="
|
||||
|
||||
COMPOSE="$(cat "$PROJECT_ROOT/infra/runners/docker-compose.yml")"
|
||||
assert_contains "$COMPOSE" "registry:" "Compose has registry service"
|
||||
assert_contains "$COMPOSE" "runner-slim-1:" "Compose has runner-slim-1"
|
||||
assert_contains "$COMPOSE" "runner-slim-2:" "Compose has runner-slim-2"
|
||||
assert_contains "$COMPOSE" "runner-emulator:" "Compose has runner-emulator"
|
||||
assert_contains "$COMPOSE" "registry:2" "Registry uses official image"
|
||||
assert_contains "$COMPOSE" "/dev/kvm" "Emulator gets KVM device"
|
||||
assert_contains "$COMPOSE" "no-new-privileges" "Security: no-new-privileges"
|
||||
assert_contains "$COMPOSE" "init: true" "Uses tini (init: true)"
|
||||
assert_contains "$COMPOSE" "stop_grace_period" "Emulator has stop grace period"
|
||||
assert_contains "$COMPOSE" "android-emulator" "Emulator runner has android-emulator label"
|
||||
|
||||
# ===========================================================================
|
||||
# Section 8: ci.yml runner variable expressions
|
||||
# ===========================================================================
|
||||
|
||||
log ""
|
||||
log "=== Section 8: ci.yml runner variable expressions ==="
|
||||
|
||||
CI_YML="$(cat "$PROJECT_ROOT/.github/workflows/ci.yml")"
|
||||
assert_contains "$CI_YML" 'vars.CI_RUNS_ON_MACOS' "ci.yml uses CI_RUNS_ON_MACOS variable"
|
||||
assert_contains "$CI_YML" 'vars.CI_RUNS_ON_ANDROID' "ci.yml uses CI_RUNS_ON_ANDROID variable"
|
||||
assert_contains "$CI_YML" 'vars.CI_RUNS_ON ' "ci.yml uses CI_RUNS_ON variable"
|
||||
assert_contains "$CI_YML" 'fromJSON(' "ci.yml uses fromJSON() for runner targeting"
|
||||
|
||||
# Verify fallback values are present (safe default = current macOS runner)
|
||||
assert_contains "$CI_YML" '"self-hosted","macOS","periodvault"' "ci.yml has macOS fallback"
|
||||
|
||||
# Verify parallelism: test-ios-simulator should NOT depend on test-android-emulator
|
||||
# Extract test-ios-simulator needs line
|
||||
IOS_SECTION="$(awk '/test-ios-simulator:/,/runs-on:/' "$PROJECT_ROOT/.github/workflows/ci.yml")"
|
||||
assert_not_contains "$IOS_SECTION" "test-android-emulator" "test-ios-simulator does NOT depend on test-android-emulator (parallel)"
|
||||
assert_contains "$IOS_SECTION" "test-shared" "test-ios-simulator depends on test-shared"
|
||||
|
||||
# Verify audit-quality-gate waits for both platform tests
|
||||
AUDIT_SECTION="$(awk '/audit-quality-gate:/,/runs-on:/' "$PROJECT_ROOT/.github/workflows/ci.yml")"
|
||||
assert_contains "$AUDIT_SECTION" "test-android-emulator" "audit-quality-gate waits for android emulator"
|
||||
assert_contains "$AUDIT_SECTION" "test-ios-simulator" "audit-quality-gate waits for ios simulator"
|
||||
|
||||
# ===========================================================================
|
||||
# Section 9: lib.sh headless emulator support
|
||||
# ===========================================================================
|
||||
|
||||
log ""
|
||||
log "=== Section 9: lib.sh headless emulator support ==="
|
||||
|
||||
LIB_SH="$(cat "$PROJECT_ROOT/scripts/lib.sh")"
|
||||
assert_contains "$LIB_SH" "start_emulator_headless()" "lib.sh defines start_emulator_headless()"
|
||||
assert_contains "$LIB_SH" "-no-window" "Headless emulator uses -no-window"
|
||||
assert_contains "$LIB_SH" "-no-audio" "Headless emulator uses -no-audio"
|
||||
assert_contains "$LIB_SH" "swiftshader_indirect" "Headless emulator uses swiftshader GPU"
|
||||
|
||||
# Verify OS-aware dispatch in ensure_android_emulator
|
||||
assert_contains "$LIB_SH" '"$(uname -s)" == "Linux"' "ensure_android_emulator detects Linux"
|
||||
assert_contains "$LIB_SH" 'start_emulator_headless' "ensure_android_emulator calls headless on Linux"
|
||||
assert_contains "$LIB_SH" 'start_emulator_windowed' "ensure_android_emulator calls windowed on macOS"
|
||||
|
||||
# Verify headless zombie kill is macOS-only
|
||||
ZOMBIE_LINE="$(grep -n 'is_emulator_headless' "$PROJECT_ROOT/scripts/lib.sh" | grep 'Darwin' || true)"
|
||||
if [[ -n "$ZOMBIE_LINE" ]]; then
|
||||
pass "Headless zombie kill is guarded by Darwin check"
|
||||
else
|
||||
fail "Headless zombie kill should be macOS-only (Darwin guard)"
|
||||
fi
|
||||
|
||||
# ===========================================================================
|
||||
# Section 10: .gitignore protects secrets
|
||||
# ===========================================================================
|
||||
|
||||
log ""
|
||||
log "=== Section 10: .gitignore protects secrets ==="
|
||||
|
||||
GITIGNORE="$(cat "$PROJECT_ROOT/infra/runners/.gitignore")"
|
||||
assert_contains "$GITIGNORE" ".env" ".gitignore excludes .env"
|
||||
assert_contains "$GITIGNORE" "!.env.example" ".gitignore keeps .example files"
|
||||
|
||||
# ===========================================================================
|
||||
# Section 11: Docker image builds (requires Docker)
|
||||
# ===========================================================================
|
||||
|
||||
log ""
|
||||
log "=== Section 11: Docker image builds ==="
|
||||
|
||||
if $SKIP_DOCKER; then
|
||||
skip "Docker image build tests (--skip-docker)"
|
||||
elif ! command -v docker &>/dev/null; then
|
||||
skip "Docker image build tests (docker not found)"
|
||||
elif ! docker info >/dev/null 2>&1; then
|
||||
skip "Docker image build tests (docker daemon not running)"
|
||||
else
|
||||
DOCKER_PLATFORM="linux/amd64"
|
||||
|
||||
# --- Build slim ---
|
||||
log "Building slim image (this may take a few minutes)..."
|
||||
if docker build --platform "$DOCKER_PLATFORM" --target slim \
|
||||
-t periodvault-runner-test:slim "$PROJECT_ROOT/infra/runners/" >/dev/null 2>&1; then
|
||||
pass "Docker build: slim target succeeds"
|
||||
|
||||
# Verify slim image contents
|
||||
SLIM_JAVA="$(docker run --rm --platform "$DOCKER_PLATFORM" periodvault-runner-test:slim \
|
||||
java -version 2>&1 | head -1)" || true
|
||||
if echo "$SLIM_JAVA" | grep -q "17"; then
|
||||
pass "Slim image: Java 17 is installed"
|
||||
else
|
||||
fail "Slim image: Java 17 not found — got: $SLIM_JAVA"
|
||||
fi
|
||||
|
||||
SLIM_SDK="$(docker run --rm --platform "$DOCKER_PLATFORM" periodvault-runner-test:slim \
|
||||
bash -c 'ls $ANDROID_HOME/platforms/' 2>&1)" || true
|
||||
if echo "$SLIM_SDK" | grep -q "android-34"; then
|
||||
pass "Slim image: Android SDK 34 is installed"
|
||||
else
|
||||
fail "Slim image: Android SDK 34 not found — got: $SLIM_SDK"
|
||||
fi
|
||||
|
||||
SLIM_RUNNER="$(docker run --rm --platform "$DOCKER_PLATFORM" periodvault-runner-test:slim \
|
||||
bash -c 'ls /home/runner/actions-runner/run.sh' 2>&1)" || true
|
||||
if echo "$SLIM_RUNNER" | grep -q "run.sh"; then
|
||||
pass "Slim image: GitHub Actions runner agent is installed"
|
||||
else
|
||||
fail "Slim image: runner agent not found"
|
||||
fi
|
||||
|
||||
SLIM_USER="$(docker run --rm --platform "$DOCKER_PLATFORM" periodvault-runner-test:slim \
|
||||
whoami 2>&1)" || true
|
||||
if [[ "$SLIM_USER" == "runner" ]]; then
|
||||
pass "Slim image: runs as 'runner' user"
|
||||
else
|
||||
fail "Slim image: expected user 'runner', got '$SLIM_USER'"
|
||||
fi
|
||||
|
||||
SLIM_ENTRY="$(docker run --rm --platform "$DOCKER_PLATFORM" periodvault-runner-test:slim \
|
||||
bash -c 'test -x /home/runner/entrypoint.sh && echo ok' 2>&1)" || true
|
||||
if [[ "$SLIM_ENTRY" == "ok" ]]; then
|
||||
pass "Slim image: entrypoint.sh is present and executable"
|
||||
else
|
||||
fail "Slim image: entrypoint.sh not executable"
|
||||
fi
|
||||
|
||||
# Verify slim does NOT have emulator
|
||||
SLIM_EMU="$(docker run --rm --platform "$DOCKER_PLATFORM" periodvault-runner-test:slim \
|
||||
bash -c 'command -v emulator || echo not-found' 2>&1)" || true
|
||||
if echo "$SLIM_EMU" | grep -q "not-found"; then
|
||||
pass "Slim image: does NOT include emulator (expected)"
|
||||
else
|
||||
fail "Slim image: unexpectedly contains emulator"
|
||||
fi
|
||||
else
|
||||
fail "Docker build: slim target failed"
|
||||
fi
|
||||
|
||||
# --- Build full ---
|
||||
log "Building full image (this may take several minutes)..."
|
||||
if docker build --platform "$DOCKER_PLATFORM" --target full \
|
||||
-t periodvault-runner-test:full "$PROJECT_ROOT/infra/runners/" >/dev/null 2>&1; then
|
||||
pass "Docker build: full target succeeds"
|
||||
|
||||
# Verify full image has emulator
|
||||
FULL_EMU="$(docker run --rm --platform "$DOCKER_PLATFORM" periodvault-runner-test:full \
|
||||
bash -c 'command -v emulator && echo found' 2>&1)" || true
|
||||
if echo "$FULL_EMU" | grep -q "found"; then
|
||||
pass "Full image: emulator is installed"
|
||||
else
|
||||
fail "Full image: emulator not found"
|
||||
fi
|
||||
|
||||
# Verify full image has AVD pre-created
|
||||
FULL_AVD="$(docker run --rm --platform "$DOCKER_PLATFORM" periodvault-runner-test:full \
|
||||
bash -c '${ANDROID_HOME}/cmdline-tools/latest/bin/avdmanager list avd 2>/dev/null | grep "Name:" || echo none' 2>&1)" || true
|
||||
if echo "$FULL_AVD" | grep -q "phone"; then
|
||||
pass "Full image: AVD 'phone' is pre-created"
|
||||
else
|
||||
fail "Full image: AVD 'phone' not found — got: $FULL_AVD"
|
||||
fi
|
||||
|
||||
# Verify full image has system images
|
||||
FULL_SYSIMG="$(docker run --rm --platform "$DOCKER_PLATFORM" periodvault-runner-test:full \
|
||||
bash -c 'ls $ANDROID_HOME/system-images/android-34/google_apis/x86_64/ 2>/dev/null | head -1 || echo none' 2>&1)" || true
|
||||
if [[ "$FULL_SYSIMG" != "none" ]]; then
|
||||
pass "Full image: system-images;android-34;google_apis;x86_64 installed"
|
||||
else
|
||||
fail "Full image: system images not found"
|
||||
fi
|
||||
|
||||
# Verify full image has xvfb
|
||||
FULL_XVFB="$(docker run --rm --platform "$DOCKER_PLATFORM" periodvault-runner-test:full \
|
||||
bash -c 'command -v Xvfb && echo found || echo not-found' 2>&1)" || true
|
||||
if echo "$FULL_XVFB" | grep -q "found"; then
|
||||
pass "Full image: Xvfb is installed"
|
||||
else
|
||||
fail "Full image: Xvfb not found"
|
||||
fi
|
||||
|
||||
# Verify kvm group exists and runner is a member
|
||||
FULL_KVM="$(docker run --rm --platform "$DOCKER_PLATFORM" periodvault-runner-test:full \
|
||||
bash -c 'id runner 2>/dev/null' 2>&1)" || true
|
||||
if echo "$FULL_KVM" | grep -q "kvm"; then
|
||||
pass "Full image: runner user is in kvm group"
|
||||
else
|
||||
fail "Full image: runner not in kvm group — got: $FULL_KVM"
|
||||
fi
|
||||
else
|
||||
fail "Docker build: full target failed"
|
||||
fi
|
||||
|
||||
# --- Docker Compose validation ---
|
||||
log "Validating docker-compose.yml..."
|
||||
# Create temp env files for validation
|
||||
cp "$PROJECT_ROOT/infra/runners/.env.example" "$PROJECT_ROOT/infra/runners/.env"
|
||||
cp "$PROJECT_ROOT/infra/runners/envs/periodvault.env.example" "$PROJECT_ROOT/infra/runners/envs/periodvault.env"
|
||||
|
||||
if docker compose -f "$PROJECT_ROOT/infra/runners/docker-compose.yml" config --quiet 2>/dev/null; then
|
||||
pass "docker compose config validates"
|
||||
else
|
||||
fail "docker compose config failed"
|
||||
fi
|
||||
|
||||
# Verify compose defines expected services
|
||||
COMPOSE_SERVICES="$(docker compose -f "$PROJECT_ROOT/infra/runners/docker-compose.yml" config --services 2>/dev/null)"
|
||||
assert_contains "$COMPOSE_SERVICES" "registry" "Compose service: registry"
|
||||
assert_contains "$COMPOSE_SERVICES" "runner-slim-1" "Compose service: runner-slim-1"
|
||||
assert_contains "$COMPOSE_SERVICES" "runner-slim-2" "Compose service: runner-slim-2"
|
||||
assert_contains "$COMPOSE_SERVICES" "runner-emulator" "Compose service: runner-emulator"
|
||||
|
||||
# Clean up temp env files
|
||||
rm -f "$PROJECT_ROOT/infra/runners/.env" "$PROJECT_ROOT/infra/runners/envs/periodvault.env"
|
||||
|
||||
# --- Cleanup test images ---
|
||||
docker rmi periodvault-runner-test:slim periodvault-runner-test:full 2>/dev/null || true
|
||||
fi
|
||||
|
||||
# ===========================================================================
|
||||
# Section 12: build-runner-image.yml workflow structure
|
||||
# ===========================================================================
|
||||
|
||||
log ""
|
||||
log "=== Section 12: build-runner-image.yml structure ==="
|
||||
|
||||
BUILD_WF="$(cat "$PROJECT_ROOT/.github/workflows/build-runner-image.yml")"
|
||||
assert_contains "$BUILD_WF" "slim" "Build workflow includes slim target"
|
||||
assert_contains "$BUILD_WF" "full" "Build workflow includes full target"
|
||||
assert_contains "$BUILD_WF" "matrix" "Build workflow uses matrix strategy"
|
||||
assert_contains "$BUILD_WF" "ghcr.io" "Build workflow pushes to GHCR"
|
||||
|
||||
# ===========================================================================
|
||||
# Results
|
||||
# ===========================================================================
|
||||
|
||||
log ""
|
||||
log "=============================="
|
||||
TOTAL=$((PASS_COUNT + FAIL_COUNT + SKIP_COUNT))
|
||||
log "Results: $PASS_COUNT passed, $FAIL_COUNT failed, $SKIP_COUNT skipped (total: $TOTAL)"
|
||||
log "=============================="
|
||||
|
||||
if [[ $FAIL_COUNT -gt 0 ]]; then
|
||||
log "FAILED"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log "ALL PASSED"
|
||||
exit 0
|
||||
239
runners-conversion/periodVault/test-test-quality-gate.sh
Executable file
239
runners-conversion/periodVault/test-test-quality-gate.sh
Executable file
@@ -0,0 +1,239 @@
|
||||
#!/usr/bin/env bash
|
||||
# test-test-quality-gate.sh — Integration-style tests for validate-test-quality.sh.
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
PASS_COUNT=0
|
||||
FAIL_COUNT=0
|
||||
declare -a TMP_REPOS=()
|
||||
|
||||
log() {
|
||||
echo "[test-quality-test] $*"
|
||||
}
|
||||
|
||||
pass() {
|
||||
PASS_COUNT=$((PASS_COUNT + 1))
|
||||
log "PASS: $*"
|
||||
}
|
||||
|
||||
fail() {
|
||||
FAIL_COUNT=$((FAIL_COUNT + 1))
|
||||
log "FAIL: $*"
|
||||
}
|
||||
|
||||
require_cmd() {
|
||||
if ! command -v "$1" >/dev/null 2>&1; then
|
||||
echo "Missing required command: $1"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
run_expect_success() {
|
||||
local label="$1"
|
||||
shift
|
||||
if "$@" >/tmp/test-quality-gate.out 2>&1; then
|
||||
pass "$label"
|
||||
else
|
||||
fail "$label"
|
||||
cat /tmp/test-quality-gate.out
|
||||
fi
|
||||
}
|
||||
|
||||
run_expect_failure() {
|
||||
local label="$1"
|
||||
shift
|
||||
if "$@" >/tmp/test-quality-gate.out 2>&1; then
|
||||
fail "$label (expected failure but command succeeded)"
|
||||
cat /tmp/test-quality-gate.out
|
||||
else
|
||||
pass "$label"
|
||||
fi
|
||||
}
|
||||
|
||||
create_fixture_repo() {
|
||||
local repo
|
||||
repo="$(mktemp -d)"
|
||||
|
||||
mkdir -p "$repo/scripts" \
|
||||
"$repo/audit" \
|
||||
"$repo/androidApp/src/androidTest/kotlin/example" \
|
||||
"$repo/iosApp/iosAppUITests"
|
||||
|
||||
cp "$PROJECT_ROOT/scripts/validate-test-quality.sh" "$repo/scripts/"
|
||||
chmod +x "$repo/scripts/validate-test-quality.sh"
|
||||
|
||||
cat > "$repo/androidApp/src/androidTest/kotlin/example/ExampleUiTest.kt" <<'EOF'
|
||||
package example
|
||||
|
||||
import org.junit.Test
|
||||
|
||||
class ExampleUiTest {
|
||||
@Test
|
||||
fun usesAntiPatternsForFixture() {
|
||||
Thread.sleep(5)
|
||||
try {
|
||||
// fixture-only
|
||||
} catch (e: AssertionError) {
|
||||
// fixture-only
|
||||
}
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
cat > "$repo/iosApp/iosAppUITests/ExampleUiTests.swift" <<'EOF'
|
||||
import XCTest
|
||||
|
||||
final class ExampleUiTests: XCTestCase {
|
||||
func testFixtureUsesAntiPatterns() {
|
||||
sleep(1)
|
||||
if XCUIApplication().buttons["Example"].exists {
|
||||
XCTAssertTrue(true)
|
||||
}
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
cat > "$repo/audit/test-quality-baseline.json" <<'EOF'
|
||||
{
|
||||
"version": 1,
|
||||
"generated_at": "2026-02-20T16:00:00Z",
|
||||
"metrics": [
|
||||
{
|
||||
"id": "android_thread_sleep_calls",
|
||||
"description": "Android Thread.sleep",
|
||||
"mode": "rg",
|
||||
"root": "androidApp/src/androidTest",
|
||||
"glob": "*.kt",
|
||||
"pattern": "Thread\\.sleep\\(",
|
||||
"baseline": 1,
|
||||
"allowed_growth": 0
|
||||
},
|
||||
{
|
||||
"id": "android_assertionerror_catches",
|
||||
"description": "Android AssertionError catches",
|
||||
"mode": "rg",
|
||||
"root": "androidApp/src/androidTest",
|
||||
"glob": "*.kt",
|
||||
"pattern": "catch \\([^\\)]*AssertionError",
|
||||
"baseline": 1,
|
||||
"allowed_growth": 0
|
||||
},
|
||||
{
|
||||
"id": "ios_sleep_calls",
|
||||
"description": "iOS sleep calls",
|
||||
"mode": "rg",
|
||||
"root": "iosApp/iosAppUITests",
|
||||
"glob": "*.swift",
|
||||
"pattern": "\\bsleep\\(",
|
||||
"baseline": 1,
|
||||
"allowed_growth": 0
|
||||
},
|
||||
{
|
||||
"id": "ios_conditional_exists_guards_in_test_bodies",
|
||||
"description": "iOS conditional exists checks in test bodies",
|
||||
"mode": "swift_test_body_pattern",
|
||||
"root": "iosApp/iosAppUITests",
|
||||
"glob": "*.swift",
|
||||
"pattern": "if[[:space:]]+[^\\n]*\\.exists",
|
||||
"baseline": 1,
|
||||
"allowed_growth": 0
|
||||
},
|
||||
{
|
||||
"id": "ios_noop_assert_true",
|
||||
"description": "iOS no-op assertTrue(true) in test bodies",
|
||||
"mode": "swift_test_body_pattern",
|
||||
"root": "iosApp/iosAppUITests",
|
||||
"glob": "*.swift",
|
||||
"pattern": "XCTAssertTrue\\(true\\)",
|
||||
"baseline": 1,
|
||||
"allowed_growth": 0
|
||||
},
|
||||
{
|
||||
"id": "ios_empty_test_bodies",
|
||||
"description": "iOS empty or comment-only test bodies",
|
||||
"mode": "rg_multiline",
|
||||
"root": "iosApp/iosAppUITests",
|
||||
"glob": "*.swift",
|
||||
"pattern": "(?s)func\\s+test[[:alnum:]_]+\\s*\\([^)]*\\)\\s*(?:throws\\s*)?\\{\\s*(?:(?://[^\\n]*\\n)\\s*)*\\}",
|
||||
"baseline": 0,
|
||||
"allowed_growth": 0
|
||||
},
|
||||
{
|
||||
"id": "ios_placeholder_test_markers",
|
||||
"description": "iOS placeholder markers in test bodies",
|
||||
"mode": "swift_test_body_pattern",
|
||||
"root": "iosApp/iosAppUITests",
|
||||
"glob": "*.swift",
|
||||
"pattern": "(TODO|FIXME|placeholder|no-op)",
|
||||
"baseline": 0,
|
||||
"allowed_growth": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
EOF
|
||||
|
||||
TMP_REPOS+=("$repo")
|
||||
echo "$repo"
|
||||
}
|
||||
|
||||
test_baseline_pass() {
|
||||
local repo
|
||||
repo="$(create_fixture_repo)"
|
||||
run_expect_success "validate-test-quality passes when metrics match baseline" \
|
||||
bash -lc "cd '$repo' && scripts/validate-test-quality.sh"
|
||||
}
|
||||
|
||||
test_growth_fails() {
|
||||
local repo
|
||||
repo="$(create_fixture_repo)"
|
||||
echo "Thread.sleep(10)" >> "$repo/androidApp/src/androidTest/kotlin/example/ExampleUiTest.kt"
|
||||
run_expect_failure "validate-test-quality fails when metric grows past threshold" \
|
||||
bash -lc "cd '$repo' && scripts/validate-test-quality.sh"
|
||||
}
|
||||
|
||||
test_allowed_growth_passes() {
|
||||
local repo
|
||||
repo="$(create_fixture_repo)"
|
||||
|
||||
local tmp
|
||||
tmp="$(mktemp)"
|
||||
jq '(.metrics[] | select(.id == "ios_sleep_calls") | .allowed_growth) = 1' \
|
||||
"$repo/audit/test-quality-baseline.json" > "$tmp"
|
||||
mv "$tmp" "$repo/audit/test-quality-baseline.json"
|
||||
|
||||
echo "sleep(1)" >> "$repo/iosApp/iosAppUITests/ExampleUiTests.swift"
|
||||
|
||||
run_expect_success "validate-test-quality honors allowed_growth threshold" \
|
||||
bash -lc "cd '$repo' && scripts/validate-test-quality.sh"
|
||||
}
|
||||
|
||||
main() {
|
||||
require_cmd jq
|
||||
require_cmd rg
|
||||
require_cmd awk
|
||||
|
||||
test_baseline_pass
|
||||
test_growth_fails
|
||||
test_allowed_growth_passes
|
||||
|
||||
log "Summary: pass=$PASS_COUNT fail=$FAIL_COUNT"
|
||||
if [[ "$FAIL_COUNT" -gt 0 ]]; then
|
||||
exit 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
cleanup() {
|
||||
local repo
|
||||
for repo in "${TMP_REPOS[@]:-}"; do
|
||||
[[ -d "$repo" ]] && rm -rf "$repo"
|
||||
done
|
||||
rm -f /tmp/test-quality-gate.out
|
||||
return 0
|
||||
}
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
main "$@"
|
||||
78
runners-conversion/periodVault/validate-audit-report.sh
Executable file
78
runners-conversion/periodVault/validate-audit-report.sh
Executable file
@@ -0,0 +1,78 @@
|
||||
#!/usr/bin/env bash
|
||||
# validate-audit-report.sh
|
||||
# Structural + semantic validation for CODEX audit reports.
|
||||
set -euo pipefail
|
||||
|
||||
REPORT_PATH="${1:-CODEX-REPORT.md}"
|
||||
|
||||
if [[ ! -f "$REPORT_PATH" ]]; then
|
||||
echo "[validate-audit-report] Missing report: $REPORT_PATH"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
FAILURES=0
|
||||
|
||||
# --- Check 1: Required sections exist ---
|
||||
required_sections=(
|
||||
"## Requirements Mapping"
|
||||
"## Constitution Compliance Matrix"
|
||||
"## Evidence"
|
||||
"## Risks"
|
||||
)
|
||||
|
||||
for section in "${required_sections[@]}"; do
|
||||
if command -v rg >/dev/null 2>&1; then
|
||||
if ! rg -q "^${section//\//\\/}$" "$REPORT_PATH"; then
|
||||
echo "[validate-audit-report] Missing section: $section"
|
||||
FAILURES=$((FAILURES + 1))
|
||||
fi
|
||||
else
|
||||
if ! grep -Eq "^${section//\//\\/}$" "$REPORT_PATH"; then
|
||||
echo "[validate-audit-report] Missing section: $section"
|
||||
FAILURES=$((FAILURES + 1))
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# --- Check 2: Reject forbidden placeholders ---
|
||||
forbidden_patterns=("TODO" "TBD" "UNMAPPED" "PLACEHOLDER" "FIXME")
|
||||
for pattern in "${forbidden_patterns[@]}"; do
|
||||
if command -v rg >/dev/null 2>&1; then
|
||||
count="$(rg -c "$pattern" "$REPORT_PATH" 2>/dev/null || echo 0)"
|
||||
else
|
||||
count="$(grep -c "$pattern" "$REPORT_PATH" 2>/dev/null || echo 0)"
|
||||
fi
|
||||
if [[ "$count" -gt 0 ]]; then
|
||||
echo "[validate-audit-report] Forbidden placeholder '$pattern' found ($count occurrences)"
|
||||
FAILURES=$((FAILURES + 1))
|
||||
fi
|
||||
done
|
||||
|
||||
# --- Check 3: Non-empty sections (at least 1 non-blank line after heading) ---
|
||||
for section in "${required_sections[@]}"; do
|
||||
# Extract content between this heading and the next ## heading (or EOF)
|
||||
section_escaped="${section//\//\\/}"
|
||||
content=""
|
||||
if command -v awk >/dev/null 2>&1; then
|
||||
content="$(awk "/^${section_escaped}\$/{found=1; next} found && /^## /{exit} found{print}" "$REPORT_PATH" | grep -v '^[[:space:]]*$' || true)"
|
||||
fi
|
||||
if [[ -z "$content" ]]; then
|
||||
echo "[validate-audit-report] Section is empty: $section"
|
||||
FAILURES=$((FAILURES + 1))
|
||||
fi
|
||||
done
|
||||
|
||||
# --- Check 4: Requirements mapping has entries (table rows or list items) ---
|
||||
req_entries="$(awk '/^## Requirements Mapping$/{found=1; next} found && /^## /{exit} found && /^\|[^-]/{print} found && /^- /{print}' "$REPORT_PATH" | wc -l | tr -d ' ')"
|
||||
if [[ "$req_entries" -lt 1 ]]; then
|
||||
echo "[validate-audit-report] Requirements Mapping has no entries (expected table rows or list items)"
|
||||
FAILURES=$((FAILURES + 1))
|
||||
fi
|
||||
|
||||
# --- Result ---
|
||||
if [[ $FAILURES -gt 0 ]]; then
|
||||
echo "[validate-audit-report] FAILED ($FAILURES issues)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "[validate-audit-report] PASS ($REPORT_PATH)"
|
||||
99
runners-conversion/periodVault/validate-ios-skipped-tests.sh
Executable file
99
runners-conversion/periodVault/validate-ios-skipped-tests.sh
Executable file
@@ -0,0 +1,99 @@
|
||||
#!/usr/bin/env bash
|
||||
# validate-ios-skipped-tests.sh — Fail when iOS test results contain non-allowlisted skipped tests.
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
|
||||
usage() {
|
||||
cat <<'EOF'
|
||||
Usage:
|
||||
scripts/validate-ios-skipped-tests.sh <xcresult_path> [allowlist_file]
|
||||
|
||||
Arguments:
|
||||
xcresult_path Path to .xcresult bundle generated by xcodebuild test
|
||||
allowlist_file Optional allowlist of skipped test names (one per line, # comments allowed)
|
||||
Default: audit/ios-skipped-tests-allowlist.txt
|
||||
EOF
|
||||
}
|
||||
|
||||
if [[ "${1:-}" == "--help" || "${1:-}" == "-h" ]]; then
|
||||
usage
|
||||
exit 0
|
||||
fi
|
||||
|
||||
RESULT_PATH="${1:-}"
|
||||
if [[ -z "$RESULT_PATH" ]]; then
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ALLOWLIST_PATH="${2:-$PROJECT_ROOT/audit/ios-skipped-tests-allowlist.txt}"
|
||||
|
||||
require_cmd() {
|
||||
if ! command -v "$1" >/dev/null 2>&1; then
|
||||
echo "Missing required command: $1" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
require_cmd xcrun
|
||||
require_cmd jq
|
||||
require_cmd sort
|
||||
require_cmd comm
|
||||
require_cmd mktemp
|
||||
|
||||
if [[ ! -d "$RESULT_PATH" ]]; then
|
||||
echo "xcresult bundle not found: $RESULT_PATH" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
TMP_JSON="$(mktemp)"
|
||||
TMP_SKIPPED="$(mktemp)"
|
||||
TMP_ALLOWLIST="$(mktemp)"
|
||||
TMP_UNALLOWED="$(mktemp)"
|
||||
|
||||
cleanup() {
|
||||
rm -f "$TMP_JSON" "$TMP_SKIPPED" "$TMP_ALLOWLIST" "$TMP_UNALLOWED"
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
if ! xcrun xcresulttool get test-results tests --path "$RESULT_PATH" --format json > "$TMP_JSON" 2>/dev/null; then
|
||||
echo "Failed to parse xcresult test results: $RESULT_PATH" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
jq -r '
|
||||
.. | objects
|
||||
| select((.result == "Skipped") or (.status == "Skipped") or (.outcome == "Skipped") or (.testStatus == "Skipped"))
|
||||
| (.name // .identifier // empty)
|
||||
' "$TMP_JSON" | sed '/^[[:space:]]*$/d' | sort -u > "$TMP_SKIPPED"
|
||||
|
||||
if [[ -f "$ALLOWLIST_PATH" ]]; then
|
||||
{
|
||||
grep -vE '^[[:space:]]*(#|$)' "$ALLOWLIST_PATH" || true
|
||||
} | sed 's/[[:space:]]*$//' | sed '/^[[:space:]]*$/d' | sort -u > "$TMP_ALLOWLIST"
|
||||
else
|
||||
: > "$TMP_ALLOWLIST"
|
||||
fi
|
||||
|
||||
if [[ ! -s "$TMP_SKIPPED" ]]; then
|
||||
echo "Skipped-test gate: PASS (no skipped iOS tests)"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
comm -23 "$TMP_SKIPPED" "$TMP_ALLOWLIST" > "$TMP_UNALLOWED"
|
||||
|
||||
if [[ -s "$TMP_UNALLOWED" ]]; then
|
||||
echo "Skipped-test gate: FAIL (non-allowlisted skipped iOS tests found)"
|
||||
cat "$TMP_UNALLOWED" | sed 's/^/ - /'
|
||||
if [[ -s "$TMP_ALLOWLIST" ]]; then
|
||||
echo "Allowlist used: $ALLOWLIST_PATH"
|
||||
else
|
||||
echo "Allowlist is empty: $ALLOWLIST_PATH"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Skipped-test gate: PASS (all skipped iOS tests are allowlisted)"
|
||||
exit 0
|
||||
58
runners-conversion/periodVault/validate-sdd.sh
Executable file
58
runners-conversion/periodVault/validate-sdd.sh
Executable file
@@ -0,0 +1,58 @@
|
||||
#!/usr/bin/env bash
|
||||
# validate-sdd.sh
|
||||
# Ensures changed spec folders keep mandatory SDD artifacts.
|
||||
set -euo pipefail
|
||||
|
||||
BASE_REF="${1:-origin/main}"
|
||||
|
||||
if ! git rev-parse --verify "$BASE_REF" >/dev/null 2>&1; then
|
||||
BASE_REF="HEAD~1"
|
||||
fi
|
||||
|
||||
CHANGED_SPEC_FILES=()
|
||||
while IFS= read -r line; do
|
||||
[[ -n "$line" ]] && CHANGED_SPEC_FILES+=("$line")
|
||||
done < <(git diff --name-only "$BASE_REF"...HEAD -- 'specs/**')
|
||||
|
||||
if [[ ${#CHANGED_SPEC_FILES[@]} -eq 0 ]]; then
|
||||
echo "[validate-sdd] No changes under specs/."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
SPEC_DIRS=()
|
||||
add_unique_dir() {
|
||||
local candidate="$1"
|
||||
local existing
|
||||
for existing in "${SPEC_DIRS[@]:-}"; do
|
||||
[[ "$existing" == "$candidate" ]] && return 0
|
||||
done
|
||||
SPEC_DIRS+=("$candidate")
|
||||
}
|
||||
|
||||
for path in "${CHANGED_SPEC_FILES[@]}"; do
|
||||
if [[ "$path" =~ ^specs/[^/]+/ ]]; then
|
||||
spec_dir="$(echo "$path" | cut -d/ -f1-2)"
|
||||
add_unique_dir "$spec_dir"
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ ${#SPEC_DIRS[@]} -eq 0 ]]; then
|
||||
echo "[validate-sdd] PASS (no feature spec directories changed)"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
FAILED=0
|
||||
for dir in "${SPEC_DIRS[@]}"; do
|
||||
for required in spec.md plan.md tasks.md allowed-files.txt; do
|
||||
if [[ ! -f "$dir/$required" ]]; then
|
||||
echo "[validate-sdd] Missing required file: $dir/$required"
|
||||
FAILED=1
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
if [[ $FAILED -ne 0 ]]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "[validate-sdd] PASS ($BASE_REF...HEAD)"
|
||||
89
runners-conversion/periodVault/validate-tdd.sh
Executable file
89
runners-conversion/periodVault/validate-tdd.sh
Executable file
@@ -0,0 +1,89 @@
|
||||
#!/usr/bin/env bash
|
||||
# validate-tdd.sh
|
||||
# Guard that production code changes are accompanied by tests.
|
||||
set -euo pipefail
|
||||
|
||||
BASE_REF="${1:-origin/main}"
|
||||
|
||||
if ! git rev-parse --verify "$BASE_REF" >/dev/null 2>&1; then
|
||||
BASE_REF="HEAD~1"
|
||||
fi
|
||||
|
||||
CHANGED_FILES=()
|
||||
while IFS= read -r line; do
|
||||
[[ -n "$line" ]] && CHANGED_FILES+=("$line")
|
||||
done < <(git diff --name-only "$BASE_REF"...HEAD)
|
||||
|
||||
if [[ ${#CHANGED_FILES[@]} -eq 0 ]]; then
|
||||
echo "[validate-tdd] No changed files."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
is_production_file() {
|
||||
local f="$1"
|
||||
[[ "$f" == shared/src/commonMain/* ]] && return 0
|
||||
[[ "$f" == androidApp/src/main/* ]] && return 0
|
||||
[[ "$f" == iosApp/iosApp/* ]] && [[ "$f" != iosApp/iosAppUITests/* ]] && [[ "$f" != iosApp/iosAppTests/* ]] && return 0
|
||||
return 1
|
||||
}
|
||||
|
||||
is_test_file() {
|
||||
local f="$1"
|
||||
[[ "$f" == shared/src/commonTest/* ]] && return 0
|
||||
[[ "$f" == shared/src/jvmTest/* ]] && return 0
|
||||
[[ "$f" == androidApp/src/androidTest/* ]] && return 0
|
||||
[[ "$f" == androidApp/src/test/* ]] && return 0
|
||||
[[ "$f" == iosApp/iosAppUITests/* ]] && return 0
|
||||
[[ "$f" == iosApp/iosAppTests/* ]] && return 0
|
||||
return 1
|
||||
}
|
||||
|
||||
PROD_COUNT=0
|
||||
TEST_COUNT=0
|
||||
for file in "${CHANGED_FILES[@]}"; do
|
||||
if is_production_file "$file"; then
|
||||
PROD_COUNT=$((PROD_COUNT + 1))
|
||||
fi
|
||||
if is_test_file "$file"; then
|
||||
TEST_COUNT=$((TEST_COUNT + 1))
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ "$PROD_COUNT" -gt 0 && "$TEST_COUNT" -eq 0 ]]; then
|
||||
echo "[validate-tdd] Failing: production code changed without matching test updates."
|
||||
echo "[validate-tdd] Production files changed: $PROD_COUNT"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
CHANGED_TEST_FILES=()
|
||||
TEST_PATH_REGEX='^(shared/src/(commonTest|jvmTest)/|androidApp/src/(androidTest|test)/|iosApp/iosApp(UI)?Tests/)'
|
||||
while IFS= read -r line; do
|
||||
[[ -n "$line" ]] && CHANGED_TEST_FILES+=("$line")
|
||||
done < <(
|
||||
if command -v rg >/dev/null 2>&1; then
|
||||
printf '%s\n' "${CHANGED_FILES[@]}" | rg "$TEST_PATH_REGEX" || true
|
||||
else
|
||||
printf '%s\n' "${CHANGED_FILES[@]}" | grep -E "$TEST_PATH_REGEX" || true
|
||||
fi
|
||||
)
|
||||
for test_file in "${CHANGED_TEST_FILES[@]:-}"; do
|
||||
if [[ -f "$test_file" ]]; then
|
||||
if command -v rg >/dev/null 2>&1; then
|
||||
if rg -q 'catch[[:space:]]*\([[:space:]]*AssertionError|XCTExpectFailure|@Ignore|@Disabled' "$test_file"; then
|
||||
echo "[validate-tdd] Failing: potential weak assertion/skip anti-pattern in $test_file"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
if grep -Eq 'catch[[:space:]]*\([[:space:]]*AssertionError|XCTExpectFailure|@Ignore|@Disabled' "$test_file"; then
|
||||
echo "[validate-tdd] Failing: potential weak assertion/skip anti-pattern in $test_file"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ "${FORCE_AUDIT_GATES:-0}" == "1" ]]; then
|
||||
echo "[validate-tdd] FORCE_AUDIT_GATES enabled."
|
||||
fi
|
||||
|
||||
echo "[validate-tdd] PASS ($BASE_REF...HEAD)"
|
||||
358
runners-conversion/periodVault/validate-test-quality.sh
Executable file
358
runners-conversion/periodVault/validate-test-quality.sh
Executable file
@@ -0,0 +1,358 @@
|
||||
#!/usr/bin/env bash
|
||||
# validate-test-quality.sh — Enforce anti-pattern regression thresholds for UI tests.
|
||||
#
|
||||
# Usage:
|
||||
# scripts/validate-test-quality.sh [baseline_file]
|
||||
# scripts/validate-test-quality.sh --help
|
||||
#
|
||||
# Behavior:
|
||||
# - Loads metric baselines from JSON.
|
||||
# - Counts pattern matches in configured roots via ripgrep.
|
||||
# - Fails if any metric exceeds baseline + allowed_growth.
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
DEFAULT_BASELINE_FILE="$PROJECT_ROOT/audit/test-quality-baseline.json"
|
||||
BASELINE_FILE="${1:-$DEFAULT_BASELINE_FILE}"
|
||||
|
||||
usage() {
|
||||
cat <<'EOF'
|
||||
validate-test-quality.sh: enforce UI test anti-pattern regression thresholds.
|
||||
|
||||
Usage:
|
||||
scripts/validate-test-quality.sh [baseline_file]
|
||||
scripts/validate-test-quality.sh --help
|
||||
EOF
|
||||
}
|
||||
|
||||
if [[ "${1:-}" == "--help" || "${1:-}" == "-h" ]]; then
|
||||
usage
|
||||
exit 0
|
||||
fi
|
||||
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
NC='\033[0m'
|
||||
|
||||
FAILURES=0
|
||||
|
||||
fail() {
|
||||
echo -e "${RED}FAIL:${NC} $1"
|
||||
FAILURES=$((FAILURES + 1))
|
||||
}
|
||||
|
||||
pass() {
|
||||
echo -e "${GREEN}PASS:${NC} $1"
|
||||
}
|
||||
|
||||
require_cmd() {
|
||||
if ! command -v "$1" >/dev/null 2>&1; then
|
||||
fail "Missing required command: $1"
|
||||
fi
|
||||
}
|
||||
|
||||
relative_path() {
|
||||
local path="$1"
|
||||
if [[ "$path" == "$PROJECT_ROOT/"* ]]; then
|
||||
echo "${path#$PROJECT_ROOT/}"
|
||||
else
|
||||
echo "$path"
|
||||
fi
|
||||
}
|
||||
|
||||
count_matches() {
|
||||
local pattern="$1"
|
||||
local root="$2"
|
||||
local glob="$3"
|
||||
local output=""
|
||||
local status=0
|
||||
|
||||
set +e
|
||||
output="$(rg --count-matches --no-messages --pcre2 -N -g "$glob" "$pattern" "$root" 2>/dev/null)"
|
||||
status=$?
|
||||
set -e
|
||||
|
||||
if [[ "$status" -eq 1 ]]; then
|
||||
echo "0"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ "$status" -ne 0 ]]; then
|
||||
return "$status"
|
||||
fi
|
||||
|
||||
if [[ -z "$output" ]]; then
|
||||
echo "0"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "$output" | awk -F: '{sum += $NF} END {print sum + 0}'
|
||||
}
|
||||
|
||||
count_matches_multiline() {
|
||||
local pattern="$1"
|
||||
local root="$2"
|
||||
local glob="$3"
|
||||
local output=""
|
||||
local status=0
|
||||
|
||||
set +e
|
||||
output="$(rg --count-matches --no-messages --pcre2 -U -N -g "$glob" "$pattern" "$root" 2>/dev/null)"
|
||||
status=$?
|
||||
set -e
|
||||
|
||||
if [[ "$status" -eq 1 ]]; then
|
||||
echo "0"
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ "$status" -ne 0 ]]; then
|
||||
return "$status"
|
||||
fi
|
||||
|
||||
if [[ -z "$output" ]]; then
|
||||
echo "0"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "$output" | awk -F: '{sum += $NF} END {print sum + 0}'
|
||||
}
|
||||
|
||||
list_metric_files() {
|
||||
local root="$1"
|
||||
local glob="$2"
|
||||
rg --files "$root" -g "$glob" 2>/dev/null || true
|
||||
}
|
||||
|
||||
count_swift_test_body_pattern_matches() {
|
||||
local pattern="$1"
|
||||
local root="$2"
|
||||
local glob="$3"
|
||||
local files=()
|
||||
|
||||
while IFS= read -r file_path; do
|
||||
[[ -n "$file_path" ]] && files+=("$file_path")
|
||||
done < <(list_metric_files "$root" "$glob")
|
||||
if [[ "${#files[@]}" -eq 0 ]]; then
|
||||
echo "0"
|
||||
return 0
|
||||
fi
|
||||
|
||||
awk -v pattern="$pattern" '
|
||||
function update_depth(line, i, c) {
|
||||
for (i = 1; i <= length(line); i++) {
|
||||
c = substr(line, i, 1)
|
||||
if (c == "{") depth++
|
||||
else if (c == "}") depth--
|
||||
}
|
||||
}
|
||||
|
||||
/^[[:space:]]*func[[:space:]]+test[[:alnum:]_]+[[:space:]]*\(.*\)[[:space:]]*(throws)?[[:space:]]*\{/ {
|
||||
in_test = 1
|
||||
depth = 0
|
||||
update_depth($0)
|
||||
if ($0 ~ pattern) count++
|
||||
if (depth <= 0) {
|
||||
in_test = 0
|
||||
depth = 0
|
||||
}
|
||||
next
|
||||
}
|
||||
|
||||
{
|
||||
if (!in_test) next
|
||||
|
||||
if ($0 ~ pattern) count++
|
||||
update_depth($0)
|
||||
if (depth <= 0) {
|
||||
in_test = 0
|
||||
depth = 0
|
||||
}
|
||||
}
|
||||
|
||||
END { print count + 0 }
|
||||
' "${files[@]}"
|
||||
}
|
||||
|
||||
count_swift_empty_test_bodies() {
|
||||
local root="$1"
|
||||
local glob="$2"
|
||||
local files=()
|
||||
|
||||
while IFS= read -r file_path; do
|
||||
[[ -n "$file_path" ]] && files+=("$file_path")
|
||||
done < <(list_metric_files "$root" "$glob")
|
||||
if [[ "${#files[@]}" -eq 0 ]]; then
|
||||
echo "0"
|
||||
return 0
|
||||
fi
|
||||
|
||||
awk '
|
||||
function update_depth(line, i, c) {
|
||||
for (i = 1; i <= length(line); i++) {
|
||||
c = substr(line, i, 1)
|
||||
if (c == "{") depth++
|
||||
else if (c == "}") depth--
|
||||
}
|
||||
}
|
||||
|
||||
function test_body_has_code(body, cleaned) {
|
||||
cleaned = body
|
||||
gsub(/\/\/.*/, "", cleaned)
|
||||
gsub(/[ \t\r\n{}]/, "", cleaned)
|
||||
return cleaned != ""
|
||||
}
|
||||
|
||||
/^[[:space:]]*func[[:space:]]+test[[:alnum:]_]+[[:space:]]*\(.*\)[[:space:]]*(throws)?[[:space:]]*\{/ {
|
||||
in_test = 1
|
||||
depth = 0
|
||||
body = ""
|
||||
update_depth($0)
|
||||
if (depth <= 0) {
|
||||
empty_count++
|
||||
in_test = 0
|
||||
depth = 0
|
||||
body = ""
|
||||
}
|
||||
next
|
||||
}
|
||||
|
||||
{
|
||||
if (!in_test) next
|
||||
|
||||
body = body $0 "\n"
|
||||
update_depth($0)
|
||||
|
||||
if (depth <= 0) {
|
||||
if (!test_body_has_code(body)) {
|
||||
empty_count++
|
||||
}
|
||||
in_test = 0
|
||||
depth = 0
|
||||
body = ""
|
||||
}
|
||||
}
|
||||
|
||||
END { print empty_count + 0 }
|
||||
' "${files[@]}"
|
||||
}
|
||||
|
||||
count_metric() {
|
||||
local mode="$1"
|
||||
local pattern="$2"
|
||||
local root="$3"
|
||||
local glob="$4"
|
||||
|
||||
case "$mode" in
|
||||
rg)
|
||||
count_matches "$pattern" "$root" "$glob"
|
||||
;;
|
||||
rg_multiline)
|
||||
count_matches_multiline "$pattern" "$root" "$glob"
|
||||
;;
|
||||
swift_test_body_pattern)
|
||||
count_swift_test_body_pattern_matches "$pattern" "$root" "$glob"
|
||||
;;
|
||||
swift_empty_test_bodies)
|
||||
count_swift_empty_test_bodies "$root" "$glob"
|
||||
;;
|
||||
*)
|
||||
echo "__INVALID_MODE__:$mode"
|
||||
return 0
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
require_cmd jq
|
||||
require_cmd rg
|
||||
require_cmd awk
|
||||
|
||||
echo "=== Test Quality Gate ==="
|
||||
echo "Baseline: $(relative_path "$BASELINE_FILE")"
|
||||
|
||||
if [[ ! -f "$BASELINE_FILE" ]]; then
|
||||
fail "Baseline file not found: $(relative_path "$BASELINE_FILE")"
|
||||
echo ""
|
||||
echo -e "${RED}Test quality gate failed with $FAILURES issue(s).${NC}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if jq -e '.metrics | type == "array" and length > 0' "$BASELINE_FILE" >/dev/null; then
|
||||
pass "Baseline includes metric definitions"
|
||||
else
|
||||
fail "Baseline file has no metrics"
|
||||
fi
|
||||
|
||||
while IFS= read -r metric; do
|
||||
[[ -z "$metric" ]] && continue
|
||||
|
||||
metric_id="$(jq -r '.id // empty' <<<"$metric")"
|
||||
description="$(jq -r '.description // empty' <<<"$metric")"
|
||||
mode="$(jq -r '.mode // "rg"' <<<"$metric")"
|
||||
root_rel="$(jq -r '.root // empty' <<<"$metric")"
|
||||
glob="$(jq -r '.glob // empty' <<<"$metric")"
|
||||
pattern="$(jq -r '.pattern // ""' <<<"$metric")"
|
||||
baseline="$(jq -r '.baseline // empty' <<<"$metric")"
|
||||
allowed_growth="$(jq -r '.allowed_growth // empty' <<<"$metric")"
|
||||
|
||||
if [[ -z "$metric_id" || -z "$root_rel" || -z "$glob" || -z "$baseline" || -z "$allowed_growth" ]]; then
|
||||
fail "Metric entry is missing required fields: $metric"
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ "$mode" != "swift_empty_test_bodies" && -z "$pattern" ]]; then
|
||||
fail "Metric '$metric_id' requires non-empty pattern for mode '$mode'"
|
||||
continue
|
||||
fi
|
||||
|
||||
if ! [[ "$baseline" =~ ^[0-9]+$ ]]; then
|
||||
fail "Metric '$metric_id' has non-numeric baseline: $baseline"
|
||||
continue
|
||||
fi
|
||||
|
||||
if ! [[ "$allowed_growth" =~ ^[0-9]+$ ]]; then
|
||||
fail "Metric '$metric_id' has non-numeric allowed_growth: $allowed_growth"
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ "$root_rel" == /* ]]; then
|
||||
root_path="$root_rel"
|
||||
else
|
||||
root_path="$PROJECT_ROOT/$root_rel"
|
||||
fi
|
||||
|
||||
if [[ ! -d "$root_path" ]]; then
|
||||
fail "Metric '$metric_id' root directory not found: $(relative_path "$root_path")"
|
||||
continue
|
||||
fi
|
||||
|
||||
current_count="0"
|
||||
if ! current_count="$(count_metric "$mode" "$pattern" "$root_path" "$glob")"; then
|
||||
fail "Metric '$metric_id' failed while counting matches"
|
||||
continue
|
||||
fi
|
||||
|
||||
if [[ "$current_count" == __INVALID_MODE__:* ]]; then
|
||||
fail "Metric '$metric_id' uses unsupported mode '$mode'"
|
||||
continue
|
||||
fi
|
||||
|
||||
max_allowed=$((baseline + allowed_growth))
|
||||
delta=$((current_count - baseline))
|
||||
|
||||
if [[ "$current_count" -le "$max_allowed" ]]; then
|
||||
pass "$metric_id ($description): current=$current_count baseline=$baseline allowed_growth=$allowed_growth threshold=$max_allowed delta=$delta"
|
||||
else
|
||||
fail "$metric_id ($description): current=$current_count exceeds threshold=$max_allowed (baseline=$baseline allowed_growth=$allowed_growth delta=$delta)"
|
||||
fi
|
||||
done < <(jq -c '.metrics[]' "$BASELINE_FILE")
|
||||
|
||||
echo ""
|
||||
if [[ "$FAILURES" -eq 0 ]]; then
|
||||
echo -e "${GREEN}Test quality gate passed.${NC}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo -e "${RED}Test quality gate failed with $FAILURES issue(s).${NC}"
|
||||
exit 1
|
||||
85
runners-conversion/periodVault/verify.sh
Executable file
85
runners-conversion/periodVault/verify.sh
Executable file
@@ -0,0 +1,85 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
GRADLEW="./gradlew"
|
||||
|
||||
if [[ ! -x "$GRADLEW" ]]; then
|
||||
echo "Missing or non-executable ./gradlew. Did you generate the Gradle wrapper?"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get the task list once (quiet output to reduce noise)
|
||||
ALL_TASKS="$($GRADLEW -q tasks --all || true)"
|
||||
|
||||
if [[ -z "$ALL_TASKS" ]]; then
|
||||
echo "Could not read Gradle tasks. Exiting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Prefer KMP aggregate task when available
|
||||
if echo "$ALL_TASKS" | grep -qE '^allTests[[:space:]]+-'; then
|
||||
TASKS="allTests"
|
||||
else
|
||||
# Fallback: collect common test tasks, excluding device-dependent instrumentation tests
|
||||
TASKS="$(
|
||||
echo "$ALL_TASKS" \
|
||||
| awk '{print $1}' \
|
||||
| grep -E '(^test$|Test$|^check$)' \
|
||||
| grep -v 'AndroidTest' \
|
||||
| grep -v 'connectedAndroidTest' \
|
||||
| grep -v 'deviceAndroidTest' \
|
||||
| sort -u \
|
||||
| tr '\n' ' '
|
||||
)"
|
||||
fi
|
||||
|
||||
# Strip spaces and validate
|
||||
if [[ -z "${TASKS// /}" ]]; then
|
||||
echo "No test tasks found. Exiting."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Running: $GRADLEW $TASKS"
|
||||
# Run all tasks in one go (faster, simpler)
|
||||
$GRADLEW $TASKS
|
||||
|
||||
echo "==================="
|
||||
echo "ALL TESTS PASSED!"
|
||||
echo "==================="
|
||||
|
||||
# --- Commit, push, and create PR if on a feature branch ---
|
||||
|
||||
# Skip commit/push/PR when invoked from a git hook (prevents infinite loop)
|
||||
if [[ "${GIT_PUSH_IN_PROGRESS:-}" == "1" ]] || [[ -n "${GIT_DIR:-}" && "${GIT_DIR}" != ".git" ]]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
BRANCH="$(git rev-parse --abbrev-ref HEAD)"
|
||||
MAIN_BRANCH="main"
|
||||
|
||||
if [[ "$BRANCH" == "$MAIN_BRANCH" ]]; then
|
||||
echo "On $MAIN_BRANCH — skipping commit/push/PR."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Stage all changes (except untracked files the user hasn't added)
|
||||
if git diff --quiet && git diff --cached --quiet && [[ -z "$(git ls-files --others --exclude-standard)" ]]; then
|
||||
echo "No changes to commit."
|
||||
else
|
||||
git add -A
|
||||
COMMIT_MSG="feat: $(echo "$BRANCH" | sed 's/^[0-9]*-//' | tr '-' ' ')"
|
||||
git commit -m "$COMMIT_MSG" || echo "Nothing to commit."
|
||||
fi
|
||||
|
||||
# Push branch to remote (skip hooks to avoid re-triggering verify.sh)
|
||||
GIT_PUSH_IN_PROGRESS=1 git push --no-verify -u origin "$BRANCH"
|
||||
|
||||
# Create PR if one doesn't already exist for this branch
|
||||
if gh pr view "$BRANCH" --json state >/dev/null 2>&1; then
|
||||
PR_URL="$(gh pr view "$BRANCH" --json url -q '.url')"
|
||||
echo "PR already exists: $PR_URL"
|
||||
else
|
||||
TITLE="$(echo "$BRANCH" | sed 's/^[0-9]*-//' | tr '-' ' ' | awk '{for(i=1;i<=NF;i++) $i=toupper(substr($i,1,1)) substr($i,2)}1')"
|
||||
PR_URL="$(gh pr create --title "$TITLE" --body "Automated PR from verify.sh" --base "$MAIN_BRANCH" --head "$BRANCH")"
|
||||
echo "PR created: $PR_URL"
|
||||
fi
|
||||
Reference in New Issue
Block a user