refactor progress
This commit is contained in:
215
scripts/ci/ci_steps.py
Normal file
215
scripts/ci/ci_steps.py
Normal file
@@ -0,0 +1,215 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
from typing import Mapping
|
||||
|
||||
from ci_utils import write_github_output, write_github_summary
|
||||
|
||||
|
||||
ADD_KNOWN_HOSTS_SCRIPT = """
|
||||
set -euo pipefail
|
||||
mkdir -p ~/.ssh
|
||||
ssh-keyscan -H "${SERVER_IP}" >> ~/.ssh/known_hosts
|
||||
"""
|
||||
|
||||
INSTALL_DOCKER_PUSSH_SCRIPT = """
|
||||
set -euo pipefail
|
||||
mkdir -p ~/.docker/cli-plugins
|
||||
curl -fsSL https://raw.githubusercontent.com/psviderski/unregistry/v0.3.1/docker-pussh \
|
||||
-o ~/.docker/cli-plugins/docker-pussh
|
||||
chmod +x ~/.docker/cli-plugins/docker-pussh
|
||||
"""
|
||||
|
||||
INSTALL_RCLONE_SCRIPT = """
|
||||
set -euo pipefail
|
||||
if ! command -v rclone >/dev/null 2>&1; then
|
||||
curl -fsSL https://rclone.org/install.sh | sudo bash
|
||||
fi
|
||||
"""
|
||||
|
||||
|
||||
def rclone_config_script(
|
||||
*,
|
||||
endpoint: str,
|
||||
acl: str,
|
||||
profile: str = "ovh",
|
||||
env_auth: bool = True,
|
||||
expand_vars: bool = False,
|
||||
) -> str:
|
||||
heredoc = "RCLONEEOF" if expand_vars else "'RCLONEEOF'"
|
||||
env_auth_value = "true" if env_auth else "false"
|
||||
lines = [
|
||||
"set -euo pipefail",
|
||||
"mkdir -p ~/.config/rclone",
|
||||
f"cat > ~/.config/rclone/rclone.conf <<{heredoc}",
|
||||
f"[{profile}]",
|
||||
"type = s3",
|
||||
"provider = Other",
|
||||
f"env_auth = {env_auth_value}",
|
||||
f"endpoint = {endpoint}",
|
||||
f"acl = {acl}",
|
||||
"RCLONEEOF",
|
||||
]
|
||||
return "\n".join(lines) + "\n"
|
||||
|
||||
|
||||
def bot_user_id_script() -> str:
|
||||
return (
|
||||
"set -euo pipefail\n"
|
||||
"echo \"user-id=$(gh api \"/users/${APP_SLUG}[bot]\" --jq .id)\" >> \"$GITHUB_OUTPUT\"\n"
|
||||
)
|
||||
|
||||
|
||||
def record_deploy_commit_script(*, include_env: bool, include_sentry: bool) -> str:
|
||||
lines = [
|
||||
"set -euo pipefail",
|
||||
"sha=$(git rev-parse HEAD)",
|
||||
"echo \"Deploying commit ${sha}\"",
|
||||
]
|
||||
if include_env:
|
||||
lines.append("printf 'DEPLOY_SHA=%s\\n' \"$sha\" >> \"$GITHUB_ENV\"")
|
||||
if include_sentry:
|
||||
lines.extend(
|
||||
[
|
||||
"printf 'SENTRY_BUILD_SHA=%s\\n' \"$sha\" >> \"$GITHUB_ENV\"",
|
||||
"printf 'SENTRY_BUILD_NUMBER=%s\\n' \"$GITHUB_RUN_NUMBER\" >> \"$GITHUB_ENV\"",
|
||||
"printf 'SENTRY_BUILD_TIMESTAMP=%s\\n' \"$(date +%s)\" >> \"$GITHUB_ENV\"",
|
||||
]
|
||||
)
|
||||
return "\n".join(lines) + "\n"
|
||||
|
||||
|
||||
def set_build_timestamp_script(*, env_name: str = "BUILD_TIMESTAMP") -> str:
|
||||
return (
|
||||
"set -euo pipefail\n"
|
||||
f"echo \"{env_name}=$(date -u +%s)\" >> \"$GITHUB_ENV\"\n"
|
||||
)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class ReleaseMetadata:
|
||||
version: str
|
||||
channel: str
|
||||
source_ref: str
|
||||
sha_short: str
|
||||
timestamp: str
|
||||
date_ymd: str
|
||||
build_number: str
|
||||
|
||||
|
||||
def build_release_metadata(
|
||||
*,
|
||||
version_input: str,
|
||||
channel: str,
|
||||
source_ref: str,
|
||||
env: Mapping[str, str],
|
||||
now: datetime | None = None,
|
||||
) -> ReleaseMetadata:
|
||||
run_number = env.get("GITHUB_RUN_NUMBER", "")
|
||||
sha = env.get("GITHUB_SHA", "")
|
||||
version = version_input or f"0.0.{run_number}"
|
||||
instant = now or datetime.now(timezone.utc)
|
||||
timestamp = instant.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
date_ymd = instant.strftime("%Y%m%d")
|
||||
sha_short = sha[:7]
|
||||
return ReleaseMetadata(
|
||||
version=version,
|
||||
channel=channel,
|
||||
source_ref=source_ref,
|
||||
sha_short=sha_short,
|
||||
timestamp=timestamp,
|
||||
date_ymd=date_ymd,
|
||||
build_number=run_number,
|
||||
)
|
||||
|
||||
|
||||
def write_release_metadata(metadata: ReleaseMetadata) -> None:
|
||||
write_github_output(
|
||||
{
|
||||
"version": metadata.version,
|
||||
"channel": metadata.channel,
|
||||
"source_ref": metadata.source_ref,
|
||||
"sha_short": metadata.sha_short,
|
||||
"timestamp": metadata.timestamp,
|
||||
"date": metadata.date_ymd,
|
||||
"build_number": metadata.build_number,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def build_release_summary(
|
||||
*,
|
||||
title: str,
|
||||
channel: str,
|
||||
version: str,
|
||||
build_number: str,
|
||||
sha: str,
|
||||
sha_short: str,
|
||||
timestamp: str,
|
||||
source_ref: str,
|
||||
build_result: str,
|
||||
image_tags: str,
|
||||
image_digest: str,
|
||||
registry: str,
|
||||
image_name: str,
|
||||
date_ymd: str,
|
||||
) -> str:
|
||||
lines: list[str] = [
|
||||
f"## {title}",
|
||||
"",
|
||||
f"channel: {channel}",
|
||||
f"version: v{version}",
|
||||
f"build: {build_number}",
|
||||
f"sha: {sha} (short: {sha_short})",
|
||||
f"time: {timestamp}",
|
||||
f"source_ref: {source_ref}",
|
||||
"",
|
||||
f"build result: {build_result}",
|
||||
"",
|
||||
]
|
||||
|
||||
if build_result == "success":
|
||||
lines.extend(
|
||||
[
|
||||
"tags:",
|
||||
"```",
|
||||
image_tags,
|
||||
"```",
|
||||
f"digest: `{image_digest}`",
|
||||
"",
|
||||
]
|
||||
)
|
||||
|
||||
if channel == "nightly":
|
||||
lines.extend(
|
||||
[
|
||||
"pull:",
|
||||
"```bash",
|
||||
f"docker pull {registry}/{image_name}:nightly",
|
||||
f"docker pull {registry}/{image_name}:nightly-{date_ymd}",
|
||||
f"docker pull {registry}/{image_name}:sha-{sha_short}",
|
||||
"```",
|
||||
]
|
||||
)
|
||||
else:
|
||||
lines.extend(
|
||||
[
|
||||
"pull:",
|
||||
"```bash",
|
||||
f"docker pull {registry}/{image_name}:stable",
|
||||
f"docker pull {registry}/{image_name}:latest",
|
||||
f"docker pull {registry}/{image_name}:v{version}",
|
||||
"```",
|
||||
]
|
||||
)
|
||||
|
||||
return "\n".join(lines) + "\n"
|
||||
|
||||
|
||||
def write_release_summary(summary: str, *, build_result: str) -> None:
|
||||
write_github_summary(summary)
|
||||
if build_result == "failure":
|
||||
raise SystemExit(1)
|
||||
80
scripts/ci/ci_utils.py
Executable file
80
scripts/ci/ci_utils.py
Executable file
@@ -0,0 +1,80 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Callable, Iterable, Mapping, Sequence
|
||||
|
||||
Step = str | Callable[[], None]
|
||||
|
||||
|
||||
def run(cmd: Sequence[str], *, env: Mapping[str, str] | None = None) -> None:
|
||||
merged_env = os.environ.copy()
|
||||
if env:
|
||||
merged_env.update(env)
|
||||
subprocess.run(cmd, check=True, env=merged_env)
|
||||
|
||||
|
||||
def run_bash(script: str, *, env: Mapping[str, str] | None = None) -> None:
|
||||
run(["bash", "-lc", script], env=env)
|
||||
|
||||
|
||||
def run_pwsh(script: str, *, env: Mapping[str, str] | None = None) -> None:
|
||||
run(["pwsh", "-NoProfile", "-NonInteractive", "-Command", script], env=env)
|
||||
|
||||
|
||||
def require_env(keys: Iterable[str]) -> None:
|
||||
missing = [key for key in keys if not os.environ.get(key)]
|
||||
if missing:
|
||||
joined = ", ".join(missing)
|
||||
raise SystemExit(f"Missing required environment variables: {joined}")
|
||||
|
||||
|
||||
def write_github_env(pairs: Mapping[str, str]) -> None:
|
||||
path = os.environ.get("GITHUB_ENV")
|
||||
if not path:
|
||||
raise SystemExit("GITHUB_ENV is not set")
|
||||
with open(path, "a", encoding="utf-8") as handle:
|
||||
for key, value in pairs.items():
|
||||
handle.write(f"{key}={value}\n")
|
||||
|
||||
|
||||
def write_github_output(pairs: Mapping[str, str]) -> None:
|
||||
path = os.environ.get("GITHUB_OUTPUT")
|
||||
if not path:
|
||||
raise SystemExit("GITHUB_OUTPUT is not set")
|
||||
with open(path, "a", encoding="utf-8") as handle:
|
||||
for key, value in pairs.items():
|
||||
handle.write(f"{key}={value}\n")
|
||||
|
||||
|
||||
def write_github_summary(text: str) -> None:
|
||||
path = os.environ.get("GITHUB_STEP_SUMMARY")
|
||||
if not path:
|
||||
raise SystemExit("GITHUB_STEP_SUMMARY is not set")
|
||||
with open(path, "a", encoding="utf-8") as handle:
|
||||
handle.write(text)
|
||||
|
||||
|
||||
def read_text(path: str) -> str:
|
||||
with open(path, "r", encoding="utf-8") as handle:
|
||||
return handle.read()
|
||||
|
||||
|
||||
def main_error(message: str) -> None:
|
||||
print(message, file=sys.stderr)
|
||||
raise SystemExit(1)
|
||||
|
||||
|
||||
def run_step(steps: Mapping[str, Step], step: str) -> None:
|
||||
selected = steps.get(step)
|
||||
if selected is None:
|
||||
main_error(f"Unknown step: {step}")
|
||||
if isinstance(selected, str):
|
||||
run_bash(selected)
|
||||
return
|
||||
selected()
|
||||
|
||||
|
||||
def pwsh_step(script: str) -> Step:
|
||||
return lambda: run_pwsh(script)
|
||||
70
scripts/ci/ci_workflow.py
Normal file
70
scripts/ci/ci_workflow.py
Normal file
@@ -0,0 +1,70 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from typing import Iterable, Sequence
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class EnvArg:
|
||||
flag: str
|
||||
env: str
|
||||
default: str = ""
|
||||
dest: str | None = None
|
||||
|
||||
def dest_name(self) -> str:
|
||||
if self.dest is not None:
|
||||
return self.dest
|
||||
return self.flag.lstrip("-").replace("-", "_")
|
||||
|
||||
|
||||
def build_step_parser(
|
||||
env_args: Sequence[EnvArg] | None = None,
|
||||
*,
|
||||
include_server_ip: bool = False,
|
||||
step_required: bool = True,
|
||||
) -> argparse.ArgumentParser:
|
||||
parser = argparse.ArgumentParser()
|
||||
if step_required:
|
||||
parser.add_argument("--step", required=True)
|
||||
if include_server_ip:
|
||||
parser.add_argument("--server-ip", default="")
|
||||
for arg in env_args or []:
|
||||
parser.add_argument(arg.flag, default=arg.default, dest=arg.dest_name())
|
||||
return parser
|
||||
|
||||
|
||||
def apply_env_args(args: argparse.Namespace, env_args: Iterable[EnvArg]) -> None:
|
||||
for arg in env_args:
|
||||
value = getattr(args, arg.dest_name(), "")
|
||||
if value:
|
||||
os.environ[arg.env] = value
|
||||
|
||||
|
||||
def apply_server_ip(args: argparse.Namespace) -> None:
|
||||
value = getattr(args, "server_ip", "")
|
||||
if value:
|
||||
os.environ["SERVER_IP"] = value
|
||||
|
||||
|
||||
def parse_step_env_args(
|
||||
env_args: Sequence[EnvArg] | None = None,
|
||||
*,
|
||||
include_server_ip: bool = False,
|
||||
) -> argparse.Namespace:
|
||||
parser = build_step_parser(env_args, include_server_ip=include_server_ip)
|
||||
args = parser.parse_args()
|
||||
apply_env_args(args, env_args or [])
|
||||
if include_server_ip:
|
||||
apply_server_ip(args)
|
||||
return args
|
||||
|
||||
|
||||
def parse_env_args(env_args: Sequence[EnvArg]) -> argparse.Namespace:
|
||||
parser = build_step_parser(env_args, step_required=False)
|
||||
args = parser.parse_args()
|
||||
apply_env_args(args, env_args)
|
||||
return args
|
||||
79
scripts/ci/cli_release.py
Normal file
79
scripts/ci/cli_release.py
Normal file
@@ -0,0 +1,79 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import hashlib
|
||||
from pathlib import Path
|
||||
from typing import Iterable
|
||||
|
||||
from ci_utils import write_github_output
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class CliVersion:
|
||||
version: str
|
||||
tag: str
|
||||
|
||||
|
||||
def determine_cli_version(
|
||||
*,
|
||||
event_name: str,
|
||||
input_version: str,
|
||||
ref_name: str,
|
||||
tag_prefix: str,
|
||||
) -> CliVersion:
|
||||
if event_name == "workflow_dispatch":
|
||||
version = input_version
|
||||
if not version:
|
||||
raise SystemExit("Missing version input")
|
||||
tag = f"{tag_prefix}{version}"
|
||||
return CliVersion(version=version, tag=tag)
|
||||
|
||||
if not ref_name:
|
||||
raise SystemExit("Missing ref name")
|
||||
version = ref_name
|
||||
if version.startswith(tag_prefix):
|
||||
version = version[len(tag_prefix) :]
|
||||
if not version:
|
||||
raise SystemExit("Unable to determine version from ref")
|
||||
return CliVersion(version=version, tag=ref_name)
|
||||
|
||||
|
||||
def write_cli_version_outputs(info: CliVersion) -> None:
|
||||
write_github_output({"version": info.version, "tag": info.tag})
|
||||
|
||||
|
||||
def prepare_release_assets(
|
||||
*,
|
||||
artifacts_dir: Path,
|
||||
release_dir: Path,
|
||||
binary_prefix: str,
|
||||
) -> list[Path]:
|
||||
release_dir.mkdir(parents=True, exist_ok=True)
|
||||
output_files: list[Path] = []
|
||||
|
||||
for entry in sorted(artifacts_dir.glob(f"{binary_prefix}-*")):
|
||||
if not entry.is_dir():
|
||||
continue
|
||||
name = entry.name
|
||||
source = entry / name
|
||||
if not source.exists():
|
||||
raise SystemExit(f"Missing binary {source}")
|
||||
target = release_dir / name
|
||||
target.write_bytes(source.read_bytes())
|
||||
target.chmod(0o755)
|
||||
output_files.append(target)
|
||||
|
||||
if not output_files:
|
||||
raise SystemExit("No release assets found")
|
||||
|
||||
return output_files
|
||||
|
||||
|
||||
def generate_checksums(files: Iterable[Path], checksums_path: Path) -> None:
|
||||
lines: list[str] = []
|
||||
for path in sorted(files):
|
||||
digest = hashlib.sha256(path.read_bytes()).hexdigest()
|
||||
lines.append(f"{digest} {path.name}")
|
||||
checksums_path.write_text("\n".join(lines) + "\n", encoding="utf-8")
|
||||
44
scripts/ci/deploy_workflow.py
Normal file
44
scripts/ci/deploy_workflow.py
Normal file
@@ -0,0 +1,44 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping, Sequence
|
||||
|
||||
from ci_steps import (
|
||||
ADD_KNOWN_HOSTS_SCRIPT,
|
||||
INSTALL_DOCKER_PUSSH_SCRIPT,
|
||||
record_deploy_commit_script,
|
||||
set_build_timestamp_script,
|
||||
)
|
||||
from ci_workflow import EnvArg, parse_step_env_args
|
||||
from ci_utils import Step, run_step
|
||||
|
||||
|
||||
def build_standard_deploy_steps(
|
||||
*,
|
||||
push_and_deploy_script: str,
|
||||
include_sentry: bool = False,
|
||||
include_build_timestamp: bool = True,
|
||||
) -> dict[str, Step]:
|
||||
steps: dict[str, Step] = {
|
||||
"record_deploy_commit": record_deploy_commit_script(
|
||||
include_env=True,
|
||||
include_sentry=include_sentry,
|
||||
),
|
||||
}
|
||||
if include_build_timestamp:
|
||||
steps["set_build_timestamp"] = set_build_timestamp_script()
|
||||
steps["install_docker_pussh"] = INSTALL_DOCKER_PUSSH_SCRIPT
|
||||
steps["add_known_hosts"] = ADD_KNOWN_HOSTS_SCRIPT
|
||||
steps["push_and_deploy"] = push_and_deploy_script
|
||||
return steps
|
||||
|
||||
|
||||
def run_deploy_workflow(
|
||||
steps: Mapping[str, Step],
|
||||
*,
|
||||
env_args: Sequence[EnvArg] | None = None,
|
||||
) -> int:
|
||||
args = parse_step_env_args(env_args, include_server_ip=True)
|
||||
run_step(steps, args.step)
|
||||
return 0
|
||||
162
scripts/ci/erlang_hot_reload.py
Executable file
162
scripts/ci/erlang_hot_reload.py
Executable file
@@ -0,0 +1,162 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
def diff_md5(local_path: str, remote_path: str, out_path: str) -> None:
|
||||
remote: dict[str, str] = {}
|
||||
with open(remote_path, "r", encoding="utf-8") as handle:
|
||||
for line in handle:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
parts = line.split(None, 1)
|
||||
if len(parts) != 2:
|
||||
continue
|
||||
mod, md5 = parts
|
||||
remote[mod] = md5.strip()
|
||||
|
||||
changed_paths: list[str] = []
|
||||
with open(local_path, "r", encoding="utf-8") as handle:
|
||||
for line in handle:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
parts = line.split(" ", 2)
|
||||
if len(parts) != 3:
|
||||
continue
|
||||
mod, md5, path = parts
|
||||
remote_md5 = remote.get(mod)
|
||||
if remote_md5 is None or remote_md5 == "null" or remote_md5 != md5:
|
||||
changed_paths.append(path)
|
||||
|
||||
with open(out_path, "w", encoding="utf-8") as handle:
|
||||
for path in changed_paths:
|
||||
handle.write(f"{path}\n")
|
||||
|
||||
|
||||
def build_json(list_path: str) -> None:
|
||||
beams: list[dict[str, str]] = []
|
||||
with open(list_path, "r", encoding="utf-8") as handle:
|
||||
for path in handle:
|
||||
path = path.strip()
|
||||
if not path:
|
||||
continue
|
||||
mod = os.path.basename(path)
|
||||
if not mod.endswith(".beam"):
|
||||
continue
|
||||
mod = mod[:-5]
|
||||
with open(path, "rb") as beam_file:
|
||||
beam_data = beam_file.read()
|
||||
beams.append({
|
||||
"module": mod,
|
||||
"beam_b64": base64.b64encode(beam_data).decode("ascii"),
|
||||
})
|
||||
|
||||
payload = {"beams": beams, "purge": "soft"}
|
||||
print(json.dumps(payload, separators=(",", ":")))
|
||||
|
||||
|
||||
def verify(mode: str) -> int:
|
||||
raw = sys.stdin.read()
|
||||
if not raw.strip():
|
||||
print("::error::Empty reload response")
|
||||
return 1
|
||||
|
||||
try:
|
||||
data = json.loads(raw)
|
||||
except Exception as exc:
|
||||
print(f"::error::Invalid JSON reload response: {exc}")
|
||||
return 1
|
||||
|
||||
results = data.get("results", [])
|
||||
if not isinstance(results, list):
|
||||
print("::error::Reload response missing results array")
|
||||
return 1
|
||||
|
||||
if mode == "strict":
|
||||
bad = [
|
||||
result for result in results
|
||||
if result.get("status") != "ok" or result.get("verified") is not True
|
||||
]
|
||||
if bad:
|
||||
print("::error::Hot reload verification failed")
|
||||
print(json.dumps(bad, indent=2))
|
||||
return 1
|
||||
|
||||
warns = [
|
||||
result for result in results
|
||||
if result.get("purged_old_code") is not True
|
||||
or (result.get("lingering_count") or 0) != 0
|
||||
]
|
||||
if warns:
|
||||
print("::warning::Old code is still lingering for some modules after reload")
|
||||
print(json.dumps(warns, indent=2))
|
||||
|
||||
print(f"Verified {len(results)} modules")
|
||||
return 0
|
||||
|
||||
if mode == "self":
|
||||
bad = [
|
||||
result for result in results
|
||||
if result.get("status") != "ok" or result.get("verified") is not True
|
||||
]
|
||||
if bad:
|
||||
print("::error::Hot reload verification failed")
|
||||
print(json.dumps(bad, indent=2))
|
||||
return 1
|
||||
|
||||
warns = [
|
||||
result for result in results
|
||||
if result.get("purged_old_code") is not True
|
||||
or (result.get("lingering_count") or 0) != 0
|
||||
]
|
||||
if warns:
|
||||
print("::warning::Self-reload modules may linger until request completes")
|
||||
print(json.dumps(warns, indent=2))
|
||||
|
||||
print(f"Verified {len(results)} self modules")
|
||||
return 0
|
||||
|
||||
print(f"::error::Unknown verify mode: {mode}")
|
||||
return 1
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser()
|
||||
subparsers = parser.add_subparsers(dest="command", required=True)
|
||||
|
||||
diff_parser = subparsers.add_parser("diff-md5")
|
||||
diff_parser.add_argument("local_path")
|
||||
diff_parser.add_argument("remote_path")
|
||||
diff_parser.add_argument("out_path")
|
||||
|
||||
build_parser = subparsers.add_parser("build-json")
|
||||
build_parser.add_argument("list_path")
|
||||
|
||||
verify_parser = subparsers.add_parser("verify")
|
||||
verify_parser.add_argument("--mode", choices=("strict", "self"), required=True)
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
if args.command == "diff-md5":
|
||||
diff_md5(args.local_path, args.remote_path, args.out_path)
|
||||
return 0
|
||||
if args.command == "build-json":
|
||||
build_json(args.list_path)
|
||||
return 0
|
||||
if args.command == "verify":
|
||||
return verify(args.mode)
|
||||
print(f"::error::Unknown command: {args.command}")
|
||||
return 1
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
18
scripts/ci/pyproject.toml
Normal file
18
scripts/ci/pyproject.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[project]
|
||||
name = "fluxer_ci"
|
||||
version = "0.0.0"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = []
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"pytest==8.3.4",
|
||||
]
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
testpaths = [
|
||||
"tests",
|
||||
]
|
||||
pythonpath = [
|
||||
".",
|
||||
]
|
||||
141
scripts/ci/release_workflow.py
Normal file
141
scripts/ci/release_workflow.py
Normal file
@@ -0,0 +1,141 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Mapping, Sequence
|
||||
|
||||
from ci_steps import (
|
||||
build_release_metadata,
|
||||
build_release_summary,
|
||||
write_release_metadata,
|
||||
write_release_summary,
|
||||
)
|
||||
from ci_workflow import EnvArg, parse_step_env_args
|
||||
from ci_utils import require_env, run_step
|
||||
|
||||
|
||||
RELEASE_METADATA_REQUIRED_ENV = (
|
||||
"GITHUB_RUN_NUMBER",
|
||||
"GITHUB_SHA",
|
||||
"CHANNEL",
|
||||
"SOURCE_REF",
|
||||
)
|
||||
|
||||
RELEASE_SUMMARY_REQUIRED_ENV = (
|
||||
"GITHUB_SHA",
|
||||
"CHANNEL",
|
||||
"VERSION",
|
||||
"BUILD_NUMBER",
|
||||
"SHA_SHORT",
|
||||
"TIMESTAMP",
|
||||
"SOURCE_REF",
|
||||
"BUILD_RESULT",
|
||||
"REGISTRY",
|
||||
"DATE_YMD",
|
||||
)
|
||||
|
||||
BASE_RELEASE_ENV_ARGS = (
|
||||
EnvArg("--version-input", "VERSION_INPUT"),
|
||||
EnvArg("--channel", "CHANNEL"),
|
||||
EnvArg("--source-ref", "SOURCE_REF"),
|
||||
EnvArg("--build-result", "BUILD_RESULT"),
|
||||
EnvArg("--version", "VERSION"),
|
||||
EnvArg("--build-number", "BUILD_NUMBER"),
|
||||
EnvArg("--sha-short", "SHA_SHORT"),
|
||||
EnvArg("--timestamp", "TIMESTAMP"),
|
||||
EnvArg("--date-ymd", "DATE_YMD"),
|
||||
EnvArg("--image-tags", "IMAGE_TAGS"),
|
||||
EnvArg("--image-digest", "IMAGE_DIGEST"),
|
||||
EnvArg("--registry", "REGISTRY"),
|
||||
)
|
||||
|
||||
|
||||
def release_metadata_step() -> None:
|
||||
import os
|
||||
|
||||
require_env(RELEASE_METADATA_REQUIRED_ENV)
|
||||
metadata = build_release_metadata(
|
||||
version_input=os.environ.get("VERSION_INPUT", ""),
|
||||
channel=os.environ["CHANNEL"],
|
||||
source_ref=os.environ["SOURCE_REF"],
|
||||
env=os.environ,
|
||||
)
|
||||
write_release_metadata(metadata)
|
||||
|
||||
|
||||
def create_release_summary_step(*, title: str, image_name_env: str) -> Callable[[], None]:
|
||||
def summary_step() -> None:
|
||||
import os
|
||||
|
||||
require_env([*RELEASE_SUMMARY_REQUIRED_ENV, image_name_env])
|
||||
summary = build_release_summary(
|
||||
title=title,
|
||||
channel=os.environ["CHANNEL"],
|
||||
version=os.environ["VERSION"],
|
||||
build_number=os.environ["BUILD_NUMBER"],
|
||||
sha=os.environ["GITHUB_SHA"],
|
||||
sha_short=os.environ["SHA_SHORT"],
|
||||
timestamp=os.environ["TIMESTAMP"],
|
||||
source_ref=os.environ["SOURCE_REF"],
|
||||
build_result=os.environ["BUILD_RESULT"],
|
||||
image_tags=os.environ.get("IMAGE_TAGS", ""),
|
||||
image_digest=os.environ.get("IMAGE_DIGEST", ""),
|
||||
registry=os.environ["REGISTRY"],
|
||||
image_name=os.environ[image_name_env],
|
||||
date_ymd=os.environ["DATE_YMD"],
|
||||
)
|
||||
write_release_summary(summary, build_result=os.environ["BUILD_RESULT"])
|
||||
|
||||
return summary_step
|
||||
|
||||
|
||||
def build_release_steps(
|
||||
*,
|
||||
title: str,
|
||||
image_name_env: str,
|
||||
extra_steps: Mapping[str, Callable[[], None]] | None = None,
|
||||
) -> dict[str, Callable[[], None]]:
|
||||
steps: dict[str, Callable[[], None]] = {"metadata": release_metadata_step}
|
||||
if extra_steps:
|
||||
steps.update(extra_steps)
|
||||
steps["summary"] = create_release_summary_step(title=title, image_name_env=image_name_env)
|
||||
return steps
|
||||
|
||||
|
||||
def build_release_env_args(
|
||||
*,
|
||||
image_name_arg: str,
|
||||
image_name_env: str,
|
||||
extra_env_args: Sequence[EnvArg] = (),
|
||||
) -> list[EnvArg]:
|
||||
return [
|
||||
*BASE_RELEASE_ENV_ARGS,
|
||||
*extra_env_args,
|
||||
EnvArg(image_name_arg, image_name_env),
|
||||
]
|
||||
|
||||
|
||||
def run_release_workflow(
|
||||
*,
|
||||
title: str,
|
||||
image_name_arg: str,
|
||||
image_name_env: str,
|
||||
extra_steps: Mapping[str, Callable[[], None]] | None = None,
|
||||
extra_env_args: Sequence[EnvArg] = (),
|
||||
) -> int:
|
||||
args = parse_step_env_args(
|
||||
build_release_env_args(
|
||||
image_name_arg=image_name_arg,
|
||||
image_name_env=image_name_env,
|
||||
extra_env_args=extra_env_args,
|
||||
)
|
||||
)
|
||||
run_step(
|
||||
build_release_steps(
|
||||
title=title,
|
||||
image_name_env=image_name_env,
|
||||
extra_steps=extra_steps,
|
||||
),
|
||||
args.step,
|
||||
)
|
||||
return 0
|
||||
69
scripts/ci/uv.lock
generated
Normal file
69
scripts/ci/uv.lock
generated
Normal file
@@ -0,0 +1,69 @@
|
||||
version = 1
|
||||
revision = 3
|
||||
requires-python = ">=3.12"
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fluxer-ci"
|
||||
version = "0.0.0"
|
||||
source = { virtual = "." }
|
||||
|
||||
[package.dev-dependencies]
|
||||
dev = [
|
||||
{ name = "pytest" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [{ name = "pytest", specifier = "==8.3.4" }]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.3.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "26.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.6.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "8.3.4"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "iniconfig" },
|
||||
{ name = "packaging" },
|
||||
{ name = "pluggy" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/05/35/30e0d83068951d90a01852cb1cef56e5d8a09d20c7f511634cc2f7e0372a/pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761", size = 1445919, upload-time = "2024-12-01T12:54:25.98Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/11/92/76a1c94d3afee238333bc0a42b82935dd8f9cf8ce9e336ff87ee14d9e1cf/pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6", size = 343083, upload-time = "2024-12-01T12:54:19.735Z" },
|
||||
]
|
||||
0
scripts/ci/workflows/__init__.py
Normal file
0
scripts/ci/workflows/__init__.py
Normal file
547
scripts/ci/workflows/build_desktop.py
Normal file
547
scripts/ci/workflows/build_desktop.py
Normal file
@@ -0,0 +1,547 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import pathlib
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from ci_steps import INSTALL_RCLONE_SCRIPT, rclone_config_script
|
||||
from ci_workflow import EnvArg, parse_step_env_args
|
||||
from ci_utils import pwsh_step, require_env, run_step, write_github_output
|
||||
|
||||
|
||||
PLATFORMS = [
|
||||
{"platform": "windows", "arch": "x64", "os": "windows-latest", "electron_arch": "x64"},
|
||||
{"platform": "windows", "arch": "arm64", "os": "windows-11-arm", "electron_arch": "arm64"},
|
||||
{"platform": "macos", "arch": "x64", "os": "macos-15-intel", "electron_arch": "x64"},
|
||||
{"platform": "macos", "arch": "arm64", "os": "macos-15", "electron_arch": "arm64"},
|
||||
{"platform": "linux", "arch": "x64", "os": "ubuntu-24.04", "electron_arch": "x64"},
|
||||
{"platform": "linux", "arch": "arm64", "os": "ubuntu-24.04-arm", "electron_arch": "arm64"},
|
||||
]
|
||||
|
||||
|
||||
def parse_bool(value: str) -> bool:
|
||||
return value.lower() in {"1", "true", "yes", "on"}
|
||||
|
||||
|
||||
def set_metadata_step(channel: str, ref: str) -> None:
|
||||
require_env(["GITHUB_RUN_NUMBER"])
|
||||
import os
|
||||
|
||||
run_number = os.environ.get("GITHUB_RUN_NUMBER", "")
|
||||
version = f"0.0.{run_number}"
|
||||
pub_date = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
build_channel = "canary" if channel == "canary" else "stable"
|
||||
source_ref = ref or ("canary" if channel == "canary" else "main")
|
||||
|
||||
write_github_output(
|
||||
{
|
||||
"version": version,
|
||||
"pub_date": pub_date,
|
||||
"channel": channel,
|
||||
"build_channel": build_channel,
|
||||
"source_ref": source_ref,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def set_matrix_step(flags: dict[str, bool]) -> None:
|
||||
filtered: list[dict[str, str]] = []
|
||||
for platform in PLATFORMS:
|
||||
plat = platform["platform"]
|
||||
arch = platform["arch"]
|
||||
skip = False
|
||||
if plat == "windows":
|
||||
skip = flags["skip_windows"] or (
|
||||
(arch == "x64" and flags["skip_windows_x64"])
|
||||
or (arch == "arm64" and flags["skip_windows_arm64"])
|
||||
)
|
||||
elif plat == "macos":
|
||||
skip = flags["skip_macos"] or (
|
||||
(arch == "x64" and flags["skip_macos_x64"])
|
||||
or (arch == "arm64" and flags["skip_macos_arm64"])
|
||||
)
|
||||
elif plat == "linux":
|
||||
skip = flags["skip_linux"] or (
|
||||
(arch == "x64" and flags["skip_linux_x64"])
|
||||
or (arch == "arm64" and flags["skip_linux_arm64"])
|
||||
)
|
||||
if not skip:
|
||||
filtered.append(platform)
|
||||
|
||||
matrix = {"include": filtered}
|
||||
write_github_output({"matrix": json.dumps(matrix, separators=(",", ":"))})
|
||||
|
||||
|
||||
STEPS = {
|
||||
"windows_paths": pwsh_step(
|
||||
r"""
|
||||
subst W: "$env:GITHUB_WORKSPACE"
|
||||
"WORKDIR=W:" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
|
||||
New-Item -ItemType Directory -Force "C:\t" | Out-Null
|
||||
New-Item -ItemType Directory -Force "C:\sq" | Out-Null
|
||||
New-Item -ItemType Directory -Force "C:\ebcache" | Out-Null
|
||||
"TEMP=C:\t" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
"TMP=C:\t" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
"SQUIRREL_TEMP=C:\sq" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
"ELECTRON_BUILDER_CACHE=C:\ebcache" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
|
||||
New-Item -ItemType Directory -Force "C:\pnpm-store" | Out-Null
|
||||
"NPM_CONFIG_STORE_DIR=C:\pnpm-store" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
"npm_config_store_dir=C:\pnpm-store" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
|
||||
"store-dir=C:\pnpm-store" | Set-Content -Path "W:\.npmrc" -Encoding ascii
|
||||
git config --global core.longpaths true
|
||||
"""
|
||||
),
|
||||
"set_workdir_unix": "echo \"WORKDIR=$GITHUB_WORKSPACE\" >> \"$GITHUB_ENV\"\n",
|
||||
"resolve_pnpm_store_windows": pwsh_step(
|
||||
r"""
|
||||
$store = pnpm store path --silent
|
||||
"PNPM_STORE_PATH=$store" | Out-File -FilePath $env:GITHUB_ENV -Append -Encoding utf8
|
||||
New-Item -ItemType Directory -Force $store | Out-Null
|
||||
"""
|
||||
),
|
||||
"resolve_pnpm_store_unix": """
|
||||
set -euo pipefail
|
||||
store="$(pnpm store path --silent)"
|
||||
echo "PNPM_STORE_PATH=$store" >> "$GITHUB_ENV"
|
||||
mkdir -p "$store"
|
||||
""",
|
||||
"install_setuptools_windows_arm64": pwsh_step(
|
||||
r"""
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install "setuptools>=69" wheel
|
||||
"""
|
||||
),
|
||||
"install_setuptools_macos": "brew install python-setuptools\n",
|
||||
"install_linux_deps": """
|
||||
set -euo pipefail
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y \
|
||||
libx11-dev libxtst-dev libxt-dev libxinerama-dev libxkbcommon-dev libxrandr-dev \
|
||||
ruby ruby-dev build-essential rpm \
|
||||
libpixman-1-dev libcairo2-dev libpango1.0-dev libjpeg-dev libgif-dev librsvg2-dev
|
||||
sudo gem install --no-document fpm
|
||||
""",
|
||||
"install_dependencies": "pnpm install --frozen-lockfile\n",
|
||||
"update_version": "pnpm version \"${VERSION}\" --no-git-tag-version --allow-same-version\n",
|
||||
"set_build_channel": "pnpm set-channel\n",
|
||||
"build_electron_main": "pnpm build\n",
|
||||
"build_app_macos": "pnpm exec electron-builder --config electron-builder.config.cjs --mac --${ELECTRON_ARCH}\n",
|
||||
"verify_bundle_id": """
|
||||
set -euo pipefail
|
||||
DIST="dist-electron"
|
||||
ZIP="$(ls -1 "$DIST"/*"${ELECTRON_ARCH}"*.zip | head -n1)"
|
||||
tmp="$(mktemp -d)"
|
||||
ditto -xk "$ZIP" "$tmp"
|
||||
APP="$(find "$tmp" -maxdepth 2 -name "*.app" -print -quit)"
|
||||
BID=$(/usr/libexec/PlistBuddy -c 'Print :CFBundleIdentifier' "$APP/Contents/Info.plist")
|
||||
|
||||
expected="app.fluxer"
|
||||
if [[ "${BUILD_CHANNEL:-stable}" == "canary" ]]; then expected="app.fluxer.canary"; fi
|
||||
echo "Bundle id in zip: $BID (expected: $expected)"
|
||||
test "$BID" = "$expected"
|
||||
""",
|
||||
"build_app_windows": "pnpm exec electron-builder --config electron-builder.config.cjs --win --${ELECTRON_ARCH}\n",
|
||||
"analyse_squirrel_paths": pwsh_step(
|
||||
r"""
|
||||
$primaryDir = if ($env:ARCH -eq "arm64") { "dist-electron/squirrel-windows-arm64" } else { "dist-electron/squirrel-windows" }
|
||||
$fallbackDir = if ($env:ARCH -eq "arm64") { "dist-electron/squirrel-windows" } else { "dist-electron/squirrel-windows-arm64" }
|
||||
$dirs = @($primaryDir, $fallbackDir)
|
||||
|
||||
$nupkg = $null
|
||||
foreach ($d in $dirs) {
|
||||
if (Test-Path $d) {
|
||||
$nupkg = Get-ChildItem -Path "$d/*.nupkg" -ErrorAction SilentlyContinue | Select-Object -First 1
|
||||
if ($nupkg) { break }
|
||||
}
|
||||
}
|
||||
|
||||
if (-not $nupkg) {
|
||||
throw "No Squirrel nupkg found in: $($dirs -join ', ')"
|
||||
}
|
||||
|
||||
Write-Host "Analyzing Windows installer $($nupkg.FullName)"
|
||||
$env:NUPKG_PATH = $nupkg.FullName
|
||||
|
||||
$lines = @(
|
||||
'import os'
|
||||
'import zipfile'
|
||||
''
|
||||
'path = os.environ["NUPKG_PATH"]'
|
||||
'build_ver = os.environ["BUILD_VERSION"]'
|
||||
'prefix = os.path.join(os.environ["LOCALAPPDATA"], "fluxer_app", f"app-{build_ver}", "resources", "app.asar.unpacked")'
|
||||
'max_len = int(os.environ.get("MAX_WINDOWS_PATH_LEN", "260"))'
|
||||
'headroom = int(os.environ.get("PATH_HEADROOM", "10"))'
|
||||
'limit = max_len - headroom'
|
||||
''
|
||||
'with zipfile.ZipFile(path) as archive:'
|
||||
' entries = []'
|
||||
' for info in archive.infolist():'
|
||||
' normalized = info.filename.lstrip("/\\\\")'
|
||||
' total_len = len(os.path.join(prefix, normalized)) if normalized else len(prefix)'
|
||||
' entries.append((total_len, info.filename))'
|
||||
''
|
||||
'if not entries:'
|
||||
' raise SystemExit("nupkg archive contains no entries")'
|
||||
''
|
||||
'entries.sort(reverse=True)'
|
||||
'print(f"Assumed install prefix: {prefix} ({len(prefix)} chars). Maximum allowed path length: {limit} (total reserve {max_len}, headroom {headroom}).")'
|
||||
'print("Top 20 longest archived paths (length includes prefix):")'
|
||||
'for length, name in entries[:20]:'
|
||||
' print(f"{length:4d} {name}")'
|
||||
''
|
||||
'longest_len, longest_name = entries[0]'
|
||||
'if longest_len > limit:'
|
||||
' raise SystemExit(f"Longest path {longest_len} for {longest_name} exceeds limit {limit}")'
|
||||
'print(f"Longest archived path {longest_len} is within the limit of {limit}.")'
|
||||
)
|
||||
|
||||
$scriptPath = Join-Path $env:TEMP "nupkg-long-path-check.py"
|
||||
Set-Content -Path $scriptPath -Value $lines -Encoding utf8
|
||||
python $scriptPath
|
||||
"""
|
||||
),
|
||||
"build_app_linux": "pnpm exec electron-builder --config electron-builder.config.cjs --linux --${ELECTRON_ARCH}\n",
|
||||
"prepare_artifacts_windows": pwsh_step(
|
||||
r"""
|
||||
New-Item -ItemType Directory -Force upload_staging | Out-Null
|
||||
|
||||
$dist = Join-Path $env:WORKDIR "fluxer_desktop/dist-electron"
|
||||
$sqDirName = if ($env:ARCH -eq "arm64") { "squirrel-windows-arm64" } else { "squirrel-windows" }
|
||||
$sqFallbackName = if ($sqDirName -eq "squirrel-windows") { "squirrel-windows-arm64" } else { "squirrel-windows" }
|
||||
|
||||
$sq = Join-Path $dist $sqDirName
|
||||
$sqFallback = Join-Path $dist $sqFallbackName
|
||||
|
||||
$picked = $null
|
||||
if (Test-Path $sq) { $picked = $sq }
|
||||
elseif (Test-Path $sqFallback) { $picked = $sqFallback }
|
||||
|
||||
if ($picked) {
|
||||
Copy-Item -Force -ErrorAction SilentlyContinue "$picked\*.exe" "upload_staging\"
|
||||
Copy-Item -Force -ErrorAction SilentlyContinue "$picked\*.exe.blockmap" "upload_staging\"
|
||||
Copy-Item -Force -ErrorAction SilentlyContinue "$picked\RELEASES*" "upload_staging\"
|
||||
Copy-Item -Force -ErrorAction SilentlyContinue "$picked\*.nupkg" "upload_staging\"
|
||||
Copy-Item -Force -ErrorAction SilentlyContinue "$picked\*.nupkg.blockmap" "upload_staging\"
|
||||
}
|
||||
|
||||
if (Test-Path $dist) {
|
||||
Copy-Item -Force -ErrorAction SilentlyContinue "$dist\*.yml" "upload_staging\"
|
||||
Copy-Item -Force -ErrorAction SilentlyContinue "$dist\*.zip" "upload_staging\"
|
||||
Copy-Item -Force -ErrorAction SilentlyContinue "$dist\*.zip.blockmap" "upload_staging\"
|
||||
}
|
||||
|
||||
if (-not (Get-ChildItem upload_staging -Filter *.exe -ErrorAction SilentlyContinue)) {
|
||||
throw "No installer .exe staged. Squirrel outputs were not copied."
|
||||
}
|
||||
|
||||
Get-ChildItem -Force upload_staging | Format-Table -AutoSize
|
||||
"""
|
||||
),
|
||||
"prepare_artifacts_unix": """
|
||||
set -euo pipefail
|
||||
mkdir -p upload_staging
|
||||
DIST="${WORKDIR}/fluxer_desktop/dist-electron"
|
||||
|
||||
cp -f "$DIST"/*.dmg upload_staging/ 2>/dev/null || true
|
||||
cp -f "$DIST"/*.zip upload_staging/ 2>/dev/null || true
|
||||
cp -f "$DIST"/*.zip.blockmap upload_staging/ 2>/dev/null || true
|
||||
cp -f "$DIST"/*.yml upload_staging/ 2>/dev/null || true
|
||||
|
||||
cp -f "$DIST"/*.AppImage upload_staging/ 2>/dev/null || true
|
||||
cp -f "$DIST"/*.deb upload_staging/ 2>/dev/null || true
|
||||
cp -f "$DIST"/*.rpm upload_staging/ 2>/dev/null || true
|
||||
cp -f "$DIST"/*.tar.gz upload_staging/ 2>/dev/null || true
|
||||
|
||||
ls -la upload_staging/
|
||||
""",
|
||||
"normalise_updater_yaml": """
|
||||
set -euo pipefail
|
||||
cd upload_staging
|
||||
[[ "${PLATFORM}" == "macos" && -f latest-mac.yml && ! -f latest-mac-arm64.yml ]] && mv latest-mac.yml latest-mac-arm64.yml || true
|
||||
""",
|
||||
"generate_checksums_unix": """
|
||||
set -euo pipefail
|
||||
cd upload_staging
|
||||
for file in *.exe *.dmg *.zip *.AppImage *.deb *.rpm *.tar.gz; do
|
||||
[ -f "$file" ] || continue
|
||||
sha256sum "$file" | awk '{print $1}' > "${file}.sha256"
|
||||
echo "Generated checksum for $file"
|
||||
done
|
||||
ls -la *.sha256 2>/dev/null || echo "No checksum files generated"
|
||||
""",
|
||||
"generate_checksums_windows": pwsh_step(
|
||||
r"""
|
||||
cd upload_staging
|
||||
$extensions = @('.exe', '.nupkg')
|
||||
Get-ChildItem -File | Where-Object { $extensions -contains $_.Extension } | ForEach-Object {
|
||||
$hash = (Get-FileHash $_.FullName -Algorithm SHA256).Hash.ToLower()
|
||||
Set-Content -Path "$($_.FullName).sha256" -Value $hash -NoNewline
|
||||
Write-Host "Generated checksum for $($_.Name)"
|
||||
}
|
||||
Get-ChildItem -Filter "*.sha256" -ErrorAction SilentlyContinue | Format-Table -AutoSize
|
||||
"""
|
||||
),
|
||||
"install_rclone": INSTALL_RCLONE_SCRIPT,
|
||||
"configure_rclone": rclone_config_script(
|
||||
endpoint="https://s3.us-east-va.io.cloud.ovh.us",
|
||||
acl="private",
|
||||
),
|
||||
"build_payload": """
|
||||
set -euo pipefail
|
||||
|
||||
mkdir -p s3_payload
|
||||
|
||||
shopt -s nullglob
|
||||
for dir in artifacts/fluxer-desktop-${CHANNEL}-*; do
|
||||
[ -d "$dir" ] || continue
|
||||
|
||||
base="$(basename "$dir")"
|
||||
if [[ "$base" =~ ^fluxer-desktop-[a-z]+-([a-z]+)-([a-z0-9]+)$ ]]; then
|
||||
platform="${BASH_REMATCH[1]}"
|
||||
arch="${BASH_REMATCH[2]}"
|
||||
else
|
||||
echo "Skipping unrecognised artifact dir: $base"
|
||||
continue
|
||||
fi
|
||||
|
||||
case "$platform" in
|
||||
windows) plat="win32" ;;
|
||||
macos) plat="darwin" ;;
|
||||
linux) plat="linux" ;;
|
||||
*)
|
||||
echo "Unknown platform: $platform"
|
||||
continue
|
||||
;;
|
||||
esac
|
||||
|
||||
dest="s3_payload/desktop/${CHANNEL}/${plat}/${arch}"
|
||||
mkdir -p "$dest"
|
||||
cp -av "$dir"/* "$dest/" || true
|
||||
|
||||
if [[ "$plat" == "darwin" ]]; then
|
||||
zip_file=""
|
||||
for z in "$dest"/*-"$arch".zip; do
|
||||
zip_file="$z"
|
||||
break
|
||||
done
|
||||
if [[ -z "$zip_file" ]]; then
|
||||
for z in "$dest"/*.zip; do
|
||||
zip_file="$z"
|
||||
break
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ -z "$zip_file" ]]; then
|
||||
echo "No .zip found for macOS $arch in $dest (auto-update requires zip artifacts)."
|
||||
else
|
||||
zip_name="$(basename "$zip_file")"
|
||||
url="${PUBLIC_DL_BASE}/desktop/${CHANNEL}/${plat}/${arch}/${zip_name}"
|
||||
|
||||
cat > "$dest/RELEASES.json" <<EOF
|
||||
{
|
||||
"currentRelease": "${VERSION}",
|
||||
"releases": [
|
||||
{
|
||||
"version": "${VERSION}",
|
||||
"updateTo": {
|
||||
"version": "${VERSION}",
|
||||
"pub_date": "${PUB_DATE}",
|
||||
"notes": "",
|
||||
"name": "${VERSION}",
|
||||
"url": "${url}"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
EOF
|
||||
cp -f "$dest/RELEASES.json" "$dest/releases.json"
|
||||
fi
|
||||
fi
|
||||
|
||||
setup_file=""
|
||||
dmg_file=""
|
||||
zip_file2=""
|
||||
appimage_file=""
|
||||
deb_file=""
|
||||
rpm_file=""
|
||||
targz_file=""
|
||||
|
||||
if [[ "$plat" == "win32" ]]; then
|
||||
setup_file="$(ls -1 "$dest"/*.exe 2>/dev/null | grep -i 'setup' | head -n1 || true)"
|
||||
if [[ -z "$setup_file" ]]; then
|
||||
setup_file="$(ls -1 "$dest"/*.exe 2>/dev/null | head -n1 || true)"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$plat" == "darwin" ]]; then
|
||||
dmg_file="$(ls -1 "$dest"/*-"$arch".dmg 2>/dev/null | head -n1 || true)"
|
||||
if [[ -z "$dmg_file" ]]; then
|
||||
dmg_file="$(ls -1 "$dest"/*.dmg 2>/dev/null | head -n1 || true)"
|
||||
fi
|
||||
zip_file2="$(ls -1 "$dest"/*-"$arch".zip 2>/dev/null | head -n1 || true)"
|
||||
if [[ -z "$zip_file2" ]]; then
|
||||
zip_file2="$(ls -1 "$dest"/*.zip 2>/dev/null | head -n1 || true)"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$plat" == "linux" ]]; then
|
||||
appimage_file="$(ls -1 "$dest"/*.AppImage 2>/dev/null | head -n1 || true)"
|
||||
deb_file="$(ls -1 "$dest"/*.deb 2>/dev/null | head -n1 || true)"
|
||||
rpm_file="$(ls -1 "$dest"/*.rpm 2>/dev/null | head -n1 || true)"
|
||||
targz_file="$(ls -1 "$dest"/*.tar.gz 2>/dev/null | head -n1 || true)"
|
||||
fi
|
||||
|
||||
read_sha256() {
|
||||
local file="$1"
|
||||
if [[ -n "$file" && -f "${file}.sha256" ]]; then
|
||||
awk '{print $1}' "${file}.sha256"
|
||||
else
|
||||
echo ""
|
||||
fi
|
||||
}
|
||||
|
||||
setup_sha256="$(read_sha256 "$setup_file")"
|
||||
dmg_sha256="$(read_sha256 "$dmg_file")"
|
||||
zip_sha256="$(read_sha256 "$zip_file2")"
|
||||
appimage_sha256="$(read_sha256 "$appimage_file")"
|
||||
deb_sha256="$(read_sha256 "$deb_file")"
|
||||
rpm_sha256="$(read_sha256 "$rpm_file")"
|
||||
targz_sha256="$(read_sha256 "$targz_file")"
|
||||
|
||||
jq -n \
|
||||
--arg channel "${CHANNEL}" \
|
||||
--arg platform "${plat}" \
|
||||
--arg arch "${arch}" \
|
||||
--arg version "${VERSION}" \
|
||||
--arg pub_date "${PUB_DATE}" \
|
||||
--arg setup "$(basename "${setup_file:-}")" \
|
||||
--arg setup_sha256 "${setup_sha256}" \
|
||||
--arg dmg "$(basename "${dmg_file:-}")" \
|
||||
--arg dmg_sha256 "${dmg_sha256}" \
|
||||
--arg zip "$(basename "${zip_file2:-}")" \
|
||||
--arg zip_sha256 "${zip_sha256}" \
|
||||
--arg appimage "$(basename "${appimage_file:-}")" \
|
||||
--arg appimage_sha256 "${appimage_sha256}" \
|
||||
--arg deb "$(basename "${deb_file:-}")" \
|
||||
--arg deb_sha256 "${deb_sha256}" \
|
||||
--arg rpm "$(basename "${rpm_file:-}")" \
|
||||
--arg rpm_sha256 "${rpm_sha256}" \
|
||||
--arg tar_gz "$(basename "${targz_file:-}")" \
|
||||
--arg tar_gz_sha256 "${targz_sha256}" \
|
||||
'{
|
||||
channel: $channel,
|
||||
platform: $platform,
|
||||
arch: $arch,
|
||||
version: $version,
|
||||
pub_date: $pub_date,
|
||||
files: (
|
||||
{}
|
||||
| if ($setup | length) > 0 then
|
||||
. + {setup: (if ($setup_sha256 | length) > 0 then {filename: $setup, sha256: $setup_sha256} else $setup end)}
|
||||
else . end
|
||||
| if ($dmg | length) > 0 then
|
||||
. + {dmg: (if ($dmg_sha256 | length) > 0 then {filename: $dmg, sha256: $dmg_sha256} else $dmg end)}
|
||||
else . end
|
||||
| if ($zip | length) > 0 then
|
||||
. + {zip: (if ($zip_sha256 | length) > 0 then {filename: $zip, sha256: $zip_sha256} else $zip end)}
|
||||
else . end
|
||||
| if ($appimage | length) > 0 then
|
||||
. + {appimage: (if ($appimage_sha256 | length) > 0 then {filename: $appimage, sha256: $appimage_sha256} else $appimage end)}
|
||||
else . end
|
||||
| if ($deb | length) > 0 then
|
||||
. + {deb: (if ($deb_sha256 | length) > 0 then {filename: $deb, sha256: $deb_sha256} else $deb end)}
|
||||
else . end
|
||||
| if ($rpm | length) > 0 then
|
||||
. + {rpm: (if ($rpm_sha256 | length) > 0 then {filename: $rpm, sha256: $rpm_sha256} else $rpm end)}
|
||||
else . end
|
||||
| if ($tar_gz | length) > 0 then
|
||||
. + {tar_gz: (if ($tar_gz_sha256 | length) > 0 then {filename: $tar_gz, sha256: $tar_gz_sha256} else $tar_gz end)}
|
||||
else . end
|
||||
)
|
||||
}' > "$dest/manifest.json"
|
||||
done
|
||||
|
||||
echo "Payload tree:"
|
||||
find s3_payload -maxdepth 6 -type f | sort
|
||||
""",
|
||||
"upload_payload": """
|
||||
set -euo pipefail
|
||||
rclone copy s3_payload/desktop "ovh:${S3_BUCKET}/desktop" \
|
||||
--transfers 32 \
|
||||
--checkers 16 \
|
||||
--fast-list \
|
||||
--s3-upload-concurrency 8 \
|
||||
--s3-chunk-size 16M \
|
||||
-v
|
||||
""",
|
||||
"build_summary": """
|
||||
{
|
||||
echo "## Desktop ${DISPLAY_CHANNEL^} Upload Complete"
|
||||
echo ""
|
||||
echo "**Version:** ${VERSION}"
|
||||
echo ""
|
||||
echo "**S3 prefix:** desktop/${CHANNEL}/"
|
||||
echo ""
|
||||
echo "**Redirect endpoint shape:** /dl/desktop/${CHANNEL}/{plat}/{arch}/{format}"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
""",
|
||||
}
|
||||
|
||||
|
||||
SKIP_FLAG_ENV_MAP = {
|
||||
"skip_windows": "SKIP_WINDOWS",
|
||||
"skip_windows_x64": "SKIP_WINDOWS_X64",
|
||||
"skip_windows_arm64": "SKIP_WINDOWS_ARM64",
|
||||
"skip_macos": "SKIP_MACOS",
|
||||
"skip_macos_x64": "SKIP_MACOS_X64",
|
||||
"skip_macos_arm64": "SKIP_MACOS_ARM64",
|
||||
"skip_linux": "SKIP_LINUX",
|
||||
"skip_linux_x64": "SKIP_LINUX_X64",
|
||||
"skip_linux_arm64": "SKIP_LINUX_ARM64",
|
||||
}
|
||||
|
||||
ENV_ARGS = [
|
||||
EnvArg("--channel", "CHANNEL"),
|
||||
EnvArg("--ref", "REF"),
|
||||
EnvArg("--skip-windows", "SKIP_WINDOWS"),
|
||||
EnvArg("--skip-windows-x64", "SKIP_WINDOWS_X64"),
|
||||
EnvArg("--skip-windows-arm64", "SKIP_WINDOWS_ARM64"),
|
||||
EnvArg("--skip-macos", "SKIP_MACOS"),
|
||||
EnvArg("--skip-macos-x64", "SKIP_MACOS_X64"),
|
||||
EnvArg("--skip-macos-arm64", "SKIP_MACOS_ARM64"),
|
||||
EnvArg("--skip-linux", "SKIP_LINUX"),
|
||||
EnvArg("--skip-linux-x64", "SKIP_LINUX_X64"),
|
||||
EnvArg("--skip-linux-arm64", "SKIP_LINUX_ARM64"),
|
||||
]
|
||||
|
||||
|
||||
def main() -> int:
|
||||
import os
|
||||
|
||||
args = parse_step_env_args(ENV_ARGS)
|
||||
|
||||
if args.step == "set_metadata":
|
||||
channel = os.environ.get("CHANNEL", "") or "stable"
|
||||
set_metadata_step(channel, os.environ.get("REF", ""))
|
||||
return 0
|
||||
|
||||
if args.step == "set_matrix":
|
||||
flags = {
|
||||
key: parse_bool(os.environ.get(env_name, "false"))
|
||||
for key, env_name in SKIP_FLAG_ENV_MAP.items()
|
||||
}
|
||||
set_matrix_step(flags)
|
||||
return 0
|
||||
|
||||
run_step(STEPS, args.step)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
55
scripts/ci/workflows/channel_vars.py
Executable file
55
scripts/ci/workflows/channel_vars.py
Executable file
@@ -0,0 +1,55 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from ci_workflow import EnvArg, parse_env_args
|
||||
from ci_utils import require_env, write_github_output
|
||||
|
||||
|
||||
ENV_ARGS = [
|
||||
EnvArg("--event-name", "EVENT_NAME"),
|
||||
EnvArg("--ref-name", "REF_NAME"),
|
||||
EnvArg("--dispatch-channel", "DISPATCH_CHANNEL"),
|
||||
]
|
||||
|
||||
|
||||
def determine_channel(
|
||||
*,
|
||||
event_name: str,
|
||||
ref_name: str,
|
||||
dispatch_channel: str,
|
||||
) -> str:
|
||||
if event_name == "push":
|
||||
return "canary" if ref_name == "canary" else "stable"
|
||||
return "canary" if dispatch_channel == "canary" else "stable"
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parse_env_args(ENV_ARGS)
|
||||
require_env(["EVENT_NAME"])
|
||||
channel = determine_channel(
|
||||
event_name=os.environ.get("EVENT_NAME", ""),
|
||||
ref_name=os.environ.get("REF_NAME", ""),
|
||||
dispatch_channel=os.environ.get("DISPATCH_CHANNEL", ""),
|
||||
)
|
||||
|
||||
stack_suffix = "-canary" if channel == "canary" else ""
|
||||
is_canary = "true" if channel == "canary" else "false"
|
||||
|
||||
write_github_output(
|
||||
{
|
||||
"channel": channel,
|
||||
"is_canary": is_canary,
|
||||
"stack_suffix": stack_suffix,
|
||||
}
|
||||
)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
54
scripts/ci/workflows/ci.py
Executable file
54
scripts/ci/workflows/ci.py
Executable file
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from ci_workflow import parse_step_env_args
|
||||
from ci_utils import run_step
|
||||
|
||||
|
||||
STEPS: dict[str, str] = {
|
||||
"install_dependencies": """
|
||||
set -euo pipefail
|
||||
pnpm install --frozen-lockfile
|
||||
""",
|
||||
"typecheck": """
|
||||
set -euo pipefail
|
||||
pnpm typecheck
|
||||
""",
|
||||
"test": """
|
||||
set -euo pipefail
|
||||
pnpm test
|
||||
""",
|
||||
"gateway_compile": """
|
||||
set -euo pipefail
|
||||
cd fluxer_gateway
|
||||
rebar3 compile
|
||||
""",
|
||||
"gateway_dialyzer": """
|
||||
set -euo pipefail
|
||||
cd fluxer_gateway
|
||||
rebar3 dialyzer
|
||||
""",
|
||||
"gateway_eunit": """
|
||||
set -euo pipefail
|
||||
cd fluxer_gateway
|
||||
rebar3 eunit
|
||||
""",
|
||||
"knip": """
|
||||
set -euo pipefail
|
||||
pnpm knip
|
||||
""",
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_step_env_args()
|
||||
run_step(STEPS, args.step)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
33
scripts/ci/workflows/ci_scripts.py
Normal file
33
scripts/ci/workflows/ci_scripts.py
Normal file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from ci_workflow import parse_step_env_args
|
||||
from ci_utils import run_step
|
||||
|
||||
|
||||
STEPS: dict[str, str] = {
|
||||
"sync": """
|
||||
set -euo pipefail
|
||||
cd scripts/ci
|
||||
uv sync --dev
|
||||
""",
|
||||
"test": """
|
||||
set -euo pipefail
|
||||
cd scripts/ci
|
||||
uv run pytest
|
||||
""",
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_step_env_args()
|
||||
run_step(STEPS, args.step)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
95
scripts/ci/workflows/deploy_admin.py
Executable file
95
scripts/ci/workflows/deploy_admin.py
Executable file
@@ -0,0 +1,95 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from deploy_workflow import build_standard_deploy_steps, run_deploy_workflow
|
||||
|
||||
|
||||
PUSH_AND_DEPLOY_SCRIPT = """
|
||||
set -euo pipefail
|
||||
docker pussh "${IMAGE_TAG}" "${SERVER}"
|
||||
|
||||
ssh "${SERVER}" \
|
||||
"IMAGE_TAG=${IMAGE_TAG} STACK=${STACK} CADDY_DOMAIN=${CADDY_DOMAIN} REPLICAS=${REPLICAS} RELEASE_CHANNEL=${RELEASE_CHANNEL} IS_CANARY=${IS_CANARY} bash" << 'REMOTE_EOF'
|
||||
set -euo pipefail
|
||||
|
||||
if [[ "${IS_CANARY}" == "true" ]]; then
|
||||
CONFIG_PATH="/etc/fluxer/config.canary.json"
|
||||
else
|
||||
CONFIG_PATH="/etc/fluxer/config.stable.json"
|
||||
fi
|
||||
sudo mkdir -p "/opt/${STACK}"
|
||||
sudo chown -R "${USER}:${USER}" "/opt/${STACK}"
|
||||
cd "/opt/${STACK}"
|
||||
|
||||
cat > compose.yaml << COMPOSEEOF
|
||||
x-deploy-base: &deploy_base
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
delay: 5s
|
||||
max_attempts: 3
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
rollback_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
|
||||
x-healthcheck: &healthcheck
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:8080/']
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
|
||||
services:
|
||||
app:
|
||||
image: ${IMAGE_TAG}
|
||||
environment:
|
||||
FLUXER_CONFIG: /etc/fluxer/config.json
|
||||
volumes:
|
||||
- ${CONFIG_PATH}:/etc/fluxer/config.json:ro
|
||||
deploy:
|
||||
<<: *deploy_base
|
||||
replicas: ${REPLICAS}
|
||||
labels:
|
||||
- "caddy=${CADDY_DOMAIN}"
|
||||
- 'caddy.reverse_proxy={{upstreams 8080}}'
|
||||
- 'caddy.header.X-Robots-Tag="noindex, nofollow, nosnippet, noimageindex"'
|
||||
- 'caddy.header.Strict-Transport-Security="max-age=31536000; includeSubDomains; preload"'
|
||||
- 'caddy.header.X-Xss-Protection="1; mode=block"'
|
||||
- 'caddy.header.X-Content-Type-Options=nosniff'
|
||||
- 'caddy.header.Referrer-Policy=strict-origin-when-cross-origin'
|
||||
- 'caddy.header.X-Frame-Options=DENY'
|
||||
networks: [fluxer-shared]
|
||||
healthcheck: *healthcheck
|
||||
|
||||
networks:
|
||||
fluxer-shared:
|
||||
external: true
|
||||
COMPOSEEOF
|
||||
|
||||
docker stack deploy \
|
||||
--with-registry-auth \
|
||||
--detach=false \
|
||||
--resolve-image never \
|
||||
-c compose.yaml \
|
||||
"${STACK}"
|
||||
REMOTE_EOF
|
||||
"""
|
||||
|
||||
STEPS = build_standard_deploy_steps(
|
||||
push_and_deploy_script=PUSH_AND_DEPLOY_SCRIPT,
|
||||
)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
return run_deploy_workflow(STEPS)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
167
scripts/ci/workflows/deploy_api.py
Executable file
167
scripts/ci/workflows/deploy_api.py
Executable file
@@ -0,0 +1,167 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from deploy_workflow import build_standard_deploy_steps, run_deploy_workflow
|
||||
|
||||
|
||||
PUSH_AND_DEPLOY_SCRIPT = """
|
||||
set -euo pipefail
|
||||
|
||||
docker pussh "${IMAGE_TAG_APP}" "${SERVER}"
|
||||
|
||||
if [[ "${IS_CANARY}" == "true" ]]; then
|
||||
docker pussh "${IMAGE_TAG_WORKER}" "${SERVER}"
|
||||
fi
|
||||
|
||||
ssh "${SERVER}" \
|
||||
"IMAGE_TAG_APP=${IMAGE_TAG_APP} IMAGE_TAG_WORKER=${IMAGE_TAG_WORKER} STACK=${STACK} WORKER_STACK=${WORKER_STACK} CANARY_WORKER_REPLICAS=${CANARY_WORKER_REPLICAS} IS_CANARY=${IS_CANARY} CADDY_DOMAIN=${CADDY_DOMAIN} RELEASE_CHANNEL=${RELEASE_CHANNEL} SENTRY_RELEASE=${SENTRY_RELEASE} SENTRY_BUILD_SHA=${SENTRY_BUILD_SHA} SENTRY_BUILD_NUMBER=${SENTRY_BUILD_NUMBER} SENTRY_BUILD_TIMESTAMP=${SENTRY_BUILD_TIMESTAMP} bash" << 'REMOTE_EOF'
|
||||
set -euo pipefail
|
||||
|
||||
if [[ "${IS_CANARY}" == "true" ]]; then
|
||||
CONFIG_PATH="/etc/fluxer/config.canary.json"
|
||||
else
|
||||
CONFIG_PATH="/etc/fluxer/config.stable.json"
|
||||
fi
|
||||
CANARY_WORKER_REPLICAS="${CANARY_WORKER_REPLICAS:-3}"
|
||||
BLUESKY_KEYS_DIR="/etc/fluxer/keys"
|
||||
sudo mkdir -p "${BLUESKY_KEYS_DIR}"
|
||||
sudo chown root:65534 "${BLUESKY_KEYS_DIR}"
|
||||
sudo chmod 0750 "${BLUESKY_KEYS_DIR}"
|
||||
shopt -s nullglob
|
||||
KEY_FILES=("${BLUESKY_KEYS_DIR}"/*.pem)
|
||||
if [[ ${#KEY_FILES[@]} -gt 0 ]]; then
|
||||
sudo chown root:65534 "${KEY_FILES[@]}"
|
||||
sudo chmod 0440 "${KEY_FILES[@]}"
|
||||
fi
|
||||
shopt -u nullglob
|
||||
|
||||
deploy_api_stack() {
|
||||
sudo mkdir -p "/opt/${STACK}"
|
||||
sudo chown -R "${USER}:${USER}" "/opt/${STACK}"
|
||||
cd "/opt/${STACK}"
|
||||
|
||||
cat > compose.yaml << COMPOSEEOF
|
||||
x-deploy-base: &deploy_base
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
delay: 5s
|
||||
max_attempts: 3
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
rollback_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
|
||||
x-healthcheck: &healthcheck
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:8080/_health']
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
|
||||
services:
|
||||
app:
|
||||
image: ${IMAGE_TAG_APP}
|
||||
command: ['npm', 'run', 'start']
|
||||
environment:
|
||||
- FLUXER_CONFIG=/etc/fluxer/config.json
|
||||
volumes:
|
||||
- ${CONFIG_PATH}:/etc/fluxer/config.json:ro
|
||||
- ${BLUESKY_KEYS_DIR}:${BLUESKY_KEYS_DIR}:ro
|
||||
- /opt/geoip/GeoLite2-City.mmdb:/data/GeoLite2-City.mmdb:ro
|
||||
deploy:
|
||||
<<: *deploy_base
|
||||
replicas: 6
|
||||
labels:
|
||||
- "caddy=${CADDY_DOMAIN}"
|
||||
- 'caddy.reverse_proxy={{upstreams 8080}}'
|
||||
- 'caddy.header.Strict-Transport-Security="max-age=31536000; includeSubDomains; preload"'
|
||||
- 'caddy.header.X-Xss-Protection="1; mode=block"'
|
||||
- 'caddy.header.X-Content-Type-Options=nosniff'
|
||||
- 'caddy.header.Referrer-Policy=strict-origin-when-cross-origin'
|
||||
- 'caddy.header.X-Frame-Options=DENY'
|
||||
- 'caddy.header.Expect-Ct="max-age=86400, report-uri=\\"https://o4510149383094272.ingest.us.sentry.io/api/4510205804019712/security/?sentry_key=bb16e8b823b82d788db49a666b3b4b90\\""'
|
||||
networks:
|
||||
- fluxer-shared
|
||||
healthcheck: *healthcheck
|
||||
|
||||
networks:
|
||||
fluxer-shared:
|
||||
external: true
|
||||
COMPOSEEOF
|
||||
|
||||
docker stack deploy --with-registry-auth --detach=false --resolve-image never -c compose.yaml "${STACK}"
|
||||
}
|
||||
|
||||
deploy_worker_stack() {
|
||||
sudo mkdir -p "/opt/${WORKER_STACK}"
|
||||
sudo chown -R "${USER}:${USER}" "/opt/${WORKER_STACK}"
|
||||
cd "/opt/${WORKER_STACK}"
|
||||
|
||||
cat > compose.yaml << COMPOSEEOF
|
||||
x-deploy-base: &deploy_base
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
delay: 5s
|
||||
max_attempts: 3
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
rollback_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
|
||||
services:
|
||||
worker:
|
||||
image: ${IMAGE_TAG_WORKER}
|
||||
command: ['npm', 'run', 'start:worker']
|
||||
environment:
|
||||
- FLUXER_CONFIG=/etc/fluxer/config.json
|
||||
- SENTRY_RELEASE=${SENTRY_RELEASE}
|
||||
- SENTRY_BUILD_SHA=${SENTRY_BUILD_SHA}
|
||||
- SENTRY_BUILD_NUMBER=${SENTRY_BUILD_NUMBER}
|
||||
- SENTRY_BUILD_TIMESTAMP=${SENTRY_BUILD_TIMESTAMP}
|
||||
volumes:
|
||||
- ${CONFIG_PATH}:/etc/fluxer/config.json:ro
|
||||
deploy:
|
||||
<<: *deploy_base
|
||||
replicas: ${CANARY_WORKER_REPLICAS}
|
||||
networks:
|
||||
- fluxer-shared
|
||||
|
||||
networks:
|
||||
fluxer-shared:
|
||||
external: true
|
||||
COMPOSEEOF
|
||||
|
||||
docker stack deploy --with-registry-auth --detach=false --resolve-image never -c compose.yaml "${WORKER_STACK}"
|
||||
}
|
||||
|
||||
deploy_api_stack
|
||||
|
||||
if [[ "${IS_CANARY}" == "true" ]]; then
|
||||
deploy_worker_stack
|
||||
fi
|
||||
REMOTE_EOF
|
||||
"""
|
||||
|
||||
STEPS = build_standard_deploy_steps(
|
||||
push_and_deploy_script=PUSH_AND_DEPLOY_SCRIPT,
|
||||
include_sentry=True,
|
||||
include_build_timestamp=False,
|
||||
)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
return run_deploy_workflow(STEPS)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
248
scripts/ci/workflows/deploy_app.py
Executable file
248
scripts/ci/workflows/deploy_app.py
Executable file
@@ -0,0 +1,248 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from ci_steps import (
|
||||
ADD_KNOWN_HOSTS_SCRIPT,
|
||||
INSTALL_DOCKER_PUSSH_SCRIPT,
|
||||
INSTALL_RCLONE_SCRIPT,
|
||||
record_deploy_commit_script,
|
||||
rclone_config_script,
|
||||
set_build_timestamp_script,
|
||||
)
|
||||
from ci_workflow import parse_step_env_args
|
||||
from ci_utils import run_step
|
||||
|
||||
|
||||
STEPS: dict[str, str] = {
|
||||
"install_dependencies": """
|
||||
set -euo pipefail
|
||||
cd fluxer_app
|
||||
pnpm install --frozen-lockfile
|
||||
""",
|
||||
"run_lingui": """
|
||||
set -euo pipefail
|
||||
cd fluxer_app
|
||||
pnpm lingui:extract
|
||||
pnpm lingui:compile --strict
|
||||
""",
|
||||
"record_deploy_commit": record_deploy_commit_script(
|
||||
include_env=True,
|
||||
include_sentry=False,
|
||||
),
|
||||
"install_wasm_pack": """
|
||||
set -euo pipefail
|
||||
if ! command -v wasm-pack >/dev/null 2>&1; then
|
||||
cargo install wasm-pack --version 0.13.1
|
||||
fi
|
||||
""",
|
||||
"generate_wasm": """
|
||||
set -euo pipefail
|
||||
cd fluxer_app
|
||||
pnpm wasm:codegen
|
||||
""",
|
||||
"add_known_hosts": ADD_KNOWN_HOSTS_SCRIPT,
|
||||
"fetch_deployment_config": """
|
||||
set -euo pipefail
|
||||
if [[ "${RELEASE_CHANNEL}" == "canary" ]]; then
|
||||
CONFIG_PATH="/etc/fluxer/config.canary.json"
|
||||
else
|
||||
CONFIG_PATH="/etc/fluxer/config.stable.json"
|
||||
fi
|
||||
ssh "${SERVER}" "cat ${CONFIG_PATH}" > fluxer_app/config.json
|
||||
""",
|
||||
"build_application": """
|
||||
set -euo pipefail
|
||||
cd fluxer_app
|
||||
pnpm build
|
||||
node -e "const fs = require('fs'); const {execSync} = require('child_process'); const cfg = JSON.parse(fs.readFileSync(process.env.FLUXER_CONFIG, 'utf8')); const app = cfg.app_public || {}; let sha = app.build_sha || ''; if (!sha) { try { sha = execSync('git rev-parse --short HEAD', {stdio:['ignore','pipe','ignore']}).toString().trim(); } catch {} } const timestamp = Number(app.build_timestamp ?? Math.floor(Date.now() / 1000)); const buildNumber = Number(app.build_number ?? 0); const env = app.project_env ?? cfg.sentry?.release_channel ?? cfg.env ?? ''; const payload = { sha, buildNumber, timestamp, env }; fs.writeFileSync('dist/version.json', JSON.stringify(payload, null, 2));"
|
||||
""",
|
||||
"install_rclone": INSTALL_RCLONE_SCRIPT,
|
||||
"upload_assets": rclone_config_script(
|
||||
endpoint="https://s3.us-east-va.io.cloud.ovh.us",
|
||||
acl="public-read",
|
||||
expand_vars=True,
|
||||
)
|
||||
+ """
|
||||
rclone copy fluxer_app/dist/assets ovh:fluxer-static/assets \
|
||||
--transfers 32 \
|
||||
--checkers 16 \
|
||||
--size-only \
|
||||
--fast-list \
|
||||
--s3-upload-concurrency 8 \
|
||||
--s3-chunk-size 16M \
|
||||
-v
|
||||
""",
|
||||
"set_build_timestamp": set_build_timestamp_script(),
|
||||
"install_docker_pussh": INSTALL_DOCKER_PUSSH_SCRIPT,
|
||||
"push_and_deploy": """
|
||||
set -euo pipefail
|
||||
|
||||
docker pussh "${IMAGE_TAG}" "${SERVER}"
|
||||
|
||||
ssh "${SERVER}" \
|
||||
"IMAGE_TAG=${IMAGE_TAG} SERVICE_NAME=${SERVICE_NAME} COMPOSE_STACK=${COMPOSE_STACK} RELEASE_CHANNEL=${RELEASE_CHANNEL} APP_REPLICAS=${APP_REPLICAS} bash" << 'REMOTE_EOF'
|
||||
set -euo pipefail
|
||||
if [[ "${RELEASE_CHANNEL}" == "canary" ]]; then
|
||||
CONFIG_PATH="/etc/fluxer/config.canary.json"
|
||||
else
|
||||
CONFIG_PATH="/etc/fluxer/config.stable.json"
|
||||
fi
|
||||
read -r CADDY_APP_DOMAIN SENTRY_CADDY_DOMAIN <<EOF
|
||||
$(python3 - <<'PY' "${CONFIG_PATH}"
|
||||
import sys, json
|
||||
from urllib.parse import urlparse
|
||||
path = sys.argv[1]
|
||||
with open(path, 'r') as f:
|
||||
cfg = json.load(f)
|
||||
domain = cfg.get('domain', {})
|
||||
overrides = cfg.get('endpoint_overrides', {})
|
||||
|
||||
def build_url(scheme, base_domain, port, path=''):
|
||||
standard = (scheme == 'http' and port == 80) or (scheme == 'https' and port == 443) or (scheme == 'ws' and port == 80) or (scheme == 'wss' and port == 443)
|
||||
port_part = f":{port}" if port and not standard else ""
|
||||
return f"{scheme}://{base_domain}{port_part}{path}"
|
||||
|
||||
def derive_domain(key):
|
||||
if key == 'cdn':
|
||||
return domain.get('cdn_domain') or domain.get('base_domain')
|
||||
if key == 'invite':
|
||||
return domain.get('invite_domain') or domain.get('base_domain')
|
||||
if key == 'gift':
|
||||
return domain.get('gift_domain') or domain.get('base_domain')
|
||||
return domain.get('base_domain')
|
||||
|
||||
public_scheme = domain.get('public_scheme', 'https')
|
||||
public_port = domain.get('public_port', 443 if public_scheme == 'https' else 80)
|
||||
|
||||
derived_app = build_url(public_scheme, derive_domain('app'), public_port)
|
||||
app_url = (overrides.get('app') or derived_app).strip()
|
||||
parsed_app = urlparse(app_url)
|
||||
app_host = parsed_app.netloc or parsed_app.path
|
||||
sentry_host_raw = (cfg.get('services', {}).get('app_proxy', {}).get('sentry_report_host') or '').strip()
|
||||
if sentry_host_raw and not sentry_host_raw.startswith('http'):
|
||||
sentry_host_raw = f"https://{sentry_host_raw}"
|
||||
|
||||
sentry_host = urlparse(sentry_host_raw).netloc if sentry_host_raw else ''
|
||||
print(f"{app_host} {sentry_host}")
|
||||
PY
|
||||
)
|
||||
EOF
|
||||
if [[ "${RELEASE_CHANNEL}" == "canary" ]]; then
|
||||
API_TARGET="fluxer-api-canary_app"
|
||||
else
|
||||
API_TARGET="fluxer-api_app"
|
||||
fi
|
||||
SENTRY_REPORT_HOST="$(
|
||||
python3 - <<'PY' "${CONFIG_PATH}"
|
||||
import sys, json
|
||||
path = sys.argv[1]
|
||||
with open(path, 'r') as f:
|
||||
cfg = json.load(f)
|
||||
app_proxy = cfg.get('services', {}).get('app_proxy', {})
|
||||
host = (app_proxy.get('sentry_report_host') or '').rstrip('/')
|
||||
print(host)
|
||||
PY
|
||||
)"
|
||||
sudo mkdir -p "/opt/${SERVICE_NAME}"
|
||||
sudo chown -R "${USER}:${USER}" "/opt/${SERVICE_NAME}"
|
||||
cd "/opt/${SERVICE_NAME}"
|
||||
|
||||
cat > compose.yaml << COMPOSEEOF
|
||||
x-deploy-base: &deploy_base
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
delay: 5s
|
||||
max_attempts: 3
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
rollback_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
|
||||
x-common-caddy-headers: &common_caddy_headers
|
||||
caddy.header.Strict-Transport-Security: "max-age=31536000; includeSubDomains; preload"
|
||||
caddy.header.X-Xss-Protection: "1; mode=block"
|
||||
caddy.header.X-Content-Type-Options: "nosniff"
|
||||
caddy.header.Referrer-Policy: "strict-origin-when-cross-origin"
|
||||
caddy.header.X-Frame-Options: "DENY"
|
||||
caddy.header.Expect-Ct: "max-age=86400, report-uri=\\"${SENTRY_REPORT_HOST}/api/4510205815291904/security/?sentry_key=59ced0e2666ab83dd1ddb056cdd22d1b\\""
|
||||
caddy.header.Cache-Control: "no-store, no-cache, must-revalidate"
|
||||
caddy.header.Pragma: "no-cache"
|
||||
caddy.header.Expires: "0"
|
||||
|
||||
x-env-base: &env_base
|
||||
FLUXER_CONFIG: /etc/fluxer/config.json
|
||||
|
||||
x-healthcheck: &healthcheck
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:8080/_health']
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
|
||||
services:
|
||||
app:
|
||||
image: ${IMAGE_TAG}
|
||||
volumes:
|
||||
- ${CONFIG_PATH}:/etc/fluxer/config.json:ro
|
||||
deploy:
|
||||
<<: *deploy_base
|
||||
replicas: ${APP_REPLICAS}
|
||||
labels:
|
||||
<<: *common_caddy_headers
|
||||
caddy: ${CADDY_APP_DOMAIN}
|
||||
caddy.redir: "/.well-known/fluxer /api/.well-known/fluxer 301"
|
||||
caddy.handle_path_0: /api*
|
||||
caddy.handle_path_0.reverse_proxy: "http://${API_TARGET}:8080"
|
||||
caddy.reverse_proxy: "{{upstreams 8080}}"
|
||||
environment:
|
||||
<<: *env_base
|
||||
networks: [fluxer-shared]
|
||||
healthcheck: *healthcheck
|
||||
|
||||
sentry:
|
||||
image: ${IMAGE_TAG}
|
||||
volumes:
|
||||
- ${CONFIG_PATH}:/etc/fluxer/config.json:ro
|
||||
deploy:
|
||||
<<: *deploy_base
|
||||
replicas: 1
|
||||
labels:
|
||||
<<: *common_caddy_headers
|
||||
caddy: ${SENTRY_CADDY_DOMAIN}
|
||||
caddy.reverse_proxy: "{{upstreams 8080}}"
|
||||
environment:
|
||||
<<: *env_base
|
||||
networks: [fluxer-shared]
|
||||
healthcheck: *healthcheck
|
||||
|
||||
networks:
|
||||
fluxer-shared:
|
||||
external: true
|
||||
COMPOSEEOF
|
||||
|
||||
docker stack deploy \
|
||||
--with-registry-auth \
|
||||
--detach=false \
|
||||
--resolve-image never \
|
||||
-c compose.yaml \
|
||||
"${COMPOSE_STACK}"
|
||||
REMOTE_EOF
|
||||
""",
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_step_env_args(include_server_ip=True)
|
||||
run_step(STEPS, args.step)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
156
scripts/ci/workflows/deploy_gateway.py
Executable file
156
scripts/ci/workflows/deploy_gateway.py
Executable file
@@ -0,0 +1,156 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from ci_steps import ADD_KNOWN_HOSTS_SCRIPT, record_deploy_commit_script
|
||||
from ci_workflow import parse_step_env_args
|
||||
from ci_utils import run_step
|
||||
|
||||
|
||||
STEPS: dict[str, str] = {
|
||||
"compile": """
|
||||
set -euo pipefail
|
||||
cd fluxer_gateway
|
||||
rebar3 as prod compile
|
||||
""",
|
||||
"add_known_hosts": ADD_KNOWN_HOSTS_SCRIPT,
|
||||
"record_deploy_commit": record_deploy_commit_script(
|
||||
include_env=False,
|
||||
include_sentry=False,
|
||||
),
|
||||
"deploy": """
|
||||
set -euo pipefail
|
||||
|
||||
CONTAINER_ID="$(ssh "${SERVER}" "docker ps -q --filter label=com.docker.swarm.service.name=fluxer-gateway_app | head -1")"
|
||||
if [ -z "${CONTAINER_ID}" ]; then
|
||||
echo "::error::No running container found for service fluxer-gateway_app"
|
||||
ssh "${SERVER}" "docker ps --filter 'name=fluxer-gateway_app' --format '{{.ID}} {{.Names}} {{.Status}}'" || true
|
||||
exit 1
|
||||
fi
|
||||
echo "Container: ${CONTAINER_ID}"
|
||||
|
||||
GATEWAY_HTTP_PORT="8080"
|
||||
echo "Gateway HTTP port: ${GATEWAY_HTTP_PORT}"
|
||||
if ! ssh "${SERVER}" "docker exec ${CONTAINER_ID} curl -fsS --max-time 3 http://localhost:${GATEWAY_HTTP_PORT}/_health >/dev/null"; then
|
||||
echo "::error::Gateway HTTP listener is not reachable on port ${GATEWAY_HTTP_PORT}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
LOCAL_MD5_LINES="$(
|
||||
erl -noshell -eval '
|
||||
Files = filelib:wildcard("fluxer_gateway/_build/prod/lib/fluxer_gateway/ebin/*.beam"),
|
||||
lists:foreach(
|
||||
fun(F) ->
|
||||
{ok, {M, Md5}} = beam_lib:md5(F),
|
||||
Hex = binary:encode_hex(Md5, lowercase),
|
||||
io:format("~s ~s ~s~n", [atom_to_list(M), binary_to_list(Hex), F])
|
||||
end,
|
||||
Files
|
||||
),
|
||||
halt().'
|
||||
)"
|
||||
|
||||
REMOTE_MD5_LINES="$(
|
||||
ssh "${SERVER}" "docker exec ${CONTAINER_ID} /opt/fluxer_gateway/bin/fluxer_gateway eval '
|
||||
Mods = hot_reload:get_loaded_modules(),
|
||||
lists:foreach(
|
||||
fun(M) ->
|
||||
case hot_reload:get_module_info(M) of
|
||||
{ok, Info} ->
|
||||
V = maps:get(loaded_md5, Info),
|
||||
S = case V of
|
||||
null -> \"null\";
|
||||
B when is_binary(B) -> binary_to_list(B)
|
||||
end,
|
||||
io:format(\"~s ~s~n\", [atom_to_list(M), S]);
|
||||
_ ->
|
||||
ok
|
||||
end
|
||||
end,
|
||||
Mods
|
||||
),
|
||||
ok.
|
||||
' " | tr -d '\r'
|
||||
)"
|
||||
|
||||
LOCAL_MD5_FILE="$(mktemp)"
|
||||
REMOTE_MD5_FILE="$(mktemp)"
|
||||
CHANGED_FILE_LIST="$(mktemp)"
|
||||
CHANGED_MAIN_LIST="$(mktemp)"
|
||||
CHANGED_SELF_LIST="$(mktemp)"
|
||||
RELOAD_RESULT_MAIN="$(mktemp)"
|
||||
RELOAD_RESULT_SELF="$(mktemp)"
|
||||
trap 'rm -f "${LOCAL_MD5_FILE}" "${REMOTE_MD5_FILE}" "${CHANGED_FILE_LIST}" "${CHANGED_MAIN_LIST}" "${CHANGED_SELF_LIST}" "${RELOAD_RESULT_MAIN}" "${RELOAD_RESULT_SELF}"' EXIT
|
||||
|
||||
printf '%s' "${LOCAL_MD5_LINES}" > "${LOCAL_MD5_FILE}"
|
||||
printf '%s' "${REMOTE_MD5_LINES}" > "${REMOTE_MD5_FILE}"
|
||||
|
||||
python3 scripts/ci/erlang_hot_reload.py diff-md5 \
|
||||
"${LOCAL_MD5_FILE}" \
|
||||
"${REMOTE_MD5_FILE}" \
|
||||
"${CHANGED_FILE_LIST}"
|
||||
|
||||
mapfile -t CHANGED_FILES < "${CHANGED_FILE_LIST}"
|
||||
|
||||
if [ "${#CHANGED_FILES[@]}" -eq 0 ]; then
|
||||
echo "No BEAM changes detected, nothing to hot-reload."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Changed modules count: ${#CHANGED_FILES[@]}"
|
||||
|
||||
while IFS= read -r p; do
|
||||
[ -n "${p}" ] || continue
|
||||
m="$(basename "${p}")"
|
||||
m="${m%.beam}"
|
||||
if [ "${m}" = "hot_reload" ] || [ "${m}" = "hot_reload_handler" ]; then
|
||||
printf '%s\n' "${p}" >> "${CHANGED_SELF_LIST}"
|
||||
else
|
||||
printf '%s\n' "${p}" >> "${CHANGED_MAIN_LIST}"
|
||||
fi
|
||||
done < "${CHANGED_FILE_LIST}"
|
||||
|
||||
build_json() {
|
||||
python3 scripts/ci/erlang_hot_reload.py build-json "$1"
|
||||
}
|
||||
|
||||
strict_verify() {
|
||||
python3 scripts/ci/erlang_hot_reload.py verify --mode strict
|
||||
}
|
||||
|
||||
self_verify() {
|
||||
python3 scripts/ci/erlang_hot_reload.py verify --mode self
|
||||
}
|
||||
|
||||
if [ -s "${CHANGED_SELF_LIST}" ]; then
|
||||
if ! build_json "${CHANGED_SELF_LIST}" | ssh "${SERVER}" "docker exec -i ${CONTAINER_ID} curl -sS -X POST -H 'Authorization: Bearer ${GATEWAY_ADMIN_SECRET}' -H 'Content-Type: application/json' --data @- http://localhost:${GATEWAY_HTTP_PORT}/_admin/reload" | tee "${RELOAD_RESULT_SELF}" | self_verify; then
|
||||
echo "::group::Hot reload response (self)"
|
||||
cat "${RELOAD_RESULT_SELF}" || true
|
||||
echo "::endgroup::"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -s "${CHANGED_MAIN_LIST}" ]; then
|
||||
if ! build_json "${CHANGED_MAIN_LIST}" | ssh "${SERVER}" "docker exec -i ${CONTAINER_ID} curl -sS -X POST -H 'Authorization: Bearer ${GATEWAY_ADMIN_SECRET}' -H 'Content-Type: application/json' --data @- http://localhost:${GATEWAY_HTTP_PORT}/_admin/reload" | tee "${RELOAD_RESULT_MAIN}" | strict_verify; then
|
||||
echo "::group::Hot reload response (main)"
|
||||
cat "${RELOAD_RESULT_MAIN}" || true
|
||||
echo "::endgroup::"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
""",
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_step_env_args(include_server_ip=True)
|
||||
run_step(STEPS, args.step)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
87
scripts/ci/workflows/deploy_kv.py
Executable file
87
scripts/ci/workflows/deploy_kv.py
Executable file
@@ -0,0 +1,87 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from deploy_workflow import build_standard_deploy_steps, run_deploy_workflow
|
||||
|
||||
|
||||
PUSH_AND_DEPLOY_SCRIPT = """
|
||||
set -euo pipefail
|
||||
|
||||
docker pussh "${IMAGE_TAG}" "${SERVER}"
|
||||
|
||||
ssh "${SERVER}" "IMAGE_TAG=${IMAGE_TAG} STACK=${STACK} IS_CANARY=${IS_CANARY} bash" << 'REMOTE_EOF'
|
||||
set -euo pipefail
|
||||
|
||||
if [[ "${IS_CANARY}" == "true" ]]; then
|
||||
CONFIG_PATH="/etc/fluxer/config.canary.json"
|
||||
else
|
||||
CONFIG_PATH="/etc/fluxer/config.stable.json"
|
||||
fi
|
||||
|
||||
sudo mkdir -p "/opt/${STACK}"
|
||||
sudo chown -R "${USER}:${USER}" "/opt/${STACK}"
|
||||
cd "/opt/${STACK}"
|
||||
|
||||
cat > compose.yaml << COMPOSEEOF
|
||||
x-deploy-base: &deploy_base
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
delay: 5s
|
||||
max_attempts: 3
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
rollback_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
|
||||
x-healthcheck: &healthcheck
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:6380/_health']
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 5s
|
||||
|
||||
services:
|
||||
app:
|
||||
image: ${IMAGE_TAG}
|
||||
deploy:
|
||||
<<: *deploy_base
|
||||
replicas: 1
|
||||
environment:
|
||||
- FLUXER_CONFIG=/etc/fluxer/config.json
|
||||
volumes:
|
||||
- ${CONFIG_PATH}:/etc/fluxer/config.json:ro
|
||||
networks: [fluxer-shared]
|
||||
healthcheck: *healthcheck
|
||||
|
||||
networks:
|
||||
fluxer-shared:
|
||||
external: true
|
||||
COMPOSEEOF
|
||||
|
||||
docker stack deploy \
|
||||
--with-registry-auth \
|
||||
--detach=false \
|
||||
--resolve-image never \
|
||||
-c compose.yaml \
|
||||
"${STACK}"
|
||||
REMOTE_EOF
|
||||
"""
|
||||
|
||||
STEPS = build_standard_deploy_steps(
|
||||
push_and_deploy_script=PUSH_AND_DEPLOY_SCRIPT,
|
||||
)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
return run_deploy_workflow(STEPS)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
132
scripts/ci/workflows/deploy_marketing.py
Executable file
132
scripts/ci/workflows/deploy_marketing.py
Executable file
@@ -0,0 +1,132 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from deploy_workflow import build_standard_deploy_steps, run_deploy_workflow
|
||||
|
||||
|
||||
PUSH_AND_DEPLOY_SCRIPT = """
|
||||
set -euo pipefail
|
||||
|
||||
docker pussh "${IMAGE_TAG}" "${SERVER}"
|
||||
|
||||
ssh "${SERVER}" \
|
||||
"IMAGE_TAG=${IMAGE_TAG} STACK=${STACK} IS_CANARY=${IS_CANARY} CADDY_DOMAIN=${CADDY_DOMAIN} RELEASE_CHANNEL=${RELEASE_CHANNEL} APP_REPLICAS=${APP_REPLICAS} bash" << 'REMOTE_EOF'
|
||||
set -euo pipefail
|
||||
|
||||
if [[ "${IS_CANARY}" == "true" ]]; then
|
||||
CONFIG_PATH="/etc/fluxer/config.canary.json"
|
||||
else
|
||||
CONFIG_PATH="/etc/fluxer/config.stable.json"
|
||||
fi
|
||||
|
||||
sudo mkdir -p "/opt/${STACK}"
|
||||
sudo chown -R "${USER}:${USER}" "/opt/${STACK}"
|
||||
cd "/opt/${STACK}"
|
||||
|
||||
cat > compose.yaml << COMPOSEEOF
|
||||
services:
|
||||
app:
|
||||
image: ${IMAGE_TAG}
|
||||
environment:
|
||||
- FLUXER_CONFIG=/etc/fluxer/config.json
|
||||
volumes:
|
||||
- ${CONFIG_PATH}:/etc/fluxer/config.json:ro
|
||||
deploy:
|
||||
replicas: ${APP_REPLICAS}
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
delay: 5s
|
||||
max_attempts: 3
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
rollback_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
labels:
|
||||
caddy: "${CADDY_DOMAIN}"
|
||||
caddy.reverse_proxy: "{{upstreams 8080}}"
|
||||
caddy.header.Strict-Transport-Security: "max-age=31536000; includeSubDomains; preload"
|
||||
caddy.header.X-Xss-Protection: "1; mode=block"
|
||||
caddy.header.X-Content-Type-Options: "nosniff"
|
||||
caddy.header.Referrer-Policy: "strict-origin-when-cross-origin"
|
||||
caddy.header.X-Frame-Options: "DENY"
|
||||
COMPOSEEOF
|
||||
|
||||
if [[ "${IS_CANARY}" == "true" ]]; then
|
||||
cat >> compose.yaml << 'COMPOSEEOF'
|
||||
caddy.header.X-Robots-Tag: "noindex, nofollow, nosnippet, noimageindex"
|
||||
caddy.@channels.path: "/channels /channels/*"
|
||||
caddy.redir: "@channels https://web.canary.fluxer.app{uri}"
|
||||
caddy.redir_0: "/.well-known/fluxer https://api.canary.fluxer.app/.well-known/fluxer 301"
|
||||
COMPOSEEOF
|
||||
else
|
||||
cat >> compose.yaml << 'COMPOSEEOF'
|
||||
caddy.redir_0: "/channels/* https://web.fluxer.app{uri}"
|
||||
caddy.redir_1: "/channels https://web.fluxer.app{uri}"
|
||||
caddy.redir_2: "/delete-my-account /help/delete-account 302"
|
||||
caddy.redir_3: "/delete-my-data /help/data-deletion 302"
|
||||
caddy.redir_4: "/export-my-data /help/data-export 302"
|
||||
caddy.redir_5: "/bugs /help/report-bug 302"
|
||||
caddy_1: "www.fluxer.app"
|
||||
caddy_1.redir: "https://fluxer.app{uri}"
|
||||
caddy_3: "fluxer.gg"
|
||||
caddy_3.@fluxer_gg_root.path: "/"
|
||||
caddy_3.redir_0: "@fluxer_gg_root https://fluxer.app"
|
||||
caddy_3.redir_1: "https://web.fluxer.app/invite{uri}"
|
||||
caddy_4: "fluxer.gift"
|
||||
caddy_4.@fluxer_gift_root.path: "/"
|
||||
caddy_4.redir_0: "@fluxer_gift_root https://fluxer.app"
|
||||
caddy_4.redir_1: "https://web.fluxer.app/gift{uri}"
|
||||
caddy_5: "fluxerapp.com"
|
||||
caddy_5.redir: "https://fluxer.app{uri}"
|
||||
caddy_6: "www.fluxerapp.com"
|
||||
caddy_6.redir: "https://fluxer.app{uri}"
|
||||
caddy_7: "fluxer.dev"
|
||||
caddy_7.redir: "https://docs.fluxer.app{uri}"
|
||||
caddy_8: "www.fluxer.dev"
|
||||
caddy_8.redir: "https://docs.fluxer.app{uri}"
|
||||
caddy.redir_9: "/.well-known/fluxer https://api.fluxer.app/.well-known/fluxer 301"
|
||||
COMPOSEEOF
|
||||
fi
|
||||
|
||||
cat >> compose.yaml << 'COMPOSEEOF'
|
||||
networks:
|
||||
- fluxer-shared
|
||||
healthcheck:
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:8080/']
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
|
||||
networks:
|
||||
fluxer-shared:
|
||||
external: true
|
||||
COMPOSEEOF
|
||||
|
||||
docker stack deploy \
|
||||
--with-registry-auth \
|
||||
--detach=false \
|
||||
--resolve-image never \
|
||||
-c compose.yaml \
|
||||
"${STACK}"
|
||||
REMOTE_EOF
|
||||
"""
|
||||
|
||||
STEPS = build_standard_deploy_steps(
|
||||
push_and_deploy_script=PUSH_AND_DEPLOY_SCRIPT,
|
||||
)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
return run_deploy_workflow(STEPS)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
88
scripts/ci/workflows/deploy_media_proxy.py
Executable file
88
scripts/ci/workflows/deploy_media_proxy.py
Executable file
@@ -0,0 +1,88 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from deploy_workflow import build_standard_deploy_steps, run_deploy_workflow
|
||||
|
||||
|
||||
PUSH_AND_DEPLOY_SCRIPT = """
|
||||
set -euo pipefail
|
||||
|
||||
docker pussh "${IMAGE_TAG}" "${SERVER}"
|
||||
|
||||
ssh "${SERVER}" "IMAGE_TAG=${IMAGE_TAG} SERVICE_NAME=${SERVICE_NAME} COMPOSE_STACK=${COMPOSE_STACK} RELEASE_CHANNEL=${RELEASE_CHANNEL} bash" << 'REMOTE_EOF'
|
||||
set -euo pipefail
|
||||
|
||||
if [[ "${RELEASE_CHANNEL}" == "canary" ]]; then
|
||||
CONFIG_PATH="/etc/fluxer/config.canary.json"
|
||||
else
|
||||
CONFIG_PATH="/etc/fluxer/config.stable.json"
|
||||
fi
|
||||
sudo mkdir -p "/opt/${SERVICE_NAME}"
|
||||
sudo chown -R "${USER}:${USER}" "/opt/${SERVICE_NAME}"
|
||||
cd "/opt/${SERVICE_NAME}"
|
||||
|
||||
cat > compose.yaml << COMPOSEEOF
|
||||
services:
|
||||
app:
|
||||
image: ${IMAGE_TAG}
|
||||
command: ['pnpm', 'start']
|
||||
environment:
|
||||
- FLUXER_CONFIG=/etc/fluxer/config.json
|
||||
volumes:
|
||||
- ${CONFIG_PATH}:/etc/fluxer/config.json:ro
|
||||
deploy:
|
||||
replicas: 2
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
delay: 5s
|
||||
max_attempts: 3
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
rollback_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
labels:
|
||||
- 'caddy=http://fluxerusercontent.com'
|
||||
- 'caddy.reverse_proxy={{upstreams 8080}}'
|
||||
- 'caddy.header.X-Robots-Tag="noindex, nofollow, nosnippet, noimageindex"'
|
||||
- 'caddy.header.Strict-Transport-Security="max-age=31536000; includeSubDomains; preload"'
|
||||
- 'caddy.header.X-Xss-Protection="1; mode=block"'
|
||||
- 'caddy.header.X-Content-Type-Options=nosniff'
|
||||
- 'caddy.header.Referrer-Policy=strict-origin-when-cross-origin'
|
||||
- 'caddy.header.X-Frame-Options=DENY'
|
||||
- 'caddy.header.Expect-Ct="max-age=86400, report-uri=\"https://o4510149383094272.ingest.us.sentry.io/api/4510205811556352/security/?sentry_key=2670068cd12b6a62f3a30a7f0055f0f1\""'
|
||||
networks:
|
||||
- fluxer-shared
|
||||
healthcheck:
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:8080/_health']
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
|
||||
networks:
|
||||
fluxer-shared:
|
||||
external: true
|
||||
COMPOSEEOF
|
||||
|
||||
docker stack deploy --with-registry-auth --detach=false --resolve-image never -c compose.yaml "${COMPOSE_STACK}"
|
||||
REMOTE_EOF
|
||||
"""
|
||||
|
||||
STEPS = build_standard_deploy_steps(
|
||||
push_and_deploy_script=PUSH_AND_DEPLOY_SCRIPT,
|
||||
)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
return run_deploy_workflow(STEPS)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
92
scripts/ci/workflows/deploy_queue.py
Executable file
92
scripts/ci/workflows/deploy_queue.py
Executable file
@@ -0,0 +1,92 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from deploy_workflow import build_standard_deploy_steps, run_deploy_workflow
|
||||
|
||||
|
||||
PUSH_AND_DEPLOY_SCRIPT = """
|
||||
set -euo pipefail
|
||||
|
||||
docker pussh "${IMAGE_TAG}" "${SERVER}"
|
||||
|
||||
ssh "${SERVER}" "IMAGE_TAG=${IMAGE_TAG} STACK=${STACK} RELEASE_CHANNEL=${RELEASE_CHANNEL} IS_CANARY=${IS_CANARY} bash" << 'REMOTE_EOF'
|
||||
set -euo pipefail
|
||||
|
||||
if [[ "${IS_CANARY}" == "true" ]]; then
|
||||
CONFIG_PATH="/etc/fluxer/config.canary.json"
|
||||
else
|
||||
CONFIG_PATH="/etc/fluxer/config.stable.json"
|
||||
fi
|
||||
|
||||
sudo mkdir -p "/opt/${STACK}"
|
||||
sudo chown -R "${USER}:${USER}" "/opt/${STACK}"
|
||||
cd "/opt/${STACK}"
|
||||
|
||||
cat > compose.yaml << COMPOSEEOF
|
||||
x-deploy-base: &deploy_base
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
delay: 5s
|
||||
max_attempts: 3
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
rollback_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
|
||||
x-healthcheck: &healthcheck
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:8080/_health']
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 5s
|
||||
|
||||
services:
|
||||
queue:
|
||||
image: ${IMAGE_TAG}
|
||||
deploy:
|
||||
<<: *deploy_base
|
||||
replicas: 1
|
||||
volumes:
|
||||
- queue_data:/data
|
||||
- ${CONFIG_PATH}:/etc/fluxer/config.json:ro
|
||||
environment:
|
||||
- FLUXER_CONFIG=/etc/fluxer/config.json
|
||||
networks: [fluxer-shared]
|
||||
healthcheck: *healthcheck
|
||||
|
||||
volumes:
|
||||
queue_data:
|
||||
driver: local
|
||||
|
||||
networks:
|
||||
fluxer-shared:
|
||||
external: true
|
||||
COMPOSEEOF
|
||||
|
||||
docker stack deploy \
|
||||
--with-registry-auth \
|
||||
--detach=false \
|
||||
--resolve-image never \
|
||||
-c compose.yaml \
|
||||
"${STACK}"
|
||||
REMOTE_EOF
|
||||
"""
|
||||
|
||||
STEPS = build_standard_deploy_steps(
|
||||
push_and_deploy_script=PUSH_AND_DEPLOY_SCRIPT,
|
||||
)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
return run_deploy_workflow(STEPS)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
149
scripts/ci/workflows/deploy_relay.py
Executable file
149
scripts/ci/workflows/deploy_relay.py
Executable file
@@ -0,0 +1,149 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from ci_steps import ADD_KNOWN_HOSTS_SCRIPT, record_deploy_commit_script
|
||||
from ci_workflow import parse_step_env_args
|
||||
from ci_utils import run_step
|
||||
|
||||
|
||||
STEPS: dict[str, str] = {
|
||||
"compile": """
|
||||
set -euo pipefail
|
||||
cd fluxer_relay
|
||||
rebar3 as prod compile
|
||||
""",
|
||||
"add_known_hosts": ADD_KNOWN_HOSTS_SCRIPT,
|
||||
"record_deploy_commit": record_deploy_commit_script(
|
||||
include_env=False,
|
||||
include_sentry=False,
|
||||
),
|
||||
"deploy": """
|
||||
set -euo pipefail
|
||||
|
||||
CONTAINER_ID="$(ssh "${SERVER}" "docker ps -q --filter label=com.docker.swarm.service.name=fluxer_relay_app | head -1")"
|
||||
if [ -z "${CONTAINER_ID}" ]; then
|
||||
echo "::error::No running container found for service fluxer_relay_app"
|
||||
ssh "${SERVER}" "docker ps --filter 'name=fluxer_relay_app' --format '{{.ID}} {{.Names}} {{.Status}}'" || true
|
||||
exit 1
|
||||
fi
|
||||
echo "Container: ${CONTAINER_ID}"
|
||||
|
||||
LOCAL_MD5_LINES="$(
|
||||
erl -noshell -eval '
|
||||
Files = filelib:wildcard("fluxer_relay/_build/prod/lib/fluxer_relay/ebin/*.beam"),
|
||||
lists:foreach(
|
||||
fun(F) ->
|
||||
{ok, {M, Md5}} = beam_lib:md5(F),
|
||||
Hex = binary:encode_hex(Md5, lowercase),
|
||||
io:format("~s ~s ~s~n", [atom_to_list(M), binary_to_list(Hex), F])
|
||||
end,
|
||||
Files
|
||||
),
|
||||
halt().'
|
||||
)"
|
||||
|
||||
REMOTE_MD5_LINES="$(
|
||||
ssh "${SERVER}" "docker exec ${CONTAINER_ID} /opt/fluxer_relay/bin/fluxer_relay eval '
|
||||
Mods = hot_reload:get_loaded_modules(),
|
||||
lists:foreach(
|
||||
fun(M) ->
|
||||
case hot_reload:get_module_info(M) of
|
||||
{ok, Info} ->
|
||||
V = maps:get(loaded_md5, Info),
|
||||
S = case V of
|
||||
null -> \"null\";
|
||||
B when is_binary(B) -> binary_to_list(B)
|
||||
end,
|
||||
io:format(\"~s ~s~n\", [atom_to_list(M), S]);
|
||||
_ ->
|
||||
ok
|
||||
end
|
||||
end,
|
||||
Mods
|
||||
),
|
||||
ok.
|
||||
' " | tr -d '\r'
|
||||
)"
|
||||
|
||||
LOCAL_MD5_FILE="$(mktemp)"
|
||||
REMOTE_MD5_FILE="$(mktemp)"
|
||||
CHANGED_FILE_LIST="$(mktemp)"
|
||||
CHANGED_MAIN_LIST="$(mktemp)"
|
||||
CHANGED_SELF_LIST="$(mktemp)"
|
||||
RELOAD_RESULT_MAIN="$(mktemp)"
|
||||
RELOAD_RESULT_SELF="$(mktemp)"
|
||||
trap 'rm -f "${LOCAL_MD5_FILE}" "${REMOTE_MD5_FILE}" "${CHANGED_FILE_LIST}" "${CHANGED_MAIN_LIST}" "${CHANGED_SELF_LIST}" "${RELOAD_RESULT_MAIN}" "${RELOAD_RESULT_SELF}"' EXIT
|
||||
|
||||
printf '%s' "${LOCAL_MD5_LINES}" > "${LOCAL_MD5_FILE}"
|
||||
printf '%s' "${REMOTE_MD5_LINES}" > "${REMOTE_MD5_FILE}"
|
||||
|
||||
python3 scripts/ci/erlang_hot_reload.py diff-md5 \
|
||||
"${LOCAL_MD5_FILE}" \
|
||||
"${REMOTE_MD5_FILE}" \
|
||||
"${CHANGED_FILE_LIST}"
|
||||
|
||||
mapfile -t CHANGED_FILES < "${CHANGED_FILE_LIST}"
|
||||
|
||||
if [ "${#CHANGED_FILES[@]}" -eq 0 ]; then
|
||||
echo "No BEAM changes detected, nothing to hot-reload."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Changed modules count: ${#CHANGED_FILES[@]}"
|
||||
|
||||
while IFS= read -r p; do
|
||||
[ -n "${p}" ] || continue
|
||||
m="$(basename "${p}")"
|
||||
m="${m%.beam}"
|
||||
if [ "${m}" = "hot_reload" ] || [ "${m}" = "hot_reload_handler" ]; then
|
||||
printf '%s\n' "${p}" >> "${CHANGED_SELF_LIST}"
|
||||
else
|
||||
printf '%s\n' "${p}" >> "${CHANGED_MAIN_LIST}"
|
||||
fi
|
||||
done < "${CHANGED_FILE_LIST}"
|
||||
|
||||
build_json() {
|
||||
python3 scripts/ci/erlang_hot_reload.py build-json "$1"
|
||||
}
|
||||
|
||||
strict_verify() {
|
||||
python3 scripts/ci/erlang_hot_reload.py verify --mode strict
|
||||
}
|
||||
|
||||
self_verify() {
|
||||
python3 scripts/ci/erlang_hot_reload.py verify --mode self
|
||||
}
|
||||
|
||||
if [ -s "${CHANGED_SELF_LIST}" ]; then
|
||||
if ! build_json "${CHANGED_SELF_LIST}" | ssh "${SERVER}" "docker exec -i ${CONTAINER_ID} curl -sS -X POST -H 'Authorization: Bearer ${RELAY_ADMIN_SECRET}' -H 'Content-Type: application/json' --data @- http://localhost:8081/_admin/reload" | tee "${RELOAD_RESULT_SELF}" | self_verify; then
|
||||
echo "::group::Hot reload response (self)"
|
||||
cat "${RELOAD_RESULT_SELF}" || true
|
||||
echo "::endgroup::"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -s "${CHANGED_MAIN_LIST}" ]; then
|
||||
if ! build_json "${CHANGED_MAIN_LIST}" | ssh "${SERVER}" "docker exec -i ${CONTAINER_ID} curl -sS -X POST -H 'Authorization: Bearer ${RELAY_ADMIN_SECRET}' -H 'Content-Type: application/json' --data @- http://localhost:8081/_admin/reload" | tee "${RELOAD_RESULT_MAIN}" | strict_verify; then
|
||||
echo "::group::Hot reload response (main)"
|
||||
cat "${RELOAD_RESULT_MAIN}" || true
|
||||
echo "::endgroup::"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
""",
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_step_env_args(include_server_ip=True)
|
||||
run_step(STEPS, args.step)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
87
scripts/ci/workflows/deploy_relay_directory.py
Executable file
87
scripts/ci/workflows/deploy_relay_directory.py
Executable file
@@ -0,0 +1,87 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from deploy_workflow import build_standard_deploy_steps, run_deploy_workflow
|
||||
|
||||
|
||||
PUSH_AND_DEPLOY_SCRIPT = """
|
||||
set -euo pipefail
|
||||
|
||||
docker pussh "${IMAGE_TAG}" "${SERVER}"
|
||||
|
||||
ssh "${SERVER}" "IMAGE_TAG=${IMAGE_TAG} STACK=${STACK} IS_CANARY=${IS_CANARY} bash" << 'REMOTE_EOF'
|
||||
set -euo pipefail
|
||||
|
||||
if [[ "${IS_CANARY}" == "true" ]]; then
|
||||
CONFIG_PATH="/etc/fluxer/config.canary.json"
|
||||
else
|
||||
CONFIG_PATH="/etc/fluxer/config.stable.json"
|
||||
fi
|
||||
|
||||
sudo mkdir -p "/opt/${STACK}"
|
||||
sudo chown -R "${USER}:${USER}" "/opt/${STACK}"
|
||||
cd "/opt/${STACK}"
|
||||
|
||||
cat > compose.yaml << COMPOSEEOF
|
||||
x-deploy-base: &deploy_base
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
delay: 5s
|
||||
max_attempts: 3
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
rollback_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
|
||||
x-healthcheck: &healthcheck
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:8080/_health']
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 5s
|
||||
|
||||
services:
|
||||
app:
|
||||
image: ${IMAGE_TAG}
|
||||
deploy:
|
||||
<<: *deploy_base
|
||||
replicas: 1
|
||||
environment:
|
||||
- FLUXER_CONFIG=/etc/fluxer/config.json
|
||||
volumes:
|
||||
- ${CONFIG_PATH}:/etc/fluxer/config.json:ro
|
||||
networks: [fluxer-shared]
|
||||
healthcheck: *healthcheck
|
||||
|
||||
networks:
|
||||
fluxer-shared:
|
||||
external: true
|
||||
COMPOSEEOF
|
||||
|
||||
docker stack deploy \
|
||||
--with-registry-auth \
|
||||
--detach=false \
|
||||
--resolve-image never \
|
||||
-c compose.yaml \
|
||||
"${STACK}"
|
||||
REMOTE_EOF
|
||||
"""
|
||||
|
||||
STEPS = build_standard_deploy_steps(
|
||||
push_and_deploy_script=PUSH_AND_DEPLOY_SCRIPT,
|
||||
)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
return run_deploy_workflow(STEPS)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
89
scripts/ci/workflows/deploy_static_proxy.py
Executable file
89
scripts/ci/workflows/deploy_static_proxy.py
Executable file
@@ -0,0 +1,89 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from deploy_workflow import build_standard_deploy_steps, run_deploy_workflow
|
||||
|
||||
|
||||
PUSH_AND_DEPLOY_SCRIPT = """
|
||||
set -euo pipefail
|
||||
|
||||
docker pussh "${IMAGE_TAG}" "${SERVER}"
|
||||
|
||||
ssh "${SERVER}" "IMAGE_TAG=${IMAGE_TAG} SERVICE_NAME=${SERVICE_NAME} COMPOSE_STACK=${COMPOSE_STACK} RELEASE_CHANNEL=${RELEASE_CHANNEL} bash" << 'REMOTE_EOF'
|
||||
set -euo pipefail
|
||||
|
||||
if [[ "${RELEASE_CHANNEL}" == "canary" ]]; then
|
||||
CONFIG_PATH="/etc/fluxer/config.canary.json"
|
||||
else
|
||||
CONFIG_PATH="/etc/fluxer/config.stable.json"
|
||||
fi
|
||||
sudo mkdir -p "/opt/${SERVICE_NAME}"
|
||||
sudo chown -R "${USER}:${USER}" "/opt/${SERVICE_NAME}"
|
||||
cd "/opt/${SERVICE_NAME}"
|
||||
|
||||
cat > compose.yaml << COMPOSEEOF
|
||||
services:
|
||||
app:
|
||||
image: ${IMAGE_TAG}
|
||||
command: ['pnpm', 'start']
|
||||
environment:
|
||||
- FLUXER_CONFIG=/etc/fluxer/config.json
|
||||
- FLUXER_CONFIG__SERVICES__MEDIA_PROXY__STATIC_MODE=true
|
||||
volumes:
|
||||
- ${CONFIG_PATH}:/etc/fluxer/config.json:ro
|
||||
deploy:
|
||||
replicas: 2
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
delay: 5s
|
||||
max_attempts: 3
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
rollback_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
labels:
|
||||
- 'caddy=http://fluxerstatic.com'
|
||||
- 'caddy.reverse_proxy={{upstreams 8080}}'
|
||||
- 'caddy.header.X-Robots-Tag="noindex, nofollow, nosnippet, noimageindex"'
|
||||
- 'caddy.header.Strict-Transport-Security="max-age=31536000; includeSubDomains; preload"'
|
||||
- 'caddy.header.X-Xss-Protection="1; mode=block"'
|
||||
- 'caddy.header.X-Content-Type-Options=nosniff'
|
||||
- 'caddy.header.Referrer-Policy=strict-origin-when-cross-origin'
|
||||
- 'caddy.header.X-Frame-Options=DENY'
|
||||
- 'caddy.header.Expect-Ct="max-age=86400, report-uri=\"https://o4510149383094272.ingest.us.sentry.io/api/4510205811556352/security/?sentry_key=2670068cd12b6a62f3a30a7f0055f0f1\""'
|
||||
networks:
|
||||
- fluxer-shared
|
||||
healthcheck:
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:8080/_health']
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
|
||||
networks:
|
||||
fluxer-shared:
|
||||
external: true
|
||||
COMPOSEEOF
|
||||
|
||||
docker stack deploy --with-registry-auth --detach=false --resolve-image never -c compose.yaml "${COMPOSE_STACK}"
|
||||
REMOTE_EOF
|
||||
"""
|
||||
|
||||
STEPS = build_standard_deploy_steps(
|
||||
push_and_deploy_script=PUSH_AND_DEPLOY_SCRIPT,
|
||||
)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
return run_deploy_workflow(STEPS)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
106
scripts/ci/workflows/migrate_cassandra.py
Executable file
106
scripts/ci/workflows/migrate_cassandra.py
Executable file
@@ -0,0 +1,106 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from ci_steps import ADD_KNOWN_HOSTS_SCRIPT
|
||||
from ci_workflow import EnvArg, parse_step_env_args
|
||||
from ci_utils import run_step
|
||||
|
||||
|
||||
STEPS: dict[str, str] = {
|
||||
"install_dependencies": """
|
||||
set -euo pipefail
|
||||
cd fluxer_api
|
||||
pnpm install --frozen-lockfile
|
||||
""",
|
||||
"validate_migrations": """
|
||||
set -euo pipefail
|
||||
cd fluxer_api
|
||||
pnpm tsx scripts/CassandraMigrate.tsx check
|
||||
""",
|
||||
"add_known_hosts": ADD_KNOWN_HOSTS_SCRIPT,
|
||||
"setup_tunnel": """
|
||||
set -euo pipefail
|
||||
TUNNEL_PID_FILE=/tmp/ssh-tunnel.pid
|
||||
rm -f "$TUNNEL_PID_FILE"
|
||||
nohup ssh -N -o ConnectTimeout=30 -o ServerAliveInterval=10 -o ServerAliveCountMax=30 -o ExitOnForwardFailure=yes -L 9042:localhost:9042 ${SERVER_USER}@${SERVER_IP} > /tmp/ssh-tunnel.log 2>&1 &
|
||||
SSH_TUNNEL_PID=$!
|
||||
printf '%s\n' "$SSH_TUNNEL_PID" > "$TUNNEL_PID_FILE"
|
||||
printf 'SSH_TUNNEL_PID=%s\n' "$SSH_TUNNEL_PID" >> "$GITHUB_ENV"
|
||||
|
||||
for i in {1..30}; do
|
||||
if timeout 1 bash -c "echo > /dev/tcp/localhost/9042" 2>/dev/null; then
|
||||
echo "SSH tunnel established"
|
||||
break
|
||||
elif command -v ss >/dev/null 2>&1 && ss -tln | grep -q ":9042 "; then
|
||||
echo "SSH tunnel established"
|
||||
break
|
||||
elif command -v netstat >/dev/null 2>&1 && netstat -tln | grep -q ":9042 "; then
|
||||
echo "SSH tunnel established"
|
||||
break
|
||||
fi
|
||||
if [ $i -eq 30 ]; then
|
||||
cat /tmp/ssh-tunnel.log || true
|
||||
exit 1
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
ps -p "$SSH_TUNNEL_PID" > /dev/null || exit 1
|
||||
""",
|
||||
"test_connection": """
|
||||
set -euo pipefail
|
||||
cd fluxer_api
|
||||
pnpm tsx scripts/CassandraMigrate.tsx \
|
||||
--host localhost \
|
||||
--port 9042 \
|
||||
--username "${CASSANDRA_USERNAME}" \
|
||||
--password "${CASSANDRA_PASSWORD}" \
|
||||
test
|
||||
""",
|
||||
"run_migrations": """
|
||||
set -euo pipefail
|
||||
cd fluxer_api
|
||||
pnpm tsx scripts/CassandraMigrate.tsx \
|
||||
--host localhost \
|
||||
--port 9042 \
|
||||
--username "${CASSANDRA_USERNAME}" \
|
||||
--password "${CASSANDRA_PASSWORD}" \
|
||||
up
|
||||
""",
|
||||
"close_tunnel": """
|
||||
set -euo pipefail
|
||||
TUNNEL_PID_FILE=/tmp/ssh-tunnel.pid
|
||||
|
||||
if [ -n "${SSH_TUNNEL_PID:-}" ]; then
|
||||
kill "$SSH_TUNNEL_PID" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
if [ -f "$TUNNEL_PID_FILE" ]; then
|
||||
read -r TUNNEL_PID < "$TUNNEL_PID_FILE" || true
|
||||
if [ -n "${TUNNEL_PID:-}" ]; then
|
||||
kill "$TUNNEL_PID" 2>/dev/null || true
|
||||
fi
|
||||
fi
|
||||
|
||||
rm -f "$TUNNEL_PID_FILE" /tmp/ssh-tunnel.log || true
|
||||
""",
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_step_env_args(
|
||||
[
|
||||
EnvArg("--server-user", "SERVER_USER"),
|
||||
],
|
||||
include_server_ip=True,
|
||||
)
|
||||
run_step(STEPS, args.step)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
73
scripts/ci/workflows/promote_canary_to_main.py
Executable file
73
scripts/ci/workflows/promote_canary_to_main.py
Executable file
@@ -0,0 +1,73 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from ci_workflow import EnvArg, parse_step_env_args
|
||||
from ci_utils import run_step
|
||||
|
||||
|
||||
STEPS: dict[str, str] = {
|
||||
"verify": """
|
||||
set -euo pipefail
|
||||
src="${SRC}"
|
||||
dst="${DST}"
|
||||
|
||||
git fetch origin "${dst}" "${src}" --prune
|
||||
|
||||
# Ensure HEAD is exactly origin/src
|
||||
git reset --hard "origin/${src}"
|
||||
|
||||
# FF-only requirement: dst must be an ancestor of src
|
||||
if ! git merge-base --is-ancestor "origin/${dst}" "origin/${src}"; then
|
||||
echo "::error::Cannot fast-forward: origin/${dst} is not an ancestor of origin/${src} (branches diverged)."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ahead="$(git rev-list --count "origin/${dst}..origin/${src}")"
|
||||
echo "ahead=$ahead" >> "$GITHUB_OUTPUT"
|
||||
|
||||
{
|
||||
echo "## Promote \`${src}\` → \`${dst}\` (ff-only)"
|
||||
echo ""
|
||||
echo "- \`${dst}\`: \`$(git rev-parse "origin/${dst}")\`"
|
||||
echo "- \`${src}\`: \`$(git rev-parse "origin/${src}")\`"
|
||||
echo "- Commits to promote: **${ahead}**"
|
||||
echo ""
|
||||
echo "### Commits"
|
||||
if [ "$ahead" -eq 0 ]; then
|
||||
echo "_Nothing to promote._"
|
||||
else
|
||||
git log --oneline --decorate "origin/${dst}..origin/${src}"
|
||||
fi
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
""",
|
||||
"push": """
|
||||
set -euo pipefail
|
||||
dst="${DST}"
|
||||
# Push src HEAD to dst (no merge commit, same SHAs)
|
||||
git push origin "HEAD:refs/heads/${dst}"
|
||||
""",
|
||||
"dry_run": """
|
||||
echo "No push performed (dry_run=${DRY_RUN}, ahead=${AHEAD})."
|
||||
""",
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_step_env_args(
|
||||
[
|
||||
EnvArg("--src", "SRC"),
|
||||
EnvArg("--dst", "DST"),
|
||||
EnvArg("--dry-run", "DRY_RUN"),
|
||||
EnvArg("--ahead", "AHEAD"),
|
||||
]
|
||||
)
|
||||
run_step(STEPS, args.step)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
122
scripts/ci/workflows/release_livekitctl.py
Normal file
122
scripts/ci/workflows/release_livekitctl.py
Normal file
@@ -0,0 +1,122 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from cli_release import (
|
||||
determine_cli_version,
|
||||
generate_checksums,
|
||||
prepare_release_assets,
|
||||
write_cli_version_outputs,
|
||||
)
|
||||
from ci_workflow import EnvArg, apply_env_args, build_step_parser
|
||||
from ci_utils import require_env, run_step
|
||||
|
||||
|
||||
BINARY_PREFIX = "livekitctl"
|
||||
TAG_PREFIX = "livekitctl-v"
|
||||
PROJECT_DIR = pathlib.Path("fluxer_devops/livekitctl")
|
||||
|
||||
|
||||
def determine_version_step() -> None:
|
||||
import os
|
||||
|
||||
require_env(["EVENT_NAME"])
|
||||
info = determine_cli_version(
|
||||
event_name=os.environ["EVENT_NAME"],
|
||||
input_version=os.environ.get("INPUT_VERSION", ""),
|
||||
ref_name=os.environ.get("REF_NAME", ""),
|
||||
tag_prefix=TAG_PREFIX,
|
||||
)
|
||||
write_cli_version_outputs(info)
|
||||
|
||||
|
||||
def build_binary_step() -> None:
|
||||
from ci_utils import run_bash
|
||||
|
||||
run_bash(
|
||||
f"""
|
||||
set -euo pipefail
|
||||
cd {PROJECT_DIR}
|
||||
go build -ldflags=\"-s -w\" -o {BINARY_PREFIX}-${{GOOS}}-${{GOARCH}} .
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def prepare_release_assets_step(artifacts_dir: pathlib.Path, release_dir: pathlib.Path) -> None:
|
||||
prepare_release_assets(
|
||||
artifacts_dir=artifacts_dir,
|
||||
release_dir=release_dir,
|
||||
binary_prefix=BINARY_PREFIX,
|
||||
)
|
||||
|
||||
|
||||
def generate_checksums_step(release_dir: pathlib.Path) -> None:
|
||||
files = release_dir.glob(f"{BINARY_PREFIX}-*")
|
||||
generate_checksums(files, release_dir / "checksums.txt")
|
||||
|
||||
|
||||
def create_tag_step() -> None:
|
||||
from ci_utils import run_bash
|
||||
from ci_utils import require_env
|
||||
|
||||
require_env(["TAG", "VERSION"])
|
||||
run_bash(
|
||||
"""
|
||||
set -euo pipefail
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git tag -a "${TAG}" -m "Release livekitctl v${VERSION}"
|
||||
git push origin "${TAG}"
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
STEPS = {
|
||||
"determine_version": determine_version_step,
|
||||
"build_binary": build_binary_step,
|
||||
"prepare_release_assets": prepare_release_assets_step,
|
||||
"generate_checksums": generate_checksums_step,
|
||||
"create_tag": create_tag_step,
|
||||
}
|
||||
|
||||
|
||||
ENV_ARGS = [
|
||||
EnvArg("--event-name", "EVENT_NAME"),
|
||||
EnvArg("--input-version", "INPUT_VERSION"),
|
||||
EnvArg("--ref-name", "REF_NAME"),
|
||||
EnvArg("--version", "VERSION"),
|
||||
EnvArg("--tag", "TAG"),
|
||||
]
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = build_step_parser(ENV_ARGS)
|
||||
parser.add_argument("--artifacts-dir", default="artifacts")
|
||||
parser.add_argument("--release-dir", default="release")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
apply_env_args(args, ENV_ARGS)
|
||||
|
||||
if args.step == "prepare_release_assets":
|
||||
prepare_release_assets_step(
|
||||
pathlib.Path(args.artifacts_dir),
|
||||
pathlib.Path(args.release_dir),
|
||||
)
|
||||
return 0
|
||||
|
||||
if args.step == "generate_checksums":
|
||||
generate_checksums_step(pathlib.Path(args.release_dir))
|
||||
return 0
|
||||
|
||||
run_step(STEPS, args.step)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
26
scripts/ci/workflows/release_relay.py
Executable file
26
scripts/ci/workflows/release_relay.py
Executable file
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from release_workflow import build_release_steps, run_release_workflow
|
||||
|
||||
|
||||
STEPS = build_release_steps(
|
||||
title="Fluxer Relay release",
|
||||
image_name_env="IMAGE_NAME",
|
||||
)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
return run_release_workflow(
|
||||
title="Fluxer Relay release",
|
||||
image_name_arg="--image-name",
|
||||
image_name_env="IMAGE_NAME",
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
26
scripts/ci/workflows/release_relay_directory.py
Normal file
26
scripts/ci/workflows/release_relay_directory.py
Normal file
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from release_workflow import build_release_steps, run_release_workflow
|
||||
|
||||
|
||||
STEPS = build_release_steps(
|
||||
title="Fluxer Relay Directory release",
|
||||
image_name_env="IMAGE_NAME",
|
||||
)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
return run_release_workflow(
|
||||
title="Fluxer Relay Directory release",
|
||||
image_name_arg="--image-name",
|
||||
image_name_env="IMAGE_NAME",
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
46
scripts/ci/workflows/release_server.py
Executable file
46
scripts/ci/workflows/release_server.py
Executable file
@@ -0,0 +1,46 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from ci_workflow import EnvArg
|
||||
from ci_utils import require_env, write_github_output
|
||||
from release_workflow import build_release_steps, run_release_workflow
|
||||
|
||||
|
||||
def determine_build_targets_step() -> None:
|
||||
import os
|
||||
|
||||
require_env(["EVENT_NAME"])
|
||||
if os.environ["EVENT_NAME"] == "workflow_dispatch":
|
||||
write_github_output({"server": os.environ.get("BUILD_SERVER_INPUT", "")})
|
||||
return
|
||||
write_github_output({"server": "true"})
|
||||
|
||||
|
||||
EXTRA_ENV_ARGS = [
|
||||
EnvArg("--event-name", "EVENT_NAME"),
|
||||
EnvArg("--build-server-input", "BUILD_SERVER_INPUT"),
|
||||
]
|
||||
|
||||
STEPS = build_release_steps(
|
||||
title="Fluxer Server release",
|
||||
image_name_env="IMAGE_NAME_SERVER",
|
||||
extra_steps={"determine_build_targets": determine_build_targets_step},
|
||||
)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
return run_release_workflow(
|
||||
title="Fluxer Server release",
|
||||
image_name_arg="--image-name-server",
|
||||
image_name_env="IMAGE_NAME_SERVER",
|
||||
extra_steps={"determine_build_targets": determine_build_targets_step},
|
||||
extra_env_args=EXTRA_ENV_ARGS,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
103
scripts/ci/workflows/restart_gateway.py
Executable file
103
scripts/ci/workflows/restart_gateway.py
Executable file
@@ -0,0 +1,103 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from ci_workflow import EnvArg
|
||||
from deploy_workflow import build_standard_deploy_steps, run_deploy_workflow
|
||||
|
||||
|
||||
VALIDATE_CONFIRMATION_STEP = """
|
||||
set -euo pipefail
|
||||
if [ "${CONFIRMATION}" != "RESTART" ]; then
|
||||
echo "::error::Confirmation failed. You must type 'RESTART' to proceed with a full restart."
|
||||
echo "::error::For regular updates, use deploy-gateway.yaml instead."
|
||||
exit 1
|
||||
fi
|
||||
"""
|
||||
|
||||
PUSH_AND_DEPLOY_SCRIPT = """
|
||||
set -euo pipefail
|
||||
|
||||
docker pussh "${IMAGE_TAG}" "${SERVER}"
|
||||
|
||||
ssh "${SERVER}" "IMAGE_TAG=${IMAGE_TAG} SERVICE_NAME=${SERVICE_NAME} COMPOSE_STACK=${COMPOSE_STACK} RELEASE_CHANNEL=${RELEASE_CHANNEL} bash" << 'REMOTE_EOF'
|
||||
set -euo pipefail
|
||||
|
||||
if [[ "${RELEASE_CHANNEL}" == "canary" ]]; then
|
||||
CONFIG_PATH="/etc/fluxer/config.canary.json"
|
||||
else
|
||||
CONFIG_PATH="/etc/fluxer/config.stable.json"
|
||||
fi
|
||||
sudo mkdir -p "/opt/${SERVICE_NAME}"
|
||||
sudo chown -R "${USER}:${USER}" "/opt/${SERVICE_NAME}"
|
||||
cd "/opt/${SERVICE_NAME}"
|
||||
|
||||
cat > compose.yaml << COMPOSEEOF
|
||||
services:
|
||||
app:
|
||||
image: ${IMAGE_TAG}
|
||||
hostname: "{{.Node.Hostname}}-{{.Task.Slot}}"
|
||||
environment:
|
||||
- FLUXER_CONFIG=/etc/fluxer/config.json
|
||||
- FLUXER_GATEWAY_NODE_FLAG=-sname
|
||||
- FLUXER_GATEWAY_NODE_NAME=fluxer_gateway_{{.Node.ID}}_{{.Task.Slot}}
|
||||
volumes:
|
||||
- ${CONFIG_PATH}:/etc/fluxer/config.json:ro
|
||||
deploy:
|
||||
replicas: 1
|
||||
endpoint_mode: dnsrr
|
||||
restart_policy:
|
||||
condition: on-failure
|
||||
delay: 5s
|
||||
max_attempts: 3
|
||||
update_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
order: start-first
|
||||
rollback_config:
|
||||
parallelism: 1
|
||||
delay: 10s
|
||||
labels:
|
||||
- 'caddy_gw=gateway.fluxer.app'
|
||||
- 'caddy_gw.reverse_proxy={{upstreams 8080}}'
|
||||
networks:
|
||||
- fluxer-shared
|
||||
healthcheck:
|
||||
test: ['CMD', 'curl', '-f', 'http://localhost:8080/_health']
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 60s
|
||||
|
||||
networks:
|
||||
fluxer-shared:
|
||||
external: true
|
||||
COMPOSEEOF
|
||||
|
||||
docker stack deploy --with-registry-auth --detach=false --resolve-image never -c compose.yaml "${COMPOSE_STACK}"
|
||||
REMOTE_EOF
|
||||
"""
|
||||
|
||||
STEPS = {
|
||||
"validate_confirmation": VALIDATE_CONFIRMATION_STEP,
|
||||
**build_standard_deploy_steps(
|
||||
push_and_deploy_script=PUSH_AND_DEPLOY_SCRIPT,
|
||||
include_build_timestamp=False,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
return run_deploy_workflow(
|
||||
STEPS,
|
||||
env_args=[
|
||||
EnvArg("--confirmation", "CONFIRMATION"),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
103
scripts/ci/workflows/sync_desktop.py
Executable file
103
scripts/ci/workflows/sync_desktop.py
Executable file
@@ -0,0 +1,103 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from ci_steps import bot_user_id_script
|
||||
from ci_workflow import EnvArg, parse_step_env_args
|
||||
from ci_utils import run_step
|
||||
|
||||
|
||||
STEPS: dict[str, str] = {
|
||||
"get_user_id": bot_user_id_script(),
|
||||
"determine_branch": """
|
||||
set -euo pipefail
|
||||
if [[ -n "${INPUT_BRANCH}" ]]; then
|
||||
echo "name=${INPUT_BRANCH}" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "name=${REF_NAME}" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
""",
|
||||
"clone_target": """
|
||||
set -euo pipefail
|
||||
git clone --depth 1 "https://x-access-token:${TOKEN}@github.com/fluxerapp/fluxer_desktop.git" target || {
|
||||
mkdir target
|
||||
cd target
|
||||
git init
|
||||
git remote add origin "https://x-access-token:${TOKEN}@github.com/fluxerapp/fluxer_desktop.git"
|
||||
}
|
||||
""",
|
||||
"configure_git": """
|
||||
set -euo pipefail
|
||||
cd target
|
||||
git config user.name "${APP_SLUG}[bot]"
|
||||
git config user.email "${USER_ID}+${APP_SLUG}[bot]@users.noreply.github.com"
|
||||
""",
|
||||
"checkout_or_create_branch": """
|
||||
set -euo pipefail
|
||||
cd target
|
||||
BRANCH="${BRANCH_NAME}"
|
||||
|
||||
if git ls-remote --exit-code --heads origin "$BRANCH" >/dev/null 2>&1; then
|
||||
git fetch origin "$BRANCH"
|
||||
git checkout "$BRANCH"
|
||||
else
|
||||
git checkout --orphan "$BRANCH"
|
||||
git rm -rf . 2>/dev/null || true
|
||||
fi
|
||||
""",
|
||||
"sync_files": """
|
||||
set -euo pipefail
|
||||
find target -mindepth 1 -maxdepth 1 ! -name '.git' -exec rm -rf {} +
|
||||
cp -a source/fluxer_desktop/. target/
|
||||
""",
|
||||
"commit_and_push": """
|
||||
set -euo pipefail
|
||||
cd target
|
||||
BRANCH="${BRANCH_NAME}"
|
||||
SOURCE_SHA="$(git -C ../source rev-parse --short HEAD)"
|
||||
|
||||
git add -A
|
||||
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes to commit"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
git commit -m "Sync from fluxerapp/fluxer @ ${SOURCE_SHA}"
|
||||
git push origin "HEAD:refs/heads/${BRANCH}"
|
||||
|
||||
echo "Synced to fluxerapp/fluxer_desktop:${BRANCH}"
|
||||
""",
|
||||
"summary": """
|
||||
set -euo pipefail
|
||||
{
|
||||
echo "## Desktop Sync Complete"
|
||||
echo ""
|
||||
echo "- **Source:** \`fluxerapp/fluxer:${BRANCH_NAME}\`"
|
||||
echo "- **Destination:** \`fluxerapp/fluxer_desktop:${BRANCH_NAME}\`"
|
||||
echo "- **Commit:** \`$(git -C source rev-parse --short HEAD)\`"
|
||||
} >> "$GITHUB_STEP_SUMMARY"
|
||||
""",
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_step_env_args(
|
||||
[
|
||||
EnvArg("--app-slug", "APP_SLUG"),
|
||||
EnvArg("--token", "TOKEN"),
|
||||
EnvArg("--user-id", "USER_ID"),
|
||||
EnvArg("--input-branch", "INPUT_BRANCH"),
|
||||
EnvArg("--ref-name", "REF_NAME"),
|
||||
EnvArg("--branch-name", "BRANCH_NAME"),
|
||||
]
|
||||
)
|
||||
run_step(STEPS, args.step)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
34
scripts/ci/workflows/sync_static.py
Executable file
34
scripts/ci/workflows/sync_static.py
Executable file
@@ -0,0 +1,34 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from ci_steps import INSTALL_RCLONE_SCRIPT, rclone_config_script
|
||||
from ci_workflow import parse_step_env_args
|
||||
from ci_utils import run_step
|
||||
|
||||
|
||||
STEPS: dict[str, str] = {
|
||||
"install_rclone": INSTALL_RCLONE_SCRIPT,
|
||||
"push": rclone_config_script(
|
||||
endpoint="$RCLONE_ENDPOINT",
|
||||
acl="private",
|
||||
expand_vars=True,
|
||||
)
|
||||
+ """
|
||||
mkdir -p "$RCLONE_SOURCE_DIR"
|
||||
rclone sync "$RCLONE_SOURCE" "$RCLONE_REMOTE:$RCLONE_BUCKET" --create-empty-src-dirs --exclude "assets/**"
|
||||
""",
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_step_env_args()
|
||||
run_step(STEPS, args.step)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
281
scripts/ci/workflows/test_cassandra_backup.py
Normal file
281
scripts/ci/workflows/test_cassandra_backup.py
Normal file
@@ -0,0 +1,281 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from ci_workflow import parse_step_env_args
|
||||
from ci_utils import run_step
|
||||
|
||||
|
||||
STEPS = {
|
||||
"set_temp_paths": """
|
||||
set -euo pipefail
|
||||
: "${RUNNER_TEMP:?RUNNER_TEMP is not set}"
|
||||
echo "WORKDIR=$RUNNER_TEMP/cassandra-restore-test" >> "$GITHUB_ENV"
|
||||
""",
|
||||
"pre_clean": """
|
||||
set -euo pipefail
|
||||
docker rm -f "${CASS_CONTAINER}" "${UTIL_CONTAINER}" 2>/dev/null || true
|
||||
docker volume rm "${CASS_VOLUME}" 2>/dev/null || true
|
||||
docker volume rm "${BACKUP_VOLUME}" 2>/dev/null || true
|
||||
rm -rf "${WORKDIR}" 2>/dev/null || true
|
||||
""",
|
||||
"install_tools": """
|
||||
set -euo pipefail
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install -y --no-install-recommends rclone age ca-certificates
|
||||
""",
|
||||
"fetch_backup": """
|
||||
set -euo pipefail
|
||||
|
||||
rm -rf "$WORKDIR"
|
||||
mkdir -p "$WORKDIR"
|
||||
|
||||
export RCLONE_CONFIG_B2S3_TYPE=s3
|
||||
export RCLONE_CONFIG_B2S3_PROVIDER=Other
|
||||
export RCLONE_CONFIG_B2S3_ACCESS_KEY_ID="${B2_KEY_ID}"
|
||||
export RCLONE_CONFIG_B2S3_SECRET_ACCESS_KEY="${B2_APPLICATION_KEY}"
|
||||
export RCLONE_CONFIG_B2S3_ENDPOINT="https://s3.eu-central-003.backblazeb2.com"
|
||||
export RCLONE_CONFIG_B2S3_REGION="eu-central-003"
|
||||
export RCLONE_CONFIG_B2S3_FORCE_PATH_STYLE=true
|
||||
|
||||
LATEST_BACKUP="$(
|
||||
rclone lsf "B2S3:fluxer" --recursive --files-only --fast-list \
|
||||
| grep -E '(^|/)cassandra-backup-[0-9]{8}-[0-9]{6}\.tar\.age$' \
|
||||
| sort -r \
|
||||
| head -n 1
|
||||
)"
|
||||
|
||||
if [ -z "${LATEST_BACKUP}" ]; then
|
||||
echo "Error: No backup found in bucket"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "LATEST_BACKUP=${LATEST_BACKUP}" >> "$GITHUB_ENV"
|
||||
|
||||
base="$(basename "${LATEST_BACKUP}")"
|
||||
ts="${base#cassandra-backup-}"
|
||||
ts="${ts%.tar.age}"
|
||||
|
||||
if ! [[ "$ts" =~ ^[0-9]{8}-[0-9]{6}$ ]]; then
|
||||
echo "Error: Could not extract timestamp from backup filename: ${base}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BACKUP_EPOCH="$(date -u -d "${ts:0:8} ${ts:9:2}:${ts:11:2}:${ts:13:2}" +%s)"
|
||||
CURRENT_EPOCH="$(date -u +%s)"
|
||||
AGE_HOURS=$(( (CURRENT_EPOCH - BACKUP_EPOCH) / 3600 ))
|
||||
|
||||
echo "Backup age: ${AGE_HOURS} hours"
|
||||
if [ "${AGE_HOURS}" -ge 3 ]; then
|
||||
echo "Error: Latest backup is ${AGE_HOURS} hours old (threshold: 3 hours)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rclone copyto "B2S3:fluxer/${LATEST_BACKUP}" "${WORKDIR}/backup.tar.age" --fast-list
|
||||
|
||||
umask 077
|
||||
printf '%s' "${AGE_PRIVATE_KEY}" > "${WORKDIR}/age.key"
|
||||
|
||||
docker volume create "${BACKUP_VOLUME}"
|
||||
|
||||
age -d -i "${WORKDIR}/age.key" "${WORKDIR}/backup.tar.age" \
|
||||
| docker run --rm -i \
|
||||
-v "${BACKUP_VOLUME}:/backup" \
|
||||
--entrypoint bash \
|
||||
"${CASSANDRA_IMAGE}" -lc '
|
||||
set -euo pipefail
|
||||
rm -rf /backup/*
|
||||
mkdir -p /backup/_tmp
|
||||
tar -C /backup/_tmp -xf -
|
||||
|
||||
top="$(find /backup/_tmp -maxdepth 1 -mindepth 1 -type d -name "cassandra-backup-*" | head -n 1 || true)"
|
||||
|
||||
if [ -n "$top" ] && [ -f "$top/schema.cql" ]; then
|
||||
cp -a "$top"/. /backup/
|
||||
elif [ -f /backup/_tmp/schema.cql ]; then
|
||||
cp -a /backup/_tmp/. /backup/
|
||||
else
|
||||
echo "Error: schema.cql not found after extraction"
|
||||
find /backup/_tmp -maxdepth 3 -type f -print | sed -n "1,80p" || true
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rm -rf /backup/_tmp
|
||||
'
|
||||
|
||||
docker run --rm \
|
||||
-v "${BACKUP_VOLUME}:/backup:ro" \
|
||||
--entrypoint bash \
|
||||
"${CASSANDRA_IMAGE}" -lc '
|
||||
set -euo pipefail
|
||||
test -f /backup/schema.cql
|
||||
echo "Extracted backup layout (top 3 levels):"
|
||||
find /backup -maxdepth 3 -type d -print | sed -n "1,200p" || true
|
||||
echo "Sample SSTables (*Data.db):"
|
||||
find /backup -type f -name "*Data.db" | sed -n "1,30p" || true
|
||||
'
|
||||
""",
|
||||
"create_data_volume": """
|
||||
set -euo pipefail
|
||||
docker volume create "${CASS_VOLUME}"
|
||||
""",
|
||||
"restore_keyspaces": """
|
||||
set -euo pipefail
|
||||
|
||||
docker run --rm \
|
||||
--name "${UTIL_CONTAINER}" \
|
||||
-v "${CASS_VOLUME}:/var/lib/cassandra" \
|
||||
-v "${BACKUP_VOLUME}:/backup:ro" \
|
||||
--entrypoint bash \
|
||||
"${CASSANDRA_IMAGE}" -lc '
|
||||
set -euo pipefail
|
||||
shopt -s nullglob
|
||||
|
||||
BASE=/var/lib/cassandra
|
||||
DATA_DIR="$BASE/data"
|
||||
mkdir -p "$DATA_DIR" "$BASE/commitlog" "$BASE/hints" "$BASE/saved_caches"
|
||||
|
||||
ROOT=/backup
|
||||
if [ -d "$ROOT/cassandra_data" ]; then ROOT="$ROOT/cassandra_data"; fi
|
||||
if [ -d "$ROOT/data" ]; then ROOT="$ROOT/data"; fi
|
||||
|
||||
echo "Using backup ROOT=$ROOT"
|
||||
echo "Restoring into DATA_DIR=$DATA_DIR"
|
||||
|
||||
restored=0
|
||||
for keyspace_dir in "$ROOT"/*/; do
|
||||
[ -d "$keyspace_dir" ] || continue
|
||||
ks="$(basename "$keyspace_dir")"
|
||||
|
||||
if [ "$ks" = "system_schema" ] || ! [[ "$ks" =~ ^system ]]; then
|
||||
echo "Restoring keyspace: $ks"
|
||||
rm -rf "$DATA_DIR/$ks"
|
||||
cp -a "$keyspace_dir" "$DATA_DIR/"
|
||||
restored=$((restored + 1))
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$restored" -le 0 ]; then
|
||||
echo "Error: No keyspaces restored from backup root: $ROOT"
|
||||
echo "Debug: listing $ROOT:"
|
||||
ls -la "$ROOT" || true
|
||||
find "$ROOT" -maxdepth 2 -type d -print | sed -n "1,100p" || true
|
||||
exit 1
|
||||
fi
|
||||
|
||||
promoted=0
|
||||
for ks_dir in "$DATA_DIR"/*/; do
|
||||
[ -d "$ks_dir" ] || continue
|
||||
ks="$(basename "$ks_dir")"
|
||||
|
||||
if [ "$ks" != "system_schema" ] && [[ "$ks" =~ ^system ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
for table_dir in "$ks_dir"*/; do
|
||||
[ -d "$table_dir" ] || continue
|
||||
|
||||
snap_root="$table_dir/snapshots"
|
||||
[ -d "$snap_root" ] || continue
|
||||
|
||||
latest_snap="$(ls -1d "$snap_root"/*/ 2>/dev/null | sort -r | head -n 1 || true)"
|
||||
[ -n "$latest_snap" ] || continue
|
||||
|
||||
files=( "$latest_snap"* )
|
||||
if [ "${#files[@]}" -gt 0 ]; then
|
||||
cp -av "${files[@]}" "$table_dir"
|
||||
promoted=$((promoted + $(ls -1 "$latest_snap"/*Data.db 2>/dev/null | wc -l || true)))
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
chown -R cassandra:cassandra "$BASE"
|
||||
|
||||
echo "Promoted Data.db files: $promoted"
|
||||
if [ "$promoted" -le 0 ]; then
|
||||
echo "Error: No *Data.db files were promoted out of snapshots"
|
||||
echo "Debug: first snapshot dirs found:"
|
||||
find "$DATA_DIR" -type d -path "*/snapshots/*" | sed -n "1,50p" || true
|
||||
exit 1
|
||||
fi
|
||||
'
|
||||
""",
|
||||
"start_cassandra": """
|
||||
set -euo pipefail
|
||||
|
||||
docker run -d \
|
||||
--name "${CASS_CONTAINER}" \
|
||||
-v "${CASS_VOLUME}:/var/lib/cassandra" \
|
||||
-e MAX_HEAP_SIZE="${MAX_HEAP_SIZE}" \
|
||||
-e HEAP_NEWSIZE="${HEAP_NEWSIZE}" \
|
||||
-e JVM_OPTS="-Dcassandra.disable_mlock=true" \
|
||||
"${CASSANDRA_IMAGE}"
|
||||
|
||||
for i in $(seq 1 150); do
|
||||
status="$(docker inspect -f '{{.State.Status}}' "${CASS_CONTAINER}" 2>/dev/null || true)"
|
||||
if [ "${status}" != "running" ]; then
|
||||
docker inspect "${CASS_CONTAINER}" --format 'ExitCode={{.State.ExitCode}} OOMKilled={{.State.OOMKilled}} Error={{.State.Error}}' || true
|
||||
docker logs --tail 300 "${CASS_CONTAINER}" || true
|
||||
exit 1
|
||||
fi
|
||||
if docker exec "${CASS_CONTAINER}" cqlsh -e "SELECT now() FROM system.local;" >/dev/null 2>&1; then
|
||||
break
|
||||
fi
|
||||
sleep 2
|
||||
done
|
||||
|
||||
docker exec "${CASS_CONTAINER}" cqlsh -e "SELECT now() FROM system.local;" >/dev/null 2>&1
|
||||
""",
|
||||
"verify_data": """
|
||||
set -euo pipefail
|
||||
|
||||
USER_COUNT=""
|
||||
for i in $(seq 1 20); do
|
||||
USER_COUNT="$(
|
||||
docker exec "${CASS_CONTAINER}" cqlsh -e "SELECT COUNT(*) FROM fluxer.users;" 2>/dev/null \
|
||||
| awk "/^[[:space:]]*[0-9]+[[:space:]]*$/ {print \$1; exit}" || true
|
||||
)"
|
||||
if [ -n "${USER_COUNT}" ]; then
|
||||
break
|
||||
fi
|
||||
sleep 2
|
||||
done
|
||||
|
||||
if [ -n "${USER_COUNT}" ] && [ "${USER_COUNT}" -gt 0 ] 2>/dev/null; then
|
||||
echo "Backup restore verification passed"
|
||||
else
|
||||
echo "Backup restore verification failed"
|
||||
docker logs --tail 300 "${CASS_CONTAINER}" || true
|
||||
exit 1
|
||||
fi
|
||||
""",
|
||||
"cleanup": """
|
||||
set -euo pipefail
|
||||
docker rm -f "${CASS_CONTAINER}" 2>/dev/null || true
|
||||
docker volume rm "${CASS_VOLUME}" 2>/dev/null || true
|
||||
docker volume rm "${BACKUP_VOLUME}" 2>/dev/null || true
|
||||
rm -rf "${WORKDIR}" 2>/dev/null || true
|
||||
""",
|
||||
"report_status": """
|
||||
set -euo pipefail
|
||||
LATEST_BACKUP_NAME="${LATEST_BACKUP:-unknown}"
|
||||
if [ "${JOB_STATUS}" = "success" ]; then
|
||||
echo "Backup ${LATEST_BACKUP_NAME} is valid and restorable"
|
||||
else
|
||||
echo "Backup ${LATEST_BACKUP_NAME} test failed"
|
||||
fi
|
||||
""",
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_step_env_args()
|
||||
run_step(STEPS, args.step)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
47
scripts/ci/workflows/update_word_lists.py
Executable file
47
scripts/ci/workflows/update_word_lists.py
Executable file
@@ -0,0 +1,47 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
sys.path.append(str(pathlib.Path(__file__).resolve().parents[1]))
|
||||
|
||||
from ci_workflow import parse_step_env_args
|
||||
from ci_utils import run_step
|
||||
|
||||
|
||||
STEPS: dict[str, str] = {
|
||||
"download": """
|
||||
set -euo pipefail
|
||||
curl -fsSL https://raw.githubusercontent.com/tailscale/tailscale/refs/heads/main/words/scales.txt -o /tmp/scales.txt
|
||||
curl -fsSL https://raw.githubusercontent.com/tailscale/tailscale/refs/heads/main/words/tails.txt -o /tmp/tails.txt
|
||||
""",
|
||||
"check_changes": """
|
||||
set -euo pipefail
|
||||
if ! diff -q /tmp/scales.txt fluxer_api/src/words/scales.txt > /dev/null 2>&1 || \
|
||||
! diff -q /tmp/tails.txt fluxer_api/src/words/tails.txt > /dev/null 2>&1; then
|
||||
printf 'changes_detected=true\n' >> "$GITHUB_OUTPUT"
|
||||
echo "Changes detected in word lists"
|
||||
else
|
||||
printf 'changes_detected=false\n' >> "$GITHUB_OUTPUT"
|
||||
echo "No changes detected in word lists"
|
||||
fi
|
||||
""",
|
||||
"update": """
|
||||
set -euo pipefail
|
||||
cp /tmp/scales.txt fluxer_api/src/words/scales.txt
|
||||
cp /tmp/tails.txt fluxer_api/src/words/tails.txt
|
||||
""",
|
||||
"no_changes": """
|
||||
echo "Word lists are already up to date."
|
||||
""",
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_step_env_args()
|
||||
run_step(STEPS, args.step)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
Reference in New Issue
Block a user