diff --git a/.bazelrc b/.bazelrc index 7d3c60255..0e66a3cd0 100644 --- a/.bazelrc +++ b/.bazelrc @@ -23,11 +23,6 @@ test --test_env=ONEPASSWORD_SERVICE_ACCOUNT_TOKEN_DEV common --enable_bzlmod=true -# Currently a bug in rules_docker where the wrong platform is selected when building docker images -# disabling the transitions ensures that docker images are built for the right CPU (amd64 or arm64) -# See https://github.com/bazelbuild/rules_docker/issues/2052#issuecomment-1175585409 -build --@io_bazel_rules_docker//transitions:enable=false - # Enable builds without the bytes https://github.com/bazelbuild/bazel/issues/6862 build:buildbuddy --remote_download_minimal diff --git a/.changeset/bunq2ynab-small-dots-approve-2.md b/.changeset/bunq2ynab-small-dots-approve-2.md new file mode 100644 index 000000000..7a6cccf2c --- /dev/null +++ b/.changeset/bunq2ynab-small-dots-approve-2.md @@ -0,0 +1,5 @@ +--- +"bunq2ynab": minor +--- + +feat: Added support for specifying a sleep timeout at the end of running bunq2ynab to prevent rate limit issues with both Bunq and YNAB. diff --git a/.changeset/bunq2ynab-small-dots-approve.md b/.changeset/bunq2ynab-small-dots-approve.md new file mode 100644 index 000000000..9be6fd461 --- /dev/null +++ b/.changeset/bunq2ynab-small-dots-approve.md @@ -0,0 +1,5 @@ +--- +"bunq2ynab": minor +--- + +feat: Enabled support for both dev and prod environments loading secrets from relevant 1Password vaults. diff --git a/.changeset/provisioner-ninety-windows-shop-2.md b/.changeset/provisioner-ninety-windows-shop-2.md new file mode 100644 index 000000000..b213befce --- /dev/null +++ b/.changeset/provisioner-ninety-windows-shop-2.md @@ -0,0 +1,5 @@ +--- +"provisioner": major +--- + +BREAKING CHANGE: Remove microk8s from provisioner as it's not used and is only consuming resources. diff --git a/.changeset/provisioner-ninety-windows-shop.md b/.changeset/provisioner-ninety-windows-shop.md new file mode 100644 index 000000000..afc2d3435 --- /dev/null +++ b/.changeset/provisioner-ninety-windows-shop.md @@ -0,0 +1,5 @@ +--- +"provisioner": minor +--- + +feat: Deploy bunq2ynab image in dev, test and prod using different images and credentials! diff --git a/.changeset/rules_release-gentle-otters-try.md b/.changeset/rules_release-gentle-otters-try.md new file mode 100644 index 000000000..99b52bb50 --- /dev/null +++ b/.changeset/rules_release-gentle-otters-try.md @@ -0,0 +1,5 @@ +--- +"rules_release": minor +--- + +feat: Added `publish_oci_image` rule to publish OCI images to a registry. diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 05b38e93f..2832557bd 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -5,6 +5,8 @@ on: push: branches: - "master" + paths: + - "provisioner/version.txt" jobs: deploy-provisioner: name: Deploy Provisioner diff --git a/.gitignore b/.gitignore index 08fd423e0..2710b777e 100755 --- a/.gitignore +++ b/.gitignore @@ -30,6 +30,8 @@ mutagen.yml.lock /home-assistant/zigbee2mqtt-data/* !/home-assistant/zigbee2mqtt-data/configuration.yaml +/home-assistant/matter-server-data/* + /home-assistant/z-stack-firmware/* /home-assistant/mosquitto-data/* diff --git a/BUILD.bazel b/BUILD.bazel index 584598efa..8915760ab 100644 --- a/BUILD.bazel +++ b/BUILD.bazel @@ -247,6 +247,7 @@ release_manager( ":push_git_changes", ], deps = [ + "//provisioner:release", "//tools/bunq2ynab:release", "@rules_release//:release", "@rules_task//:release", @@ -255,7 +256,7 @@ release_manager( alias( name = "regctl", - actual = "//tools/regctl", + actual = "@rules_release//tools/regctl", ) alias( diff --git a/MODULE.bazel b/MODULE.bazel index f74ba96b6..4da2e3a31 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -15,19 +15,40 @@ register_toolchains( ) # ------------------------------------ platforms ------------------------------------ # -bazel_dep(name = "platforms", version = "0.0.8") +bazel_dep( + name = "platforms", + version = "0.0.8", +) # ------------------------------------ rules_skylib ------------------------------------ # -bazel_dep(name = "bazel_skylib", version = "1.4.2") +bazel_dep( + name = "bazel_skylib", + version = "1.4.2", +) # ------------------------------------ aspect_bazel_lib ------------------------------------ # -bazel_dep(name = "aspect_bazel_lib", version = "2.0.1") +bazel_dep( + name = "aspect_bazel_lib", + version = "2.0.3", +) + +bazel_lib_toolchains = use_extension("@aspect_bazel_lib//lib:extensions.bzl", "toolchains") + +bazel_lib_toolchains.jq() + +use_repo(bazel_lib_toolchains, "jq_toolchains") # ------------------------------------ rules_pkg ------------------------------------ # -bazel_dep(name = "rules_pkg", version = "0.9.1") +bazel_dep( + name = "rules_pkg", + version = "0.9.1", +) # ------------------------------------ rules_task ------------------------------------ # -bazel_dep(name = "rules_task", version = "0.0.0") +bazel_dep( + name = "rules_task", + version = "0.0.0", +) local_path_override( module_name = "rules_task", @@ -35,7 +56,10 @@ local_path_override( ) # ------------------------------------ rules_python ------------------------------------ # -bazel_dep(name = "rules_python", version = "0.25.0") +bazel_dep( + name = "rules_python", + version = "0.25.0", +) python = use_extension("@rules_python//python/extensions:python.bzl", "python") @@ -48,10 +72,30 @@ python.toolchain( use_repo(python, "python_versions") # ------------------------------------ rules_oci ------------------------------------ # -bazel_dep(name = "rules_oci", version = "1.4.3") +bazel_dep( + name = "rules_oci", + version = "1.4.3", +) + +oci = use_extension("@rules_oci//oci:extensions.bzl", "oci") + +oci.pull( + name = "ubuntu_base", + digest = "sha256:dfd64a3b4296d8c9b62aa3309984f8620b98d87e47492599ee20739e8eb54fbf", + image = "index.docker.io/library/ubuntu", + platforms = [ + "linux/amd64", + "linux/arm64/v8", + ], +) + +use_repo(oci, "ubuntu_base") # ------------------------------------ rules_release ------------------------------------ # -bazel_dep(name = "rules_release", version = "0.0.0") +bazel_dep( + name = "rules_release", + version = "0.0.0", +) local_path_override( module_name = "rules_release", diff --git a/WORKSPACE.bzlmod b/WORKSPACE.bzlmod index 73486a23c..0a10f3fb6 100644 --- a/WORKSPACE.bzlmod +++ b/WORKSPACE.bzlmod @@ -1,50 +1,6 @@ load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive", "http_file") load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository") -# ------------------------------------ rules_docker ------------------------------------ # -# https://github.com/bazelbuild/rules_docker/pull/2201 -http_archive( - name = "io_bazel_rules_docker", - sha256 = "b1e80761a8a8243d03ebca8845e9cc1ba6c82ce7c5179ce2b295cd36f7e394bf", - urls = [ - "https://github.com/bazelbuild/rules_docker/releases/download/v0.25.0/rules_docker-v0.25.0.tar.gz", - ], -) - -load( - "@io_bazel_rules_docker//repositories:repositories.bzl", - container_repositories = "repositories", -) - -container_repositories() - -load( - "@io_bazel_rules_docker//python3:image.bzl", - _py3_image_repos = "repositories", -) - -_py3_image_repos() - -load( - "@io_bazel_rules_docker//container:container.bzl", - "container_pull", -) - -# https://hub.docker.com/layers/library/python/3.10.8-bullseye/images/sha256-de501d4dbc12f846ee78ee76629b7524ec07f10c121832a190a1ecb7b9a119bb?context=explore -container_pull( - name = "python_base_arm64", - digest = "sha256:fa109a6d1100a4e6382950ab1f7f94563268e537b1619f1969b9b33a9c1a99fb", - registry = "index.docker.io", - repository = "library/python:3.10.8-bullseye", -) - -container_pull( - name = "python_base_amd64", - digest = "sha256:de501d4dbc12f846ee78ee76629b7524ec07f10c121832a190a1ecb7b9a119bb", - registry = "index.docker.io", - repository = "library/python:3.10.8-bullseye", -) - # ------------------------------------ rules_pycross ------------------------------------ # http_archive( name = "jvolkman_rules_pycross", @@ -78,37 +34,6 @@ load("@pdm-setup//:requirements.bzl", "install_deps") install_deps() -# ------------------------------------ workstation ------------------------------------ # - -# https://hub.docker.com/layers/library/ubuntu/jammy-20230425/images/sha256-ca5534a51dd04bbcebe9b23ba05f389466cf0c190f1f8f182d7eea92a9671d00?context=explore -container_pull( - name = "ubuntu_base_amd64", - digest = "sha256:ca5534a51dd04bbcebe9b23ba05f389466cf0c190f1f8f182d7eea92a9671d00", - registry = "index.docker.io", - repository = "library/ubuntu:jammy-20230425", -) - -container_pull( - name = "ubuntu_base_arm64", - digest = "sha256:6f8fe7bff0bee25c481cdc26e28bba984ebf72e6152005c18e1036983c01a28b", - registry = "index.docker.io", - repository = "library/ubuntu:jammy-20230425", -) - -http_file( - name = "inspec_arm64", - downloaded_file_path = "inspec.deb", - sha256 = "79a496d2467f579c6533bcf42c663d96d830af42ba2f32769ddf6ef879d7d3b5", - url = "https://packages.chef.io/files/stable/inspec/5.18.14/ubuntu/20.04/inspec_5.18.14-1_arm64.deb", -) - -http_file( - name = "inspec_amd64", - downloaded_file_path = "inspec.deb", - sha256 = "b4e8b11478cd2c930b24edcf5c24ef49fe83452f08f6cedc13deae5ce7b0c757", - url = "https://packages.chef.io/files/stable/inspec/5.18.14/ubuntu/20.04/inspec_5.18.14-1_amd64.deb", -) - # ------------------------------------ tilt ------------------------------------ # # From https://github.com/tilt-dev/tilt/releases @@ -228,34 +153,6 @@ http_archive( url = "https://cache.agilebits.com/dist/1P/op2/pkg/v2.22.0/op_darwin_arm64_v2.22.0.zip", ) -# ------------------------------------ regctl ------------------------------------ # - -# From https://github.com/regclient/regclient/releases - -http_file( - name = "regctl_linux_arm64", - downloaded_file_path = "regctl", - executable = True, - sha256 = "380105c05c6c69ea3d35a8efeec0ccfa1bdfc38a876bf7d473be06d7267bae99", - url = "https://github.com/regclient/regclient/releases/download/v0.5.3/regctl-linux-arm64", -) - -http_file( - name = "regctl_linux_amd64", - downloaded_file_path = "regctl", - executable = True, - sha256 = "5141569cd0ef6e52a9dc67391c432f1bdd0cfd2d3b82d3f22d56f94feab7203e", - url = "https://github.com/regclient/regclient/releases/download/v0.5.3/regctl-linux-amd64", -) - -http_file( - name = "regctl_darwin_arm64", - downloaded_file_path = "regctl", - executable = True, - sha256 = "4705d5068f946a75606494400c811b9f3f34c89dc495a372462eef02d372fe4d", - url = "https://github.com/regclient/regclient/releases/download/v0.5.3/regctl-darwin-arm64", -) - # ------------------------------------ mutagen ------------------------------------ # # From https://github.com/mutagen-io/mutagen/releases diff --git a/devbox.lock b/devbox.lock index b6e187dd6..27619de35 100644 --- a/devbox.lock +++ b/devbox.lock @@ -93,7 +93,7 @@ }, "python310Packages.pip@23.2.1": { "last_modified": "2023-10-25T20:49:13Z", - "plugin_version": "0.0.1", + "plugin_version": "0.0.2", "resolved": "github:NixOS/nixpkgs/75a52265bda7fd25e06e3a67dee3f0354e73243c#python310Packages.pip", "source": "devbox-search", "version": "23.2.1", diff --git a/home-assistant/config/automations.yaml b/home-assistant/config/automations.yaml index fccacf691..6d07d28ce 100644 --- a/home-assistant/config/automations.yaml +++ b/home-assistant/config/automations.yaml @@ -33,7 +33,6 @@ presence_entity_id: sensor.driveway_presence presence_indicator_entity_ids: - binary_sensor.driveway_doorbell_motion - - binary_sensor.hallway_door_contact presence_hint_entity_ids: - binary_sensor.driveway_shelly_input presence_timeout: 120 @@ -52,10 +51,11 @@ - binary_sensor.landing_motion_occupancy - binary_sensor.living_room_motion_occupancy - binary_sensor.toilet_motion_occupancy + - binary_sensor.hallway_motion_occupancy + - binary_sensor.hallway_motion_update_available presence_timeout: 120 presence_indicator_entity_ids: - binary_sensor.hallway_motion_occupancy - - binary_sensor.hallway_door_contact - id: "1655752934365" alias: "Laundry Room - Presence: Manage" description: "" @@ -305,7 +305,7 @@ - service: media_player.play_media data: media_content_type: music - media_content_id: http://hypervisor:8123/local/sounds_sounds_ring_button_Chime.mp3 + media_content_id: http://192.168.1.30:8123/local/sounds_sounds_ring_button_Chime.mp3 target: entity_id: - media_player.woonkamer @@ -313,7 +313,7 @@ - delay: hours: 0 minutes: 0 - seconds: 4 + seconds: 8 milliseconds: 0 - parallel: - service: sonos.restore @@ -459,8 +459,8 @@ illuminance_entity_id: sensor.living_room_illuminance away_timer_entity_id: timer.living_room_dining_table_away_timer away_timer_duration: - hours: 0 - minutes: 3 + hours: 1 + minutes: 0 seconds: 0 manual_timer_entity_id: timer.living_room_dining_table_manual_timer manual_timer_duration: @@ -480,8 +480,8 @@ illuminance_entity_id: sensor.living_room_illuminance away_timer_entity_id: timer.living_room_kitchen_away_timer away_timer_duration: - hours: 0 - minutes: 3 + hours: 1 + minutes: 0 seconds: 0 manual_timer_entity_id: timer.living_room_kitchen_manual_timer manual_timer_duration: @@ -502,7 +502,7 @@ away_timer_entity_id: timer.living_room_reading_away_timer away_timer_duration: hours: 0 - minutes: 3 + minutes: 15 seconds: 0 manual_timer_entity_id: timer.living_room_reading_manual_timer manual_timer_duration: @@ -523,8 +523,8 @@ illuminance_entity_id: sensor.living_room_illuminance away_timer_entity_id: timer.living_room_sofa_away_timer away_timer_duration: - hours: 0 - minutes: 3 + hours: 1 + minutes: 0 seconds: 0 manual_timer_entity_id: timer.living_room_sofa_manual_timer manual_timer_duration: diff --git a/home-assistant/docker-compose.yml b/home-assistant/docker-compose.yml index ca3e09879..c5e36d4ab 100755 --- a/home-assistant/docker-compose.yml +++ b/home-assistant/docker-compose.yml @@ -49,7 +49,7 @@ services: home-assistant: container_name: homeassistant - image: homeassistant/home-assistant:2023.10 + image: homeassistant/home-assistant:2023.12 volumes: - ./config:/config - /etc/localtime:/etc/localtime:ro @@ -57,6 +57,17 @@ services: privileged: true network_mode: host + matter-server: + container_name: matter-server + image: ghcr.io/home-assistant-libs/python-matter-server:stable + restart: unless-stopped + security_opt: + - apparmor=unconfined + volumes: + - ./matter-server-data:/data + - /run/dbus:/run/dbus:ro + network_mode: host + esphome: container_name: esphome image: esphome/esphome:2023.9 diff --git a/provisioner/BUILD.bazel b/provisioner/BUILD.bazel index de859d33d..7192e0b31 100644 --- a/provisioner/BUILD.bazel +++ b/provisioner/BUILD.bazel @@ -1,15 +1,20 @@ load("//tools/pyinfra:defs.bzl", "pyinfra_run") load("@rules_task//task:defs.bzl", "cmd", "task", "task_test") -load("//tools/docker:docker.bzl", "docker_load") +load("//tools/docker:docker_load.bzl", "docker_load") +load("//tools/docker:docker_run_and_commit.bzl", "docker_run_and_commit") load("@pdm-setup//:requirements.bzl", "requirement") load("//tools/onepassword:defs.bzl", "secrets") +load("@rules_oci//oci:defs.bzl", "oci_image") +load("@rules_release//tools:defs.bzl", "publish_github_release", release = "bazel_diff_release") + +package(default_visibility = ["//visibility:public"]) pyinfra_run( name = "provision", srcs = [ "connectors/teleport.py", + "deploys/bunq2ynab/tasks/install_bunq2ynab.py", "deploys/docker/tasks/install_docker.py", - "deploys/microk8s/tasks/install_microk8s.py", "deploys/monitoring/tasks/install_monitoring.py", "deploys/network/tasks/install_network.py", "deploys/teleport/tasks/install_teleport.py", @@ -22,12 +27,12 @@ pyinfra_run( "--data install_network=True", "--data install_docker=True", "--data install_monitoring=True", - "--data install_microk8s=True", "--data install_teleport=True", + "--data install_bunq2ynab=True", ], data = [ + "deploys/bunq2ynab/files/docker-compose.yml.j2", "deploys/docker/files/daemon.json", - "deploys/microk8s/files/cmdline.txt", "deploys/monitoring/files/docker-compose.yml.j2", "deploys/monitoring/files/docker-logs-fluentbit.conf", "deploys/monitoring/files/docker-parser-fluentbit.conf", @@ -38,6 +43,8 @@ pyinfra_run( "deploys/monitoring/files/reboot.sh", "deploys/network/files/99_config.yaml", "deploys/teleport/files/teleport.yaml.j2", + "//tools/bunq2ynab:publish_oci_image.remote_tags", + "//tools/bunq2ynab:publish_oci_image-dev.remote_tags", ], deploy = "deploy.py", env = { @@ -173,10 +180,6 @@ task_test( name = "deploy_test", size = "large", cmds = [ - cmd.shell( - cmd.executable(":deploy_mark"), - "test-commit-sha", - ), cmd.executable(":deploy_provision"), cmd.shell( cmd.executable(":deploy_validate"), @@ -190,14 +193,37 @@ task_test( }, ) +docker_run_and_commit( + name = "install_deps", + cmd = """ + apt-get update + apt-get install -y snapd squashfuse fuse sudo lsb-release + systemctl enable snapd + useradd -m ubuntu -s /bin/bash + adduser ubuntu sudo + echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers + """, + image = "@ubuntu_base", +) + +oci_image( + name = "dev-image", + base = "@ubuntu_base", + env = { + "DEBIAN_FRONTEND": "noninteractive", + "LANG": "C.UTF-8", + "LC_ALL": "C.UTF-8", + "container": "docker", + }, + tars = [ + ":install_deps", + ], +) + docker_load( name = "load_dev_image", - exec_properties = { - "workload-isolation-type": "firecracker", - "init-dockerd": "true", - "recycle-runner": "true", - }, - image = "//tools/ubuntu:ubuntu_snap_base_image", + image = ":dev-image", + tag = "provisioner-dev:latest", ) task( @@ -233,7 +259,7 @@ task( task( name = "dev", cmds = [ - "docker rm -f provisioner_dev", + "docker rm -f provisioner-dev", "export CONTAINER_ID=$($run_dev_image)", {"defer": "docker rm -f $CONTAINER_ID"}, "docker logs -f $CONTAINER_ID", @@ -241,7 +267,7 @@ task( env = { "run_dev_image": cmd.shell( cmd.executable(":run_dev_image"), - "--name provisioner_dev", + "--name provisioner-dev", ), }, ) @@ -265,7 +291,7 @@ task( elif setup_env == 'prod': os.environ['VALIDATE_HOST'] = f"ssh://ubuntu@provisioner?timeout=0&controlpersist=0" else: - os.environ['VALIDATE_HOST'] = f"docker://root@provisioner_dev" + os.environ['VALIDATE_HOST'] = f"docker://root@provisioner-dev" """), cmd.python_entry_point("pytest:console_main", "--export-traces", "-vv", "-ra", "--hosts=\"$VALIDATE_HOST\"", "$tests"), ], @@ -289,6 +315,7 @@ task_test( cmds = [ "export CONTAINER_ID=$($run_dev_image)", {"defer": "docker rm -f $CONTAINER_ID"}, + "$bunq2ynab_publish_oci_image", "$provision", "$validate", ], @@ -296,6 +323,7 @@ task_test( "run_dev_image": cmd.executable(":run_dev_image"), "provision": cmd.executable(":provision"), "validate": cmd.executable(":validate"), + "bunq2ynab_publish_oci_image": cmd.executable("//tools/bunq2ynab:publish_oci_image-dev"), "SETUP_ENV": "test", }, exec_properties = { @@ -305,3 +333,30 @@ task_test( "include-secrets": "true", }, ) + +publish_github_release( + name = "publish_github_release", + before_cmds = [ + "export OP_SERVICE_ACCOUNT_TOKEN=${ONEPASSWORD_SERVICE_ACCOUNT_TOKEN_PROD:-}", + "export GH_TOKEN=$($OP read op://vgijssel-prod/github-release/credential)", + ], + env = { + "OP": cmd.executable("//tools/onepassword:op"), + }, + release = ":release", +) + +release( + name = "release", + changelog_file = "CHANGELOG.md", + publish_cmds = [ + ":publish_github_release", + ], + release_name = "provisioner", + target = ":deploy", + version_file = "version.txt", + deps = [ + "//tools/bunq2ynab:release", + "@rules_task//:release", + ], +) diff --git a/provisioner/CHANGELOG.md b/provisioner/CHANGELOG.md new file mode 100644 index 000000000..e69de29bb diff --git a/provisioner/deploy.py b/provisioner/deploy.py index 7403d2025..d87287bb6 100644 --- a/provisioner/deploy.py +++ b/provisioner/deploy.py @@ -1,8 +1,8 @@ from provisioner.deploys.network.tasks.install_network import install_network -from provisioner.deploys.microk8s.tasks.install_microk8s import install_microk8s from provisioner.deploys.teleport.tasks.install_teleport import install_teleport from provisioner.deploys.monitoring.tasks.install_monitoring import install_monitoring from provisioner.deploys.docker.tasks.install_docker import install_docker +from provisioner.deploys.bunq2ynab.tasks.install_bunq2ynab import install_bunq2ynab from provisioner.utils import wait_for_reconnect from pyinfra import host @@ -34,5 +34,5 @@ if host.data.get("install_teleport"): install_teleport() -if host.data.get("install_microk8s"): - install_microk8s() +if host.data.get("install_bunq2ynab"): + install_bunq2ynab() diff --git a/provisioner/deploys/bunq2ynab/files/docker-compose.yml.j2 b/provisioner/deploys/bunq2ynab/files/docker-compose.yml.j2 new file mode 100644 index 000000000..14882dab8 --- /dev/null +++ b/provisioner/deploys/bunq2ynab/files/docker-compose.yml.j2 @@ -0,0 +1,20 @@ +name: bunq2ynab +version: '3.8' +services: + bunq2ynab: + container_name: bunq2ynab + image: {{ bunq2ynab_image_tag }} + restart: always + environment: + - OP_SERVICE_ACCOUNT_TOKEN={{ op_service_account_token }} + - SETUP_ENV={{ setup_env }} + # sleeping for 15 minutes to prevent rate limiting + - TIMEOUT=900 + deploy: + resources: + limits: + cpus: '0.5' + memory: '256M' + labels: + containerName: "bunq2ynab" + env: "{{ setup_env }}" \ No newline at end of file diff --git a/provisioner/deploys/bunq2ynab/tasks/install_bunq2ynab.py b/provisioner/deploys/bunq2ynab/tasks/install_bunq2ynab.py new file mode 100644 index 000000000..08c51fb1d --- /dev/null +++ b/provisioner/deploys/bunq2ynab/tasks/install_bunq2ynab.py @@ -0,0 +1,55 @@ +from pyinfra.api.deploy import deploy +from pyinfra.operations import files, server, apt, systemd +from pyinfra import host +from tools.onepassword.lib import get_item_path +from pyinfra.facts.server import LsbRelease +from pyinfra.facts.deb import DebArch + + +@deploy("Install bunq2ynab") +def install_bunq2ynab(): + files.directory( + name="Ensure bunq2ynab directory exists", + path="/opt/bunq2ynab", + user="root", + group="root", + mode="0700", + _sudo=True, + ) + + github_registry_username = get_item_path("github-container-registry.username") + github_registry_password = get_item_path("github-container-registry.credential") + + server.shell( + name="Login to the Github Container Registry", + commands=[ + f"docker login ghcr.io --username {github_registry_username} --password {github_registry_password}", + ], + _sudo=True, + ) + + op_service_account_token = get_item_path( + "1password_service_account_token.credential" + ) + + docker_compose = files.template( + name="Copy the docker-compose file", + src="provisioner/deploys/bunq2ynab/files/docker-compose.yml.j2", + dest="/opt/bunq2ynab/docker-compose.yml", + mode="0600", + _sudo=True, + user="root", + group="root", + op_service_account_token=op_service_account_token, + setup_env=host.data.setup_env, + bunq2ynab_image_tag=host.data.bunq2ynab_image_tag, + ) + + if docker_compose.changed: + server.shell( + name="Start the bunq2ynab service", + commands=[ + "docker compose -f /opt/bunq2ynab/docker-compose.yml up -d --force-recreate --remove-orphans", + ], + _sudo=True, + ) diff --git a/provisioner/deploys/microk8s/files/cmdline.txt b/provisioner/deploys/microk8s/files/cmdline.txt deleted file mode 100644 index c67e9172d..000000000 --- a/provisioner/deploys/microk8s/files/cmdline.txt +++ /dev/null @@ -1 +0,0 @@ -console=serial0,115200 dwc_otg.lpm_enable=0 console=tty1 root=LABEL=writable rootfstype=ext4 rootwait fixrtc quiet splash cgroup_enable=memory cgroup_memory=1 \ No newline at end of file diff --git a/provisioner/deploys/microk8s/tasks/install_microk8s.py b/provisioner/deploys/microk8s/tasks/install_microk8s.py deleted file mode 100644 index a48d97e6e..000000000 --- a/provisioner/deploys/microk8s/tasks/install_microk8s.py +++ /dev/null @@ -1,101 +0,0 @@ -from pyinfra import host -from pyinfra.api.deploy import deploy -from pyinfra.operations import snap, server, files, apt -from pyinfra.facts.server import Users - -MICROK8S_VERSION = "1.27" - - -# From https://microk8s.io/docs/getting-started -@deploy("Install Microk8s") -def install_microk8s(): - if not host.data.get("inside_docker"): - # From https://microk8s.io/docs/install-raspberry-pi - apt.packages( - name="Ensure all kernel modules are available", - packages=["linux-modules-extra-raspi"], - update=True, - present=True, - cache_time=24 * 60 * 60, - _sudo=True, - ) - - # From https://microk8s.io/docs/install-raspberry-pi - config_file = files.put( - name="Copy cmdline", - src="provisioner/deploys/microk8s/files/cmdline.txt", - dest="/boot/firmware/cmdline.txt", - create_remote_dir=True, - _sudo=True, - user="root", - group="root", - mode="0755", - ) - - if config_file.changed and not host.data.get("inside_docker"): - server.reboot( - name="Reboot the server and wait to reconnect", - delay=60, - reboot_timeout=600, - _sudo=True, - ) - - snap.package( - name="Install MicroK8S", - packages="microk8s", - classic=True, - present=True, - _sudo=True, - channel=f"{MICROK8S_VERSION}/stable", - ) - - if not host.data.get("inside_docker"): - server.shell( - name="Update firewall rules", - commands=[ - "ufw allow in on cni0", - "ufw allow out on cni0", - "ufw default allow routed", - ], - _sudo=True, - ) - - existing_groups = host.get_fact(Users)["ubuntu"]["groups"] - - if "microk8s" not in existing_groups: - server.shell( - name="Add ubuntu to microk8s group", - commands=[ - "usermod -a -G microk8s ubuntu", - ], - _sudo=True, - ) - - files.directory( - name="Create and own .kube directory", - present=True, - user="ubuntu", - group="ubuntu", - path="/home/ubuntu/.kube", - _sudo=True, - ) - - if not host.data.get("inside_docker"): - server.shell( - name="Start Microk8s", - commands=[ - "microk8s start", - ], - _sudo=True, - ) - - server.shell( - name="Enable DNS addon", - # From here https://microk8s.io/docs/addons - commands=[ - "microk8s enable dns", - "microk8s enable helm", - "microk8s enable hostpath-storage", - ], - _sudo=True, - ) diff --git a/provisioner/group_data/dev.py b/provisioner/group_data/dev.py index 853e3d363..cfad086b4 100644 --- a/provisioner/group_data/dev.py +++ b/provisioner/group_data/dev.py @@ -5,3 +5,9 @@ teleport_acme_email = "" teleport_acme_enabled = "no" new_relic_display_name = "provisioner_dev" +bunq2ynab_image_tag = ( + "ghcr.io/vgijssel/setup/bunq2ynab-dev:" + + open("./tools/bunq2ynab/publish_oci_image-dev.remote_tags.txt", "r") + .read() + .strip() +) diff --git a/provisioner/group_data/prod.py b/provisioner/group_data/prod.py index 354fc8ce5..3c64f8bfd 100644 --- a/provisioner/group_data/prod.py +++ b/provisioner/group_data/prod.py @@ -5,3 +5,7 @@ teleport_acme_email = "haves_borzoi_0o@icloud.com" teleport_acme_enabled = "yes" new_relic_display_name = "provisioner" +bunq2ynab_image_tag = ( + "ghcr.io/vgijssel/setup/bunq2ynab:" + + open("./tools/bunq2ynab/publish_oci_image.remote_tags.txt", "r").read().strip() +) diff --git a/provisioner/group_data/test.py b/provisioner/group_data/test.py index b8a3f8d6e..ea4e27995 100644 --- a/provisioner/group_data/test.py +++ b/provisioner/group_data/test.py @@ -5,3 +5,9 @@ teleport_acme_email = "" teleport_acme_enabled = "no" new_relic_display_name = "provisioner_test" +bunq2ynab_image_tag = ( + "ghcr.io/vgijssel/setup/bunq2ynab-dev:" + + open("./tools/bunq2ynab/publish_oci_image-dev.remote_tags.txt", "r") + .read() + .strip() +) diff --git a/provisioner/inventory.py b/provisioner/inventory.py index 5ae8f3f79..bd28ae211 100644 --- a/provisioner/inventory.py +++ b/provisioner/inventory.py @@ -52,7 +52,7 @@ def patched_get_all_connectors(): ] else: - container_id = "provisioner_dev" + container_id = "provisioner-dev" dev = [ (f"@docker/{container_id}", {}), ] diff --git a/provisioner/test_provisioner.py b/provisioner/test_provisioner.py index 6b15319c1..8a47826c8 100644 --- a/provisioner/test_provisioner.py +++ b/provisioner/test_provisioner.py @@ -28,14 +28,6 @@ def test_hostname(host): assert host.check_output("hostname -s") == "provisioner" -def test_cmdline(host): - cmdline = host.file("/boot/firmware/cmdline.txt") - assert cmdline.contains("root") - assert cmdline.user == "root" - assert cmdline.group == "root" - assert cmdline.mode == 0o755 - - def test_ubuntu_focal(host): assert host.system_info.type == "linux" assert host.system_info.distribution == "ubuntu" @@ -143,27 +135,6 @@ def test_otel_collector_health(host): # "0 1 * * * /opt/monitoring/reboot.sh" in host.check_output("crontab -l") -def test_microk8s_installed(host): - assert "microk8s" in host.check_output("snap list") - - -def test_microk8s_version(host): - with host.sudo(): - assert host.check_output("microk8s version").startswith("MicroK8s v1.27") - - -def test_user_added_to_microk8s_group(host): - assert "microk8s" in host.user("ubuntu").groups - - -def test_kube_config_permissions(host): - kube_config = host.file("/home/ubuntu/.kube") - assert kube_config.is_directory - assert kube_config.user == "ubuntu" - assert kube_config.group == "ubuntu" - assert kube_config.mode == 0o755 - - def test_passwd_file(host): passwd = host.file("/etc/passwd") assert passwd.contains("root") @@ -205,3 +176,8 @@ def test_https_port_is_open(host): def test_teleport_diag_port_is_open(host): assert host.socket("tcp://127.0.0.1:3000").is_listening assert not host.socket("tcp://0.0.0.0:3000").is_listening + + +def test_bunq2ynab_service(host): + bunq2ynab = host.docker("bunq2ynab") + assert bunq2ynab.is_running diff --git a/provisioner/version.txt b/provisioner/version.txt new file mode 100644 index 000000000..bd52db81d --- /dev/null +++ b/provisioner/version.txt @@ -0,0 +1 @@ +0.0.0 \ No newline at end of file diff --git a/rules/rules_release/MODULE.bazel b/rules/rules_release/MODULE.bazel index f9b9a8ea7..8f2208331 100644 --- a/rules/rules_release/MODULE.bazel +++ b/rules/rules_release/MODULE.bazel @@ -20,7 +20,7 @@ bazel_dep( ) # ------------------------------------ aspect_bazel_lib ------------------------------------ # -bazel_dep(name = "aspect_bazel_lib", version = "2.0.1") +bazel_dep(name = "aspect_bazel_lib", version = "2.0.3") # ------------------------------------ rules_java ------------------------------------ # bazel_dep(name = "rules_java", version = "7.0.6") @@ -28,6 +28,9 @@ bazel_dep(name = "rules_java", version = "7.0.6") # ------------------------------------ bazel-diff ------------------------------------ # use_repo(non_module_dependencies, "bazel_diff") +# ------------------------------------ rules_oci ------------------------------------ # +bazel_dep(name = "rules_oci", version = "1.4.3") + # ------------------------------------ github_cli ------------------------------------ # use_repo(non_module_dependencies, "github_cli_linux_arm64") @@ -42,6 +45,13 @@ use_repo(non_module_dependencies, "onepassword_linux_amd64") use_repo(non_module_dependencies, "onepassword_darwin_arm64") +# ------------------------------------ regctl ------------------------------------ # +use_repo(non_module_dependencies, "regctl_linux_arm64") + +use_repo(non_module_dependencies, "regctl_linux_amd64") + +use_repo(non_module_dependencies, "regctl_darwin_arm64") + # ------------------------------------ examples ------------------------------------ # use_repo(non_module_dependencies, "examples_workspace") diff --git a/rules/rules_release/release/repositories.bzl b/rules/rules_release/release/repositories.bzl index cc9d64b0c..46fe52268 100644 --- a/rules/rules_release/release/repositories.bzl +++ b/rules/rules_release/release/repositories.bzl @@ -1,4 +1,4 @@ -load("@bazel_tools//tools/build_defs/repo:http.bzl", _http_archive = "http_archive", _http_jar = "http_jar") +load("@bazel_tools//tools/build_defs/repo:http.bzl", _http_archive = "http_archive", _http_file = "http_file", _http_jar = "http_jar") load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe") # Copied from https://groups.google.com/g/bazel-discuss/c/xpsg3mWQPZg @@ -26,20 +26,8 @@ def http_archive(**kwargs): def http_jar(**kwargs): maybe(_http_jar, **kwargs) -# load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") - -# http_archive( -# name = "bazel_skylib", -# sha256 = "66ffd9315665bfaafc96b52278f57c7e2dd09f5ede279ea6d39b2be471e7e3aa", -# urls = [ -# "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.4.2/bazel-skylib-1.4.2.tar.gz", -# "https://github.com/bazelbuild/bazel-skylib/releases/download/1.4.2/bazel-skylib-1.4.2.tar.gz", -# ], -# ) - -# load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace") - -# bazel_skylib_workspace() +def http_file(**kwargs): + maybe(_http_file, **kwargs) def rules_release_bazel_dependencies(): http_archive( @@ -79,6 +67,13 @@ def rules_release_bazel_dependencies(): url = "https://github.com/bazelbuild/rules_python/releases/download/0.21.0/rules_python-0.21.0.tar.gz", ) + http_archive( + name = "rules_oci", + sha256 = "d41d0ba7855f029ad0e5ee35025f882cbe45b0d5d570842c52704f7a47ba8668", + strip_prefix = "rules_oci-1.4.3", + url = "https://github.com/bazel-contrib/rules_oci/releases/download/v1.4.3/rules_oci-v1.4.3.tar.gz", + ) + def rules_release_dependencies(): http_jar( name = "bazel_diff", @@ -131,6 +126,32 @@ def rules_release_dependencies(): url = "https://cache.agilebits.com/dist/1P/op2/pkg/v2.22.0/op_darwin_arm64_v2.22.0.zip", ) + # ------------------------------------ regctl ------------------------------------ # + # From https://github.com/regclient/regclient/releases + http_file( + name = "regctl_linux_arm64", + downloaded_file_path = "regctl", + executable = True, + sha256 = "380105c05c6c69ea3d35a8efeec0ccfa1bdfc38a876bf7d473be06d7267bae99", + url = "https://github.com/regclient/regclient/releases/download/v0.5.3/regctl-linux-arm64", + ) + + http_file( + name = "regctl_linux_amd64", + downloaded_file_path = "regctl", + executable = True, + sha256 = "5141569cd0ef6e52a9dc67391c432f1bdd0cfd2d3b82d3f22d56f94feab7203e", + url = "https://github.com/regclient/regclient/releases/download/v0.5.3/regctl-linux-amd64", + ) + + http_file( + name = "regctl_darwin_arm64", + downloaded_file_path = "regctl", + executable = True, + sha256 = "4705d5068f946a75606494400c811b9f3f34c89dc495a372462eef02d372fe4d", + url = "https://github.com/regclient/regclient/releases/download/v0.5.3/regctl-darwin-arm64", + ) + starlarkified_local_repository( name = "examples_workspace", path = "examples/workspace", diff --git a/rules/rules_release/release/repository_primary_deps.bzl b/rules/rules_release/release/repository_primary_deps.bzl index 5f04901a8..a7ad104ba 100644 --- a/rules/rules_release/release/repository_primary_deps.bzl +++ b/rules/rules_release/release/repository_primary_deps.bzl @@ -5,6 +5,7 @@ load("@aspect_rules_js//npm:repositories.bzl", "npm_translate_lock") load("@rules_python//python:repositories.bzl", "py_repositories") load("@rules_python//python:pip.bzl", "pip_parse") load("@bazel_skylib//:workspace.bzl", "bazel_skylib_workspace") +load("@rules_oci//oci:dependencies.bzl", "rules_oci_dependencies") def install_primary_deps(): # ------------------------------------ bazel_features ------------------------------------ # @@ -35,3 +36,6 @@ def install_primary_deps(): pnpm_lock = "@rules_release//:pnpm-lock.yaml", verify_node_modules_ignored = "@rules_release//:.bazelignore", ) + + # ------------------------------------ rules_oci ------------------------------------ # + rules_oci_dependencies() diff --git a/rules/rules_release/release/repository_secondary_deps.bzl b/rules/rules_release/release/repository_secondary_deps.bzl index 8f565b2aa..88339afc4 100644 --- a/rules/rules_release/release/repository_secondary_deps.bzl +++ b/rules/rules_release/release/repository_secondary_deps.bzl @@ -1,6 +1,7 @@ load("@rules_nodejs//nodejs:repositories.bzl", "DEFAULT_NODE_VERSION", "nodejs_register_toolchains") load("@pip//:requirements.bzl", install_pip_deps = "install_deps") load("@npm//:repositories.bzl", "npm_repositories") +load("@rules_oci//oci:repositories.bzl", "LATEST_CRANE_VERSION", "oci_register_toolchains") def install_secondary_deps(): # ------------------------------------ aspect_rules_js ------------------------------------ # @@ -14,3 +15,9 @@ def install_secondary_deps(): # ------------------------------------ rules_release ------------------------------------ # npm_repositories() + + # ------------------------------------ rules_oci ------------------------------------ # + oci_register_toolchains( + name = "oci", + crane_version = LATEST_CRANE_VERSION, + ) diff --git a/rules/rules_release/tools/defs.bzl b/rules/rules_release/tools/defs.bzl index b3a10bcba..8ff998b1f 100644 --- a/rules/rules_release/tools/defs.bzl +++ b/rules/rules_release/tools/defs.bzl @@ -1,5 +1,7 @@ load("//tools/private:publish_github_release.bzl", _publish_github_release = "publish_github_release") +load("//tools/private:publish_oci_image.bzl", _publish_oci_image = "publish_oci_image") load("//tools/private:bazel_diff_release.bzl", _bazel_diff_release = "bazel_diff_release") publish_github_release = _publish_github_release bazel_diff_release = _bazel_diff_release +publish_oci_image = _publish_oci_image diff --git a/rules/rules_release/tools/private/publish_oci_image.bzl b/rules/rules_release/tools/private/publish_oci_image.bzl new file mode 100644 index 000000000..b4744651a --- /dev/null +++ b/rules/rules_release/tools/private/publish_oci_image.bzl @@ -0,0 +1,44 @@ +load("@rules_task//task:defs.bzl", "cmd", "task") +load("@rules_oci//oci:defs.bzl", "oci_push") +load("@aspect_bazel_lib//lib:copy_file.bzl", "copy_file") + +def publish_oci_image(name, image, repository, remote_tags, before_cmds = [], env = {}): + oci_push_name = "{}.push_oci".format(name) + remote_tags_name = "{}.remote_tags".format(name) + + oci_push( + name = oci_push_name, + image = image, + remote_tags = remote_tags, + repository = repository, + ) + + target_env = { + "REGCTL": cmd.executable(Label("//tools/regctl")), + "REMOTE_TAGS_FILE": cmd.file(remote_tags), + "PUSH_IMAGE": cmd.executable(oci_push_name), + } + + for k, v in env.items(): + if k in target_env: + fail("env key {} already exists".format(k)) + + target_env[k] = v + + task( + name = name, + cmds = before_cmds + [ + "export IMAGE_TAG=$(cat $REMOTE_TAGS_FILE)", + "export IMAGE={}:$IMAGE_TAG".format(repository), + "export CONTAINER_EXISTS=$($REGCTL image inspect $IMAGE > /dev/null 2>&1 && echo true || echo false)", + "if [ $CONTAINER_EXISTS = true ]; then echo 'Image already exists, exitting.'; exit 0; fi", + "$PUSH_IMAGE", + ], + env = target_env, + ) + + copy_file( + name = remote_tags_name, + src = remote_tags, + out = "{}.txt".format(remote_tags_name), + ) diff --git a/tools/regctl/BUILD.bazel b/rules/rules_release/tools/regctl/BUILD.bazel similarity index 100% rename from tools/regctl/BUILD.bazel rename to rules/rules_release/tools/regctl/BUILD.bazel diff --git a/tools/bunq2ynab/BUILD.bazel b/tools/bunq2ynab/BUILD.bazel index b514b82a4..a5570707c 100644 --- a/tools/bunq2ynab/BUILD.bazel +++ b/tools/bunq2ynab/BUILD.bazel @@ -2,8 +2,8 @@ load("@rules_python//python:defs.bzl", "py_binary") load("@rules_task//task:defs.bzl", "cmd", "task", "task_test") load("//tools/python:defs.bzl", "py_image") load("@pdm-setup//:requirements.bzl", "requirement") -load("@rules_release//tools:defs.bzl", "publish_github_release", release = "bazel_diff_release") -load("@rules_oci//oci:defs.bzl", "oci_push") +load("@rules_release//tools:defs.bzl", "publish_github_release", "publish_oci_image", release = "bazel_diff_release") +load("@aspect_bazel_lib//lib:jq.bzl", "jq") package(default_visibility = ["//visibility:public"]) @@ -51,7 +51,7 @@ task( env = { "BINARY": cmd.executable("list_user_binary"), "OP": cmd.executable("//tools/onepassword:op"), - "CONFIG_FILE_TPL": cmd.file("config.json.tpl"), + "CONFIG_FILE_TPL": cmd.file("config-dev.json.tpl"), }, ) @@ -74,22 +74,31 @@ task( env = { "BINARY": cmd.executable("list_budget_binary"), "OP": cmd.executable("//tools/onepassword:op"), - "CONFIG_FILE_TPL": cmd.file("config.json.tpl"), + "CONFIG_FILE_TPL": cmd.file("config-dev.json.tpl"), }, ) task( name = "bunq2ynab", cmds = [ + "export SETUP_ENV=${SETUP_ENV:-dev}", + "export TIMEOUT=${TIMEOUT:-0}", + 'export SETUP_ENV="${SETUP_ENV^^}"', + "echo Running in env: $SETUP_ENV", "export CONFIG_FILE=$(pwd)/config.json", - "$OP inject --in-file=$CONFIG_FILE_TPL --out-file=$CONFIG_FILE --force", + 'export CONFIG_TEMPLATE="CONFIG_${SETUP_ENV}_FILE_TPL"', + "echo Using config template: $CONFIG_TEMPLATE", + "$OP inject --in-file=${!CONFIG_TEMPLATE} --out-file=$CONFIG_FILE --force", {"defer": "rm -rf $CONFIG_FILE"}, "$BINARY --config $CONFIG_FILE", + "echo Sleeping for $TIMEOUT to prevent rate limiting", + "sleep $TIMEOUT", ], env = { "BINARY": cmd.executable("bunq2ynab_binary"), "OP": cmd.executable("//tools/onepassword:op"), - "CONFIG_FILE_TPL": cmd.file("config.json.tpl"), + "CONFIG_DEV_FILE_TPL": cmd.file("config-dev.json.tpl"), + "CONFIG_PROD_FILE_TPL": cmd.file("config-prod.json.tpl"), }, ) @@ -107,8 +116,8 @@ py_binary( task( name = "generate_oauth_token", cmds = [ - "export CLIENT_ID=$($OP read op://vgijssel-dev/bunq2ynab_oauth/username)", - "export CLIENT_SECRET=$($OP read op://vgijssel-dev/bunq2ynab_oauth/credential)", + "export CLIENT_ID=$($OP read op://vgijssel-prod/bunq2ynab_oauth/username)", + "export CLIENT_SECRET=$($OP read op://vgijssel-prod/bunq2ynab_oauth/credential)", "$BINARY --oauth-client-id $CLIENT_ID --oauth-client-secret $CLIENT_SECRET", ], env = { @@ -121,6 +130,7 @@ py_image( name = "bunq2ynab_image", base = "//tools/python:python_base_image_file", binary = ":bunq2ynab", + labels = "container-labels.txt", platforms = [ "//:python_container_linux_amd64", "//:python_container_linux_arm64", @@ -140,7 +150,7 @@ task_test( name = "bunq2ynab_image_test", cmds = [ cmd.executable("bunq2ynab_image.load"), - "docker run --rm --env OP_SERVICE_ACCOUNT_TOKEN=$ONEPASSWORD_SERVICE_ACCOUNT_TOKEN_DEV localhost/bunq2ynab:latest", + "docker run --rm --env OP_SERVICE_ACCOUNT_TOKEN=$ONEPASSWORD_SERVICE_ACCOUNT_TOKEN_DEV --env TIMEOUT=5 localhost/bunq2ynab:latest", ], exec_properties = { "workload-isolation-type": "firecracker", @@ -161,45 +171,63 @@ publish_github_release( release = ":release", ) -release( - name = "release", - changelog_file = "CHANGELOG.md", - publish_cmds = [ - ":bunq2ynab_image_push", - ":publish_github_release", - ], - release_name = "bunq2ynab", - target = ":bunq2ynab_image", - version_file = "version.txt", - deps = [ - "@rules_task//:release", - ], +# Copied from https://github.com/bazel-contrib/rules_oci/blob/375e5dbf3b2c50e83c9df0d0dc16a027984dab77/examples/env_inheritance/BUILD.bazel#L28C1-L34 +genrule( + name = "hash", + srcs = [":bunq2ynab_image"], + outs = ["hash.txt"], + cmd = """ + prefix_to_remove="sha256:" + digest=$$($(JQ_BIN) -r '.manifests[0].digest' $(location :bunq2ynab_image)/index.json) + digest_without_prefix=$$(echo $$digest | sed "s/$$prefix_to_remove//") + echo "dev-$$digest_without_prefix" > $@ + """, + toolchains = ["@jq_toolchains//:resolved_toolchain"], ) -oci_push( - name = "bunq2ynab_image_push_oci", +publish_oci_image( + name = "publish_oci_image", + before_cmds = [ + "export OP_SERVICE_ACCOUNT_TOKEN=${ONEPASSWORD_SERVICE_ACCOUNT_TOKEN_PROD:-}", + "export REGISTRY_LOGIN=$($OP read op://vgijssel-prod/github-container-registry/username)", + "export REGISTRY_PASSWORD=$($OP read op://vgijssel-prod/github-container-registry/credential)", + "docker login ghcr.io --username $REGISTRY_LOGIN --password $REGISTRY_PASSWORD", + ], + env = { + "OP": cmd.executable("//tools/onepassword:op"), + }, image = ":bunq2ynab_image", remote_tags = "version.txt", repository = "ghcr.io/vgijssel/setup/bunq2ynab", ) -task( - name = "bunq2ynab_image_push", - cmds = [ +publish_oci_image( + name = "publish_oci_image-dev", + before_cmds = [ "export OP_SERVICE_ACCOUNT_TOKEN=${ONEPASSWORD_SERVICE_ACCOUNT_TOKEN_PROD:-}", - "export IMAGE_TAG=$(cat $VERSION_FILE)", - "export IMAGE=ghcr.io/vgijssel/setup/bunq2ynab:$IMAGE_TAG", "export REGISTRY_LOGIN=$($OP read op://vgijssel-prod/github-container-registry/username)", "export REGISTRY_PASSWORD=$($OP read op://vgijssel-prod/github-container-registry/credential)", "docker login ghcr.io --username $REGISTRY_LOGIN --password $REGISTRY_PASSWORD", - "export CONTAINER_EXISTS=$($REGCTL image inspect $IMAGE > /dev/null 2>&1 && echo true || echo false)", - "if [ $CONTAINER_EXISTS = true ]; then echo 'Image already exists, exitting.'; exit 0; fi", - "$PUSH_IMAGE", ], env = { "OP": cmd.executable("//tools/onepassword:op"), - "REGCTL": cmd.executable("//tools/regctl"), - "VERSION_FILE": cmd.file("version.txt"), - "PUSH_IMAGE": cmd.executable(":bunq2ynab_image_push_oci"), }, + image = ":bunq2ynab_image", + remote_tags = ":hash", + repository = "ghcr.io/vgijssel/setup/bunq2ynab-dev", +) + +release( + name = "release", + changelog_file = "CHANGELOG.md", + publish_cmds = [ + ":publish_oci_image", + ":publish_github_release", + ], + release_name = "bunq2ynab", + target = ":bunq2ynab_image", + version_file = "version.txt", + deps = [ + "@rules_task//:release", + ], ) diff --git a/tools/bunq2ynab/config.json.tpl b/tools/bunq2ynab/config-dev.json.tpl similarity index 58% rename from tools/bunq2ynab/config.json.tpl rename to tools/bunq2ynab/config-dev.json.tpl index 9d2fe3b31..7a681d4e7 100644 --- a/tools/bunq2ynab/config.json.tpl +++ b/tools/bunq2ynab/config-dev.json.tpl @@ -1,4 +1,4 @@ { - "api_token": "op://vgijssel-dev/bunq_api_key/credential", + "api_token": "op://vgijssel-dev/bunq_api_token/credential", "personal_access_token": "op://vgijssel-dev/ynab_personal_access_token/credential" } diff --git a/tools/bunq2ynab/config-prod.json.tpl b/tools/bunq2ynab/config-prod.json.tpl new file mode 100644 index 000000000..b46145e1f --- /dev/null +++ b/tools/bunq2ynab/config-prod.json.tpl @@ -0,0 +1,4 @@ +{ + "api_token": "op://vgijssel-prod/bunq_api_token/credential", + "personal_access_token": "op://vgijssel-prod/ynab_personal_access_token/credential" +} diff --git a/tools/bunq2ynab/container-labels.txt b/tools/bunq2ynab/container-labels.txt new file mode 100644 index 000000000..f27e885d9 --- /dev/null +++ b/tools/bunq2ynab/container-labels.txt @@ -0,0 +1 @@ +org.opencontainers.image.source=https://github.com/vgijssel/setup \ No newline at end of file diff --git a/tools/docker/docker.bzl b/tools/docker/docker.bzl deleted file mode 100644 index 63a9d404b..000000000 --- a/tools/docker/docker.bzl +++ /dev/null @@ -1,36 +0,0 @@ -""" -For quickly loading and running docker images built by Bazel. -""" - -load("@rules_task//task:defs.bzl", "cmd", "task") - -def docker_load(name, image, **kwargs): - """ - Loads a docker image and return the image name. - """ - image_label = "{}.tar".format(image) - image_sha_label = "{}.json.sha256".format(image) - - task( - name = name, - cmds = [ - """ - DOCKER_DIGEST_FILE=$image_sha_label - DOCKER_DIGEST=$(cat $DOCKER_DIGEST_FILE) - DOCKER_LOAD_FILE=$image_label - - if ! docker image inspect $DOCKER_DIGEST > /dev/null 2>&1 ; then - docker load --input $DOCKER_LOAD_FILE >&2 - else - echo Image already exists >&2 - fi - - echo $DOCKER_DIGEST - """, - ], - env = { - "image_label": cmd.file(image_label), - "image_sha_label": cmd.file(image_sha_label), - }, - **kwargs - ) diff --git a/tools/docker/docker_load.bzl b/tools/docker/docker_load.bzl new file mode 100644 index 000000000..862b43e7c --- /dev/null +++ b/tools/docker/docker_load.bzl @@ -0,0 +1,37 @@ +""" +For quickly loading and running docker images built by Bazel. +""" + +load("@rules_task//task:defs.bzl", "cmd", "task") +load(":docker_local_tar.bzl", "docker_local_tar") + +def docker_load(name, tag, image, format = "docker"): + local_tar_name = "{}.local_tar".format(name) + + docker_local_tar( + name = local_tar_name, + image = image, + tag = tag, + format = format, + ) + + # From https://stackoverflow.com/questions/72945407/how-do-i-import-and-run-a-multi-platform-oci-image-in-docker-for-macos + # We need to load the multi-arch image using regctl + # Export the platform specific digest into a tar + # And load that tar into the daemon + task( + name = name, + cmds = [ + "docker load < $LOCAL_TAR 1>&2", + "echo localhost/{}".format(tag), + ], + env = { + "LOCAL_TAR": cmd.file(local_tar_name), + "REGCTL": cmd.executable("@rules_release//tools/regctl"), + }, + exec_properties = { + "workload-isolation-type": "firecracker", + "init-dockerd": "true", + "recycle-runner": "true", + }, + ) diff --git a/tools/docker/docker_local_tar.bzl b/tools/docker/docker_local_tar.bzl new file mode 100644 index 000000000..9b034f518 --- /dev/null +++ b/tools/docker/docker_local_tar.bzl @@ -0,0 +1,45 @@ +load("@rules_oci//oci:defs.bzl", "oci_tarball") + +# From https://stackoverflow.com/questions/72945407/how-do-i-import-and-run-a-multi-platform-oci-image-in-docker-for-macos +# We need to load the multi-arch image using regctl +# Export the platform specific digest into a tar +# And load that tar into the daemon +def docker_local_tar(name, image, tag, format = "docker"): + tarball_name = "{}.oci_tarball".format(name) + out_file = "{}.tar".format(name) + + oci_tarball( + name = tarball_name, + image = image, + repo_tags = [tag], + format = format, + ) + + native.genrule( + name = name, + srcs = [ + tarball_name, + ], + outs = [ + out_file, + ], + tools = [ + "@rules_release//tools/regctl", + ], + cmd = """ + regctl=$(location @rules_release//tools/regctl) + tarball=$(location {tarball_name}) + $$regctl image import ocidir://{tag} $$tarball + digest=$$($$regctl image digest --platform local ocidir://{tag}) + export local_tarball=$@ + $$regctl image export ocidir://{tag}@$$digest $$local_tarball + """.format( + tarball_name = tarball_name, + tag = tag, + ), + exec_properties = { + "workload-isolation-type": "firecracker", + "init-dockerd": "true", + "recycle-runner": "true", + }, + ) diff --git a/tools/docker/docker_run_and_commit.bzl b/tools/docker/docker_run_and_commit.bzl new file mode 100644 index 000000000..6b3030421 --- /dev/null +++ b/tools/docker/docker_run_and_commit.bzl @@ -0,0 +1,57 @@ +""" +For quickly loading and running docker images built by Bazel. +""" + +load(":docker_local_tar.bzl", "docker_local_tar") +load("@bazel_skylib//rules:write_file.bzl", "write_file") + +def docker_run_and_commit(name, cmd, image, format = "docker", timeout = 300): + local_tar_name = "{}.local_tar".format(name) + script_name = "{}.script".format(name) + script_file = "{}.sh".format(script_name) + out_file = "{}.tar".format(name) + tag = "{}:latest".format(name.replace("_", "-")) + + write_file( + name = script_name, + out = script_file, + content = [ + "#!/usr/bin/env bash", + "set -euo pipefail", + cmd, + ], + is_executable = True, + ) + + docker_local_tar( + name = local_tar_name, + image = image, + tag = name, + format = format, + ) + + native.genrule( + name = name, + srcs = [script_name, local_tar_name], + outs = [out_file], + cmd = """ + script=$$(pwd)/$(location {script_name}) + local_tar=$(location {local_tar_name}) + docker load -i $$local_tar + container_id=$$(docker run --rm -v $$script:/tmp/script.sh --detach --entrypoint="" localhost/{tag} sleep {timeout}) + docker exec $$container_id /tmp/script.sh + docker export $$container_id --output $@ + docker rm -f $$container_id + """.format( + script_name = script_name, + local_tar_name = local_tar_name, + tag = tag, + cmd = cmd, + timeout = timeout, + ), + exec_properties = { + "workload-isolation-type": "firecracker", + "init-dockerd": "true", + "recycle-runner": "true", + }, + ) diff --git a/tools/python/defs.bzl b/tools/python/defs.bzl index 1da8d42a9..780de947f 100644 --- a/tools/python/defs.bzl +++ b/tools/python/defs.bzl @@ -1,8 +1,7 @@ load("@aspect_bazel_lib//lib:tar.bzl", "mtree_spec", "tar") -load("@rules_oci//oci:defs.bzl", "oci_image", "oci_image_index", "oci_tarball") +load("@rules_oci//oci:defs.bzl", "oci_image", "oci_image_index") load("@aspect_bazel_lib//lib:transitions.bzl", "platform_transition_filegroup") -load("@rules_task//task:defs.bzl", "cmd", "task") -load("@local_config_platform//:constraints.bzl", "HOST_CONSTRAINTS") +load("//tools/docker:docker_load.bzl", "docker_load") def py_image_layer(name, binary, prefix = "", **kwargs): mtree_spec_name = "{}_mtree".format(name) @@ -28,7 +27,7 @@ def py_image_layer(name, binary, prefix = "", **kwargs): mtree = prefixed_mtree_spec_name, ) -def py_image(name, base, binary, platforms, prefix = ""): +def py_image(name, base, binary, platforms, prefix = "", labels = None): binary_name = Label(binary).name package_name = native.package_name() entrypoint = ["/{}{}/{}".format(prefix, package_name, binary_name)] @@ -37,11 +36,6 @@ def py_image(name, base, binary, platforms, prefix = ""): image_name = "{}.image".format(image_index_name) image_load_name = "{}.load".format(image_index_name) image_python_layer_name = "{}_python_layer".format(image_index_name) - tarball_name = "{}.tarball".format(image_index_name) - - repo_tags = [ - "{}:{}".format(binary_name, "latest"), - ] py_image_layer( name = image_python_layer_name, @@ -56,6 +50,7 @@ def py_image(name, base, binary, platforms, prefix = ""): tars = [ image_python_layer_name, ], + labels = labels, ) transitioned_images = [] @@ -77,34 +72,9 @@ def py_image(name, base, binary, platforms, prefix = ""): images = transitioned_images, ) - oci_tarball( - name = tarball_name, + docker_load( + name = image_load_name, + tag = "{}:latest".format(binary_name), image = image_index_name, - repo_tags = repo_tags, format = "oci", ) - - # From https://stackoverflow.com/questions/72945407/how-do-i-import-and-run-a-multi-platform-oci-image-in-docker-for-macos - # We need to load the multi-arch image using regctl - # Export the platform specific digest into a tar - # And load that tar into the daemon - task( - name = image_load_name, - cmds = [ - "$REGCTL image import ocidir://{} $TARBALL".format(binary_name), - "digest=$($REGCTL image digest --platform local ocidir://{})".format(binary_name), - "export LOCAL_TARBALL=$(pwd)/{}.tar".format(binary_name), - "$REGCTL image export ocidir://{}@$digest $LOCAL_TARBALL".format(binary_name), - {"defer": "rm -f $LOCAL_TARBALL"}, - "docker load < $LOCAL_TARBALL", - ], - env = { - "TARBALL": cmd.file(tarball_name), - "REGCTL": cmd.executable("//tools/regctl:regctl"), - }, - exec_properties = { - "workload-isolation-type": "firecracker", - "init-dockerd": "true", - "recycle-runner": "true", - }, - ) diff --git a/tools/ubuntu/BUILD.bazel b/tools/ubuntu/BUILD.bazel deleted file mode 100644 index 4f3ce7fe3..000000000 --- a/tools/ubuntu/BUILD.bazel +++ /dev/null @@ -1,51 +0,0 @@ -load("@io_bazel_rules_docker//container:container.bzl", "container_image") -load("@io_bazel_rules_docker//docker/util:run.bzl", "container_run_and_commit_layer") - -package(default_visibility = ["//visibility:public"]) - -ubuntu_base_image = select({ - "@platforms//cpu:aarch64": "@ubuntu_base_arm64//image", - "@platforms//cpu:x86_64": "@ubuntu_base_amd64//image", -}) - -container_image( - name = "ubuntu_base_image", - base = ubuntu_base_image, - env = { - "LANG": "C.UTF-8", - "LC_ALL": "C.UTF-8", - "container": "docker", - }, -) - -container_run_and_commit_layer( - name = "setup_snap", - commands = [ - "apt-get update", - "apt-get install -y snapd squashfuse fuse sudo lsb-release", - "systemctl enable snapd", - "useradd -m ubuntu -s /bin/bash", - "adduser ubuntu sudo", - "echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers", - ], - env = { - "DEBIAN_FRONTEND": "noninteractive", - }, - exec_properties = { - "workload-isolation-type": "firecracker", - "init-dockerd": "true", - "recycle-runner": "true", - }, - image = ":ubuntu_base_image.tar", -) - -container_image( - name = "ubuntu_snap_base_image", - base = ":ubuntu_base_image.tar", - env = { - "PATH": "/snap/bin:$$PATH", - }, - layers = [ - ":setup_snap", - ], -) diff --git a/workstation/BUILD.bazel b/workstation/BUILD.bazel index 54702c8ce..98397997e 100644 --- a/workstation/BUILD.bazel +++ b/workstation/BUILD.bazel @@ -1,140 +1,136 @@ -load("//tools/pyinfra:defs.bzl", "pyinfra_run") -load("@io_bazel_rules_docker//docker/package_managers:install_pkgs.bzl", "install_pkgs") -load("@io_bazel_rules_docker//docker/package_managers:download_pkgs.bzl", "download_pkgs") -load("@io_bazel_rules_docker//container:container.bzl", "container_image") -load("@io_bazel_rules_docker//docker/util:run.bzl", "container_run_and_commit_layer") -load("@rules_pkg//:pkg.bzl", "pkg_tar") -load("@rules_task//task:defs.bzl", "cmd", "task") +# load("//tools/pyinfra:defs.bzl", "pyinfra_run") +# load("@rules_pkg//:pkg.bzl", "pkg_tar") +# load("@rules_task//task:defs.bzl", "cmd", "task") -pyinfra_run( - name = "provision", - srcs = [ - "deploys/editor/tasks/install_editor.py", - "deploys/languages/tasks/install_languages.py", - "deploys/ssh/tasks/install_ssh.py", - "deploys/terminal/tasks/install_terminal.py", - "deploys/utilities/tasks/install_utilities.py", - "deploys/workflow/tasks/install_workflow.py", - "helpers/home_link.py", - "helpers/macos.py", - "helpers/onepassword.py", - ], - args = [ - "--data install_editor=True", - "--data install_terminal=True", - "--data install_languages=True", - "--data install_ssh=True", - "--data install_utilities=True", - "--data install_workflow=True", - ], - data = [ - "deploys/editor/files/keybindings.json", - "deploys/editor/files/settings.json", - "deploys/languages/files/tool-versions", - "deploys/ssh/files/config/1Password/ssh/agent.toml", - "deploys/ssh/files/gitconfig", - "deploys/ssh/files/ssh/config", - "deploys/terminal/files/bash_profile", - "deploys/terminal/files/bashrc", - "deploys/terminal/files/config/atuin/config.toml", - "deploys/terminal/files/profile", - "deploys/terminal/files/sheldon/plugins.lock", - "deploys/terminal/files/sheldon/plugins.toml", - "deploys/terminal/files/shell_snippets/benchmark.sh", - "deploys/terminal/files/terminal_env", - "deploys/terminal/files/zprofile", - "deploys/terminal/files/zshrc", - ], - deploy = "deploy.py", - inventory = "inventory.py", -) - -inspec_files = select({ - "@platforms//cpu:aarch64": ["@inspec_arm64//file"], - "@platforms//cpu:x86_64": ["@inspec_amd64//file"], -}) +# pyinfra_run( +# name = "provision", +# srcs = [ +# "deploys/editor/tasks/install_editor.py", +# "deploys/languages/tasks/install_languages.py", +# "deploys/ssh/tasks/install_ssh.py", +# "deploys/terminal/tasks/install_terminal.py", +# "deploys/utilities/tasks/install_utilities.py", +# "deploys/workflow/tasks/install_workflow.py", +# "helpers/home_link.py", +# "helpers/macos.py", +# "helpers/onepassword.py", +# ], +# args = [ +# "--data install_editor=True", +# "--data install_terminal=True", +# "--data install_languages=True", +# "--data install_ssh=True", +# "--data install_utilities=True", +# "--data install_workflow=True", +# ], +# data = [ +# "deploys/editor/files/keybindings.json", +# "deploys/editor/files/settings.json", +# "deploys/languages/files/tool-versions", +# "deploys/ssh/files/config/1Password/ssh/agent.toml", +# "deploys/ssh/files/gitconfig", +# "deploys/ssh/files/ssh/config", +# "deploys/terminal/files/bash_profile", +# "deploys/terminal/files/bashrc", +# "deploys/terminal/files/config/atuin/config.toml", +# "deploys/terminal/files/profile", +# "deploys/terminal/files/sheldon/plugins.lock", +# "deploys/terminal/files/sheldon/plugins.toml", +# "deploys/terminal/files/shell_snippets/benchmark.sh", +# "deploys/terminal/files/terminal_env", +# "deploys/terminal/files/zprofile", +# "deploys/terminal/files/zshrc", +# ], +# deploy = "deploy.py", +# inventory = "inventory.py", +# ) -container_image( - name = "loose_debs_image", - base = "//tools/ubuntu:ubuntu_base_image.tar", - files = inspec_files, -) +# inspec_files = select({ +# "@platforms//cpu:aarch64": ["@inspec_arm64//file"], +# "@platforms//cpu:x86_64": ["@inspec_amd64//file"], +# }) -container_run_and_commit_layer( - name = "loose_debs_layer", - commands = [ - "dpkg -i /inspec.deb", - "inspec --chef-license=accept", - ], - exec_properties = { - "workload-isolation-type": "firecracker", - "init-dockerd": "true", - "recycle-runner": "true", - }, - image = ":loose_debs_image.tar", -) +# container_image( +# name = "loose_debs_image", +# base = "//tools/ubuntu:ubuntu_base_image.tar", +# files = inspec_files, +# ) -download_pkgs( - name = "debs", - exec_properties = { - "workload-isolation-type": "firecracker", - "init-dockerd": "true", - "recycle-runner": "true", - }, - image_tar = "//tools/ubuntu:ubuntu_base_image.tar", - packages = [ - "openssh-client", - ], -) +# container_run_and_commit_layer( +# name = "loose_debs_layer", +# commands = [ +# "dpkg -i /inspec.deb", +# "inspec --chef-license=accept", +# ], +# exec_properties = { +# "workload-isolation-type": "firecracker", +# "init-dockerd": "true", +# "recycle-runner": "true", +# }, +# image = ":loose_debs_image.tar", +# ) -install_pkgs( - name = "debs_installed", - exec_properties = { - "workload-isolation-type": "firecracker", - "init-dockerd": "true", - "recycle-runner": "true", - }, - image_tar = "//tools/ubuntu:ubuntu_base_image.tar", - installables_tar = ":debs.tar", - output_image_name = "debs_installed", -) +# download_pkgs( +# name = "debs", +# exec_properties = { +# "workload-isolation-type": "firecracker", +# "init-dockerd": "true", +# "recycle-runner": "true", +# }, +# image_tar = "//tools/ubuntu:ubuntu_base_image.tar", +# packages = [ +# "openssh-client", +# ], +# ) -pkg_tar( - name = "inspec_tar", - srcs = [ - "controls/workstation.rb", - "inspec.yml", - ], - package_dir = "/project", - # Otherwise all directories are flattened: - # - strip_prefix = ".", -) +# install_pkgs( +# name = "debs_installed", +# exec_properties = { +# "workload-isolation-type": "firecracker", +# "init-dockerd": "true", +# "recycle-runner": "true", +# }, +# image_tar = "//tools/ubuntu:ubuntu_base_image.tar", +# installables_tar = ":debs.tar", +# output_image_name = "debs_installed", +# ) -container_image( - name = "inspec_image", - base = ":debs_installed.tar", - layers = [":loose_debs_layer"], - tars = [ - ":inspec_tar", - ], - workdir = "/project", -) +# pkg_tar( +# name = "inspec_tar", +# srcs = [ +# "controls/workstation.rb", +# "inspec.yml", +# ], +# package_dir = "/project", +# # Otherwise all directories are flattened: +# # +# strip_prefix = ".", +# ) -# docker_load_and_run( -# name = "inspec_runner", -# command = "inspec exec . -t ssh://$$USER@host.docker.internal -i /root/.ssh/id_rsa --shell --shell-command='/bin/sh' --shell-options='--login'", -# docker_args = [ -# "--volume=$$BUILD_WORKSPACE_DIRECTORY/tmp/remote_key/id_rsa:/root/.ssh/id_rsa", +# container_image( +# name = "inspec_image", +# base = ":debs_installed.tar", +# layers = [":loose_debs_layer"], +# tars = [ +# ":inspec_tar", # ], -# image = ":inspec_image", +# workdir = "/project", # ) -task( - name = "test", - cmds = [ - cmd.executable(":provision"), - cmd.executable("//tools/macos:macos-remote-setup"), - # cmd.executable(":inspec_runner"), - ], -) +# # docker_load_and_run( +# # name = "inspec_runner", +# # command = "inspec exec . -t ssh://$$USER@host.docker.internal -i /root/.ssh/id_rsa --shell --shell-command='/bin/sh' --shell-options='--login'", +# # docker_args = [ +# # "--volume=$$BUILD_WORKSPACE_DIRECTORY/tmp/remote_key/id_rsa:/root/.ssh/id_rsa", +# # ], +# # image = ":inspec_image", +# # ) + +# task( +# name = "test", +# cmds = [ +# cmd.executable(":provision"), +# cmd.executable("//tools/macos:macos-remote-setup"), +# # cmd.executable(":inspec_runner"), +# ], +# )