Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Initial work on Mac patching #12

Open
wants to merge 22 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
a5a29e2
Initial work on Mac patching
colincornaby Oct 16, 2023
8439911
Apply suggestions from code review
colincornaby Oct 15, 2023
3067891
Update urumanifest/github.py
colincornaby Oct 15, 2023
57e2584
OS is needed for pathname manipulation
colincornaby Oct 16, 2023
261367d
Removing magic number in bundle path setup
colincornaby Oct 16, 2023
2f250cd
Switching to rglob
colincornaby Oct 16, 2023
5bee7f0
Converting to named tuple
colincornaby Oct 16, 2023
9d2fc93
Feedback from code review
colincornaby Oct 23, 2023
392222a
Finishing implementing code review feedback
colincornaby Oct 23, 2023
92ed098
Syncing with latest Mac self patching changes
colincornaby May 26, 2024
620bb5a
Apply suggestions from code review
colincornaby Jun 2, 2024
87e533c
Fixing the script yielding the file name to unpack instead of the pat…
colincornaby Jun 30, 2024
38a32a9
Properly setting permissions of archived app bundles
colincornaby Jun 30, 2024
5d25b28
Cleaning up bundles adding multiple name entries
colincornaby Jun 30, 2024
927bf38
Adding Mac thin client manifests
colincornaby Jul 1, 2024
abbc6fe
Fixing bundle flag
colincornaby Jul 1, 2024
4fc98e2
Initial work on macBundleExternal/ Internal keys
colincornaby Jul 7, 2024
ca85d8d
Elimating dir checks for bundle
colincornaby Jul 7, 2024
28e387d
Apply suggestions from code review
colincornaby Jul 10, 2024
f956772
Apply suggestions from code review
colincornaby Jul 15, 2024
8e14b72
Fixing typo in string literal
colincornaby Jul 15, 2024
c1ac1ed
Eliminating manual folder extension check
colincornaby Jul 15, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 27 additions & 6 deletions urumanifest/commit.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import logging
from pathlib import Path
import shutil
import tarfile
from typing import Dict, NamedTuple, Optional, Set, Tuple, Union

from assets import Asset, AssetDatabase, AssetError, AssetEntry
Expand All @@ -32,8 +33,20 @@

_BUFFER_SIZE = 10 * 1024 * 1024

def _compress_asset(source_path: Path, output_path: Path):
def _add_to_tar_archive(info: tarfile.TarInfo):
info.mode = 0o777
return info

def _compress_asset(source_path: Path, output_path: Path, bundle: bool):
output_path.parent.mkdir(parents=True, exist_ok=True)
if bundle:
with tarfile.TarFile.open(output_path, "w:gz") as tar:
tar.gettarinfo(source_path, arcname="")
tar.add(source_path, arcname="", filter=_add_to_tar_archive)
with output_path.open("rb") as in_stream:
h = hashlib.md5()
_io_loop(in_stream, h.update)
return h.hexdigest(), output_path.stat().st_size
colincornaby marked this conversation as resolved.
Show resolved Hide resolved
with source_path.open("rb") as in_stream:
with gzip.open(output_path, "wb") as gz_stream:
_io_loop(in_stream, gz_stream.write)
Expand All @@ -42,10 +55,15 @@ def _compress_asset(source_path: Path, output_path: Path):
_io_loop(in_stream, h.update)
return h.hexdigest(), output_path.stat().st_size

def _hash_asset(args: Tuple[Path, Path]) -> Tuple[Path, str, int]:
server_path, source_path = args
def _hash_asset(args: Tuple[Path, Path, bool]) -> Tuple[Path, str, int]:
server_path, source_path, mac_app_bundle = args
# One day, we will not use such a vulnerable hashing algo...
h = hashlib.md5()
if mac_app_bundle:
bundle_name = source_path.stem
# Guess the executable path - we could get the exact name if we could unpack the Info.plist
# in a cross platform way.
source_path = source_path.joinpath("Contents", "MacOS", bundle_name)
with source_path.open("rb") as in_stream:
_io_loop(in_stream, h.update)
return server_path, h.hexdigest(), source_path.stat().st_size
Expand Down Expand Up @@ -109,7 +127,9 @@ def on_compress(asset: manifest.ManifestEntry, future: concurrent.futures.Future
for i in compressed_assets)
for server_path, staged_asset, source_asset, cached_asset in asset_iter:
assert server_path.parent.name
asset_output_path = output_path.joinpath(server_path).with_suffix(f"{server_path.suffix}.gz")

output_suffix = "tgz" if staged_asset.flags & ManifestFlags.bundle else "gz"
asset_output_path = output_path.joinpath(server_path).with_suffix(f"{server_path.suffix}.{output_suffix}")
staged_asset.download_name = asset_output_path.relative_to(output_path)

# While the old, sucky manifest generator was picky about what it compressed, we're not
Expand All @@ -120,7 +140,8 @@ def on_compress(asset: manifest.ManifestEntry, future: concurrent.futures.Future
if staged_asset.flags & ManifestFlags.dirty or force:
future = executor.submit(_compress_asset,
source_asset.source_path,
asset_output_path)
asset_output_path,
staged_asset.flags & ManifestFlags.bundle)
future.add_done_callback(functools.partial(on_compress, staged_asset))
else:
staged_asset.download_hash = cached_asset.download_hash
Expand Down Expand Up @@ -282,7 +303,7 @@ def hash_staged_assets(source_assets: Dict[Path, Asset], staged_assets: Dict[Pat
ncpus: Optional[int] = None) -> None:
logging.info("Hashing all staged assets...")
with concurrent.futures.ProcessPoolExecutor(max_workers=ncpus) as executor:
args = ((sp, source_assets[sp].source_path) for sp, sa in staged_assets.items() if not sa.flags & ManifestFlags.consumable)
args = ((sp, source_assets[sp].source_path, sa.flags & ManifestFlags.bundle) for sp, sa in staged_assets.items() if not sa.flags & ManifestFlags.consumable)
for server_path, h, sz in executor.map(_hash_asset, args, chunksize=64):
logging.trace(f"{server_path}: {h}")
staged_asset = staged_assets[server_path]
Expand Down
12 changes: 12 additions & 0 deletions urumanifest/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@

# All gather sections that list installer prerequisites
gather_installers = frozenset(("prereq", "prereq64"))
mac_bundles = frozenset(("macBundleInternal", "macBundleExternal"))

class _manifests(NamedTuple):
thin: str
Expand All @@ -63,6 +64,10 @@ class _manifests(NamedTuple):

# Legacy -- to be deleted??? -- TransGaming Cider Wrapper (macOS)
"mac": _manifests(None, None, "macExternal"),
"macInternal": _manifests("MacThinInternal", None, "MacInternal"),
"macExternal": _manifests("MacThinExternal", None, "MacExternal"),
Comment on lines +67 to +68
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are these legacy manifests?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

macInternal and macExternal are current. Should they be declared as something else?

Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

They should probably be listed before the comment that says legacy manifests follow.

"macBundleInternal": _manifests("MacThinInternal", None, "MacInternal"),
"macBundleExternal": _manifests("MacThinExternal", None, "MacExternal"),
}

class _directorytuple(NamedTuple):
Expand All @@ -81,6 +86,10 @@ class _directorytuple(NamedTuple):
"external64": _directorytuple("", "client/windows_amd64/external"),
"internal": _directorytuple("", "client/windows_ia32/internal"),
"internal64": _directorytuple("", "client/windows_amd64/internal"),
"macexternal": _directorytuple("", "client/mac/external"),
"macinternal": _directorytuple("", "client/mac/internal"),
Comment on lines +89 to +90
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The filename case doesn't match above. Also, just to make sure - we are assuming that mac clients are universal binaries?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, thats the assumption. It wouldn't really make sense to have it be a universal binary locally and then a processor specific binary.

That said - that will make things messy when Apple eventually gets around to cutting Intel support from their tools. But that eventuality could be dealt with like any other platform we discontinue support for.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note that currently CI is not generating universal binaries (because of vcpkg limitations). In theory we could figure out a way to lipo them together into a universal binary when we attach them to the published release.

Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note to self: the gather_lut keys are expected to be all lowercase.

"macbundleexternal": _directorytuple("", "client/mac/external"),
"macbundleinternal": _directorytuple("", "client/mac/internal"),
"mac": _directorytuple("", "client/macos_ia32/external"),
"prereq": _directorytuple("", "dependencies/windows_ia32"),
"prereq64": _directorytuple("", "dependencies/windows_amd64"),
Expand All @@ -100,6 +109,7 @@ class ManifestFlags(enum.IntFlag):
sound_cache_stereo = (1<<2)
file_gzipped = (1<<3)
installer = (1<<4)
bundle = (1<<5)

# Internal flags
python_file_mod = (1<<16)
Expand Down Expand Up @@ -153,4 +163,6 @@ class PyToolsResultCodes(enum.IntEnum):
"plasma-windows-x64-external-release": "external64",
"plasma-windows-x86-internal-release": "internal",
"plasma-windows-x64-internal-release": "internal64",
"plasma-macos-x64-internal-release": "macInternal",
"plasma-macos-x64-external-release": "macExternal",
Comment on lines +166 to +167
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Just noting that these are Intel x86_64 binaries (which is probably the best option for the moment, but they are not universal because they do not include arm64)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah - that still needs to be done on the CI side. For now, I'm kind of assuming whatever the platforms should be is what is provided by CI.

}
1 change: 1 addition & 0 deletions urumanifest/dependencies.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,6 +191,7 @@ def track_client_dependency(client_path: Path, server_path: Path):

def track_manifest_dependency(client_path: Path, server_path: Path, category: str, manifest_names):
flags = ManifestFlags.installer if category in gather_installers else 0
flags |= ManifestFlags.bundle if category in mac_bundles else 0
track_dependency(client_path, server_path, flags)
for name in manifest_names:
if name:
Expand Down
41 changes: 33 additions & 8 deletions urumanifest/github.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import logging
import math
import json
import os
from pathlib import Path, PurePosixPath
import subprocess
import sys
Expand Down Expand Up @@ -372,6 +373,13 @@ def nuke_recurse(nuke: Path):
database.current_sha = rev
database.valid = True

class ArtifactInfo(NamedTuple):
path: Path
zipinfo: zipfile.ZipInfo
name: str
bundle: bool = False

colincornaby marked this conversation as resolved.
Show resolved Hide resolved

def _unpack_artifact(staging_path: Path, database: _WorkflowDatabase, rev: str, name: str, artifact: tempfile.NamedTemporaryFile):
logging.debug(f"Decompressing {name}.zip from {artifact.name}")

Expand All @@ -381,10 +389,19 @@ def _unpack_artifact(staging_path: Path, database: _WorkflowDatabase, rev: str,
output_path.mkdir(parents=True, exist_ok=True)

def iter_client_dir():
for i in filter(lambda x: not x.is_dir(), archive.infolist()):
for i in archive.infolist():
member_path = PurePosixPath(i.filename)
if len(member_path.parts) == 2 and member_path.parts[0] == "client":
yield i
client_folder_name = "client"
if len(member_path.parts) >= 2 and member_path.parts[0] == client_folder_name:
is_mac_app_bundle = member_path.parts[1].endswith(".app")
if not i.is_dir() and len(member_path.parts) == 2:
yield ArtifactInfo(path=member_path.name, zipinfo=i, name=member_path.name)
elif is_mac_app_bundle and not i.is_dir():
# remove "client/"
path = member_path.relative_to(client_folder_name)
yield ArtifactInfo(path=path, zipinfo=i, name=member_path.parts[1], bundle=True)



desired_members = list(iter_client_dir())
with tqdm_logging.tqdm_logging_redirect(
Expand All @@ -394,19 +411,27 @@ def iter_client_dir():
leave=False
) as progress:
for info in progress:
logging.trace(f"Extracting {info.filename}")
_unpack_member(output_path, archive, info)
logging.trace(f"Extracting {info.zipinfo.filename}")
_unpack_member(output_path, info.path, archive, info.zipinfo)

gather_key = workflow_lut[name]
gather_package = { gather_key: [PurePosixPath(i.filename).name for i in desired_members] }
desired_files = [i.name for i in filter(lambda x: x.bundle == False, desired_members)]
# Bundles might add multiple members with the same name - clean that up
desired_bundles = list(set([i.name for i in list(filter(lambda x: x.bundle == True, desired_members))]))
gather_package = { gather_key: desired_files }
if desired_bundles:
key = "macBundleExternal" if gather_key == "macExternal" else "macBundleInternal"
gather_package[key] = desired_bundles
with output_path.joinpath("control.json").open("w") as fp:
json.dump(gather_package, fp, indent=2)

database.workflow_gathers[name] = subdir_name

def _unpack_member(output_path: Path, archive: zipfile.ZipFile, info: zipfile.ZipInfo):
def _unpack_member(output_path: Path, output_subpath: Path, archive: zipfile.ZipFile, info: zipfile.ZipInfo):
with archive.open(info, "r") as infile:
with output_path.joinpath(Path(info.filename).name).open("wb") as outfile:
full_path = output_path.joinpath(output_subpath)
full_path.parent.mkdir(parents=True, exist_ok=True)
with full_path.open("wb") as outfile:
block = 1024 * 8
while True:
buf = infile.read(block)
Expand Down
Loading