Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

flash extra.json manifest #165

Closed
wants to merge 44 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
44 commits
Select commit Hold shift + click to select a range
f9320a6
optimize system image
incognitojam Aug 11, 2023
deda4ea
tool path
incognitojam Aug 11, 2023
07cf8be
cleanup script
incognitojam Aug 11, 2023
9758d8f
fix
incognitojam Aug 11, 2023
eadc225
.
incognitojam Aug 11, 2023
259addc
extras script
incognitojam Aug 11, 2023
66ddae5
cleanup
incognitojam Aug 11, 2023
8c4ffdf
fix
incognitojam Aug 11, 2023
1981231
remove
incognitojam Aug 11, 2023
1d46372
OTA_DIR
incognitojam Aug 11, 2023
a34157a
bash is easier
incognitojam Aug 11, 2023
c4135a4
TODO
incognitojam Aug 13, 2023
0137907
checksum
incognitojam Aug 13, 2023
cc64f40
fix
incognitojam Aug 13, 2023
a91a7be
fixes
incognitojam Aug 13, 2023
288cf53
.
incognitojam Aug 13, 2023
d920b1d
cleanup
incognitojam Aug 13, 2023
12c577b
more fixes
incognitojam Aug 13, 2023
d395fa1
extras
incognitojam Aug 13, 2023
acdc83e
cleanup
incognitojam Aug 13, 2023
354bc1e
system size is weird
incognitojam Aug 13, 2023
d3f44aa
cleanup
incognitojam Aug 13, 2023
9d7a403
push
incognitojam Aug 13, 2023
8af8d4f
calculate optimized hash
incognitojam Aug 13, 2023
8efc6a6
cleanup
incognitojam Aug 13, 2023
fa1ad25
Merge remote-tracking branch 'origin/master' into agnos-extra
incognitojam Aug 16, 2023
2a80242
cleanup
incognitojam Aug 16, 2023
8dd26f6
better
incognitojam Aug 16, 2023
ec75b98
.
incognitojam Aug 16, 2023
94e1e81
more cleanup
incognitojam Aug 16, 2023
5d6b75a
fixes
incognitojam Aug 16, 2023
c6a2583
add -optimized suffix to file name
incognitojam Aug 16, 2023
7d95523
fixes
incognitojam Aug 16, 2023
a2399bd
cleanup
incognitojam Aug 16, 2023
a3a1223
CI
incognitojam Aug 16, 2023
c97d7ad
mv
incognitojam Aug 16, 2023
903f0e7
.
incognitojam Aug 16, 2023
456b6d7
consistent
incognitojam Aug 17, 2023
9430673
remove TODO
incognitojam Aug 17, 2023
34de4ce
cleaner
incognitojam Aug 17, 2023
8159d16
Merge remote-tracking branch 'origin/master' into agnos-extra
incognitojam Aug 17, 2023
4403c5e
not quite long enough
incognitojam Aug 17, 2023
5ab471c
less
incognitojam Aug 18, 2023
58b3dae
name
incognitojam Aug 20, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions .github/workflows/tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,17 @@ jobs:
python-version: '3.11.4'
- run: 'pip install pre-commit'
- run: 'pre-commit run --all'

extra_ota:
name: check extra manifest is valid JSON
runs-on: ubuntu-20.04
timeout-minutes: 9
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.11.4'
- run: |
internal/package_extra.sh https://raw.githubusercontent.com/commaai/openpilot/master/system/hardware/tici/agnos.json
cat output/ota/extra.json | jq empty
cat output/ota/extra-staging.json | jq empty
24 changes: 24 additions & 0 deletions internal/extra_push.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
#!/bin/bash -e

# Make sure we're in the correct directory
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null && pwd)"
cd $DIR

# Constants
OTA_OUTPUT_DIR="$DIR/../output/ota"

source upload.sh

# Liftoff!
for NAME in $(cat $EXTRA_JSON | jq -r '.[] | .name'); do
local HASH_RAW=$(cat $EXTRA_JSON | jq -r ".[] | select(.name == \"$NAME\") | .hash_raw")
local SPARSE=$(cat $EXTRA_JSON | jq -r ".[] | select(.name == \"$NAME\") | .sparse")

if [ "$SPARSE" == "true" ]; then
upload_file "$NAME-$HASH_RAW-skip-chunks.img.gz"
else
upload_file "$NAME-$HASH_RAW.img.gz"
fi
done

echo "Done!"
48 changes: 6 additions & 42 deletions internal/ota_push.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,38 +5,12 @@ DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null && pwd)"
cd $DIR

# Constants
OTA_DIR="$DIR/../output/ota"
DATA_ACCOUNT="commadist"
OTA_OUTPUT_DIR="$DIR/../output/ota"

# Parse input
FOUND=0
if [ "$1" == "production" ]; then
OTA_JSON="$OTA_DIR/ota.json"
DATA_CONTAINER="agnosupdate"
FOUND=1
fi
if [ "$1" == "staging" ]; then
OTA_JSON="$OTA_DIR/ota-staging.json"
DATA_CONTAINER="agnosupdate-staging"
FOUND=1
fi
source upload.sh

if [ $FOUND == 0 ]; then
echo "Supply either 'production' or 'staging' as first argument!"
exit 1
fi

upload_file() {
local FILE_NAME=$1
local CLOUD_PATH="https://$DATA_ACCOUNT.blob.core.windows.net/$DATA_CONTAINER/$FILE_NAME"

echo "Copying $FILE_NAME to the cloud..."
azcopy cp --overwrite=false $OTA_DIR/$FILE_NAME "$CLOUD_PATH?$DATA_SAS_TOKEN"
echo " $CLOUD_PATH"
}

process_file() {
local NAME=$1
# Liftoff!
for NAME in $(cat $OTA_JSON | jq -r ".[] .name"); do
local HASH_RAW=$(cat $OTA_JSON | jq -r ".[] | select(.name == \"$NAME\") | .hash_raw")
upload_file "$NAME-$HASH_RAW.img.xz"

Expand All @@ -46,24 +20,14 @@ process_file() {

# echo "Copying system.caibx to the cloud..."
# local SYSTEM_CAIBX_PATH="https://$DATA_ACCOUNT.blob.core.windows.net/$DATA_CONTAINER/$CAIBX_FILE_NAME"
# azcopy cp --overwrite=false $OTA_DIR/$CAIBX_FILE_NAME "$SYSTEM_CAIBX_PATH?$DATA_SAS_TOKEN"
# azcopy cp --overwrite=false $OTA_OUTPUT_DIR/$CAIBX_FILE_NAME "$SYSTEM_CAIBX_PATH?$DATA_SAS_TOKEN"
# echo " $SYSTEM_CAIBX_PATH"

# echo "Copying system chunks to the cloud..."
# local SYSTEM_CHUNKS_PATH="https://$DATA_ACCOUNT.blob.core.windows.net/$DATA_CONTAINER"
# azcopy cp --recursive --overwrite=false $OTA_DIR/$CHUNKS_FOLDER "$SYSTEM_CHUNKS_PATH?$DATA_SAS_TOKEN"
# azcopy cp --recursive --overwrite=false $OTA_OUTPUT_DIR/$CHUNKS_FOLDER "$SYSTEM_CHUNKS_PATH?$DATA_SAS_TOKEN"
# echo " $SYSTEM_CHUNKS_PATH"
# fi
}

# Generate token
echo "Logging in..."
SAS_EXPIRY=$(date -u '+%Y-%m-%dT%H:%M:%SZ' -d '+1 hour')
DATA_SAS_TOKEN=$(az storage container generate-sas --as-user --auth-mode login --account-name $DATA_ACCOUNT --name $DATA_CONTAINER --https-only --permissions wr --expiry $SAS_EXPIRY --output tsv)

# Liftoff!
for name in $(cat $OTA_JSON | jq -r ".[] .name"); do
process_file $name
done

echo "Done!"
115 changes: 115 additions & 0 deletions internal/package_extra.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
#!/bin/bash -e

# Make sure we're in the correct directory
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null && pwd)"
cd $DIR

# Constants
ROOT="$DIR/../"
OUTPUT_DIR="$ROOT/output"
OTA_OUTPUT_DIR="$OUTPUT_DIR/ota"
TOOLS_DIR="$DIR/../tools"

AGNOS_UPDATE_URL=${AGNOS_UPDATE_URL:-https://commadist.azureedge.net/agnosupdate}
AGNOS_STAGING_UPDATE_URL=${AGNOS_STAGING_UPDATE_URL:-https://commadist.azureedge.net/agnosupdate-staging}
OTA_JSON="$OTA_OUTPUT_DIR/ota.json"
EXTRA_JSON="$OTA_OUTPUT_DIR/extra.json"
EXTRA_STAGING_JSON="$OTA_OUTPUT_DIR/extra-staging.json"

process_file() {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

seems like this file has a ton of duplication

local NAME=$1
echo "Processing $NAME..."

local IMAGE_CONFIG=$(cat $OTA_JSON | jq -r ".[] | select(.name == \"$NAME\")")
local URL=$(echo $IMAGE_CONFIG | jq -r ".url")
local HASH=$(echo $IMAGE_CONFIG | jq -r ".hash")
local HASH_RAW=$(echo $IMAGE_CONFIG | jq -r ".hash_raw")
local SPARSE=$(echo $IMAGE_CONFIG | jq -r ".sparse")

local FILE_NAME=$NAME-$HASH_RAW.img
local IMAGE_FILE=$OTA_OUTPUT_DIR/$FILE_NAME
if [ ! -f $IMAGE_FILE ]; then
local XZ_FILE=$IMAGE_FILE.xz
echo " downloading..."
wget -O $XZ_FILE $URL &> /dev/null

echo " decompressing..."
xz --decompress --stdout $XZ_FILE > $IMAGE_FILE
fi

echo " hashing..."
local ACTUAL_HASH=$(sha256sum $IMAGE_FILE | cut -c 1-64)
if [ $ACTUAL_HASH != $HASH ]; then
echo "$NAME hash mismatch!" >&2
echo " Expected: $HASH" >&2
echo " Actual: $ACTUAL_HASH" >&2
exit 1
fi

if [ $SPARSE == "true" ]; then
local SKIP_CHUNKS_IMAGE_FILE=${IMAGE_FILE%.img}-skip-chunks.img
if [ ! -f $SKIP_CHUNKS_IMAGE_FILE ]; then
echo " optimizing..."
$TOOLS_DIR/simg2dontcare.py $IMAGE_FILE $SKIP_CHUNKS_IMAGE_FILE
fi
IMAGE_FILE=$SKIP_CHUNKS_IMAGE_FILE
FILE_NAME=${FILE_NAME%.img}-skip-chunks.img
HASH=$(sha256sum $IMAGE_FILE | cut -c 1-64)
fi

local GZ_FILE_NAME=$FILE_NAME.gz
local GZ_FILE=$OTA_OUTPUT_DIR/$GZ_FILE_NAME
if [ ! -f $GZ_FILE ]; then
echo " compressing..."
gzip -c $IMAGE_FILE > $GZ_FILE
fi

local SIZE=$(wc -c < $IMAGE_FILE)
cat <<EOF | tee -a $EXTRA_JSON $EXTRA_STAGING_JSON > /dev/null
{
"name": "$NAME",
"hash": "$HASH",
"hash_raw": "$HASH_RAW",
"size": $SIZE,
"sparse": $SPARSE,
EOF

cat <<EOF >> $EXTRA_JSON
"url": "$AGNOS_UPDATE_URL/$GZ_FILE_NAME"
EOF
cat <<EOF >> $EXTRA_STAGING_JSON
"url": "$AGNOS_STAGING_UPDATE_URL/$GZ_FILE_NAME"
EOF

cat <<EOF | tee -a $EXTRA_JSON $EXTRA_STAGING_JSON > /dev/null
},
EOF
}

cd $ROOT
mkdir -p $OTA_OUTPUT_DIR

# If given a manifest URL, download and use that
if [ ! -z "$1" ]; then
OTA_JSON=$(mktemp)
echo "Using provided manifest URL..."
wget -O $OTA_JSON $1 &> /dev/null
elif [ ! -f $OTA_JSON ]; then
echo "OTA file does not exist! Either provide a manifest URL to use or run package_ota.sh to create one" >&2
exit 1
fi

echo "[" > $EXTRA_JSON
echo "[" > $EXTRA_STAGING_JSON

for NAME in $(cat $OTA_JSON | jq -r '.[] | .name'); do
process_file $NAME
done

# remove trailing comma
sed -i "$ s/.$//" $EXTRA_JSON $EXTRA_STAGING_JSON

echo "]" >> $EXTRA_JSON
echo "]" >> $EXTRA_STAGING_JSON

echo "Done!"
3 changes: 1 addition & 2 deletions internal/package_ota.sh
Original file line number Diff line number Diff line change
Expand Up @@ -32,15 +32,14 @@ process_file() {
local HASH_RAW=$HASH
if [ "$NAME" == "system" ]; then
echo "Converting system to raw..."
local FILE_RAW=/tmp/system.img.raw
local FILE_RAW=$TMP_DIR/system.img.raw
simg2img $FILE $FILE_RAW

echo "Hashing system raw..."
HASH_RAW=$(sha256sum $FILE_RAW | cut -c 1-64)
SIZE=$(wc -c < $FILE_RAW)
echo " $HASH_RAW ($SIZE bytes) (raw)"


# echo "Creating system casync files"
# casync make --compression=xz --store $OTA_OUTPUT_DIR/system-$HASH_RAW $OTA_OUTPUT_DIR/system-$HASH_RAW.caibx $FILE_RAW

Expand Down
41 changes: 41 additions & 0 deletions internal/upload.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
if [ -z "$OTA_OUTPUT_DIR" ]; then
echo "OTA_OUTPUT_DIR is not set!" >&2
exit 1
fi

# Constants
DATA_ACCOUNT="commadist"

# Parse input
FOUND=0
if [ "$1" == "production" ]; then
OTA_JSON="$OTA_OUTPUT_DIR/ota.json"
EXTRA_JSON="$OTA_OUTPUT_DIR/extra.json"
DATA_CONTAINER="agnosupdate"
FOUND=1
fi
if [ "$1" == "staging" ]; then
OTA_JSON="$OTA_OUTPUT_DIR/ota-staging.json"
EXTRA_JSON="$OTA_OUTPUT_DIR/extra-staging.json"
DATA_CONTAINER="agnosupdate-staging"
FOUND=1
fi

if [ $FOUND == 0 ]; then
echo "Supply either 'production' or 'staging' as first argument!"
exit 1
fi

upload_file() {
local FILE_NAME=$1
local CLOUD_PATH="https://$DATA_ACCOUNT.blob.core.windows.net/$DATA_CONTAINER/$FILE_NAME"

echo "Copying $FILE_NAME to the cloud..."
azcopy cp --overwrite=false $OTA_OUTPUT_DIR/$FILE_NAME "$CLOUD_PATH?$DATA_SAS_TOKEN"
echo " $CLOUD_PATH"
}

# Generate token
echo "Logging in..."
SAS_EXPIRY=$(date -u '+%Y-%m-%dT%H:%M:%SZ' -d '+1 hour')
DATA_SAS_TOKEN=$(az storage container generate-sas --as-user --auth-mode login --account-name $DATA_ACCOUNT --name $DATA_CONTAINER --https-only --permissions wr --expiry $SAS_EXPIRY --output tsv)