Skip to content

Make it build with mtl 2.3 (#131) #384

Make it build with mtl 2.3 (#131)

Make it build with mtl 2.3 (#131) #384

Workflow file for this run

name: build
on:
workflow_dispatch:
push:
branches: [main]
pull_request:
branches: [main]
concurrency:
group: "${{ github.workflow }}-${{ github.ref }}"
cancel-in-progress: true
jobs:
build:
runs-on: ubuntu-latest
timeout-minutes: 300
env:
MNIST_FNAME: /tmp/mnist/mnist.ts.pt
MNIST_COMMIT: 94b288a631362aa44edc219eb8f54a7c39891169
steps:
- uses: actions/checkout@v4
# Lint code with HLint
- name: Set up HLint
uses: haskell-actions/hlint-setup@v2
with:
version: "3.8"
- name: Run HLint
uses: haskell-actions/hlint-run@v2
with:
path: '["inferno-core/", "inferno-lsp/", "inferno-ml/", "inferno-ml-server-types/", "inferno-types/", "inferno-vc/"]'
fail-on: error
- uses: cachix/install-nix-action@v18
with:
install_url: https://releases.nixos.org/nix/nix-2.13.3/install
extra_nix_config: |
fallback = true
substituters = https://cache.nixos.org https://cache.iog.io
trusted-public-keys = cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY= hydra.iohk.io:f/Ea+s+dFdN+3Y/G+FDgSq+a5NEWhJGzdjvKNGv0/EQ=
narinfo-cache-negative-ttl = 60
- uses: cachix/cachix-action@v12
with:
name: inferno
authToken: "${{ secrets.CACHIX_TOKEN }}"
- uses: DeterminateSystems/magic-nix-cache-action@main
# Build inferno and run all tests
- run: |
nix build -L .#
# Download and train MNIST model to test Inferno's torchscript model loader
- name: Restore cached MNIST model
id: cache-mnist
uses: actions/cache/restore@v3
with:
path: ${{ env.MNIST_FNAME }}
key: ${{ env.MNIST_COMMIT }}
- name: Download and train MNIST (if not cached)
if: steps.cache-mnist.outputs.cache-hit != 'true'
run: |
nix develop .#pytorch -c .github/torch.sh
- name: Cache MNIST model
uses: actions/cache/save@v3
with:
path: ${{ env.MNIST_FNAME }}
key: ${{ steps.cache-mnist.outputs.cache-primary-key }}
- name: Run inferno on MNIST
working-directory: inferno-ml/test/
run: |
cp ${MNIST_FNAME} ./
nix run .#inferno-ml -- mnist.inferno
# Downhole autoencoder example
- name: Download and run downhole autoencode model
working-directory: inferno-ml/test/
run: |
wget https://www.dropbox.com/s/gshxebydlwqvspj/downhole_autoencoder.ts.pt?dl=1 -O downhole_autoencoder.ts.pt
nix run .#inferno-ml -- downhole-autoencoder.inferno