Skip to content

Commit

Permalink
Merge pull request #903 from Xilinx/feature/fix_packages
Browse files Browse the repository at this point in the history
Deprecate pkg_resources
  • Loading branch information
auphelia authored Oct 6, 2023
2 parents 1bcf6d3 + d8a4048 commit 52d7fc2
Show file tree
Hide file tree
Showing 19 changed files with 51 additions and 72 deletions.
2 changes: 1 addition & 1 deletion .isort.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
line_length=88
indent=' '
skip=.tox,.venv,build,dist
known_standard_library=setuptools,pkg_resources
known_standard_library=setuptools
known_test=pytest
known_first_party=finn
sections=FUTURE,STDLIB,TEST,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
Expand Down
8 changes: 4 additions & 4 deletions docker/quicktest.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,16 @@ cd $FINN_ROOT
# check if command line argument is empty or not present
if [ -z $1 ]; then
echo "Running quicktest: not (vivado or slow or board) with pytest-xdist"
python setup.py test --addopts "-m 'not (vivado or slow or vitis or board or notebooks)' --dist=loadfile -n $PYTEST_PARALLEL"
pytest -m 'not (vivado or slow or vitis or board or notebooks)' --dist=loadfile -n $PYTEST_PARALLEL
elif [ $1 = "main" ]; then
echo "Running main test suite: not (rtlsim or end2end) with pytest-xdist"
python setup.py test --addopts "-k 'not (rtlsim or end2end)' --dist=loadfile -n $PYTEST_PARALLEL"
pytest -k 'not (rtlsim or end2end)' --dist=loadfile -n $PYTEST_PARALLEL
elif [ $1 = "rtlsim" ]; then
echo "Running rtlsim test suite with pytest-parallel"
python setup.py test --addopts "-k rtlsim --workers $PYTEST_PARALLEL"
pytest -k rtlsim --workers $PYTEST_PARALLEL
elif [ $1 = "end2end" ]; then
echo "Running end2end test suite with no parallelism"
python setup.py test --addopts "-k end2end"
pytest -k end2end
elif [ $1 = "full" ]; then
echo "Running full test suite, each step with appropriate parallelism"
$0 main;
Expand Down
11 changes: 6 additions & 5 deletions notebooks/end2end_example/bnn-pynq/cnv_end2end_example.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -516,12 +516,13 @@
"metadata": {},
"outputs": [],
"source": [
"import pkg_resources as pk\n",
"import importlib_resources\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"\n",
"fn = pk.resource_filename(\"finn.qnn-data\", \"cifar10/cifar10-test-data-class3.npz\")\n",
"x = np.load(fn)[\"arr_0\"]\n",
"ref = importlib_resources.files(\"finn.qnn-data\") / \"cifar10/cifar10-test-data-class3.npz\"\n",
"with importlib_resources.as_file(ref) as fn:\n",
" x = np.load(fn)[\"arr_0\"]\n",
"x = x.reshape(3, 32,32).transpose(1, 2, 0)\n",
"plt.imshow(x)"
]
Expand Down Expand Up @@ -640,9 +641,9 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.6"
"version": "3.10.12"
}
},
"nbformat": 4,
"nbformat_minor": 2
"nbformat_minor": 4
}
2 changes: 2 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ bitstring==3.1.7
clize==5.0.1
dataclasses-json==0.5.7
gspread==3.6.0
importlib-resources==6.1.0
ipython==8.12.2
numpy==1.24.1
onnx==1.13.0
Expand All @@ -13,6 +14,7 @@ psutil==5.9.4
pyscaffold==4.4
scipy==1.10.1
setupext-janitor>=1.1.2
setuptools==68.2.2
sigtools==4.0.1
toposort==1.7.0
vcdvcd==1.0.5
Expand Down
2 changes: 1 addition & 1 deletion run-docker.sh
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ DOCKER_INTERACTIVE=""

if [ "$1" = "test" ]; then
gecho "Running test suite (all tests)"
DOCKER_CMD="python setup.py test"
DOCKER_CMD="pytest"
elif [ "$1" = "quicktest" ]; then
gecho "Running test suite (non-Vivado, non-slow tests)"
DOCKER_CMD="quicktest.sh"
Expand Down
2 changes: 0 additions & 2 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,6 @@ packages = find_namespace:
include_package_data = True
package_dir =
=src
# DON'T CHANGE THE FOLLOWING LINE! IT WILL BE UPDATED BY PYSCAFFOLD!
setup_requires = pyscaffold>=3.2a0,<3.3a0
# The usage of test_requires is discouraged, see `Dependency Management` docs
# tests_require = pytest; pytest-cov
# Require a specific Python version, e.g. Python 2.7 or >= 3.4
Expand Down
12 changes: 1 addition & 11 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,17 +35,7 @@
PyScaffold helps you to put up the scaffold of your new Python project.
Learn more under: https://pyscaffold.org/
"""
from pkg_resources import VersionConflict, require
from setuptools import setup

import sys

try:
require("setuptools>=38.3")
except VersionConflict:
print("Error: version of setuptools is too old (<38.3)!")
sys.exit(1)


if __name__ == "__main__":
setup(use_pyscaffold=True)
setup()
4 changes: 1 addition & 3 deletions src/finn/transformation/fpgadataflow/create_stitched_ip.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

import pkg_resources as pk

import json
import multiprocessing as mp
import os
Expand Down Expand Up @@ -499,7 +497,7 @@ def apply(self, model):
"[ipx::get_file_groups xilinx_simulationcheckpoint]" % block_name
)
# add a rudimentary driver mdd to get correct ranges in xparameters.h later on
example_data_dir = pk.resource_filename("finn.qnn-data", "mdd-data/")
example_data_dir = os.environ["FINN_ROOT"] + "/src/finn/qnn-data/mdd-data"
copytree(example_data_dir, vivado_stitch_proj_dir + "/data")

#####
Expand Down
11 changes: 5 additions & 6 deletions src/finn/transformation/fpgadataflow/make_pynq_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,6 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.


import pkg_resources as pk

import numpy as np
import os
import qonnx
Expand Down Expand Up @@ -89,8 +86,8 @@ def apply(self, model):
model.set_metadata_prop("pynq_driver_dir", pynq_driver_dir)

# create the base FINN driver -- same for all accels
driver_base_template = pk.resource_filename(
"finn.qnn-data", "templates/driver/driver_base.py"
driver_base_template = (
os.environ["FINN_ROOT"] + "/src/finn/qnn-data/templates/driver/driver_base.py"
)
driver_base_py = pynq_driver_dir + "/driver_base.py"
shutil.copy(driver_base_template, driver_base_py)
Expand Down Expand Up @@ -268,7 +265,9 @@ def apply(self, model):

# add validate.py to run full top-1 test (only for suitable networks)
validate_py = pynq_driver_dir + "/validate.py"
validate_template = pk.resource_filename("finn.qnn-data", "templates/driver/validate.py")
validate_template = (
os.environ["FINN_ROOT"] + "/src/finn/qnn-data/templates/driver/validate.py"
)
shutil.copy(validate_template, validate_py)

# generate weight files for runtime-writable layers
Expand Down
6 changes: 2 additions & 4 deletions src/finn/util/pyverilator.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

import pkg_resources as pk

import numpy as np
import os
import shutil
Expand Down Expand Up @@ -94,7 +92,7 @@ def file_to_basename(x):

# use custom version of axis infrastructure vh
# to enable Verilator to simulate AMD/Xilinx components (e.g DWC)
custom_vh = pk.resource_filename("finn.qnn-data", "verilog/custom_axis_infrastructure.vh")
custom_vh = os.environ["FINN_ROOT"] + "/src/finn/qnn-data/verilog/custom_axis_infrastructure.vh"
shutil.copy(custom_vh, verilog_header_dir + "/axis_infrastructure_v1_1_0.vh")
for fn in all_verilog_srcs:
if fn.endswith(".vh"):
Expand Down Expand Up @@ -131,7 +129,7 @@ def verilator_fifosim(model, n_inputs, max_iters=100000000):
vivado_stitch_proj_dir = prepare_stitched_ip_for_verilator(model)
verilog_header_dir = vivado_stitch_proj_dir + "/pyverilator_vh"
build_dir = make_build_dir("verilator_fifosim_")
fifosim_cpp_fname = pk.resource_filename("finn.qnn-data", "cpp/verilator_fifosim.cpp")
fifosim_cpp_fname = os.environ["FINN_ROOT"] + "/src/finn/qnn-data/cpp/verilator_fifosim.cpp"
with open(fifosim_cpp_fname, "r") as f:
fifosim_cpp_template = f.read()
assert len(model.graph.input) == 1, "Only a single input stream is supported"
Expand Down
8 changes: 4 additions & 4 deletions src/finn/util/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,9 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

import pkg_resources as pk

import pytest

import importlib_resources as importlib
import numpy as np
import onnx
import onnx.numpy_helper as nph
Expand Down Expand Up @@ -137,8 +136,9 @@ def get_example_input(topology):
onnx_tensor = onnx.load_tensor_from_string(raw_i)
return nph.to_array(onnx_tensor)
elif topology == "cnv":
fn = pk.resource_filename("finn.qnn-data", "cifar10/cifar10-test-data-class3.npz")
input_tensor = np.load(fn)["arr_0"].astype(np.float32)
ref = importlib.files("finn.qnn-data") / "cifar10/cifar10-test-data-class3.npz"
with importlib.as_file(ref) as fn:
input_tensor = np.load(fn)["arr_0"].astype(np.float32)
return input_tensor
else:
raise Exception("Unknown topology, can't return example input")
Expand Down
8 changes: 4 additions & 4 deletions tests/brevitas/test_brevitas_cnv.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,9 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

import pkg_resources as pk

import pytest

import importlib_resources as importlib
import numpy as np
import os
import torch
Expand Down Expand Up @@ -65,8 +64,9 @@ def test_brevitas_cnv_export_exec(wbits, abits):
model = model.transform(RemoveStaticGraphInputs())
assert len(model.graph.input) == 1
assert len(model.graph.output) == 1
fn = pk.resource_filename("finn.qnn-data", "cifar10/cifar10-test-data-class3.npz")
input_tensor = np.load(fn)["arr_0"].astype(np.float32)
ref = importlib.files("finn.qnn-data") / "cifar10/cifar10-test-data-class3.npz"
with importlib.as_file(ref) as fn:
input_tensor = np.load(fn)["arr_0"].astype(np.float32)
input_tensor = input_tensor / 255
assert input_tensor.shape == (1, 3, 32, 32)
# run using FINN-based execution
Expand Down
4 changes: 1 addition & 3 deletions tests/end2end/test_end2end_cybsec_mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

import pkg_resources as pk

import pytest

import json
Expand Down Expand Up @@ -83,7 +81,7 @@ def forward(self, x):

@pytest.mark.end2end
def test_end2end_cybsec_mlp_export():
assets_dir = pk.resource_filename("finn.qnn-data", "cybsec-mlp/")
assets_dir = os.environ["FINN_ROOT"] + "/src/finn/qnn-data/cybsec-mlp"
# load up trained net in Brevitas
input_size = 593
hidden1 = 64
Expand Down
7 changes: 2 additions & 5 deletions tests/end2end/test_ext_weights.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

import pkg_resources as pk

import pytest

import os
Expand Down Expand Up @@ -84,9 +82,8 @@ def test_end2end_ext_weights_build():
model_file = get_checkpoint_name("download")
load_test_checkpoint_or_skip(model_file)
build_env = get_build_env(build_kind, target_clk_ns)
folding_config_file = pk.resource_filename(
"finn.qnn-data", "test_ext_weights/tfc-w1a1-extw.json"
)
test_data = os.environ["FINN_ROOT"] + "/src/finn/qnn-data/test_ext_weights"
folding_config_file = test_data + "/tfc-w1a1-extw.json"
output_dir = make_build_dir("test_end2end_ext_weights_build")
cfg = build.DataflowBuildConfig(
output_dir=output_dir,
Expand Down
8 changes: 4 additions & 4 deletions tests/fpgadataflow/test_convert_to_hls_layers_cnv.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,9 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

import pkg_resources as pk

import pytest

import importlib_resources as importlib
import numpy as np
import os
import torch
Expand Down Expand Up @@ -86,8 +85,9 @@ def test_convert_to_hls_layers_cnv_w1a1(fused_activation):
model = model.transform(Streamline())
model = model.transform(InferDataLayouts())
# load one of the test vectors
fn = pk.resource_filename("finn.qnn-data", "cifar10/cifar10-test-data-class3.npz")
input_tensor = np.load(fn)["arr_0"].astype(np.float32)
ref = importlib.files("finn.qnn-data") / "cifar10/cifar10-test-data-class3.npz"
with importlib.as_file(ref) as fn:
input_tensor = np.load(fn)["arr_0"].astype(np.float32)
input_tensor = input_tensor / 255
assert input_tensor.shape == (1, 3, 32, 32)
# generate expected value from streamlined net
Expand Down
8 changes: 4 additions & 4 deletions tests/transformation/streamline/test_streamline_cnv.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,9 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

import pkg_resources as pk

import pytest

import importlib_resources as importlib
import numpy as np
import torch
from brevitas.export import export_qonnx
Expand Down Expand Up @@ -78,8 +77,9 @@ def test_streamline_cnv(size, wbits, abits):
model = model.transform(GiveReadableTensorNames())
model = model.transform(RemoveStaticGraphInputs())
# load one of the test vectors
fn = pk.resource_filename("finn.qnn-data", "cifar10/cifar10-test-data-class3.npz")
input_tensor = np.load(fn)["arr_0"].astype(np.float32)
ref = importlib.files("finn.qnn-data") / "cifar10/cifar10-test-data-class3.npz"
with importlib.as_file(ref) as fn:
input_tensor = np.load(fn)["arr_0"].astype(np.float32)
input_tensor = input_tensor / 255
assert input_tensor.shape == (1, 3, 32, 32)
# run using FINN-based execution
Expand Down
8 changes: 4 additions & 4 deletions tests/transformation/test_batchnorm_to_affine_bnn_pynq.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,9 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

import pkg_resources as pk

import pytest

import importlib_resources as importlib
import numpy as np
import onnx
import onnx.numpy_helper as nph
Expand Down Expand Up @@ -59,8 +58,9 @@ def test_batchnorm_to_affine_cnv_w1a1():
model = model.transform(ConvertQONNXtoFINN())
model = model.transform(InferShapes())
model = model.transform(FoldConstants())
fn = pk.resource_filename("finn.qnn-data", "cifar10/cifar10-test-data-class3.npz")
input_tensor = np.load(fn)["arr_0"].astype(np.float32)
ref = importlib.files("finn.qnn-data") / "cifar10/cifar10-test-data-class3.npz"
with importlib.as_file(ref) as fn:
input_tensor = np.load(fn)["arr_0"].astype(np.float32)
input_tensor = input_tensor / 255
assert input_tensor.shape == (1, 3, 32, 32)
input_dict = {"0": input_tensor}
Expand Down
8 changes: 4 additions & 4 deletions tests/transformation/test_qonnx_to_finn.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,9 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.


import pkg_resources as pk

import pytest

import importlib_resources as importlib
import numpy as np
import onnx
import onnx.numpy_helper as nph
Expand All @@ -55,8 +54,9 @@ def get_brev_model_and_sample_inputs(model_name, wbits, abits):
brev_model = get_test_model_trained(model_name, wbits, abits)
elif model_name == "CNV":
in_shape = (1, 3, 32, 32)
fn = pk.resource_filename("finn.qnn-data", "cifar10/cifar10-test-data-class3.npz")
input_tensor = np.load(fn)["arr_0"].astype(np.float32)
ref = importlib.files("finn.qnn-data") / "cifar10/cifar10-test-data-class3.npz"
with importlib.as_file(ref) as fn:
input_tensor = np.load(fn)["arr_0"].astype(np.float32)
input_tensor = input_tensor / 255
brev_model = get_test_model_trained(model_name, wbits, abits)
elif model_name == "mobilenet":
Expand Down
4 changes: 1 addition & 3 deletions tests/util/test_build_dataflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

import pkg_resources as pk

import pytest

import numpy as np
Expand All @@ -44,7 +42,7 @@
def test_end2end_build_dataflow_directory():
test_dir = make_build_dir("test_build_dataflow_directory_")
target_dir = test_dir + "/build_dataflow"
example_data_dir = pk.resource_filename("finn.qnn-data", "build_dataflow/")
example_data_dir = os.environ["FINN_ROOT"] + "/src/finn/qnn-data/build_dataflow"
copytree(example_data_dir, target_dir)
build_dataflow_directory(target_dir)
# check the generated files
Expand Down

0 comments on commit 52d7fc2

Please sign in to comment.