From 5769521d03436abc0747c674087df9e523aa4cf3 Mon Sep 17 00:00:00 2001 From: PeterMeisrimelModelon <92585725+PeterMeisrimelModelon@users.noreply.github.com> Date: Mon, 12 Feb 2024 17:01:50 +0100 Subject: [PATCH 1/6] Replacing nose by pytest Resolving some Warnings; Adding pytest.ini Fixing deprecationWarnings cleanup Apply suggestions from code review Co-authored-by: Robin Andersson More review comment fixes Adding pytest.ini to test folder for automatic detection rebase fix replacing nose --- .github/workflows/build.yml | 4 +- setup.cfg | 2 +- setup.py | 3 +- src/pyfmi/__init__.py | 14 -- src/pyfmi/master.pyx | 12 +- tests/pytest.ini | 3 + tests/test_fmi.py | 358 ++++++++++++++---------------------- tests/test_fmi_coupled.py | 63 +++---- tests/test_fmi_estimate.py | 5 +- tests/test_fmi_extended.py | 3 - tests/test_fmi_master.py | 55 ++---- tests/test_fmi_util.py | 4 - tests/test_io.py | 343 ++++++++++------------------------ tests/test_log.py | 19 +- tests/test_stream.py | 43 ++--- 15 files changed, 319 insertions(+), 612 deletions(-) create mode 100644 tests/pytest.ini diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d9676ec1..8b973249 100755 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -15,7 +15,7 @@ jobs: python-version: '3.11.x' - name: Setup Python run: | - python3 -m pip install Cython numpy scipy matplotlib nose-py3 setuptools==69.1.0 + python3 -m pip install Cython numpy scipy matplotlib pytest setuptools==69.1.0 - name: Install system run: | sudo apt-get -y install cmake liblapack-dev libsuitesparse-dev libhypre-dev @@ -60,4 +60,4 @@ jobs: run: | rm src/pyfmi/__init__.py cp -rv src/pyfmi/tests/files tests - python3 -m nose --verbose tests/*.py + pytest --verbose tests/ diff --git a/setup.cfg b/setup.cfg index 477bf0bc..c6e2d84f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -14,4 +14,4 @@ install_requires = assimulo >= 3.5.0 tests_require = - nose-py3 >= 1.6.3 + pytest >= 7.4.4 diff --git a/setup.py b/setup.py index d40dd4d1..1e7c91b1 100644 --- a/setup.py +++ b/setup.py @@ -349,7 +349,8 @@ def check_extensions(): 'version.txt', 'LICENSE', 'CHANGELOG', - 'util/*'] + extra_package_data}, + 'util/*'] + extra_package_data, + 'pyfmi.tests': ['pytest.ini']}, script_args=copy_args ) diff --git a/src/pyfmi/__init__.py b/src/pyfmi/__init__.py index df5e3f7f..e523c0d6 100644 --- a/src/pyfmi/__init__.py +++ b/src/pyfmi/__init__.py @@ -30,20 +30,6 @@ import sys import time -def testattr(**kwargs): - """Add attributes to a test function/method/class. - - This function is needed to be able to add - @attr(slow = True) - for functions. - - """ - def wrap(func): - func.__dict__.update(kwargs) - return func - return wrap - - try: curr_dir = os.path.dirname(os.path.abspath(__file__)) _fpath=os.path.join(curr_dir,'version.txt') diff --git a/src/pyfmi/master.pyx b/src/pyfmi/master.pyx index 98aa760c..efd6e39f 100644 --- a/src/pyfmi/master.pyx +++ b/src/pyfmi/master.pyx @@ -782,7 +782,7 @@ cdef class Master: index_end = index_start + self.models_dict[model]["local_output_len"] local_output_vref_array = (model).get_real(self.models_dict[model]["local_output_vref_array"]) for i, index in enumerate(range(index_start, index_end)): - y[index] = local_output_vref_array[i] + y[index] = local_output_vref_array[i].item() return y.reshape(-1,1) cpdef np.ndarray get_connection_outputs_discrete(self): @@ -794,7 +794,7 @@ cdef class Master: index_end = index_start + self.models_dict[model]["local_output_discrete_len"] local_output_discrete = model.get(self.models_dict[model]["local_output_discrete"]) for i, index in enumerate(range(index_start, index_end)): - y[index] = local_output_discrete[i] + y[index] = local_output_discrete[i].item() return y.reshape(-1,1) cpdef np.ndarray _get_derivatives(self): @@ -810,7 +810,7 @@ cdef class Master: index_end = index_start + self.models_dict[model]["local_derivative_len"] local_derivative_vref_array = (model).get_real(self.models_dict[model]["local_derivative_vref_array"]) for i, index in enumerate(range(index_start, index_end)): - xd[index] = local_derivative_vref_array[i] + xd[index] = local_derivative_vref_array[i].item() return xd.reshape(-1,1) @@ -819,7 +819,7 @@ cdef class Master: ytmp = model.get(np.array(self.models_dict[model]["local_output_discrete"])[mask]) for i, flag in enumerate(mask): if flag: - yout[i+self.models_dict[model]["global_index_outputs_discrete"]] = ytmp[j] + yout[i+self.models_dict[model]["global_index_outputs_discrete"]] = ytmp[j].item() j = j + 1 cpdef np.ndarray get_specific_connection_outputs(self, model, np.ndarray mask, np.ndarray yout): @@ -827,7 +827,7 @@ cdef class Master: cdef np.ndarray ytmp = (model).get_real(self.models_dict[model]["local_output_vref_array"][mask]) for i, flag in enumerate(mask): if flag: - yout[i+self.models_dict[model]["global_index_outputs"]] = ytmp[j] + yout[i+self.models_dict[model]["global_index_outputs"]] = ytmp[j].item() j = j + 1 cpdef get_connection_derivatives(self, np.ndarray y_cur): @@ -1847,5 +1847,3 @@ cdef class Master: last_has_outputs = has_outputs """ return order, blocks,compressed_blocks - - diff --git a/tests/pytest.ini b/tests/pytest.ini new file mode 100644 index 00000000..9cce0b8d --- /dev/null +++ b/tests/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +filterwarnings = + ignore:.*does not support directional derivatives.*:UserWarning diff --git a/tests/test_fmi.py b/tests/test_fmi.py index 79784aa5..91054d14 100644 --- a/tests/test_fmi.py +++ b/tests/test_fmi.py @@ -15,7 +15,7 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import nose +import pytest import os import numpy as np from zipfile import ZipFile @@ -24,7 +24,6 @@ import logging from io import StringIO -from pyfmi import testattr from pyfmi.fmi import FMUException, InvalidOptionException, InvalidXMLException, InvalidBinaryException, InvalidVersionException, FMUModelME1, FMUModelCS1, load_fmu, FMUModelCS2, FMUModelME2 import pyfmi.fmi as fmi from pyfmi.fmi_algorithm_drivers import AssimuloFMIAlg, AssimuloFMIAlgOptions, \ @@ -70,12 +69,11 @@ def _helper_unzipped_fmu_exception_invalid_dir(fmu_loader): """ err_msg = "Specified fmu path '.*\\' needs to contain a modelDescription.xml according to the FMI specification" with tempfile.TemporaryDirectory() as temp_dir: - with np.testing.assert_raises_regex(FMUException, err_msg): + with pytest.raises(FMUException, match = err_msg): fmu = fmu_loader(temp_dir, allow_unzipped_fmu = True) if assimulo_installed: class Test_FMUModelME1_Simulation: - @testattr(stddist = True) def test_simulate_with_debug_option_no_state(self): """ Verify that an instance of CVodeDebugInformation is created """ model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NoState.Example1.fmu"), _connect_dll=False) @@ -90,7 +88,6 @@ def test_simulate_with_debug_option_no_state(self): from pyfmi.debug import CVodeDebugInformation debug = CVodeDebugInformation("NoState_Example1_debug.txt") - @testattr(stddist = True) def test_no_result(self): model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) @@ -98,7 +95,8 @@ def test_no_result(self): opts["result_handling"] = None res = model.simulate(options=opts) - nose.tools.assert_raises(Exception,res._get_result_data) + with pytest.raises(Exception): + res._get_result_data() model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) @@ -106,9 +104,9 @@ def test_no_result(self): opts["return_result"] = False res = model.simulate(options=opts) - nose.tools.assert_raises(Exception,res._get_result_data) + with pytest.raises(Exception): + res._get_result_data() - @testattr(stddist = True) def test_custom_result_handler(self): model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) @@ -120,11 +118,14 @@ def get_result(self): opts = model.simulate_options() opts["result_handling"] = "hejhej" - nose.tools.assert_raises(Exception, model.simulate, options=opts) + with pytest.raises(Exception): + model.simulate(options=opts) opts["result_handling"] = "custom" - nose.tools.assert_raises(Exception, model.simulate, options=opts) + with pytest.raises(Exception): + model.simulate(options=opts) opts["result_handler"] = A() - nose.tools.assert_raises(Exception, model.simulate, options=opts) + with pytest.raises(Exception): + model.simulate(options=opts) opts["result_handler"] = B() res = model.simulate(options=opts) @@ -136,7 +137,6 @@ def setup_atol_auto_update_test_base(self): opts["solver"] = "CVode" return model, opts - @testattr(stddist = True) def test_atol_auto_update1(self): """ Tests that atol automatically gets updated when "atol = factor * pre_init_nominals". @@ -148,7 +148,6 @@ def test_atol_auto_update1(self): model.simulate(options=opts, algorithm=NoSolveAlg) np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) - @testattr(stddist = True) def test_atol_auto_update2(self): """ Tests that atol doesn't get auto-updated when heuristic fails. @@ -160,7 +159,6 @@ def test_atol_auto_update2(self): model.simulate(options=opts, algorithm=NoSolveAlg) np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02]) - @testattr(stddist = True) def test_atol_auto_update3(self): """ Tests that atol doesn't get auto-updated when nominals are never retrieved. @@ -178,8 +176,6 @@ def test_atol_auto_update3(self): class Test_FMUModelME1: - - @testattr(stddist = True) def test_unzipped_fmu_exception_invalid_dir(self): """ Verify that we get an exception if unzipped FMU does not contain modelDescription.xml, which it should according to the FMI specification. """ _helper_unzipped_fmu_exception_invalid_dir(FMUModelME1) @@ -200,33 +196,28 @@ def _test_unzipped_bouncing_ball(self, fmu_loader, tmp_dir = None): value = np.abs(res.final('h') - (0.0424044)) assert value < tol, "Assertion failed, value={} is not less than {}.".format(value, tol) - @testattr(stddist = True) def test_unzipped_fmu1(self): """ Test load and simulate unzipped ME FMU 1.0 using FMUModelME1 """ self._test_unzipped_bouncing_ball(FMUModelME1) self._test_unzipped_bouncing_ball(FMUModelME1, tmp_dir = tempfile.TemporaryDirectory(dir = "./").name) - @testattr(stddist = True) def test_unzipped_fmu2(self): """ Test load and simulate unzipped ME FMU 1.0 using load_fmu """ self._test_unzipped_bouncing_ball(load_fmu) self._test_unzipped_bouncing_ball(load_fmu, tmp_dir = tempfile.TemporaryDirectory(dir = "./").name) - @testattr(stddist = True) def test_invalid_binary(self): err_msg = "The FMU could not be loaded." fmu = os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "RLC_Circuit.fmu") - with nose.tools.assert_raises_regex(InvalidBinaryException, err_msg): + with pytest.raises(InvalidBinaryException, match = err_msg): model = FMUModelME1(fmu, _connect_dll=True) - @testattr(stddist = True) def test_invalid_version(self): err_msg = "This class only supports FMI 1.0" fmu = os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "LinearStability.SubSystem2.fmu") - with nose.tools.assert_raises_regex(InvalidVersionException, err_msg): + with pytest.raises(InvalidVersionException, match = err_msg): model = FMUModelME1(fmu, _connect_dll=True) - @testattr(stddist = True) def test_get_time_varying_variables(self): model = FMUModelME1(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "RLC_Circuit.fmu"), _connect_dll=False) @@ -237,7 +228,6 @@ def test_get_time_varying_variables(self): assert len(i) == len(i_f) assert len(b) == len(b_f) - @testattr(stddist = True) def test_get_time_varying_variables_with_alias(self): model = FMUModelME1(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "Alias1.fmu"), _connect_dll=False) @@ -246,20 +236,18 @@ def test_get_time_varying_variables_with_alias(self): assert len(r) == 1 assert r[0] == model.get_variable_valueref("y") - @testattr(stddist = True) def test_get_variable_by_valueref(self): bounce = FMUModelME1(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "bouncingBall.fmu"), _connect_dll=False) assert "der(v)" == bounce.get_variable_by_valueref(3) assert "v" == bounce.get_variable_by_valueref(2) - nose.tools.assert_raises(FMUException, bounce.get_variable_by_valueref,7) + with pytest.raises(FMUException): + bounce.get_variable_by_valueref(7) - @testattr(stddist = True) def test_get_variable_nominal_valueref(self): bounce = FMUModelME1(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "bouncingBall.fmu"), _connect_dll=False) assert bounce.get_variable_nominal("v") == bounce.get_variable_nominal(valueref=2) - @testattr(windows_full = True) def test_default_experiment(self): model = FMUModelME1(FMU_PATHS.ME1.coupled_clutches, _connect_dll=False) @@ -268,14 +256,12 @@ def test_default_experiment(self): assert np.abs(model.get_default_experiment_tolerance()-0.0001) < 1e-4 - @testattr(stddist = True) def test_log_file_name(self): model = FMUModelME1(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "bouncingBall.fmu"), _connect_dll=False) assert os.path.exists("bouncingBall_log.txt") model = FMUModelME1(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "bouncingBall.fmu"), _connect_dll=False, log_file_name="Test_log.txt") assert os.path.exists("Test_log.txt") - @testattr(stddist = True) def test_ode_get_sizes(self): bounce = FMUModelME1(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "bouncingBall.fmu"), _connect_dll=False) dq = FMUModelME1(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "dq.fmu"), _connect_dll=False) @@ -288,7 +274,6 @@ def test_ode_get_sizes(self): assert nCont == 1 assert nEvent == 0 - @testattr(stddist = True) def test_get_name(self): bounce = FMUModelME1(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "bouncingBall.fmu"), _connect_dll=False) dq = FMUModelME1(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "dq.fmu"), _connect_dll=False) @@ -296,14 +281,13 @@ def test_get_name(self): assert bounce.get_name() == 'bouncingBall' assert dq.get_name() == 'dq' - @testattr(stddist = True) def test_instantiate_jmu(self): """ Test that FMUModelME1 can not be instantiated with a JMU file. """ - nose.tools.assert_raises(FMUException,FMUModelME1,'model.jmu') + with pytest.raises(FMUException): + FMUModelME1('model.jmu') - @testattr(stddist = True) def test_get_fmi_options(self): """ Test that simulate_options on an FMU returns the correct options @@ -312,7 +296,6 @@ class instance. bounce = FMUModelME1(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "bouncingBall.fmu"), _connect_dll=False) assert isinstance(bounce.simulate_options(), AssimuloFMIAlgOptions) - @testattr(stddist = True) def test_get_xxx_empty(self): """ Test that get_xxx([]) do not calls do not trigger calls to FMU. """ model = FMUModelME1(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "bouncingBall.fmu"), _connect_dll=False) @@ -323,8 +306,6 @@ def test_get_xxx_empty(self): assert len(model.get_string([])) == 0, "get_string ([]) has non-empty return" class Test_FMUModelCS1: - - @testattr(stddist = True) def test_unzipped_fmu_exception_invalid_dir(self): """ Verify that we get an exception if unzipped FMU does not contain modelDescription.xml, which it should according to the FMI specification. """ _helper_unzipped_fmu_exception_invalid_dir(FMUModelCS1) @@ -345,33 +326,28 @@ def _test_unzipped_bouncing_ball(self, fmu_loader, tmp_dir = None): value = np.abs(res.final('h') - (0.0424044)) assert value < tol, "Assertion failed, value={} is not less than {}.".format(value, tol) - @testattr(stddist = True) def test_unzipped_fmu1(self): """ Test load and simulate unzipped CS FMU 1.0 using FMUModelCS1 """ self._test_unzipped_bouncing_ball(FMUModelCS1) self._test_unzipped_bouncing_ball(FMUModelCS1, tmp_dir = tempfile.TemporaryDirectory(dir = "./").name) - @testattr(stddist = True) def test_unzipped_fmu2(self): """ Test load and simulate unzipped CS FMU 1.0 using load_fmu """ self._test_unzipped_bouncing_ball(load_fmu) self._test_unzipped_bouncing_ball(load_fmu, tmp_dir = tempfile.TemporaryDirectory(dir = "./").name) - @testattr(stddist = True) def test_invalid_binary(self): err_msg = "The FMU could not be loaded." fmu = os.path.join(file_path, "files", "FMUs", "XML", "CS1.0", "NegatedAlias.fmu") - with nose.tools.assert_raises_regex(InvalidBinaryException, err_msg): + with pytest.raises(InvalidBinaryException, match = err_msg): model = FMUModelCS1(fmu, _connect_dll=True) - @testattr(stddist = True) def test_invalid_version(self): err_msg = "This class only supports FMI 1.0" fmu = os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "NegatedAlias.fmu") - with nose.tools.assert_raises_regex(InvalidVersionException, err_msg): + with pytest.raises(InvalidVersionException, match = err_msg): model = FMUModelCS1(fmu, _connect_dll=True) - @testattr(stddist = True) def test_custom_result_handler(self): model = Dummy_FMUModelCS1([], os.path.join(file_path, "files", "FMUs", "XML", "CS1.0", "NegatedAlias.fmu"), _connect_dll=False) @@ -383,15 +359,17 @@ def get_result(self): opts = model.simulate_options() opts["result_handling"] = "hejhej" - nose.tools.assert_raises(Exception, model.simulate, options=opts) + with pytest.raises(Exception): + model.simulate(options=opts) opts["result_handling"] = "custom" - nose.tools.assert_raises(Exception, model.simulate, options=opts) + with pytest.raises(Exception): + model.simulate(options=opts) opts["result_handler"] = A() - nose.tools.assert_raises(Exception, model.simulate, options=opts) + with pytest.raises(Exception): + model.simulate(options=opts) opts["result_handler"] = B() res = model.simulate(options=opts) - @testattr(stddist = True) def test_no_result(self): model = Dummy_FMUModelCS1([], os.path.join(file_path, "files", "FMUs", "XML", "CS1.0", "NegatedAlias.fmu"), _connect_dll=False) @@ -399,7 +377,8 @@ def test_no_result(self): opts["result_handling"] = None res = model.simulate(options=opts) - nose.tools.assert_raises(Exception,res._get_result_data) + with pytest.raises(Exception): + res._get_result_data() model = Dummy_FMUModelCS1([], os.path.join(file_path, "files", "FMUs", "XML", "CS1.0", "NegatedAlias.fmu"), _connect_dll=False) @@ -407,9 +386,9 @@ def test_no_result(self): opts["return_result"] = False res = model.simulate(options=opts) - nose.tools.assert_raises(Exception,res._get_result_data) + with pytest.raises(Exception): + res._get_result_data() - @testattr(stddist = True) def test_result_name_file(self): model = Dummy_FMUModelCS1([], os.path.join(file_path, "files", "FMUs", "XML", "CS1.0", "CoupledClutches.fmu"), _connect_dll=False) @@ -427,7 +406,6 @@ def test_result_name_file(self): assert res.result_file == "CoupledClutches_result_test.txt" assert os.path.exists(res.result_file) - @testattr(stddist = True) def test_default_experiment(self): model = FMUModelCS1(os.path.join(file_path, "files", "FMUs", "XML", "CS1.0", "CoupledClutches.fmu"), _connect_dll=False) @@ -435,25 +413,24 @@ def test_default_experiment(self): assert np.abs(model.get_default_experiment_stop_time()-1.5) < 1e-4 assert np.abs(model.get_default_experiment_tolerance()-0.0001) < 1e-4 - @testattr(stddist = True) def test_log_file_name(self): model = FMUModelCS1(os.path.join(file_path, "files", "FMUs", "XML", "CS1.0", "bouncingBall.fmu", ), _connect_dll=False) assert os.path.exists("bouncingBall_log.txt") model = FMUModelCS1(os.path.join(file_path, "files", "FMUs", "XML", "CS1.0", "bouncingBall.fmu"), _connect_dll=False, log_file_name="Test_log.txt") assert os.path.exists("Test_log.txt") - @testattr(stddist = True) def test_erreneous_ncp(self): model = Dummy_FMUModelCS1([], os.path.join(file_path, "files", "FMUs", "XML", "CS1.0", "NegatedAlias.fmu"), _connect_dll=False) opts = model.simulate_options() opts["ncp"] = 0 - nose.tools.assert_raises(FMUException, model.simulate, options=opts) + with pytest.raises(FMUException): + model.simulate(options=opts) opts["ncp"] = -1 - nose.tools.assert_raises(FMUException, model.simulate, options=opts) + with pytest.raises(FMUException): + model.simulate(options=opts) class Test_FMUModelBase: - @testattr(stddist = True) def test_unicode_description(self): full_path = os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "Description.fmu") model = FMUModelME1(full_path, _connect_dll=False) @@ -462,14 +439,12 @@ def test_unicode_description(self): assert desc == "Test symbols '' ‘’" - @testattr(stddist = True) def test_get_erronous_nominals(self): model = FMUModelME1(FMU_PATHS.ME1.nominal_test4, _connect_dll=False) - nose.tools.assert_almost_equal(model.get_variable_nominal("x"), 2.0) - nose.tools.assert_almost_equal(model.get_variable_nominal("y"), 1.0) + assert model.get_variable_nominal("x") == pytest.approx(2.0) + assert model.get_variable_nominal("y") == pytest.approx(1.0) - @testattr(stddist = True) def test_caching(self): negated_alias = FMUModelME1(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) @@ -497,7 +472,6 @@ def test_caching(self): vars_6 = negated_alias.get_model_variables() assert id(vars_1) != id(vars_6) - @testattr(stddist = True) def test_get_scalar_variable(self): negated_alias = FMUModelME1(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) @@ -510,26 +484,27 @@ def test_get_scalar_variable(self): assert sc_x.causality == fmi.FMI_INTERNAL assert sc_x.alias == fmi.FMI_NO_ALIAS - nose.tools.assert_raises(FMUException, negated_alias.get_scalar_variable, "not_existing") + with pytest.raises(FMUException): + negated_alias.get_scalar_variable("not_existing") - @testattr(stddist = True) def test_get_variable_description(self): model = FMUModelME1(FMU_PATHS.ME1.coupled_clutches, _connect_dll=False) assert model.get_variable_description("J1.phi") == "Absolute rotation angle of component" - @testattr(stddist = True) def test_simulation_without_initialization(self): model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "bouncingBall.fmu"), _connect_dll=False) opts = model.simulate_options() opts["initialize"] = False - nose.tools.assert_raises(FMUException, model.simulate, options=opts) + with pytest.raises(FMUException): + model.simulate(options=opts) model = Dummy_FMUModelCS1([], os.path.join(file_path, "files", "FMUs", "XML", "CS1.0", "bouncingBall.fmu"), _connect_dll=False) opts = model.simulate_options() opts["initialize"] = False - nose.tools.assert_raises(FMUException, model.simulate, options=opts) + with pytest.raises(FMUException): + model.simulate(options=opts) def test_get_erroneous_nominals_capi_fmi1(self): """ Tests that erroneous nominals returned from getting nominals of continuous states get auto-corrected. """ @@ -559,30 +534,26 @@ def test_get_erroneous_nominals_capi_fmi1(self): expected_msg2 = "The nominal value for J4.w is 0.0 which is illegal according to the " \ + "FMI specification. Setting the nominal to 1.0." log = str(log_stream.getvalue()) - nose.tools.assert_in(expected_msg1, log) # First warning of 6. - nose.tools.assert_in(expected_msg2, log) # Last warning of 6. + assert expected_msg1 in log # First warning of 6. + assert expected_msg2 in log # Last warning of 6. # Check values are auto-corrected: - nose.tools.assert_almost_equal(xn[0], 2.0) # -2.0 - nose.tools.assert_almost_equal(xn[1], 1.0) # 0.0 - nose.tools.assert_almost_equal(xn[2], 2.0) # 2.0 - nose.tools.assert_almost_equal(xn[3], 2.0) # -2.0 - nose.tools.assert_almost_equal(xn[4], 1.0) # 0.0 - nose.tools.assert_almost_equal(xn[5], 2.0) # 2.0 - nose.tools.assert_almost_equal(xn[6], 2.0) # -2.0 - nose.tools.assert_almost_equal(xn[7], 1.0) # 0,0 + assert xn[0] == pytest.approx(2.0) + assert xn[1] == pytest.approx(1.0) + assert xn[2] == pytest.approx(2.0) + assert xn[3] == pytest.approx(2.0) + assert xn[4] == pytest.approx(1.0) + assert xn[5] == pytest.approx(2.0) + assert xn[6] == pytest.approx(2.0) + assert xn[7] == pytest.approx(1.0) class Test_LoadFMU: - - @testattr(stddist = True) def test_unzipped_fmu_exception_invalid_dir(self): """ Verify that we get an exception if unzipped FMU does not contain modelDescription.xml, which it should according to the FMI specification. """ _helper_unzipped_fmu_exception_invalid_dir(load_fmu) class Test_FMUModelCS2: - - @testattr(stddist = True) def test_unzipped_fmu_exception_invalid_dir(self): """ Verify that we get an exception if unzipped FMU does not contain modelDescription.xml, which it should according to the FMI specification. """ _helper_unzipped_fmu_exception_invalid_dir(FMUModelCS2) @@ -603,19 +574,16 @@ def _test_unzipped_bouncing_ball(self, fmu_loader, tmp_dir = None): value = np.abs(res.final('h') - (0.0424044)) assert value < tol, "Assertion failed, value={} is not less than {}.".format(value, tol) - @testattr(stddist = True) def test_unzipped_fmu1(self): """ Test load and simulate unzipped CS FMU 2.0 using FMUModelCS2 """ self._test_unzipped_bouncing_ball(FMUModelCS2) self._test_unzipped_bouncing_ball(FMUModelCS2, tmp_dir = tempfile.TemporaryDirectory(dir = "./").name) - @testattr(stddist = True) def test_unzipped_fmu2(self): """ Test load and simulate unzipped CS FMU 2.0 using load_fmu """ self._test_unzipped_bouncing_ball(load_fmu) self._test_unzipped_bouncing_ball(load_fmu, tmp_dir = tempfile.TemporaryDirectory(dir = "./").name) - @testattr(stddist = True) def test_log_file_name(self): full_path = os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "CoupledClutches.fmu") model = FMUModelCS2(full_path, _connect_dll=False) @@ -623,36 +591,34 @@ def test_log_file_name(self): path, file_name = os.path.split(full_path) assert model.get_log_filename() == file_name.replace(".","_")[:-4]+"_log.txt" - @testattr(stddist = True) def test_invalid_binary(self): err_msg = "The FMU could not be loaded." fmu = os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "CoupledClutches.fmu") - with nose.tools.assert_raises_regex(InvalidBinaryException, err_msg): + with pytest.raises(InvalidBinaryException, match = err_msg): model = FMUModelCS2(fmu, _connect_dll=True) - @testattr(stddist = True) def test_invalid_version(self): err_msg = "The FMU version is not supported" fmu = os.path.join(file_path, "files", "FMUs", "XML", "CS1.0", "CoupledClutches.fmu") - with nose.tools.assert_raises_regex(InvalidVersionException, err_msg): + with pytest.raises(InvalidVersionException, match = err_msg): model = FMUModelCS2(fmu, _connect_dll=True) - @testattr(stddist = True) def test_unzipped_fmu_exceptions(self): """ Verify exception is raised if 'fmu' is a file and allow_unzipped_fmu is set to True, with FMUModelCS2. """ err_msg = "Argument named 'fmu' must be a directory if argument 'allow_unzipped_fmu' is set to True." - with nose.tools.assert_raises_regex(FMUException, err_msg): + with pytest.raises(FMUException, match = err_msg): model = FMUModelCS2(os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "LinearStability.SubSystem1.fmu"), _connect_dll=False, allow_unzipped_fmu=True) - @testattr(stddist = True) - def test_erroneous_ncp(self): + def test_erreneous_ncp(self): model = FMUModelCS2(os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "CoupledClutches.fmu"), _connect_dll=False) opts = model.simulate_options() opts["ncp"] = 0 - nose.tools.assert_raises(FMUException, model.simulate, options=opts) + with pytest.raises(FMUException): + model.simulate(options=opts) opts["ncp"] = -1 - nose.tools.assert_raises(FMUException, model.simulate, options=opts) + with pytest.raises(FMUException): + model.simulate(options=opts) def _verify_downsample_result(self, ref_traj, test_traj, ncp, factor): """Auxiliary function for result_downsampling_factor testing. @@ -739,7 +705,6 @@ def test_error_check_invalid_value(self): if assimulo_installed: class Test_FMUModelME2_Simulation: - @testattr(stddist = True) def test_basicsens1(self): #Noncompliant FMI test as 'd' is parameter is not supposed to be able to be set during simulation model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens1.fmu"), _connect_dll=False) @@ -756,11 +721,10 @@ def f(*args, **kwargs): opts["sensitivities"] = ["d"] res = model.simulate(options=opts) - nose.tools.assert_almost_equal(res.final('dx/dd'), 0.36789, 3) + assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3) assert res.solver.statistics["nsensfcnfcns"] > 0 - @testattr(stddist = True) def test_basicsens1dir(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens1.fmu"), _connect_dll=False) @@ -788,12 +752,11 @@ def d(*args, **kwargs): opts["sensitivities"] = ["d"] res = model.simulate(options=opts) - nose.tools.assert_almost_equal(res.final('dx/dd'), 0.36789, 3) + assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3) assert res.solver.statistics["nsensfcnfcns"] > 0 assert res.solver.statistics["nfcnjacs"] == 0 - @testattr(stddist = True) def test_basicsens2(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens2.fmu"), _connect_dll=False) @@ -821,11 +784,10 @@ def d(*args, **kwargs): opts["sensitivities"] = ["d"] res = model.simulate(options=opts) - nose.tools.assert_almost_equal(res.final('dx/dd'), 0.36789, 3) + assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3) assert res.solver.statistics["nsensfcnfcns"] == 0 - @testattr(stddist = True) def test_relative_tolerance(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) @@ -836,7 +798,6 @@ def test_relative_tolerance(self): assert res.options["CVode_options"]["atol"] == 1e-10 - @testattr(stddist = True) def test_simulate_with_debug_option_no_state(self): """ Verify that an instance of CVodeDebugInformation is created """ model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) @@ -851,7 +812,6 @@ def test_simulate_with_debug_option_no_state(self): from pyfmi.debug import CVodeDebugInformation debug = CVodeDebugInformation("NoState_Example1_debug.txt") - @testattr(stddist = True) def test_maxord_is_set(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) opts = model.simulate_options() @@ -862,7 +822,6 @@ def test_maxord_is_set(self): assert res.solver.maxord == 1 - @testattr(stddist = True) def test_with_jacobian_option(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) opts = model.simulate_options() @@ -891,7 +850,6 @@ def run_case(expected, default="Default"): opts["with_jacobian"] = True run_case(True, True) - @testattr(stddist = True) def test_sparse_option(self): def run_case(expected_jacobian, expected_sparse, fnbr=0, nnz={}, set_sparse=False): @@ -917,13 +875,11 @@ def get_derivatives_dependencies(self): run_case(True, "SPARSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT}) run_case(True, "SPARSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT}, True) - @testattr(stddist = True) def test_ncp_option(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) opts = model.simulate_options() assert opts["ncp"] == 500, opts["ncp"] - @testattr(stddist = True) def test_solver_options(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) opts = model.simulate_options() @@ -938,7 +894,6 @@ def test_solver_options(self): assert opts["CVode_options"]["atol"] == "Default", "Default should have been changed: " + opts["CVode_options"]["atol"] assert opts["CVode_options"]["maxh"] == 1.0, "Value should have been changed to 1.0: " + opts["CVode_options"]["maxh"] - @testattr(stddist = True) def test_solver_options_using_defaults(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) opts = model.simulate_options() @@ -951,7 +906,6 @@ def test_solver_options_using_defaults(self): assert opts["CVode_options"]["atol"] == 1e-6, "Default should have been changed: " + opts["CVode_options"]["atol"] assert opts["CVode_options"]["maxh"] == "Default", "Value should have been default is: " + opts["CVode_options"]["maxh"] - @testattr(stddist = True) def test_deepcopy_option(self): opts = AssimuloFMIAlgOptions() opts["CVode_options"]["maxh"] = 2.0 @@ -962,7 +916,6 @@ def test_deepcopy_option(self): assert opts["CVode_options"]["maxh"] == opts_copy["CVode_options"]["maxh"], "Deepcopy not working..." - @testattr(stddist = True) def test_maxh_option(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) opts = model.simulate_options() @@ -993,7 +946,6 @@ def run_case(tstart, tstop, solver, ncp="Default"): run_case(0,1,"LSODAR") run_case(0,1,"LSODAR") - @testattr(stddist = True) def test_rtol_auto_update(self): """ Test that default rtol picks up the unbounded attribute. """ model = Dummy_FMUModelME2([], FMU_PATHS.ME2.coupled_clutches_modified, _connect_dll=False) @@ -1005,13 +957,12 @@ def test_rtol_auto_update(self): if res.solver.supports.get('rtol_as_vector', False): # automatic construction of rtol vector if model.get_variable_unbounded(state): - nose.tools.assert_equal(res.solver.rtol[i], 0) + assert res.solver.rtol[i] == 0 else: - nose.tools.assert_greater(res.solver.rtol[i], 0) + assert res.solver.rtol[i] > 0 else: # no support: scalar rtol - nose.tools.assert_true(isinstance(res.solver.rtol, float)) + assert isinstance(res.solver.rtol, float) - @testattr(stddist = True) def test_rtol_vector_manual_valid(self): """ Tests manual valid rtol vector works; if supported. """ @@ -1023,12 +974,11 @@ def test_rtol_vector_manual_valid(self): try: res = model.simulate(options=opts) # solver support - nose.tools.assert_equal(res.solver.rtol[0], 1e-5) - nose.tools.assert_equal(res.solver.rtol[1], 0.) + assert res.solver.rtol[0] == 1e-5 + assert res.solver.rtol[1] == 0. except InvalidOptionException as e: # if no solver support - nose.tools.assert_true(str(e).startswith("Failed to set the solver option 'rtol'")) + assert str(e).startswith("Failed to set the solver option 'rtol'") - @testattr(stddist = True) def test_rtol_vector_manual_size_mismatch(self): """ Tests invalid rtol vector: size mismatch. """ model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) @@ -1037,10 +987,9 @@ def test_rtol_vector_manual_size_mismatch(self): opts["CVode_options"]["rtol"] = [1e-5, 0, 1e-5] err_msg = "If the relative tolerance is provided as a vector, it need to be equal to the number of states." - with nose.tools.assert_raises_regex(InvalidOptionException, err_msg): + with pytest.raises(InvalidOptionException, match = err_msg): model.simulate(options=opts) - @testattr(stddist = True) def test_rtol_vector_manual_invalid(self): """ Tests invalid rtol vector: different nonzero values. """ @@ -1050,10 +999,9 @@ def test_rtol_vector_manual_invalid(self): opts["CVode_options"]["rtol"] = [1e-5, 0, 1e-5, 1e-5, 0, 1e-5,1e-6, 0] err_msg = "If the relative tolerance is provided as a vector, the values need to be equal except for zeros." - with nose.tools.assert_raises_regex(InvalidOptionException, err_msg): + with pytest.raises(InvalidOptionException, match = err_msg): model.simulate(options=opts) - @testattr(stddist = True) def test_rtol_vector_manual_scalar_conversion(self): """ Test automatic scalar conversion of trivial rtol vector. """ model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) @@ -1063,9 +1011,8 @@ def test_rtol_vector_manual_scalar_conversion(self): #Verify no exception is raised as the rtol vector should be treated as a scalar res = model.simulate(options=opts) - nose.tools.assert_equal(res.solver.rtol, 1e-5) + assert res.solver.rtol == 1e-5 - @testattr(stddist = True) def test_rtol_vector_unsupported(self): """ Test that rtol as a vector triggers exceptions for unsupported solvers. """ model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) @@ -1081,10 +1028,10 @@ def run_case(solver): try: res = model.simulate(options=opts) # solver support; check tolerances - nose.tools.assert_equal(res.solver.rtol[0], 1e-5) - nose.tools.assert_equal(res.solver.rtol[1], 0.0) + assert res.solver.rtol[0] == 1e-5 + assert res.solver.rtol[1] == 0.0 except InvalidOptionException as e: - nose.tools.assert_true(str(e).startswith("Failed to set the solver option 'rtol'")) + assert str(e).startswith("Failed to set the solver option 'rtol'") return # OK run_case("CVode") @@ -1101,7 +1048,6 @@ def setup_atol_auto_update_test_base(self): opts["solver"] = "CVode" return model, opts - @testattr(stddist = True) def test_atol_auto_update1(self): """ Tests that atol automatically gets updated when "atol = factor * pre_init_nominals". @@ -1113,7 +1059,6 @@ def test_atol_auto_update1(self): model.simulate(options=opts, algorithm=NoSolveAlg) np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) - @testattr(stddist = True) def test_atol_auto_update2(self): """ Tests that atol doesn't get auto-updated when heuristic fails. @@ -1125,7 +1070,6 @@ def test_atol_auto_update2(self): model.simulate(options=opts, algorithm=NoSolveAlg) np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02]) - @testattr(stddist = True) def test_atol_auto_update3(self): """ Tests that atol doesn't get auto-updated when nominals are never retrieved. @@ -1137,7 +1081,6 @@ def test_atol_auto_update3(self): model.simulate(options=opts, algorithm=NoSolveAlg) np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) - @testattr(stddist = True) def test_atol_auto_update4(self): """ Tests that atol is not auto-updated when it's set the "correct" way (post initialization). @@ -1152,7 +1095,6 @@ def test_atol_auto_update4(self): model.simulate(options=opts, algorithm=NoSolveAlg) np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) - @testattr(stddist = True) def test_atol_auto_update5(self): """ Tests that atol is automatically set and depends on rtol. @@ -1163,7 +1105,6 @@ def test_atol_auto_update5(self): model.simulate(options=opts, algorithm=NoSolveAlg) np.testing.assert_allclose(opts["CVode_options"]["atol"], [3e-8, 3e-8]) - @testattr(stddist = True) def test_atol_auto_update6(self): """ Tests that rtol doesn't affect explicitly set atol. @@ -1178,8 +1119,6 @@ def test_atol_auto_update6(self): class Test_FMUModelME2: - - @testattr(stddist = True) def test_unzipped_fmu_exception_invalid_dir(self): """ Verify that we get an exception if unzipped FMU does not contain modelDescription.xml, which it should according to the FMI specification. """ _helper_unzipped_fmu_exception_invalid_dir(FMUModelME2) @@ -1200,38 +1139,32 @@ def _test_unzipped_bouncing_ball(self, fmu_loader, tmp_dir = None): value = np.abs(res.final('h') - (0.0424044)) assert value < tol, "Assertion failed, value={} is not less than {}.".format(value, tol) - @testattr(stddist = True) def test_unzipped_fmu1(self): """ Test load and simulate unzipped ME FMU 2.0 using FMUModelME2 """ self._test_unzipped_bouncing_ball(FMUModelME2) self._test_unzipped_bouncing_ball(FMUModelME2, tmp_dir = tempfile.TemporaryDirectory(dir = "./").name) - @testattr(stddist = True) def test_unzipped_fmu2(self): """ Test load and simulate unzipped ME FMU 2.0 using load_fmu """ self._test_unzipped_bouncing_ball(load_fmu) self._test_unzipped_bouncing_ball(load_fmu, tmp_dir = tempfile.TemporaryDirectory(dir = "./").name) - @testattr(stddist = True) def test_unzipped_fmu_exceptions(self): """ Verify exception is raised if 'fmu' is a file and allow_unzipped_fmu is set to True, with FMUModelME2. """ err_msg = "Argument named 'fmu' must be a directory if argument 'allow_unzipped_fmu' is set to True." - with nose.tools.assert_raises_regex(FMUException, err_msg): + with pytest.raises(FMUException, match = err_msg): model = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "LinearStability.SubSystem2.fmu"), _connect_dll=False, allow_unzipped_fmu=True) - @testattr(stddist = True) def test_invalid_binary(self): err_msg = "The FMU could not be loaded." - with nose.tools.assert_raises_regex(InvalidBinaryException, err_msg): + with pytest.raises(InvalidBinaryException, match = err_msg): model = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "LinearStability.SubSystem2.fmu"), _connect_dll=True) - @testattr(stddist = True) def test_invalid_version(self): err_msg = "The FMU version is not supported by this class" - with nose.tools.assert_raises_regex(InvalidVersionException, err_msg): + with pytest.raises(InvalidVersionException, match = err_msg): model = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "RLC_Circuit.fmu"), _connect_dll=True) - @testattr(stddist = True) def test_estimate_directional_derivatives_linearstate(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "LinearStateSpace.fmu"), _connect_dll=False) @@ -1261,7 +1194,6 @@ def f(*args, **kwargs): assert np.allclose(Cs, C.toarray()), str(Cs)+' '+str(C.toarray()) assert np.allclose(Ds, D.toarray()), str(Ds)+' '+str(D.toarray()) - @testattr(stddist = True) def test_estimate_directional_derivatives_without_structure_info(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Bouncing_Ball.fmu"), _connect_dll=False) @@ -1289,7 +1221,6 @@ def f(*args, **kwargs): assert np.allclose(Cs, C.toarray()), str(Cs)+' '+str(C.toarray()) assert np.allclose(Ds, D.toarray()), str(Ds)+' '+str(D.toarray()) - @testattr(stddist = True) def test_estimate_directional_derivatives_BCD(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "OutputTest2.fmu"), _connect_dll=False) @@ -1345,7 +1276,6 @@ def f(*args, **kwargs): assert np.allclose(C, np.array([[0.0, 0.0], [0.0, 1.0], [1.0, 0.0]])), str(C.toarray()) assert np.allclose(D, np.array([[-1.0], [0.0], [1.0]])), str(D.toarray()) - @testattr(stddist = True) def test_output_dependencies(self): model = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "OutputTest2.fmu"), _connect_dll=False) @@ -1359,7 +1289,6 @@ def test_output_dependencies(self): assert input_dep["y3"][0] == "u1" assert len(input_dep["y2"]) == 0 - @testattr(stddist = True) def test_output_dependencies_2(self): model = FMUModelME2(FMU_PATHS.ME2.coupled_clutches, _connect_dll=False) @@ -1368,7 +1297,6 @@ def test_output_dependencies_2(self): assert len(state_dep.keys()) == 0, len(state_dep.keys()) assert len(input_dep.keys()) == 0, len(input_dep.keys()) - @testattr(stddist = True) def test_derivative_dependencies(self): model = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) @@ -1377,7 +1305,6 @@ def test_derivative_dependencies(self): assert len(state_dep.keys()) == 0, len(state_dep.keys()) assert len(input_dep.keys()) == 0, len(input_dep.keys()) - @testattr(stddist = True) def test_exception_with_load_fmu(self): """ Verify exception is raised. """ err_msg = "Argument named 'fmu' must be a directory if argument 'allow_unzipped_fmu' is set to True." @@ -1387,16 +1314,15 @@ def test_exception_with_load_fmu(self): with open(test_file, 'w') as fh: fh.write('') rm_file = True - with nose.tools.assert_raises_regex(FMUException, err_msg): + with pytest.raises(FMUException, match = err_msg): fmu = load_fmu(test_file, allow_unzipped_fmu = True) if rm_file: os.remove(test_file) - @testattr(stddist = True) def test_malformed_xml(self): - nose.tools.assert_raises(InvalidXMLException, load_fmu, os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "MalFormed.fmu")) + with pytest.raises(InvalidXMLException): + load_fmu(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "MalFormed.fmu")) - @testattr(stddist = True) def test_log_file_name(self): full_path = FMU_PATHS.ME2.coupled_clutches @@ -1405,25 +1331,26 @@ def test_log_file_name(self): path, file_name = os.path.split(full_path) assert model.get_log_filename() == file_name.replace(".","_")[:-4]+"_log.txt" - @testattr(stddist = True) def test_units(self): model = FMUModelME2(FMU_PATHS.ME2.coupled_clutches, _connect_dll=False) assert model.get_variable_unit("J1.w") == "rad/s", model.get_variable_unit("J1.w") assert model.get_variable_unit("J1.phi") == "rad", model.get_variable_unit("J1.phi") - nose.tools.assert_raises(FMUException, model.get_variable_unit, "clutch1.useHeatPort") - nose.tools.assert_raises(FMUException, model.get_variable_unit, "clutch1.sss") - nose.tools.assert_raises(FMUException, model.get_variable_unit, "clutch1.sss") + with pytest.raises(FMUException): + model.get_variable_unit("clutch1.useHeatPort") + with pytest.raises(FMUException): + model.get_variable_unit("clutch1.sss") + with pytest.raises(FMUException): + model.get_variable_unit("clutch1.sss") - @testattr(stddist = True) def test_display_units(self): model = FMUModelME2(FMU_PATHS.ME2.coupled_clutches, _connect_dll=False) assert model.get_variable_display_unit("J1.phi") == "deg", model.get_variable_display_unit("J1.phi") - nose.tools.assert_raises(FMUException, model.get_variable_display_unit, "J1.w") + with pytest.raises(FMUException): + model.get_variable_display_unit("J1.w") - @testattr(stddist = True) def test_get_xxx_empty(self): """ Test that get_xxx([]) do not calls do not trigger calls to FMU. """ model = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "bouncingBall.fmu"), _connect_dll=False) @@ -1434,8 +1361,6 @@ def test_get_xxx_empty(self): assert len(model.get_string([])) == 0, "get_string ([]) has non-empty return" class Test_FMUModelBase2: - - @testattr(stddist = True) def test_relative_quantity(self): model = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "test_type_definitions.fmu"), _connect_dll=False) @@ -1450,9 +1375,9 @@ def test_relative_quantity(self): rel = model.get_variable_relative_quantity("real_with_typedef") assert rel is True, "Relative quantity should be True" - nose.tools.assert_raises(FMUException, model.get_variable_relative_quantity, "int_with_attr") + with pytest.raises(FMUException): + model.get_variable_relative_quantity("int_with_attr") - @testattr(stddist = True) def test_unbounded_attribute(self): model = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "test_type_definitions.fmu"), _connect_dll=False) @@ -1467,9 +1392,9 @@ def test_unbounded_attribute(self): unbounded = model.get_variable_unbounded("real_with_typedef") assert unbounded is True, "Unbounded should be True" - nose.tools.assert_raises(FMUException, model.get_variable_unbounded, "int_with_attr") + with pytest.raises(FMUException): + model.get_variable_unbounded("int_with_attr") - @testattr(stddist = True) def test_unicode_description(self): model = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Description.fmu"), _connect_dll=False) @@ -1477,7 +1402,6 @@ def test_unicode_description(self): assert desc == "Test symbols '' ‘’" - @testattr(stddist = True) def test_declared_enumeration_type(self): model = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Enumerations.Enumeration3.fmu"), _connect_dll=False) @@ -1490,26 +1414,26 @@ def test_declared_enumeration_type(self): assert enum.name == "Enumerations.Enumeration3.cities", "Got: " + enum.name assert enum.description == "", "Got: " + enum.description - nose.tools.assert_raises(FMUException, model.get_variable_declared_type, "z") + with pytest.raises(FMUException): + model.get_variable_declared_type("z") - @testattr(stddist = True) def test_get_erroneous_nominals_xml(self): model = FMUModelME2(FMU_PATHS.ME2.nominal_test4, _connect_dll=False) - nose.tools.assert_almost_equal(model.get_variable_nominal("x"), 2.0) - nose.tools.assert_almost_equal(model.get_variable_nominal("y"), 1.0) + assert model.get_variable_nominal("x") == pytest.approx(2.0) + assert model.get_variable_nominal("y") == pytest.approx(1.0) - nose.tools.assert_almost_equal(model.get_variable_nominal("x", _override_erroneous_nominal=False), -2.0) - nose.tools.assert_almost_equal(model.get_variable_nominal("y", _override_erroneous_nominal=False), 0.0) + assert model.get_variable_nominal("x", _override_erroneous_nominal=False) == pytest.approx(-2.0) + assert model.get_variable_nominal("y", _override_erroneous_nominal=False) == pytest.approx(0.0) x_vref = model.get_variable_valueref("x") y_vref = model.get_variable_valueref("y") - nose.tools.assert_almost_equal(model.get_variable_nominal(valueref=x_vref), 2.0) - nose.tools.assert_almost_equal(model.get_variable_nominal(valueref=y_vref), 1.0) + assert model.get_variable_nominal(valueref=x_vref) == pytest.approx(2.0) + assert model.get_variable_nominal(valueref=y_vref) == pytest.approx(1.0) - nose.tools.assert_almost_equal(model.get_variable_nominal(valueref=x_vref, _override_erroneous_nominal=False), -2.0) - nose.tools.assert_almost_equal(model.get_variable_nominal(valueref=y_vref, _override_erroneous_nominal=False), 0.0) + assert model.get_variable_nominal(valueref=x_vref, _override_erroneous_nominal=False) == pytest.approx(-2.0) + assert model.get_variable_nominal(valueref=y_vref, _override_erroneous_nominal=False) == pytest.approx(0.0) def test_get_erroneous_nominals_capi(self): """ Tests that erroneous nominals returned from GetNominalsOfContinuousStates get auto-corrected. """ @@ -1538,20 +1462,19 @@ def test_get_erroneous_nominals_capi(self): expected_msg2 = "The nominal value for J4.w is 0.0 which is illegal according to the " \ + "FMI specification. Setting the nominal to 1.0." log = str(log_stream.getvalue()) - nose.tools.assert_in(expected_msg1, log) # First warning of 6. - nose.tools.assert_in(expected_msg2, log) # Last warning of 6. + assert expected_msg1 in log # First warning of 6. + assert expected_msg2 in log # Last warning of 6. # Check that values are auto-corrected: - nose.tools.assert_almost_equal(xn[0], 2.0) # -2.0 - nose.tools.assert_almost_equal(xn[1], 1.0) # 0.0 - nose.tools.assert_almost_equal(xn[2], 2.0) # 2.0 - nose.tools.assert_almost_equal(xn[3], 2.0) # -2.0 - nose.tools.assert_almost_equal(xn[4], 1.0) # 0.0 - nose.tools.assert_almost_equal(xn[5], 2.0) # 2.0 - nose.tools.assert_almost_equal(xn[6], 2.0) # -2.0 - nose.tools.assert_almost_equal(xn[7], 1.0) # 0,0 - - @testattr(stddist = True) + assert xn[0] == pytest.approx(2.0) + assert xn[1] == pytest.approx(1.0) + assert xn[2] == pytest.approx(2.0) + assert xn[3] == pytest.approx(2.0) + assert xn[4] == pytest.approx(1.0) + assert xn[5] == pytest.approx(2.0) + assert xn[6] == pytest.approx(2.0) + assert xn[7] == pytest.approx(1.0) + def test_get_time_varying_variables(self): model = FMUModelME2(FMU_PATHS.ME2.coupled_clutches, _connect_dll=False) @@ -1570,38 +1493,39 @@ def test_get_time_varying_variables(self): [r,i,b] = model.get_model_time_varying_value_references(filter=list(vars.keys())) assert len(r) == 1, len(r) - @testattr(stddist = True) def test_get_directional_derivative_capability(self): bounce = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "bouncingBall.fmu"), _connect_dll=False) bounce.setup_experiment() bounce.initialize() # Bouncing ball don't have the capability, check that this is handled - nose.tools.assert_raises(FMUException, bounce.get_directional_derivative, [1], [1], [1]) + with pytest.raises(FMUException): + bounce.get_directional_derivative([1], [1], [1]) bounce = Dummy_FMUModelCS2([], os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "bouncingBall.fmu"), _connect_dll=False) bounce.setup_experiment() bounce.initialize() # Bouncing ball don't have the capability, check that this is handled - nose.tools.assert_raises(FMUException, bounce.get_directional_derivative, [1], [1], [1]) + with pytest.raises(FMUException): + bounce.get_directional_derivative([1], [1], [1]) - @testattr(stddist = True) def test_simulation_without_initialization(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "bouncingBall.fmu"), _connect_dll=False) opts = model.simulate_options() opts["initialize"] = False - nose.tools.assert_raises(FMUException, model.simulate, options=opts) + with pytest.raises(FMUException): + model.simulate(options=opts) model = Dummy_FMUModelCS2([], os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "bouncingBall.fmu"), _connect_dll=False) opts = model.simulate_options() opts["initialize"] = False - nose.tools.assert_raises(FMUException, model.simulate, options=opts) + with pytest.raises(FMUException): + model.simulate(options=opts) - @testattr(stddist = True) - def test_simulation_with_syncronization_exception_ME(self): + def test_simulation_with_synchronization_exception_ME(self): """ Verifies the allowed values for the option to synchronize simulations (ME) """ @@ -1609,16 +1533,17 @@ def test_simulation_with_syncronization_exception_ME(self): opts = model.simulate_options() opts["synchronize_simulation"] = "Hej" - nose.tools.assert_raises(InvalidOptionException, model.simulate, options=opts) + with pytest.raises(InvalidOptionException): + model.simulate(options=opts) model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "bouncingBall.fmu"), _connect_dll=False) opts = model.simulate_options() opts["synchronize_simulation"] = -1.0 - nose.tools.assert_raises(InvalidOptionException, model.simulate, options=opts) + with pytest.raises(InvalidOptionException): + model.simulate(options=opts) - @testattr(stddist = True) - def test_simulation_with_syncronization_exception_CS(self): + def test_simulation_with_synchronization_exception_CS(self): """ Verifies the allowed values for the option to synchronize simulations (CS) """ @@ -1626,16 +1551,17 @@ def test_simulation_with_syncronization_exception_CS(self): opts = model.simulate_options() opts["synchronize_simulation"] = "Hej" - nose.tools.assert_raises(InvalidOptionException, model.simulate, options=opts) + with pytest.raises(InvalidOptionException): + model.simulate(options=opts) model = Dummy_FMUModelCS2([], os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "bouncingBall.fmu"), _connect_dll=False) opts = model.simulate_options() opts["synchronize_simulation"] = -1.0 - nose.tools.assert_raises(InvalidOptionException, model.simulate, options=opts) + with pytest.raises(InvalidOptionException): + model.simulate(options=opts) - @testattr(stddist = True) - def test_simulation_with_syncronization_ME(self): + def test_simulation_with_synchronization_ME(self): """ Verifies that the option synchronize simulation works as intended in the most basic test for ME FMUs. """ @@ -1653,11 +1579,10 @@ def test_simulation_with_syncronization_ME(self): res = model.simulate(final_time=0.1, options=opts) tsyn = res.detailed_timings["computing_solution"] - assert tsyn > t, "Syncronization does not work: %d, %d"%(t, tsyn) + assert tsyn > t, f"synchronization does not work: Expected {tsyn} > {t}" - @testattr(stddist = True) - def test_simulation_with_syncronization_CS(self): + def test_simulation_with_synchronization_CS(self): """ Verifies that the option synchronize simulation works as intended in the most basic test for CS FMUs. """ @@ -1675,9 +1600,8 @@ def test_simulation_with_syncronization_CS(self): res = model.simulate(final_time=0.1, options=opts) tsyn = res.detailed_timings["computing_solution"] - assert tsyn > t, "Syncronization does not work: %d, %d"%(t, tsyn) + assert tsyn > t, f"synchronization does not work: Expected {tsyn} > {t}" - @testattr(stddist = True) def test_caching(self): negated_alias = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) @@ -1705,7 +1629,6 @@ def test_caching(self): vars_6 = negated_alias.get_model_variables() assert id(vars_1) != id(vars_6) - @testattr(stddist = True) def test_get_scalar_variable(self): negated_alias = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) @@ -1718,37 +1641,32 @@ def test_get_scalar_variable(self): assert sc_x.causality == fmi.FMI2_LOCAL, sc_x.causality assert sc_x.initial == fmi.FMI2_INITIAL_APPROX, sc_x.initial - nose.tools.assert_raises(FMUException, negated_alias.get_scalar_variable, "not_existing") + with pytest.raises(FMUException): + negated_alias.get_scalar_variable("not_existing") - @testattr(stddist = True) def test_get_variable_description(self): model = FMUModelME2(FMU_PATHS.ME2.coupled_clutches, _connect_dll=False) assert model.get_variable_description("J1.phi") == "Absolute rotation angle of component" class Test_load_fmu_only_XML: - - @testattr(stddist = True) def test_loading_xml_me1(self): model = FMUModelME1(FMU_PATHS.ME1.coupled_clutches, _connect_dll=False) assert model.get_name() == "CoupledClutches", model.get_name() - @testattr(stddist = True) def test_loading_xml_cs1(self): model = FMUModelCS1(os.path.join(file_path, "files", "FMUs", "XML", "CS1.0", "CoupledClutches.fmu"), _connect_dll=False) assert model.get_name() == "CoupledClutches", model.get_name() - @testattr(stddist = True) def test_loading_xml_me2(self): model = FMUModelME2(FMU_PATHS.ME2.coupled_clutches, _connect_dll=False) assert model.get_name() == "CoupledClutches", model.get_name() - @testattr(stddist = True) def test_loading_xml_cs2(self): model = FMUModelCS2(os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "CoupledClutches.fmu"), _connect_dll=False) diff --git a/tests/test_fmi_coupled.py b/tests/test_fmi_coupled.py index b1b622a8..08899f8a 100644 --- a/tests/test_fmi_coupled.py +++ b/tests/test_fmi_coupled.py @@ -15,10 +15,9 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import nose +import pytest import os -from pyfmi import testattr from pyfmi.fmi import FMUModelME2 from pyfmi.fmi_coupled import CoupledFMUModelME2 import pyfmi.fmi as fmi @@ -36,7 +35,6 @@ if assimulo_installed: class Test_CoupledFMUModelME2_Simulation: - @testattr(stddist = True) def test_linear_example(self): model_sub_1 = Dummy_FMUModelME2([], os.path.join(me2_xml_path, "LinearStability.SubSystem1.fmu"), _connect_dll=False) model_sub_2 = Dummy_FMUModelME2([], os.path.join(me2_xml_path, "LinearStability.SubSystem2.fmu"), _connect_dll=False) @@ -76,20 +74,18 @@ def sub2(*args, **kwargs): res = coupled.simulate(options=opts) - nose.tools.assert_almost_equal(res.final("First.x1"),0.08597302307099872) - nose.tools.assert_almost_equal(res.final("Second.x2"),0.0083923348082567) - nose.tools.assert_almost_equal(res.initial("First.x1"),1.0) - nose.tools.assert_almost_equal(res.initial("Second.x2"),1.0) + assert res.final("First.x1") == pytest.approx(0.08597302307099872) + assert res.final("Second.x2") == pytest.approx(0.0083923348082567) + assert res.initial("First.x1") == pytest.approx(1.0) + assert res.initial("Second.x2") == pytest.approx(1.0) - nose.tools.assert_almost_equal(res.final("First.u1"),-0.25909975860402856) - nose.tools.assert_almost_equal(res.final("Second.u2"),-0.0011806893910324295) - nose.tools.assert_almost_equal(res.initial("First.u1"),-17.736842105263158) - nose.tools.assert_almost_equal(res.initial("Second.u2"),-14.73684210526316) + assert res.final("First.u1") == pytest.approx(-0.25909975860402856) + assert res.final("Second.u2") == pytest.approx(-0.0011806893910324295) + assert res.initial("First.u1") == pytest.approx(-17.736842105263158) + assert res.initial("Second.u2") == pytest.approx(-14.73684210526316) class Test_CoupledFMUModelME2: - - @testattr(stddist = True) def test_reversed_connections(self): model_sub_1 = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.SubSystem1.fmu"), _connect_dll=False) model_sub_2 = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.SubSystem2.fmu"), _connect_dll=False) @@ -99,16 +95,16 @@ def test_reversed_connections(self): connections = [(model_sub_2,"y1",model_sub_1,"u2"), (model_sub_1,"y2",model_sub_2,"u1")] - nose.tools.assert_raises(fmi.FMUException, CoupledFMUModelME2, models, connections) + with pytest.raises(fmi.FMUException): + CoupledFMUModelME2(models, connections) connections = [(model_sub_2,"u2",model_sub_1,"y1"), (model_sub_1,"u1",model_sub_2,"y2")] - nose.tools.assert_raises(fmi.FMUException, CoupledFMUModelME2, models, connections) + with pytest.raises(fmi.FMUException): + CoupledFMUModelME2(models, connections) - @testattr(stddist = True) def test_inputs_list(self): - model_sub_1 = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.SubSystem1.fmu"), _connect_dll=False) model_sub_2 = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.SubSystem2.fmu"), _connect_dll=False) model_full = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.FullSystem.fmu"), _connect_dll=False) @@ -128,7 +124,6 @@ def test_inputs_list(self): assert "First.u1" in vars assert "Second.u2" in vars - @testattr(stddist = True) def test_alias(self): model_cc_1 = FMUModelME2(os.path.join(me2_xml_path, "CoupledClutches.fmu"), _connect_dll=False) model_cc_2 = FMUModelME2(os.path.join(me2_xml_path, "CoupledClutches.fmu"), _connect_dll=False) @@ -142,7 +137,6 @@ def test_alias(self): assert "First.J4.phi" in aliases.keys() assert coupled.get_variable_alias_base("First.J4.phi") == "First.J4.flange_a.phi" - @testattr(stddist = True) def test_loading(self): model_cc_1 = FMUModelME2(os.path.join(me2_xml_path, "CoupledClutches.fmu"), _connect_dll=False) model_cc_2 = FMUModelME2(os.path.join(me2_xml_path, "CoupledClutches.fmu"), _connect_dll=False) @@ -150,24 +144,28 @@ def test_loading(self): models = [model_cc_1, model_cc_2] connections = [] - nose.tools.assert_raises(fmi.FMUException, CoupledFMUModelME2, models, connections) + with pytest.raises(fmi.FMUException): + CoupledFMUModelME2(models, connections) models = [("First", model_cc_1), model_cc_2] - nose.tools.assert_raises(fmi.FMUException, CoupledFMUModelME2, models, connections) + with pytest.raises(fmi.FMUException): + CoupledFMUModelME2(models, connections) models = [("First", model_cc_1), ("First", model_cc_2)] - nose.tools.assert_raises(fmi.FMUException, CoupledFMUModelME2, models, connections) + with pytest.raises(fmi.FMUException): + CoupledFMUModelME2(models, connections) models = [("First", model_cc_1), ("Second", model_cc_2)] coupled = CoupledFMUModelME2(models, connections) connections = [("k")] - nose.tools.assert_raises(fmi.FMUException, CoupledFMUModelME2, models, connections) + with pytest.raises(fmi.FMUException): + CoupledFMUModelME2(models, connections) connections = [(model_cc_1, "J1.phi", model_cc_2, "J2.phi")] - nose.tools.assert_raises(fmi.FMUException, CoupledFMUModelME2, models, connections) + with pytest.raises(fmi.FMUException): + CoupledFMUModelME2(models, connections) - @testattr(stddist = True) def test_get_variable_valueref(self): model_cc_1 = FMUModelME2(os.path.join(me2_xml_path, "CoupledClutches.fmu"), _connect_dll=False) model_cc_2 = FMUModelME2(os.path.join(me2_xml_path, "CoupledClutches.fmu"), _connect_dll=False) @@ -177,7 +175,8 @@ def test_get_variable_valueref(self): coupled = CoupledFMUModelME2(models, connections) - nose.tools.assert_raises(fmi.FMUException, coupled.get_variable_valueref, "J1.w") + with pytest.raises(fmi.FMUException): + coupled.get_variable_valueref("J1.w") vr_1 = coupled.get_variable_valueref("First.J1.w") vr_2 = coupled.get_variable_valueref("Second.J1.w") @@ -190,7 +189,6 @@ def test_get_variable_valueref(self): assert var_name_1 == "First.J1.w" assert var_name_2 == "Second.J1.w" - @testattr(stddist = True) def test_ode_sizes(self): model_cc_1 = FMUModelME2(os.path.join(me2_xml_path, "CoupledClutches.fmu"), _connect_dll=False) model_cc_2 = FMUModelME2(os.path.join(me2_xml_path, "CoupledClutches.fmu"), _connect_dll=False) @@ -205,7 +203,6 @@ def test_ode_sizes(self): assert nbr_states == 16 assert nbr_event_ind == 66 - @testattr(stddist = True) def test_variable_variability(self): model_cc_1 = FMUModelME2(os.path.join(me2_xml_path, "CoupledClutches.fmu"), _connect_dll=False) model_cc_2 = FMUModelME2(os.path.join(me2_xml_path, "CoupledClutches.fmu"), _connect_dll=False) @@ -215,13 +212,13 @@ def test_variable_variability(self): coupled = CoupledFMUModelME2(models, connections) - nose.tools.assert_raises(fmi.FMUException, coupled.get_variable_variability, "J1.w") + with pytest.raises(fmi.FMUException): + coupled.get_variable_variability("J1.w") variability = coupled.get_variable_variability("First.J1.w") assert variability == model_cc_1.get_variable_variability("J1.w") - @testattr(stddist = True) def test_variable_causality(self): model_cc_1 = FMUModelME2(os.path.join(me2_xml_path, "CoupledClutches.fmu"), _connect_dll=False) model_cc_2 = FMUModelME2(os.path.join(me2_xml_path, "CoupledClutches.fmu"), _connect_dll=False) @@ -231,13 +228,13 @@ def test_variable_causality(self): coupled = CoupledFMUModelME2(models, connections) - nose.tools.assert_raises(fmi.FMUException, coupled.get_variable_causality, "J1.w") + with pytest.raises(fmi.FMUException): + coupled.get_variable_causality("J1.w") causality = coupled.get_variable_causality("First.J1.w") assert causality == model_cc_1.get_variable_causality("J1.w") - @testattr(stddist = True) def test_derivatives_list(self): model_cc_1 = FMUModelME2(os.path.join(me2_xml_path, "CoupledClutches.fmu"), _connect_dll=False) model_cc_2 = FMUModelME2(os.path.join(me2_xml_path, "CoupledClutches.fmu"), _connect_dll=False) @@ -255,7 +252,6 @@ def test_derivatives_list(self): alias_vars = coupled.get_variable_alias(var).keys() assert state in alias_vars - @testattr(stddist = True) def test_states_list(self): model_cc_1 = FMUModelME2(os.path.join(me2_xml_path, "CoupledClutches.fmu"), _connect_dll=False) model_cc_2 = FMUModelME2(os.path.join(me2_xml_path, "CoupledClutches.fmu"), _connect_dll=False) @@ -273,7 +269,6 @@ def test_states_list(self): alias_vars = coupled.get_variable_alias(var).keys() assert state in alias_vars - @testattr(stddist = True) def test_model_variables(self): model_cc_1 = FMUModelME2(os.path.join(me2_xml_path, "CoupledClutches.fmu"), _connect_dll=False) model_cc_2 = FMUModelME2(os.path.join(me2_xml_path, "CoupledClutches.fmu"), _connect_dll=False) diff --git a/tests/test_fmi_estimate.py b/tests/test_fmi_estimate.py index 0f438c09..7477dd0b 100644 --- a/tests/test_fmi_estimate.py +++ b/tests/test_fmi_estimate.py @@ -18,9 +18,8 @@ import os import numpy as np -from pyfmi import testattr from pyfmi.tests.test_util import Dummy_FMUModelME2 -from scipy.io.matlab.mio import loadmat +from scipy.io.matlab import loadmat assimulo_installed = True try: @@ -32,8 +31,6 @@ if assimulo_installed: class Test_FMUModelME2_Estimate: - - @testattr(stddist = True) def test_quadtank_estimate(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "QuadTankPack_Sim_QuadTank.fmu"), _connect_dll=False) diff --git a/tests/test_fmi_extended.py b/tests/test_fmi_extended.py index b0655012..f0045d89 100644 --- a/tests/test_fmi_extended.py +++ b/tests/test_fmi_extended.py @@ -18,7 +18,6 @@ import os import numpy as np -from pyfmi import testattr from pyfmi.fmi_extended import FMUModelME1Extended file_path = os.path.dirname(os.path.abspath(__file__)) @@ -27,14 +26,12 @@ class Test_FMUModelME1Extended: - @testattr(stddist = True) def test_log_file_name(self): model = FMUModelME1Extended(os.path.join(me1_xml_path, "bouncingBall.fmu"), _connect_dll=False) assert os.path.exists("bouncingBall_log.txt") model = FMUModelME1Extended(os.path.join(me1_xml_path, "bouncingBall.fmu"), log_file_name="Test_log.txt", _connect_dll=False) assert os.path.exists("Test_log.txt") - @testattr(stddist = True) def test_default_experiment(self): model = FMUModelME1Extended(os.path.join(me1_xml_path, "CoupledClutches.fmu"), _connect_dll=False) diff --git a/tests/test_fmi_master.py b/tests/test_fmi_master.py index 93b68b05..62206c26 100644 --- a/tests/test_fmi_master.py +++ b/tests/test_fmi_master.py @@ -15,12 +15,13 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import nose +import pytest import os import numpy as np import warnings +import re -from pyfmi import testattr, Master +from pyfmi import Master from pyfmi.fmi import FMUException, FMUModelCS2, FMUModelME2 from pyfmi.tests.test_util import Dummy_FMUModelCS2 from pyfmi.common.io import ResultHandler, ResultSizeError @@ -35,8 +36,6 @@ warnings.filterwarnings("ignore") class Test_Master: - - @testattr(stddist = True) def test_loading_models(self): model_sub1 = FMUModelCS2(os.path.join(cs2_xml_path, "LinearStability.SubSystem1.fmu"), _connect_dll=False) model_sub2 = FMUModelCS2(os.path.join(cs2_xml_path, "LinearStability.SubSystem2.fmu"), _connect_dll=False) @@ -48,7 +47,6 @@ def test_loading_models(self): #Assert that loading is successful sim = Master(models, connections) - @testattr(stddist = True) def test_loading_wrong_model(self): model_sub1 = FMUModelCS2(os.path.join(cs2_xml_path, "LinearStability.SubSystem1.fmu"), _connect_dll=False) model_sub2 = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.SubSystem2.fmu"), _connect_dll=False) @@ -57,9 +55,9 @@ def test_loading_wrong_model(self): connections = [(model_sub1,"y1",model_sub2,"u2"), (model_sub2,"y2",model_sub1,"u1")] - nose.tools.assert_raises(FMUException, Master, models, connections) + with pytest.raises(FMUException): + Master(models, connections) - @testattr(stddist = True) def test_connection_variables(self): model_sub1 = FMUModelCS2(os.path.join(cs2_xml_path, "LinearStability.SubSystem1.fmu"), _connect_dll=False) model_sub2 = FMUModelCS2(os.path.join(cs2_xml_path, "LinearStability.SubSystem2.fmu"), _connect_dll=False) @@ -70,15 +68,16 @@ def test_connection_variables(self): connections = [(model_sub1,"y1",model_sub2,"x2"), (model_sub2,"y2",model_sub1,"u1")] - nose.tools.assert_raises(FMUException, Master, models, connections) + with pytest.raises(FMUException): + Master(models, connections) #Test wrong input / output order connections = [(model_sub2,"u2", model_sub1,"y1"), (model_sub2,"y2",model_sub1,"u1")] - nose.tools.assert_raises(FMUException, Master, models, connections) + with pytest.raises(FMUException): + Master(models, connections) - @testattr(stddist = True) def test_basic_algebraic_loop(self): model_sub1 = FMUModelCS2(os.path.join(cs2_xml_path, "LinearStability.SubSystem1.fmu"), _connect_dll=False) model_sub2 = FMUModelCS2(os.path.join(cs2_xml_path, "LinearStability.SubSystem2.fmu"), _connect_dll=False) @@ -147,27 +146,23 @@ def _sim_basic_simulation(self, models, connections, opts_update): opts.update(opts_update) res = master.simulate(options=opts) - nose.tools.assert_almost_equal(res[models[0]].final("x1"), 0.0859764038708439, 3) - nose.tools.assert_almost_equal(res[models[1]].final("x2"), 0.008392664839635064, 4) + assert res[models[0]].final("x1") == pytest.approx(0.0859764038708439, abs = 1e-3) + assert res[models[1]].final("x2") == pytest.approx(0.008392664839635064, abs = 1e-4) return res def _basic_simulation(self, opts_update): models, connections = self._load_basic_simulation() self._sim_basic_simulation(models, connections, opts_update) - - @testattr(stddist = True) def test_basic_simulation_txt_file(self): opts = {"result_handling":"file"} self._basic_simulation(opts) - @testattr(stddist = True) def test_basic_simulation_mat_file(self): opts = {"result_handling":"binary"} self._basic_simulation(opts) - @testattr(stddist = True) def test_basic_simulation_memory(self): opts = {"result_handling":"memory"} self._basic_simulation(opts) @@ -179,7 +174,6 @@ def test_basic_simulation_max_result_size(self): with nose.tools.assert_raises(ResultSizeError): self._basic_simulation(opts) - @testattr(stddist = True) def test_basic_simulation_mat_file_naming(self): opts = {"result_handling":"binary", "result_file_name": "Should fail..."} @@ -195,7 +189,6 @@ def test_basic_simulation_mat_file_naming(self): except UnrecognizedOptionError: pass - @testattr(stddist = True) def test_basic_simulation_mat_file_naming_exists(self): models, connections = self._load_basic_simulation() @@ -206,7 +199,6 @@ def test_basic_simulation_mat_file_naming_exists(self): assert os.path.isfile("Test1.mat"), "Test1.mat does not exists" assert os.path.isfile("Test2.mat"), "Test2.mat does not exists" - @testattr(stddist = True) def test_basic_simulation_txt_file_naming_exists(self): models, connections = self._load_basic_simulation() @@ -217,7 +209,6 @@ def test_basic_simulation_txt_file_naming_exists(self): assert os.path.isfile("Test1.txt"), "Test1.txt does not exists" assert os.path.isfile("Test2.txt"), "Test2.txt does not exists" - @testattr(stddist = True) def test_basic_simulation_csv_file_naming_exists(self): models, connections = self._load_basic_simulation() @@ -228,7 +219,6 @@ def test_basic_simulation_csv_file_naming_exists(self): assert os.path.isfile("Test1.csv"), "Test1.csv does not exists" assert os.path.isfile("Test2.csv"), "Test2.csv does not exists" - @testattr(stddist = True) def test_basic_simulation_none_result(self): models, connections = self._load_basic_simulation() @@ -239,10 +229,9 @@ def test_basic_simulation_none_result(self): opts["step_size"] = 0.0005 res = master.simulate(options=opts) - assert res[models[0]]._result_data == None, "Result is not none" - assert res[models[1]]._result_data == None, "Result is not none" + assert res[models[0]]._result_data is None + assert res[models[1]]._result_data is None - @testattr(stddist = True) def test_custom_result_handler_invalid(self): models, connections = self._load_basic_simulation() @@ -251,27 +240,27 @@ class A: opts = {} opts["result_handling"] = "hejhej" - nose.tools.assert_raises(Exception, self._sim_basic_simulation, models, connections, opts) + with pytest.raises(Exception): + self._sim_basic_simulation(models, connections, opts) opts["result_handling"] = "custom" opts["result_handler"] = A() err = "'result_handler' option must be a dictionary for 'result_handling' = 'custom'." - with nose.tools.assert_raises_regex(FMUException, err): + with pytest.raises(Exception, match = re.escape(err)): self._sim_basic_simulation(models, connections, opts) opts["result_handling"] = "custom" opts["result_handler"] = {m: A() for m in models[1:]} err = "'result_handler' option does not contain result handler for model '{}'".format(models[0].get_identifier()) - with nose.tools.assert_raises_regex(FMUException, err): + with pytest.raises(Exception, match = re.escape(err)): self._sim_basic_simulation(models, connections, opts) opts["result_handling"] = "custom" opts["result_handler"] = {m: A() for m in models} err = "The result handler needs to be an instance of ResultHandler." - with nose.tools.assert_raises_regex(FMUException, err): - self._sim_basic_simulation(models, connections, opts) + with pytest.raises(Exception, match = re.escape(err)): + self._sim_basic_simulation(models, connections, opts) - @testattr(stddist = True) def test_custom_result_handler_valid(self): models, connections = self._load_basic_simulation() @@ -294,12 +283,10 @@ def get_result(self): assert res[models[0]]._result_data == 0, "Result is not 0" assert res[models[1]]._result_data == 1, "Result is not 1" - @testattr(stddist = True) def test_basic_simulation_with_block_initialization(self): opts = {"block_initialization": True} self._basic_simulation(opts) - @testattr(stddist = True) def test_integer_connections(self): model_sub1 = Dummy_FMUModelCS2([], os.path.join(cs2_xml_path, "IntegerStep.fmu"), _connect_dll=False) model_sub2 = Dummy_FMUModelCS2([], os.path.join(cs2_xml_path, "GainTestInteger.fmu"), _connect_dll=False) @@ -322,7 +309,7 @@ def do_step2(current_t, step_size, new_step=True): models = [model_sub1, model_sub2] connections = [(model_sub1, 'y', model_sub2, 'u')] - master = Master(models,connections) + master = Master(models, connections) opts = master.simulate_options() opts["block_initialization"] = True @@ -332,7 +319,6 @@ def do_step2(current_t, step_size, new_step=True): assert res[model_sub2]["u"][0] == 1 assert res[model_sub2]["u"][-1] == 3 - @testattr(stddist = True) def test_integer_to_real_connections(self): model_sub1 = Dummy_FMUModelCS2([], os.path.join(cs2_xml_path, "IntegerStep.fmu"), _connect_dll=False) model_sub2 = Dummy_FMUModelCS2([], os.path.join(cs2_xml_path, "GainTestReal.fmu"), _connect_dll=False) @@ -365,7 +351,6 @@ def do_step2(current_t, step_size, new_step=True): assert res[model_sub2]["u"][0] == 1.0 assert res[model_sub2]["u"][-1] == 3.0 - @testattr(stddist = True) def test_unstable_simulation(self): model_sub1 = Dummy_FMUModelCS2([], os.path.join(cs2_xml_path, "LinearCoSimulation_LinearSubSystem1.fmu"), _connect_dll=False) model_sub2 = Dummy_FMUModelCS2([], os.path.join(cs2_xml_path, "LinearCoSimulation_LinearSubSystem2.fmu"), _connect_dll=False) diff --git a/tests/test_fmi_util.py b/tests/test_fmi_util.py index 5c03a930..ac717872 100644 --- a/tests/test_fmi_util.py +++ b/tests/test_fmi_util.py @@ -22,12 +22,9 @@ import numpy as np from collections import OrderedDict -from pyfmi import testattr import pyfmi.fmi_util as fmi_util class Test_FMIUtil: - - @testattr(stddist = True) def test_cpr_seed(self): structure = OrderedDict([('der(inertia3.phi)', ['inertia3.w']), ('der(inertia3.w)', ['damper.phi_rel', 'inertia3.phi']), @@ -46,7 +43,6 @@ def test_cpr_seed(self): assert np.array(groups[1][4] == [3,4]).all() assert np.array(groups[2][4] == [5,6]).all() - @testattr(stddist = True) def test_cpr_seed_interested_columns(self): structure = OrderedDict([('der(inertia3.phi)', ['inertia3.w']), ('der(inertia3.w)', ['damper.phi_rel', 'inertia3.phi']), diff --git a/tests/test_io.py b/tests/test_io.py index c9e270ef..5853d85c 100644 --- a/tests/test_io.py +++ b/tests/test_io.py @@ -15,21 +15,19 @@ # You should have received a copy of the GNU Lesser General Public License # along with this program. If not, see . -import nose -import math +import pytest import os import numpy as np import time +import math from io import StringIO, BytesIO from collections import OrderedDict -from pyfmi import testattr from pyfmi.fmi import ( FMUException, FMUModelME2, FMI2_PARAMETER, FMI2_CONSTANT, - FMI2_LOCAL ) from pyfmi.common.io import ( ResultHandler, @@ -50,7 +48,7 @@ ) import pyfmi.fmi as fmi -from pyfmi.tests.test_util import Dummy_FMUModelME1, Dummy_FMUModelME2, Dummy_FMUModelCS2 +from pyfmi.tests.test_util import Dummy_FMUModelME1, Dummy_FMUModelCS1, Dummy_FMUModelME2, Dummy_FMUModelCS2 file_path = os.path.dirname(os.path.abspath(__file__)) @@ -69,14 +67,13 @@ def _run_negated_alias(model, result_type, result_file_name=""): # test that res['y'] returns a vector of the same length as the time # vector - nose.tools.assert_equal(len(res['y']),len(res['time']), - "Wrong size of result vector.") + assert len(res['y']) ==len(res['time']), "Wrong size of result vector." x = res["x"] y = res["y"] for i in range(len(x)): - nose.tools.assert_equal(x[i], -y[i]) + assert x[i] == -y[i] if assimulo_installed: class TestResultFileText_Simulation: @@ -114,18 +111,15 @@ def f(*args, **kwargs): assert len(x) > 2 for i in range(len(x)): - nose.tools.assert_equal(x[i], -y[i]) + assert x[i] == -y[i] - @testattr(stddist = True) def test_correct_file_after_simulation_failure(self): self._correct_syntax_after_simulation_failure("NegatedAlias_result.txt") - @testattr(stddist = True) def test_correct_stream_after_simulation_failure(self): stream = StringIO("") self._correct_syntax_after_simulation_failure(stream) - @testattr(stddist = True) def test_read_all_variables_using_model_variables(self): simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) @@ -138,7 +132,6 @@ def test_read_all_variables_using_model_variables(self): for var in simple_alias.get_model_variables(): res[var] - @testattr(stddist = True) def test_read_alias_derivative(self): simple_alias = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False) @@ -150,10 +143,10 @@ def test_read_alias_derivative(self): derx = res["der(x)"] dery = res["der(y)"] + assert len(derx) > 0 for i in range(len(derx)): - nose.tools.assert_equal(derx[i], dery[i]) + assert derx[i] == dery[i] - @testattr(stddist = True) def test_no_variables(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) @@ -164,9 +157,8 @@ def test_no_variables(self): res = model.simulate(options=opts) - nose.tools.assert_almost_equal(1.0, res["time"][-1]) + assert 1.0 == pytest.approx(res["time"][-1]) - @testattr(stddist = True) def test_enumeration_file(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) @@ -181,8 +173,6 @@ def test_enumeration_file(self): res["mode"] #Check that the enumeration variable is in the dict, otherwise exception class TestResultFileText: - - def _get_description(self, result_file_name): model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "CoupledClutches.fmu"), _connect_dll=False) model.initialize() @@ -200,16 +190,13 @@ def _get_description(self, result_file_name): assert res.description[res.get_variable_index("J1.phi")] == "Absolute rotation angle of component" - @testattr(stddist = True) def test_get_description_file(self): self._get_description('CoupledClutches_result.txt') - @testattr(stddist = True) def test_get_description_stream(self): stream = StringIO() self._get_description(stream) - @testattr(stddist = True) def test_description_not_stored(self): model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "CoupledClutches.fmu"), _connect_dll=False) model.initialize() @@ -246,11 +233,9 @@ def _get_description_unicode(self, result_file_name): assert desc == u"Test symbols '' ‘’" - @testattr(stddist = True) def _get_description_unicode_file(self): self._get_description_unicode('Description_result.txt') - @testattr(stddist = True) def _get_description_unicode_stream(self): stream = StringIO() self._get_description_unicode(stream) @@ -276,14 +261,12 @@ def _work_flow_me1(self, result_file_name): derh = res.get_variable_data('der(h)') g = res.get_variable_data('g') - nose.tools.assert_almost_equal(h.x, 1.000000, 5) - nose.tools.assert_almost_equal(derh.x, 0.000000, 5) + assert h.x == pytest.approx(1.000000, abs = 1e-5) + assert derh.x == pytest.approx(0.000000, abs = 1e-5) - @testattr(stddist = True) def test_work_flow_me1_file(self): self._work_flow_me1('bouncingBall_result.txt') - @testattr(stddist = True) def test_work_flow_me1_stream(self): stream = StringIO() self._work_flow_me1(stream) @@ -310,19 +293,16 @@ def _work_flow_me2(self, result_file_name): derh = res.get_variable_data('der(h)') g = res.get_variable_data('g') - nose.tools.assert_almost_equal(h.x, 1.000000, 5) - nose.tools.assert_almost_equal(derh.x, 0.000000, 5) + assert h.x == pytest.approx(1.000000, abs = 1e-5) + assert derh.x == pytest.approx(0.000000, abs = 1e-5) - @testattr(stddist = True) def test_work_flow_me2_file(self): self._work_flow_me2('bouncingBall_result.txt') - @testattr(stddist = True) def test_work_flow_me2_stream(self): stream = StringIO() self._work_flow_me2(stream) - @testattr(stddist = True) def test_work_flow_me2_stream2(self): """ Verify exception when using ResultHandlerFile with a stream that doesnt support 'seek'. """ class A: @@ -330,10 +310,9 @@ def write(self): pass stream = A() msg = "Failed to write the result file. Option 'result_file_name' needs to be a filename or a class that supports 'write' and 'seek'." - with nose.tools.assert_raises_regex(FMUException, msg): + with pytest.raises(FMUException, match = msg): self._work_flow_me2(stream) - @testattr(stddist = True) def test_work_flow_me2_stream3(self): """ Verify exception when using ResultHandlerFile with a stream that doesnt support 'write'. """ class A: @@ -341,10 +320,9 @@ def seek(self): pass stream = A() msg = "Failed to write the result file. Option 'result_file_name' needs to be a filename or a class that supports 'write' and 'seek'." - with nose.tools.assert_raises_regex(FMUException, msg): + with pytest.raises(FMUException, match = msg): self._work_flow_me2(stream) - @testattr(stddist = True) def test_constructor_invalid_stream1(self): """ Verify exception is raised for ResultDymolaTextual if fname argument is a stream not supporting 'readline'. """ class A: @@ -352,10 +330,9 @@ def seek(self): pass stream = A() msg = "Given stream needs to support 'readline' and 'seek' in order to retrieve the results." - with nose.tools.assert_raises_regex(JIOError, msg): + with pytest.raises(JIOError, match = msg): res = ResultDymolaTextual(stream) - @testattr(stddist = True) def test_constructor_invalid_stream2(self): """ Verify exception is raised for ResultDymolaTextual if fname argument is a stream not supporting 'seek'. """ class A: @@ -363,22 +340,19 @@ def readline(self): pass stream = A() msg = "Given stream needs to support 'readline' and 'seek' in order to retrieve the results." - with nose.tools.assert_raises_regex(JIOError, msg): + with pytest.raises(JIOError, match = msg): res = ResultDymolaTextual(stream) if assimulo_installed: class TestResultMemory_Simulation: - @testattr(stddist = True) def test_memory_options_me1(self): simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) _run_negated_alias(simple_alias, "memory") - @testattr(stddist = True) def test_memory_options_me2(self): simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) _run_negated_alias(simple_alias, "memory") - @testattr(stddist = True) def test_only_parameters(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) @@ -388,11 +362,10 @@ def test_only_parameters(self): res = model.simulate(options=opts) - nose.tools.assert_almost_equal(3.0, res["p2"][0]) + assert 3.0 == pytest.approx(res["p2"][0]) assert not isinstance(res.initial("p2"), np.ndarray) assert not isinstance(res.final("p2"), np.ndarray) - @testattr(stddist = True) def test_no_variables(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) @@ -402,11 +375,9 @@ def test_no_variables(self): res = model.simulate(options=opts) - nose.tools.assert_almost_equal(1.0, res["time"][-1]) + assert 1.0 == pytest.approx(res["time"][-1]) - @testattr(stddist = True) def test_enumeration_memory(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) data_type = model.get_variable_data_type("mode") @@ -423,7 +394,6 @@ class TestResultMemory: if assimulo_installed: class TestResultFileBinary_Simulation: - def _correct_file_after_simulation_failure(self, result_file_name): simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) @@ -457,14 +427,12 @@ def f(*args, **kwargs): assert len(x) > 2 for i in range(len(x)): - nose.tools.assert_equal(x[i], -y[i]) + assert x[i] == -y[i] - @testattr(stddist = True) def test_work_flow_me2_file(self): self._correct_file_after_simulation_failure("NegatedAlias_result.mat") - @testattr(stddist = True) def test_work_flow_me2_stream(self): stream = BytesIO() self._correct_file_after_simulation_failure(stream) @@ -480,13 +448,11 @@ def _only_parameters(self, result_file_name): res = model.simulate(options=opts) - nose.tools.assert_almost_equal(3.0, res["p2"][0]) + assert 3.0 == pytest.approx(res["p2"][0]) - @testattr(stddist = True) def test_only_parameters_file(self): self._only_parameters("ParameterAlias_result.mat") - @testattr(stddist = True) def test_only_parameters_stream(self): stream = BytesIO() self._only_parameters(stream) @@ -502,19 +468,16 @@ def _no_variables(self, result_file_name): res = model.simulate(options=opts) - nose.tools.assert_almost_equal(1.0, res["time"][-1]) + assert 1.0 == pytest.approx(res["time"][-1]) - @testattr(stddist = True) def test_no_variables_file(self): self._no_variables("ParameterAlias_result.mat") - @testattr(stddist = True) def test_no_variables_stream(self): stream = BytesIO() self._no_variables(stream) - @testattr(stddist = True) def test_read_alias_derivative(self): simple_alias = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False) @@ -526,12 +489,11 @@ def test_read_alias_derivative(self): derx = res["der(x)"] dery = res["der(y)"] + assert len(derx) > 0 for i in range(len(derx)): - nose.tools.assert_equal(derx[i], dery[i]) + assert derx[i] == dery[i] - @testattr(stddist = True) def test_enumeration_binary(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) data_type = model.get_variable_data_type("mode") @@ -544,7 +506,6 @@ def test_enumeration_binary(self): res = model.simulate(options=opts) res["mode"] #Check that the enumeration variable is in the dict, otherwise exception - @testattr(stddist = True) def test_integer_start_time(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False) @@ -554,7 +515,6 @@ def test_integer_start_time(self): #Assert that there is no exception when reloading the file res = model.simulate(start_time=0, options=opts) - @testattr(stddist = True) def test_read_all_variables_using_model_variables(self): simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) @@ -567,7 +527,6 @@ def test_read_all_variables_using_model_variables(self): for var in simple_alias.get_model_variables(): res[var] - @testattr(stddist = True) def test_variable_alias_custom_handler(self): simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) @@ -579,39 +538,33 @@ def test_variable_alias_custom_handler(self): # test that res['y'] returns a vector of the same length as the time # vector - nose.tools.assert_equal(len(res['y']),len(res['time']), - "Wrong size of result vector.") + assert len(res['y']) ==len(res['time']), "Wrong size of result vector." x = res["x"] y = res["y"] for i in range(len(x)): - nose.tools.assert_equal(x[i], -y[i]) + assert x[i] == -y[i] - @testattr(stddist = True) def test_binary_options_me1(self): simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) _run_negated_alias(simple_alias, "binary") - @testattr(stddist = True) def test_binary_options_me2(self): simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) _run_negated_alias(simple_alias, "binary") - @testattr(stddist = True) def test_binary_options_me1_stream(self): simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) stream = BytesIO() _run_negated_alias(simple_alias, "binary", stream) - @testattr(stddist = True) def test_binary_options_me2_stream(self): simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) stream = BytesIO() _run_negated_alias(simple_alias, "binary", stream) class TestResultFileBinary: - def _get_description_unicode(self, result_file_name): model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "Description.fmu"), _connect_dll=False) model.initialize() @@ -633,16 +586,13 @@ def _get_description_unicode(self, result_file_name): assert desc == u"Test symbols '' ‘’" - @testattr(stddist = True) def test_get_description_unicode_file(self): self._get_description_unicode('Description_result.mat') - @testattr(stddist = True) def test_get_description_unicode_stream(self): stream = BytesIO() self._get_description_unicode(stream) - @testattr(stddist = True) def test_get_description(self): model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "CoupledClutches.fmu"), _connect_dll=False) model.initialize() @@ -657,8 +607,7 @@ def test_get_description(self): res = ResultDymolaBinary('CoupledClutches_result.mat') assert res.description[res.get_variable_index("J1.phi")] == "Absolute rotation angle of component" - - @testattr(stddist = True) + def test_modified_result_file_data_diagnostics(self): """Verify that computed diagnostics can be retrieved from an updated result file""" model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "CoupledClutches.fmu"), _connect_dll=False) @@ -709,8 +658,7 @@ def test_modified_result_file_data_diagnostics(self): result_writer.simulation_end() assert len(res.get_variable_data("@Diagnostics.state_errors.clutch2.w_rel").x) == 4, res.get_variable_data("@Diagnostics.state_errors.clutch2.w_rel").x - - @testattr(stddist = True) + def test_modified_result_file_data_diagnostics_steps(self): """Verify that diagnostics can be retrieved from an updated result file""" model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "CoupledClutches.fmu"), _connect_dll=False) @@ -761,8 +709,7 @@ def test_modified_result_file_data_diagnostics_steps(self): result_writer.simulation_end() assert len(res.get_variable_data("@Diagnostics.nbr_steps").x) == 4, res.get_variable_data("@Diagnostics.nbr_steps").x - - @testattr(stddist = True) + def test_modified_result_file_data_2(self): """Verify that continuous trajectories are updated when retrieved from a result file""" model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "CoupledClutches.fmu"), _connect_dll=False) @@ -784,8 +731,7 @@ def test_modified_result_file_data_2(self): result_writer.simulation_end() assert len(res.get_variable_data("J1.phi").x) == 2, res.get_variable_data("J1.phi").x - - @testattr(stddist = True) + def test_modified_result_file_data_2_different(self): """Verify that (different) continuous trajectories are updated when retrieved from a result file""" model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "CoupledClutches.fmu"), _connect_dll=False) @@ -807,8 +753,7 @@ def test_modified_result_file_data_2_different(self): result_writer.simulation_end() assert len(res.get_variable_data("J2.phi").x) == 2, res.get_variable_data("J2.phi").x - - @testattr(stddist = True) + def test_modified_result_file_data_1(self): """Verify that (different) constants/parameters can be retrieved from an updated result file""" model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "CoupledClutches.fmu"), _connect_dll=False) @@ -832,8 +777,7 @@ def test_modified_result_file_data_1(self): #Assert that no exception is raised res.get_variable_data("J2.J") - - @testattr(stddist = True) + def test_modified_result_file_data_1_delayed(self): """Verify that constants/parameters can be retrieved from an updated result file""" model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "CoupledClutches.fmu"), _connect_dll=False) @@ -853,8 +797,7 @@ def test_modified_result_file_data_1_delayed(self): #Assert that no exception is raised res.get_variable_data("J2.J") - - @testattr(stddist = True) + def test_modified_result_file_time(self): """Verify that 'time' can be retrieved from an updated result file""" model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "CoupledClutches.fmu"), _connect_dll=False) @@ -876,7 +819,6 @@ def test_modified_result_file_time(self): res.get_variable_data("time") - @testattr(stddist = True) def test_description_not_stored(self): model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "CoupledClutches.fmu"), _connect_dll=False) model.initialize() @@ -895,7 +837,6 @@ def test_description_not_stored(self): assert res.description[res.get_variable_index("J1.phi")] == "", "Description is not empty, " + res.description[res.get_variable_index("J1.phi")] - @testattr(stddist = True) def test_overwriting_results(self): model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "CoupledClutches.fmu"), _connect_dll=False) model.initialize() @@ -922,9 +863,9 @@ def test_overwriting_results(self): result_writer.integration_point() result_writer.simulation_end() - nose.tools.assert_raises(JIOError,res.get_variable_data, "J1.phi") + with pytest.raises(JIOError): + res.get_variable_data("J1.phi") - @testattr(stddist = True) def test_read_all_variables(self): res = ResultDymolaBinary(os.path.join(file_path, "files", "Results", "DoublePendulum.mat")) @@ -933,7 +874,6 @@ def test_read_all_variables(self): for var in res.name: res.get_variable_data(var) - @testattr(stddist = True) def test_data_matrix_delayed_loading(self): res = ResultDymolaBinary(os.path.join(file_path, "files", "Results", "DoublePendulum.mat"), delayed_trajectory_loading=True) @@ -944,7 +884,6 @@ def test_data_matrix_delayed_loading(self): assert nbr_continuous_variables == 68, "Number of variables is incorrect, should be 68" assert nbr_points == 502, "Number of points is incorrect, should be 502" - @testattr(stddist = True) def test_data_matrix_loading(self): res = ResultDymolaBinary(os.path.join(file_path, "files", "Results", "DoublePendulum.mat"), delayed_trajectory_loading=False) @@ -955,7 +894,6 @@ def test_data_matrix_loading(self): assert nbr_continuous_variables == 68, "Number of variables is incorrect, should be 68" assert nbr_points == 502, "Number of points is incorrect, should be 502" - @testattr(stddist = True) def test_read_all_variables_from_stream(self): with open(os.path.join(file_path, "files", "Results", "DoublePendulum.mat"), "rb") as f: @@ -966,7 +904,6 @@ def test_read_all_variables_from_stream(self): for var in res.name: res.get_variable_data(var) - @testattr(stddist = True) def test_compare_all_variables_from_stream(self): res_file = ResultDymolaBinary(os.path.join(file_path, "files", "Results", "DoublePendulum.mat")) @@ -982,7 +919,6 @@ def test_compare_all_variables_from_stream(self): np.testing.assert_array_equal(x_file.x, x_stream.x, err_msg="Mismatch in array values for var=%s"%var) - @testattr(stddist = True) def test_on_demand_loading_32_bits(self): res_demand = ResultDymolaBinary(os.path.join(file_path, "files", "Results", "DoublePendulum.mat")) res_all = ResultDymolaBinary(os.path.join(file_path, "files", "Results", "DoublePendulum.mat")) @@ -990,7 +926,6 @@ def test_on_demand_loading_32_bits(self): t_all = res_all.get_variable_data('time').x np.testing.assert_array_equal(t_demand, t_all, "On demand loaded result and all loaded does not contain equal result.") - @testattr(stddist = True) def test_work_flow_me1(self): model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "bouncingBall.fmu"), _connect_dll=False) model.initialize() @@ -1009,10 +944,9 @@ def test_work_flow_me1(self): derh = res.get_variable_data('der(h)') g = res.get_variable_data('g') - nose.tools.assert_almost_equal(h.x, 1.000000, 5) - nose.tools.assert_almost_equal(derh.x, 0.000000, 5) + assert h.x == pytest.approx(1.000000, abs = 1e-5) + assert derh.x == pytest.approx(0.000000, abs = 1e-5) - @testattr(stddist = True) def test_many_variables_long_descriptions(self): """ Tests that large FMUs with lots of variables and huge length of descriptions gives @@ -1024,9 +958,9 @@ def test_many_variables_long_descriptions(self): res = ResultHandlerBinaryFile(model) res.set_options(model.simulate_options()) - nose.tools.assert_raises(FMUException,res.simulation_start) + with pytest.raises(FMUException): + res.simulation_start() - @testattr(stddist = True) def test_work_flow_me2(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "bouncingBall.fmu"), _connect_dll=False) model.setup_experiment() @@ -1046,8 +980,8 @@ def test_work_flow_me2(self): derh = res.get_variable_data('der(h)') g = res.get_variable_data('g') - nose.tools.assert_almost_equal(h.x[0], 1.000000, 5) - nose.tools.assert_almost_equal(derh.x[0], 0.000000, 5) + assert h.x[0] == pytest.approx(1.000000, abs = 1e-5) + assert derh.x[0] == pytest.approx(0.000000, abs = 1e-5) def _work_flow_me2_aborted(self, result_file_name): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "bouncingBall.fmu"), _connect_dll=False) @@ -1074,34 +1008,30 @@ def _work_flow_me2_aborted(self, result_file_name): h = res.get_variable_data('h') derh = res.get_variable_data('der(h)') - nose.tools.assert_almost_equal(h.x[0], 1.000000, 5, msg="Incorrect initial value for 'h', should be 1.0") - nose.tools.assert_almost_equal(derh.x[0], 0.000000, 5, msg="Incorrect value for 'derh', should be 0.0") - nose.tools.assert_almost_equal(h.x[1], 1.000000, 5, msg="Incorrect value for 'h', should be 1.0") - nose.tools.assert_almost_equal(derh.x[1], 0.000000, 5, msg="Incorrect value for 'derh', should be 0.0") - nose.tools.assert_almost_equal(h.x[2], 1.000000, 5, msg="Incorrect value for 'h', should be 1.0") - nose.tools.assert_almost_equal(derh.x[2], 0.000000, 5, msg="Incorrect value for 'derh', should be 0.0") + assert h.x[0] == pytest.approx(1.000000, abs = 1e-5), "Incorrect initial value for 'h', should be 1.0" + assert derh.x[0] == pytest.approx(0.000000, abs = 1e-5), "Incorrect value for 'derh', should be 0.0" + assert h.x[1] == pytest.approx(1.000000, abs = 1e-5), "Incorrect value for 'h', should be 1.0" + assert derh.x[1] == pytest.approx(0.000000, abs = 1e-5), "Incorrect value for 'derh', should be 0.0" + assert h.x[2] == pytest.approx(1.000000, abs = 1e-5), "Incorrect value for 'h', should be 1.0" + assert derh.x[2] == pytest.approx(0.000000, abs = 1e-5), "Incorrect value for 'derh', should be 0.0" - @testattr(stddist = True) def test_work_flow_me2_aborted_file(self): self._work_flow_me2_aborted('bouncingBall_result.mat') - @testattr(stddist = True) def test_work_flow_me2_aborted_stream(self): """ Verify expected workflow for ME2 aborted simulation using byte stream. """ stream = BytesIO() self._work_flow_me2_aborted(stream) - @testattr(stddist = True) def test_work_flow_me2_aborted_stream2(self): """ Verify exception when using ResultHandlerBinaryFile with a stream that doesnt support anything. """ class A: pass stream = A() msg = "Failed to write the result file. Option 'result_file_name' needs to be a filename or a class that supports 'write', 'tell' and 'seek'." - with nose.tools.assert_raises_regex(FMUException, msg): + with pytest.raises(FMUException, match = msg): self._work_flow_me2_aborted(stream) - @testattr(stddist = True) def test_work_flow_me2_aborted_stream3(self): """ Verify exception when using ResultHandlerBinaryFile with a stream that doesnt support 'seek'. """ class A: @@ -1111,10 +1041,9 @@ def tell(self): pass stream = A() msg = "Failed to write the result file. Option 'result_file_name' needs to be a filename or a class that supports 'write', 'tell' and 'seek'." - with nose.tools.assert_raises_regex(FMUException, msg): + with pytest.raises(FMUException, match = msg): self._work_flow_me2_aborted(stream) - @testattr(stddist = True) def test_work_flow_me2_aborted_stream4(self): """ Verify exception when using ResultHandlerBinaryFile with a stream that doesnt support 'tell'. """ class A: @@ -1124,10 +1053,9 @@ def seek(self): pass stream = A() msg = "Failed to write the result file. Option 'result_file_name' needs to be a filename or a class that supports 'write', 'tell' and 'seek'." - with nose.tools.assert_raises_regex(FMUException, msg): + with pytest.raises(FMUException, match = msg): self._work_flow_me2_aborted(stream) - @testattr(stddist = True) def test_work_flow_me2_aborted_stream5(self): """ Verify exception when using ResultHandlerBinaryFile with a stream that doesnt support 'write'. """ class A: @@ -1137,10 +1065,9 @@ def tell(self): pass stream = A() msg = "Failed to write the result file. Option 'result_file_name' needs to be a filename or a class that supports 'write', 'tell' and 'seek'." - with nose.tools.assert_raises_regex(FMUException, msg): + with pytest.raises(FMUException, match = msg): self._work_flow_me2_aborted(stream) - @testattr(stddist = True) def test_filter_no_variables(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "bouncingBall.fmu"), _connect_dll=False) model.setup_experiment() @@ -1161,14 +1088,12 @@ def test_filter_no_variables(self): res = ResultDymolaBinary('bouncingBall_result.mat') t = res.get_variable_data('time') - nose.tools.assert_almost_equal(t.x[-1], 1.000000, 5) + assert t.x[-1] == pytest.approx(1.000000, abs = 1e-5) - @testattr(stddist = True) def test_binary_options_cs2(self): simple_alias = Dummy_FMUModelCS2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "NegatedAlias.fmu"), _connect_dll=False) _run_negated_alias(simple_alias, "binary") - @testattr(stddist = True) def test_binary_options_cs2_stream(self): simple_alias = Dummy_FMUModelCS2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "NegatedAlias.fmu"), _connect_dll=False) stream = BytesIO() @@ -1181,7 +1106,6 @@ def _get_bouncing_ball_dummy(self, fmu_type = 'me2'): elif fmu_type == 'me1': return Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "bouncingBall.fmu"), _connect_dll=False) - @testattr(stddist = True) def test_exception_simulation_start(self): """ Verify exception is raised if simulation_start is invoked without arguments. """ model = self._get_bouncing_ball_dummy() @@ -1192,9 +1116,9 @@ def test_exception_simulation_start(self): bouncingBall = ResultHandlerBinaryFile(model) bouncingBall.set_options(opts) - msg = r"Unable to start simulation. The following keyword argument\(s\) are empty:" - msg += r" 'diagnostics\_params' and 'diagnostics\_vars'." - with nose.tools.assert_raises_regex(FMUException, msg): + msg = "Unable to start simulation. The following keyword argument\(s\) are empty:" + msg += " 'diagnostics\_params' and 'diagnostics\_vars'." + with pytest.raises(FMUException, match = msg): bouncingBall.simulation_start() def _get_diagnostics_cancelled_sim(self, result_file_name): @@ -1265,23 +1189,20 @@ def _get_diagnostics_cancelled_sim(self, result_file_name): ev_ind = res.get_variable_data(DIAGNOSTICS_PREFIX+'event_info.state_event_info.index_1').x # Verify - nose.tools.assert_almost_equal(h.x[0], 1.000000, 5, msg="Incorrect initial value for 'h', should be 1.0") - nose.tools.assert_almost_equal(derh.x[0], 0.000000, 5, msg="Incorrect value for 'derh', should be 0.0") + assert h.x[0] == pytest.approx(1.000000, abs = 1e-5), "Incorrect initial value for 'h', should be 1.0" + assert derh.x[0] == pytest.approx(0.000000, abs = 1e-5), "Incorrect value for 'derh', should be 0.0" np.testing.assert_array_equal(ev_ind, np.array([0., 0., 0., 0., 1., 0.])) - @testattr(stddist = True) def test_diagnostics_data_cancelled_simulation_mat_file(self): """ Verify that we can retrieve data and diagnostics data after cancelled sim using matfile. """ self._get_diagnostics_cancelled_sim("TestCancelledSim.mat") - @testattr(stddist = True) def test_diagnostics_data_cancelled_simulation_file_stream(self): """ Verify that we can retrieve data and diagnostics data after cancelled sim using filestream. """ test_file_stream = open('myfilestream.txt', 'wb') self._get_diagnostics_cancelled_sim(test_file_stream) - @testattr(stddist = True) def test_debug_file_not_generated_when_dynamic_diagnostics_is_true(self): """ Verify that the debug file is not created when option dynamic_diagnostics is true. """ model = self._get_bouncing_ball_dummy() @@ -1292,10 +1213,8 @@ def test_debug_file_not_generated_when_dynamic_diagnostics_is_true(self): os.remove(potential_debug_file) model.simulate(options = opts) - nose.tools.assert_false(os.path.isfile(potential_debug_file), - "Test failed, file {} exists after simulation".format(potential_debug_file)) + assert not os.path.isfile(potential_debug_file), "Test failed, file {} exists after simulation".format(potential_debug_file) - @testattr(stddist = True) def test_exception_dynamic_diagnostics_and_non_binary_result_handling(self): """ Verify that an exception is raised if dynamic_diagnostics is True and result_handling is not binary. """ model = self._get_bouncing_ball_dummy() @@ -1305,10 +1224,9 @@ def test_exception_dynamic_diagnostics_and_non_binary_result_handling(self): err_msg = ("The chosen result_handler does not support dynamic_diagnostics." " Try using e.g., ResultHandlerBinaryFile.") - with nose.tools.assert_raises_regex(fmi.InvalidOptionException, err_msg): + with pytest.raises(fmi.InvalidOptionException, match = err_msg): model.simulate(options = opts) - @testattr(stddist = True) def test_exception_dynamic_diagnostics_and_non_binary_result_handling1(self): """ Verify that an exception is raised if dynamic diagnostics is True and result_handling is custom and does not support dynamic_diagnostics. """ @@ -1324,14 +1242,13 @@ def get_result(self): foo_inst = Foo(model) opts["result_handler"] = foo_inst - nose.tools.assert_false(foo_inst.supports.get('dynamic_diagnostics')) + assert not foo_inst.supports.get('dynamic_diagnostics') err_msg = ("The chosen result_handler does not support dynamic_diagnostics." " Try using e.g., ResultHandlerBinaryFile.") - with nose.tools.assert_raises_regex(fmi.InvalidOptionException, err_msg): + with pytest.raises(fmi.InvalidOptionException, match = err_msg): model.simulate(options = opts) - @testattr(stddist = True) def test_exception_dynamic_diagnostics_and_non_binary_result_handling2(self): """ Verify that exception is raised if dynamic diagnostics is True and result_handling is custom and valid class. """ model = self._get_bouncing_ball_dummy() @@ -1350,9 +1267,8 @@ def test_exception_dynamic_diagnostics_and_non_binary_result_handling2(self): exception_msg = str(e) raise e # In case error did not stop the test run - nose.tools.assert_true(no_error, "Error occurred: {}".format(exception_msg)) + assert no_error, "Error occurred: {}".format(exception_msg) - @testattr(stddist = True) def test_custom_result_handler_dynamic_diagnostics(self): """ Test dynamic diagnostics with a custom results handler that supports it. """ model = self._get_bouncing_ball_dummy() @@ -1377,9 +1293,8 @@ def get_result(self): opts["result_handler"] = res_handler model.simulate(options = opts) - nose.tools.assert_true(res_handler.diagnostics_point_called, msg = "diagnostics_point function was never called.") + assert res_handler.diagnostics_point_called, "diagnostics_point function was never called." - @testattr(stddist = True) def test_result_handler_supports_dynamic_diagnostics(self): """ Test dynamic diagnostics with a custom results handler that supports it, but lacks actual implementation. """ model = self._get_bouncing_ball_dummy() @@ -1400,7 +1315,8 @@ def get_result(self): res_handler = ResultDynDiag() opts["result_handler"] = res_handler - nose.tools.assert_raises(NotImplementedError, model.simulate, options = opts) + with pytest.raises(NotImplementedError): + model.simulate(options = opts) def _test_no_debug_file(self, fmu_type): model = self._get_bouncing_ball_dummy(fmu_type=fmu_type) @@ -1413,15 +1329,12 @@ def _test_no_debug_file(self, fmu_type): model.simulate(options = opts) - nose.tools.assert_false(os.path.isfile(expected_debug_file), - msg = f"file {expected_debug_file} found.") + assert not os.path.isfile(expected_debug_file), f"file {expected_debug_file} found." - @testattr(stddist = True) def test_debug_file_not_generated_me1(self): """ Verify that the debug file is not generated by enabling logging (ME1). """ self._test_no_debug_file(fmu_type = 'me1') - @testattr(stddist = True) def test_debug_file_not_generated_me2(self): """ Verify that the debug file is not generated by enabling logging (ME2). """ self._test_no_debug_file(fmu_type = 'me2') @@ -1445,19 +1358,16 @@ def _test_debug_file_opening(self, fmu_type): # Verify with open(expected_debug_file, 'r') as f: line = f.readline() - nose.tools.assert_false(test_str in line, "Test failed, found '{}' in '{}'".format(test_str, line)) + assert not test_str in line, "Test failed, found '{}' in '{}'".format(test_str, line) - @testattr(stddist = True) def test_debug_file_opened_in_write_mode_me1(self): """ Verify that the debug file is opened in write mode if it already did exist (ME1). """ self._test_debug_file_opening(fmu_type = 'me1') - @testattr(stddist = True) def test_debug_file_opened_in_write_mode_me2(self): """ Verify that the debug file is opened in write mode if it already did exist (ME2). """ self._test_debug_file_opening(fmu_type = 'me1') - @testattr(stddist = True) def test_diagnostics_numerical_values(self): """ Verify that we get the expected values for some diagnostics. """ model = self._get_bouncing_ball_dummy() @@ -1472,40 +1382,34 @@ def test_diagnostics_numerical_values(self): expected_solver_order[0] = 0.0 np.testing.assert_array_equal(res[f'{DIAGNOSTICS_PREFIX}solver.solver_order'], expected_solver_order) - @testattr(stddist = True) def test_get_last_result_file0(self): """ Verify get_last_result_file seems to point at the correct file. """ test_model = self._get_bouncing_ball_dummy() file_name = "testname.mat" test_model._result_file = file_name - nose.tools.assert_equal(test_model.get_last_result_file().split(os.sep)[-1], file_name, - "Unable to find {} in string {}".format(file_name, test_model.get_last_result_file())) + assert test_model.get_last_result_file().split(os.sep)[-1] == file_name, "Unable to find {} in string {}".format(file_name, test_model.get_last_result_file()) - @testattr(stddist = True) def test_get_last_result_file1(self): """ Verify get_last_result_file returns an absolute path. """ test_model = self._get_bouncing_ball_dummy() file_name = "testname.mat" test_model._result_file = file_name - nose.tools.assert_true(os.path.isabs(test_model.get_last_result_file()), "Expected abspath but got {}".format(test_model.get_last_result_file())) + assert os.path.isabs(test_model.get_last_result_file()), "Expected abspath but got {}".format(test_model.get_last_result_file()) - @testattr(stddist = True) def test_get_last_result_file2(self): """ Verify get_last_result_file doesnt cause exception if the result file is not yet set. """ test_model = self._get_bouncing_ball_dummy() test_model._result_file = None - nose.tools.assert_true(test_model.get_last_result_file() is None, "Expected None but got {}".format(test_model.get_last_result_file())) + assert test_model.get_last_result_file() is None, "Expected None but got {}".format(test_model.get_last_result_file()) - @testattr(stddist = True) def test_get_last_result_file3(self): """ Verify get_last_result_file doesnt cause exception if the result file is not set correctly. """ test_model = self._get_bouncing_ball_dummy() test_model._result_file = 123 # arbitrary number, just verify get_last_result_file works - nose.tools.assert_true(test_model.get_last_result_file() is None, "Expected None but got {}".format(test_model.get_last_result_file())) + assert test_model.get_last_result_file() is None, "Expected None but got {}".format(test_model.get_last_result_file()) if assimulo_installed: class TestResultCSVTextual_Simulation: - @testattr(stddist = True) def test_only_parameters(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) @@ -1516,9 +1420,8 @@ def test_only_parameters(self): res = model.simulate(options=opts) - nose.tools.assert_almost_equal(3.0, res["p2"][0]) + assert 3.0 == pytest.approx(res["p2"][0]) - @testattr(stddist = True) def test_no_variables(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) @@ -1530,11 +1433,9 @@ def test_no_variables(self): res = model.simulate(options=opts) - nose.tools.assert_almost_equal(1.0, res["time"][-1]) + assert 1.0 == pytest.approx(res["time"][-1]) - @testattr(stddist = True) def test_variable_alias_custom_handler(self): - simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) opts = simple_alias.simulate_options() @@ -1545,38 +1446,32 @@ def test_variable_alias_custom_handler(self): # test that res['y'] returns a vector of the same length as the time # vector - nose.tools.assert_equal(len(res['y']),len(res['time']), - "Wrong size of result vector.") + assert len(res['y']) ==len(res['time']), "Wrong size of result vector." x = res["x"] y = res["y"] for i in range(len(x)): - nose.tools.assert_equal(x[i], -y[i]) + assert x[i] == -y[i] - @testattr(stddist = True) def test_csv_options_me1(self): simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) _run_negated_alias(simple_alias, "csv") - @testattr(stddist = True) def test_csv_options_me2(self): simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) _run_negated_alias(simple_alias, "csv") - @testattr(stddist = True) def test_csv_options_me1_stream(self): simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) stream = StringIO() _run_negated_alias(simple_alias, "csv", stream) - @testattr(stddist = True) def test_csv_options_me2(self): simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) stream = StringIO() _run_negated_alias(simple_alias, "csv", stream) - @testattr(stddist = True) def test_enumeration_csv(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) @@ -1593,7 +1488,6 @@ def test_enumeration_csv(self): class TestResultCSVTextual: - @testattr(stddist = True) def test_constructor_invalid_stream1(self): """ Verify exception is raised for ResultCSVTextual if filename argument is a stream not supporting 'readline'. """ class A: @@ -1601,10 +1495,9 @@ def seek(self): pass stream = A() msg = "Given stream needs to support 'readline' and 'seek' in order to retrieve the results." - with nose.tools.assert_raises_regex(JIOError, msg): + with pytest.raises(JIOError, match = msg): res = ResultCSVTextual(stream) - @testattr(stddist = True) def test_constructor_invalid_stream2(self): """ Verify exception is raised for ResultCSVTextual if filename argument is a stream not supporting 'seek'. """ class A: @@ -1612,10 +1505,9 @@ def readline(self): pass stream = A() msg = "Given stream needs to support 'readline' and 'seek' in order to retrieve the results." - with nose.tools.assert_raises_regex(JIOError, msg): + with pytest.raises(JIOError, match = msg): res = ResultCSVTextual(stream) - @testattr(stddist = True) def test_delimiter(self): res = ResultCSVTextual(os.path.join(file_path, 'files', 'Results', 'TestCSV.csv'), delimiter=",") @@ -1624,7 +1516,6 @@ def test_delimiter(self): assert x.x[-1] == 1 - @testattr(stddist = True) def _work_flow_me1(self, result_file_name): model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "bouncingBall.fmu"), _connect_dll=False) model.initialize() @@ -1645,14 +1536,12 @@ def _work_flow_me1(self, result_file_name): derh = res.get_variable_data('der(h)') g = res.get_variable_data('g') - nose.tools.assert_almost_equal(h.x, 1.000000, 5) - nose.tools.assert_almost_equal(derh.x, 0.000000, 5) + assert h.x == pytest.approx(1.000000, abs = 1e-5) + assert derh.x == pytest.approx(0.000000, abs = 1e-5) - @testattr(stddist = True) def test_work_flow_me1_file(self): self._work_flow_me1('bouncingBall_result.csv') - @testattr(stddist = True) def test_work_flow_me1_stream(self): stream = StringIO() self._work_flow_me1(stream) @@ -1677,40 +1566,33 @@ def _work_flow_me2(self, result_file_name): derh = res.get_variable_data('der(h)') g = res.get_variable_data('g') - nose.tools.assert_almost_equal(h.x, 1.000000, 5) - nose.tools.assert_almost_equal(derh.x, 0.000000, 5) + assert h.x == pytest.approx(1.000000, abs = 1e-5) + assert derh.x == pytest.approx(0.000000, abs = 1e-5) - @testattr(stddist = True) def test_work_flow_me2_file(self): self._work_flow_me2('bouncingBall_result.csv') - @testattr(stddist = True) def test_work_flow_me2_stream(self): stream = StringIO() self._work_flow_me2(stream) - @testattr(stddist = True) def test_work_flow_me2_stream2(self): """ Verify exception when using ResultHandlerCSV with a stream that doesnt support 'write'. """ class A: pass stream = A() # send in something that is not a string msg = "Failed to write the result file. Option 'result_file_name' needs to be a filename or a class that supports writing to through the 'write' method." - with nose.tools.assert_raises_regex(FMUException, msg): + with pytest.raises(FMUException, match = msg): self._work_flow_me2(stream) - """ - @testattr(stddist = True) def test_csv_options_cs1(self): simple_alias = Dummy_FMUModelCS1([40], os.path.join(file_path, "files", "FMUs", "XML", "CS1.0", "NegatedAlias.fmu"), _connect_dll=False) - self._run_negated_alias(self, simple_alias) + _run_negated_alias(simple_alias, "csv") - @testattr(stddist = True) def test_csv_options_cs2(self): simple_alias = Dummy_FMUModelCS2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "NegatedAlias.fmu"), _connect_dll=False) self._run_negated_alias(self, simple_alias) - """ class TestResultDymolaBinary: @@ -2086,8 +1968,8 @@ def _test_result_exception(self, result_type, result_file_name="", fmi_type="me" opts["result_max_size"] = 10 - with nose.tools.assert_raises(ResultSizeError): - res = model.simulate(options=opts) + with pytest.raises(ResultSizeError): + model.simulate(options=opts) def _test_result_size_verification(self, result_type, result_file_name="", dynamic_diagnostics=False): """ @@ -2118,8 +2000,8 @@ def _test_result_size_verification(self, result_type, result_file_name="", dynam else: result_handler.simulation_start() - with nose.tools.assert_raises(ResultSizeError): - for i in range(ncp): + with pytest.raises(ResultSizeError): + for _ in range(ncp): result_handler.integration_point() if opts["dynamic_diagnostics"]: @@ -2142,8 +2024,8 @@ def _test_result_size_early_abort(self, result_type, result_file_name=""): opts["result_max_size"] = max_size opts["ncp"] = 10000000 - with nose.tools.assert_raises(ResultSizeError): - res = model.simulate(options=opts) + with pytest.raises(ResultSizeError): + model.simulate(options=opts) result_file = model.get_last_result_file() if result_file: @@ -2156,114 +2038,89 @@ def _test_result_size_early_abort(self, result_type, result_file_name=""): """ Binary """ - @testattr(stddist = True) def test_binary_file_size_verification_diagnostics(self): """ Make sure that the diagnostics variables are also taken into account. """ self._test_result_size_verification("binary", dynamic_diagnostics=True) - - @testattr(stddist = True) + def test_binary_file_size_verification(self): self._test_result_size_verification("binary") - - @testattr(stddist = True) + def test_binary_file_size_early_abort(self): self._test_result_size_early_abort("binary") - @testattr(stddist = True) def test_small_size_binary_file(self): self._test_result_exception("binary") - - @testattr(stddist = True) + def test_small_size_binary_file_cs(self): self._test_result_exception("binary", fmi_type="cs") - - @testattr(stddist = True) + def test_small_size_binary_file_stream(self): self._test_result_exception("binary", BytesIO()) - @testattr(stddist = True) def test_large_size_binary_file(self): self._test_result("binary") - @testattr(stddist = True) def test_large_size_binary_file_stream(self): self._test_result("binary", BytesIO()) """ Text """ - @testattr(stddist = True) def test_text_file_size_verification(self): self._test_result_size_verification("file") - - @testattr(stddist = True) + def test_text_file_size_early_abort(self): self._test_result_size_early_abort("file") - @testattr(stddist = True) def test_small_size_text_file(self): self._test_result_exception("file") - - @testattr(stddist = True) + def test_small_size_text_file_stream(self): self._test_result_exception("file", StringIO()) - @testattr(stddist = True) def test_large_size_text_file(self): self._test_result("file") - @testattr(stddist = True) def test_large_size_text_file_stream(self): self._test_result("file", StringIO()) """ CSV """ - @testattr(stddist = True) def test_csv_file_size_verification(self): self._test_result_size_verification("csv") - - @testattr(stddist = True) + def test_csv_file_size_early_abort(self): self._test_result_size_early_abort("csv") - @testattr(stddist = True) def test_small_size_csv_file(self): self._test_result_exception("csv") - - @testattr(stddist = True) + def test_small_size_csv_file_stream(self): self._test_result_exception("csv", StringIO()) - @testattr(stddist = True) def test_large_size_csv_file(self): self._test_result("csv", max_size=10000000) - @testattr(stddist = True) def test_large_size_csv_file_stream(self): self._test_result("csv", StringIO(), max_size=10000000) """ Memory """ - @testattr(stddist = True) def test_small_size_memory(self): self._test_result_exception("memory") - - @testattr(stddist = True) + def test_memory_size_early_abort(self): self._test_result_size_early_abort("memory") - - @testattr(stddist = True) + def test_small_size_memory_stream(self): self._test_result_exception("memory", StringIO()) - @testattr(stddist = True) def test_large_size_memory(self): self._test_result("memory") - @testattr(stddist = True) def test_large_size_memory_stream(self): - self._test_result("memory", StringIO()) \ No newline at end of file + self._test_result("memory", StringIO()) diff --git a/tests/test_log.py b/tests/test_log.py index 0da0bdef..78bd0a59 100644 --- a/tests/test_log.py +++ b/tests/test_log.py @@ -17,7 +17,6 @@ import os -from pyfmi import testattr from pyfmi.common.log import extract_xml_log, parse_xml_log from pyfmi.common.diagnostics import DIAGNOSTICS_PREFIX from pyfmi.tests.test_util import Dummy_FMUModelME2 @@ -28,8 +27,6 @@ logs = os.path.join(file_path, "files", "Logs") class Test_Log: - - @testattr(stddist = True) def test_decode_bytes(self): """ Verifies that malformed strings are still accepted and don't cause exceptions @@ -41,7 +38,6 @@ def test_decode_bytes(self): assert s_string == '[WARNING][FMU status:Warning] "�\x15"', s_string - @testattr(stddist = True) def test_extract_log(self): extract_xml_log("Tmp1.xml", os.path.join(logs, "CoupledClutches_log.txt"), modulename = 'Model') @@ -51,14 +47,12 @@ def test_extract_log(self): assert "" == str(log.nodes[1]), "Got: " + str(log.nodes[1]) - @testattr(stddist = True) def test_extract_log_exception(self): try: extract_xml_log("Tmp2", os.path.join(logs, "CoupledClutches_log_.txt"), modulename = 'Model') except FileNotFoundError: pass - @testattr(stddist = True) def test_extract_log_cs(self): extract_xml_log("Tmp3.xml", os.path.join(logs, "CoupledClutches_CS_log.txt"), modulename = 'Slave') @@ -68,7 +62,6 @@ def test_extract_log_cs(self): assert "" == str(log.nodes[1]), "Got: " + str(log.nodes[1]) - @testattr(stddist = True) def test_extract_log_wrong_modulename(self): extract_xml_log("Tmp4.xml", os.path.join(logs, "CoupledClutches_CS_log.txt"), modulename = 'Test') @@ -100,7 +93,6 @@ def _test_logging_different_solver(self, solver_name): np.testing.assert_equal(len(res['time']), len(res['h']), "Expected time and h to be of equal length but they weren't!") return res - @testattr(stddist = True) def test_logging_option_CVode(self): res = self._test_logging_different_solver("CVode") t = res['time'] @@ -110,30 +102,25 @@ def test_logging_option_CVode(self): assert (f'{DIAGNOSTICS_PREFIX}state_errors.h' in res.keys()), f"'{DIAGNOSTICS_PREFIX}state_errors.h' should be part of result variables!" - @testattr(stddist = True) def test_logging_option_Radau5ODE(self): res = self._test_logging_different_solver("Radau5ODE") event_type = list(res[f'{DIAGNOSTICS_PREFIX}event_data.event_info.event_type']) assert event_type.count(-1) == len(event_type), "Expected no events to have happened!" assert (f'{DIAGNOSTICS_PREFIX}state_errors.h' in res.keys()), f"'{DIAGNOSTICS_PREFIX}state_errors.h' should be part of result variables!" - @testattr(stddist = True) def test_logging_option_ImplicitEuler(self): res = self._test_logging_different_solver("ImplicitEuler") - assert not (f'{DIAGNOSTICS_PREFIX}state_errors.h' in res.keys()), f"'{DIAGNOSTICS_PREFIX}state_errors.h' should not be part of result variables!" + assert f'{DIAGNOSTICS_PREFIX}state_errors.h' not in res.keys(), f"'{DIAGNOSTICS_PREFIX}state_errors.h' should not be part of result variables!" - @testattr(stddist = True) def test_logging_option_ExplicitEuler(self): res = self._test_logging_different_solver("ExplicitEuler") - assert not (f'{DIAGNOSTICS_PREFIX}state_errors.h' in res.keys()), f"'{DIAGNOSTICS_PREFIX}state_errors.h' should not be part of result variables!" + assert f'{DIAGNOSTICS_PREFIX}state_errors.h' not in res.keys(), f"'{DIAGNOSTICS_PREFIX}state_errors.h' should not be part of result variables!" - @testattr(stddist = True) def test_logging_option_LSODAR(self): res = self._test_logging_different_solver("LSODAR") event_type = list(res[f'{DIAGNOSTICS_PREFIX}event_data.event_info.event_type']) assert event_type.count(-1) == len(event_type), "Expected no events to have happened, but event_type contains: {}!".format(event_type) - @testattr(stddist = True) def test_calculated_diagnostic(self): res = self._test_logging_different_solver("CVode") np.testing.assert_equal(len(res['time']), len(res[f'{DIAGNOSTICS_PREFIX}nbr_steps']), @@ -148,14 +135,12 @@ def test_calculated_diagnostic(self): "Expected time and Diagnostics.nbr_state_limits_step.h to be of equal length but they weren't!") - @testattr(stddist = True) def test_extract_boolean_value(self): log = parse_xml_log(os.path.join(logs, "boolean_log.xml")) eis = log.find("EventInfo") for ei in eis: assert isinstance(ei.time_event_info, bool), "Expected ei.time_event_info to be bool" - @testattr(stddist = True) def test_hasattr_works(self): """ Tests that 'hasattr' works on the log nodes. diff --git a/tests/test_stream.py b/tests/test_stream.py index 758c7af1..fd8be522 100644 --- a/tests/test_stream.py +++ b/tests/test_stream.py @@ -15,14 +15,13 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import nose +import pytest import os from io import StringIO import tempfile from shutil import rmtree from filecmp import cmp as compare_files -from pyfmi import testattr from pyfmi.fmi import FMUException, load_fmu, FMUModelCS2, FMUModelME2 from pyfmi.tests.test_util import get_examples_folder @@ -74,31 +73,29 @@ def simulate_and_verify_stream_contents(compiled_fmu, fmu_loader, stream, open_t ] for i, line in enumerate(expected): err_msg = "Unable to find substring {} in list {}".format(line, "".join(contents)) - nose.tools.assert_in(line, contents[i], err_msg) + assert line in contents[i], err_msg class Test_FMUModelME2: """ Test stream functionality for FMI class FMUModelME2. """ + @pytest.fixture(autouse=True) @classmethod def setup_class(cls): cls.example_fmu = os.path.join(get_examples_folder(), 'files', 'FMUs', 'ME2.0', 'bouncingBall.fmu') cls.test_class = FMUModelME2 # Verify the installation is not corrupt while setting up the class. - nose.tools.assert_true(os.path.isfile(cls.example_fmu)) + assert os.path.isfile(cls.example_fmu) - @testattr(stddist = True) def test_testio(self): """ FMUModelME2 and custom IO class. """ stream = TestIO("") simulate_and_verify_stream_contents(self.example_fmu, self.test_class, stream) - @testattr(stddist = True) def test_stringio(self): """ FMUModelME2 and StringIO. """ stream = StringIO() simulate_and_verify_stream_contents(self.example_fmu, self.test_class, stream) - @testattr(stddist = True) def test_textiowrapper(self): """ FMUModelME2 and TextIOWrapper. """ p = tempfile.mkdtemp() @@ -111,27 +108,25 @@ def test_textiowrapper(self): class Test_FMUModelCS2: """ Test stream functionality for FMI class FMUModelCS2. """ + @pytest.fixture(autouse=True) @classmethod def setup_class(cls): cls.example_fmu = os.path.join(get_examples_folder(), 'files', 'FMUs', 'CS2.0', 'bouncingBall.fmu') cls.test_class = FMUModelCS2 # Verify the installation is not corrupt while setting up the class. - nose.tools.assert_true(os.path.isfile(cls.example_fmu)) + assert os.path.isfile(cls.example_fmu) - @testattr(stddist = True) def test_testio(self): """ FMUModelCS2 and custom IO class. """ stream = TestIO("") simulate_and_verify_stream_contents(self.example_fmu, self.test_class, stream) - @testattr(stddist = True) def test_stringio(self): """ FMUModelCS2 and StringIO. """ stream = StringIO() simulate_and_verify_stream_contents(self.example_fmu, self.test_class, stream) - @testattr(stddist = True) def test_textiowrapper(self): """ FMUModelCS2 and TextIOWrapper. """ p = tempfile.mkdtemp() @@ -144,27 +139,25 @@ def test_textiowrapper(self): class Test_LoadFMU: """ Test stream functionality with load_fmu. """ + @pytest.fixture(autouse=True) @classmethod def setup_class(cls): cls.example_fmu = os.path.join(get_examples_folder(), 'files', 'FMUs', 'ME2.0', 'bouncingBall.fmu') cls.test_class = load_fmu # Verify the installation is not corrupt while setting up the class. - nose.tools.assert_true(os.path.isfile(cls.example_fmu)) + assert os.path.isfile(cls.example_fmu) - @testattr(stddist = True) def test_testio(self): """ load_fmu and custom IO class. """ stream = TestIO("") simulate_and_verify_stream_contents(Test_LoadFMU.example_fmu, Test_LoadFMU.test_class, stream) - @testattr(stddist = True) def test_stringio(self): """ load_fmu and StringIO. """ stream = StringIO() simulate_and_verify_stream_contents(Test_LoadFMU.example_fmu, Test_LoadFMU.test_class, stream) - @testattr(stddist = True) def test_textiowrapper(self): """ load_fmu and TextIOWrapper. """ p = tempfile.mkdtemp() @@ -177,14 +170,14 @@ def test_textiowrapper(self): class TestXML: """ Test other log related functions together with streams. """ + @pytest.fixture(autouse=True) @classmethod def setup_class(cls): cls.example_fmu = os.path.join(get_examples_folder(), 'files', 'FMUs', 'ME2.0', 'bouncingBall.fmu') # Verify the installation is not corrupt while setting up the class. - nose.tools.assert_true(os.path.isfile(cls.example_fmu)) + assert os.path.isfile(cls.example_fmu) - @testattr(stddist = True) def test_extract_xml_log(self): """ Compare contents of XML log when using stream and normal logfile. """ stream = TestIO("") @@ -206,9 +199,8 @@ def test_extract_xml_log(self): xml_log = fmu.extract_xml_log() err_msg = "Unequal xml files, please compare the contents of:\n{}\nand\n{}".format(xml_log_s, xml_log) - nose.tools.assert_true(compare_files(xml_log_s, xml_log), err_msg) + assert compare_files(xml_log_s, xml_log), err_msg - @testattr(stddist = True) def test_get_log(self): """ Test get_log throws exception if stream doesnt support getvalue. """ stream = StringIO("") @@ -221,10 +213,9 @@ def test_get_log(self): 'FMIL: module = FMI2XML, log level = 3: fmi2_xml_get_default_experiment_tolerance' ] for i, line in enumerate(expected_substr): - nose.tools.assert_in(line, log[i]) + assert line in log[i] - @testattr(stddist = True) def test_get_log_exception1(self): """ Test get_log throws exception if stream doesnt allow reading (it is set for writing). """ try: @@ -234,7 +225,7 @@ def test_get_log_exception1(self): fmu_s = load_fmu(self.example_fmu, log_file_name = stream, log_level = 3) res_s = fmu_s.simulate() err_msg = "Unable to read from given stream, make sure the stream is readable." - with nose.tools.assert_raises_regex(FMUException, err_msg): + with pytest.raises(FMUException, match = err_msg): log = fmu_s.get_log() finally: if not stream.closed: @@ -242,16 +233,14 @@ def test_get_log_exception1(self): rmtree(p) - @testattr(stddist = True) def test_get_nbr_of_lines_in_log(self): """ Test get_number_of_lines_log when using a stream. """ stream = StringIO("") fmu = load_fmu(self.example_fmu, log_file_name = stream, log_level = 3) - nose.tools.assert_equal(fmu.get_number_of_lines_log(), 0) + assert fmu.get_number_of_lines_log() == 0 res = fmu.simulate() - nose.tools.assert_equal(fmu.get_number_of_lines_log(), 0) + assert fmu.get_number_of_lines_log() == 0 - @testattr(stddist = True) def test_extract_xml_log_into_stream(self): """ Compare contents of XML log when extract XML into a stream. """ stream = TestIO("") @@ -277,4 +266,4 @@ def test_extract_xml_log_into_stream(self): xml_log = fmu.extract_xml_log() err_msg = "Unequal xml files, please compare the contents of:\n{}\nand\n{}".format(xml_file1, xml_log) - nose.tools.assert_true(compare_files(xml_file1, xml_log), err_msg) + assert compare_files(xml_file1, xml_log), err_msg From b4b12c9c237c86ac7bfc15ab7b83e474879f9c22 Mon Sep 17 00:00:00 2001 From: Peter Meisrimel Date: Wed, 2 Oct 2024 15:44:47 +0200 Subject: [PATCH 2/6] cleanup --- tests/test_fmi_master.py | 1 - tests/test_log.py | 2 -- 2 files changed, 3 deletions(-) diff --git a/tests/test_fmi_master.py b/tests/test_fmi_master.py index 62206c26..ea18f579 100644 --- a/tests/test_fmi_master.py +++ b/tests/test_fmi_master.py @@ -469,7 +469,6 @@ def test_error_check_invalid_value(self): assert error_raised # TODO: Test case that supports storing FMU states required - @nose.tools.nottest def test_error_controlled_with_downsampling(self): models, connections = self._load_basic_simulation() uptate_options = {'result_downsampling_factor': 2, diff --git a/tests/test_log.py b/tests/test_log.py index 78bd0a59..056e557a 100644 --- a/tests/test_log.py +++ b/tests/test_log.py @@ -159,7 +159,6 @@ def test_hasattr_works(self): except AttributeError: pass - @testattr(stddist = True) def test_truncated_log_valid_xml(self): """ Test that a truncated log still contains valid XML.""" # XXX: There currently is no FMU is linux binaries running on Ubuntu 20+ (libgfortran issues) @@ -208,7 +207,6 @@ def test_truncated_log_valid_xml(self): assert len(final_msg) == 1, "MaximumLogSizeExceeded not found or found multiple times?" assert final_msg[0].nodes[0].text == "Maximum log size was exceeded, log is truncated to fully include logging from last CAPI call not exceeding limit." - @testattr(stddist = True) def test_resume_logging_on_increased_max_log_size(self): """Test that logging will resume when increasing max log size & previously exceeding the maximal size.""" file_path = os.path.dirname(os.path.abspath(__file__)) From 5706885de41f60e33bd82e17d34a85f1290ab75f Mon Sep 17 00:00:00 2001 From: Peter Meisrimel Date: Wed, 2 Oct 2024 16:21:29 +0200 Subject: [PATCH 3/6] Cleanup more rebase fixes more rebase cleanup more rebase cleanup Removing tests as module; moved test files to tests/files moved test_util to pyfmi/src Adding pytest config fixed example path replaced tests conditional to Assimulo installation by markers simplified workflow file; added changelog --- .github/workflows/build.yml | 5 +- CHANGELOG | 4 + tests/pytest.ini => pytest.ini | 4 + setup.py | 17 +- src/pyfmi/{tests => }/test_util.pxd | 0 src/pyfmi/{tests => }/test_util.pyx | 2 +- src/pyfmi/tests/__init__.py | 16 - tests/__init__.py | 16 - .../files/FMUs/XML/CS1.0/CoupledClutches.fmu | Bin .../files/FMUs/XML/CS1.0/NegatedAlias.fmu | Bin .../files/FMUs/XML/CS1.0/bouncingBall.fmu | Bin .../files/FMUs/XML/CS2.0/CoupledClutches.fmu | Bin .../files/FMUs/XML/CS2.0/GainTestInteger.fmu | Bin .../files/FMUs/XML/CS2.0/GainTestReal.fmu | Bin .../files/FMUs/XML/CS2.0/IntegerStep.fmu | Bin .../LinearCoSimulation_LinearSubSystem1.fmu | Bin .../LinearCoSimulation_LinearSubSystem2.fmu | Bin .../XML/CS2.0/LinearStability.SubSystem1.fmu | Bin .../XML/CS2.0/LinearStability.SubSystem2.fmu | Bin ...LinearStability_LinearSubSystemNoFeed1.fmu | Bin ...LinearStability_LinearSubSystemNoFeed2.fmu | Bin .../files/FMUs/XML/CS2.0/NegatedAlias.fmu | Bin .../files/FMUs/XML/CS2.0/bouncingBall.fmu | Bin .../files/FMUs/XML/ME1.0/Alias1.fmu | Bin .../files/FMUs/XML/ME1.0/CoupledClutches.fmu | Bin .../files/FMUs/XML/ME1.0/Description.fmu | Bin .../files/FMUs/XML/ME1.0/NegatedAlias.fmu | Bin .../files/FMUs/XML/ME1.0/NoState.Example1.fmu | Bin .../files/FMUs/XML/ME1.0/NominalTest4.fmu | Bin .../files/FMUs/XML/ME1.0/RLC_Circuit.fmu | Bin .../files/FMUs/XML/ME1.0/bouncingBall.fmu | Bin .../files/FMUs/XML/ME1.0/dq.fmu | Bin .../files/FMUs/XML/ME2.0/Alias.fmu | Bin .../files/FMUs/XML/ME2.0/BasicSens1.fmu | Bin .../files/FMUs/XML/ME2.0/BasicSens2.fmu | Bin .../files/FMUs/XML/ME2.0/Bouncing_Ball.fmu | Bin .../files/FMUs/XML/ME2.0/CoupledClutches.fmu | Bin .../XML/ME2.0/CoupledClutchesModified.fmu | Bin .../files/FMUs/XML/ME2.0/Description.fmu | Bin .../XML/ME2.0/Enumerations.Enumeration3.fmu | Bin .../files/FMUs/XML/ME2.0/Friction2.fmu | Bin .../files/FMUs/XML/ME2.0/Large.fmu | Bin .../XML/ME2.0/LinearStability.FullSystem.fmu | Bin .../XML/ME2.0/LinearStability.SubSystem1.fmu | Bin .../XML/ME2.0/LinearStability.SubSystem2.fmu | Bin .../files/FMUs/XML/ME2.0/LinearStateSpace.fmu | Bin .../files/FMUs/XML/ME2.0/MalFormed.fmu | Bin .../files/FMUs/XML/ME2.0/NegatedAlias.fmu | Bin .../files/FMUs/XML/ME2.0/NoState.Example1.fmu | Bin .../XML/ME2.0/NominalTests.NominalTest4.fmu | Bin .../files/FMUs/XML/ME2.0/OutputTest2.fmu | Bin .../files/FMUs/XML/ME2.0/ParameterAlias.fmu | Bin .../XML/ME2.0/QuadTankPack_Sim_QuadTank.fmu | Bin .../files/FMUs/XML/ME2.0/bouncingBall.fmu | Bin .../FMUs/XML/ME2.0/test_type_definitions.fmu | Bin .../files/Logs/CoupledClutches_CS_log.txt | 0 .../files/Logs/CoupledClutches_log.txt | 0 .../files/Logs/CoupledClutches_log.xml | 0 .../files/Logs/boolean_log.xml | 0 .../files/Results/DoublePendulum.mat | Bin .../tests => tests}/files/Results/TestCSV.csv | 0 .../files/Results/qt_par_est_data.mat | Bin tests/test_fmi.py | 859 +++++++++-------- tests/test_fmi_coupled.py | 103 +-- tests/test_fmi_estimate.py | 160 ++-- tests/test_fmi_extended.py | 1 - tests/test_fmi_master.py | 25 +- tests/test_io.py | 869 +++++++++--------- tests/test_log.py | 4 +- tests/test_stream.py | 2 +- 70 files changed, 1025 insertions(+), 1062 deletions(-) rename tests/pytest.ini => pytest.ini (67%) rename src/pyfmi/{tests => }/test_util.pxd (100%) rename src/pyfmi/{tests => }/test_util.pyx (99%) delete mode 100644 src/pyfmi/tests/__init__.py delete mode 100644 tests/__init__.py rename {src/pyfmi/tests => tests}/files/FMUs/XML/CS1.0/CoupledClutches.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/CS1.0/NegatedAlias.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/CS1.0/bouncingBall.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/CS2.0/CoupledClutches.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/CS2.0/GainTestInteger.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/CS2.0/GainTestReal.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/CS2.0/IntegerStep.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/CS2.0/LinearCoSimulation_LinearSubSystem1.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/CS2.0/LinearCoSimulation_LinearSubSystem2.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/CS2.0/LinearStability.SubSystem1.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/CS2.0/LinearStability.SubSystem2.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/CS2.0/LinearStability_LinearSubSystemNoFeed1.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/CS2.0/LinearStability_LinearSubSystemNoFeed2.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/CS2.0/NegatedAlias.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/CS2.0/bouncingBall.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME1.0/Alias1.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME1.0/CoupledClutches.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME1.0/Description.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME1.0/NegatedAlias.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME1.0/NoState.Example1.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME1.0/NominalTest4.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME1.0/RLC_Circuit.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME1.0/bouncingBall.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME1.0/dq.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/Alias.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/BasicSens1.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/BasicSens2.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/Bouncing_Ball.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/CoupledClutches.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/CoupledClutchesModified.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/Description.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/Enumerations.Enumeration3.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/Friction2.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/Large.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/LinearStability.FullSystem.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/LinearStability.SubSystem1.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/LinearStability.SubSystem2.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/LinearStateSpace.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/MalFormed.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/NegatedAlias.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/NoState.Example1.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/NominalTests.NominalTest4.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/OutputTest2.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/ParameterAlias.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/QuadTankPack_Sim_QuadTank.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/bouncingBall.fmu (100%) rename {src/pyfmi/tests => tests}/files/FMUs/XML/ME2.0/test_type_definitions.fmu (100%) rename {src/pyfmi/tests => tests}/files/Logs/CoupledClutches_CS_log.txt (100%) rename {src/pyfmi/tests => tests}/files/Logs/CoupledClutches_log.txt (100%) rename {src/pyfmi/tests => tests}/files/Logs/CoupledClutches_log.xml (100%) rename {src/pyfmi/tests => tests}/files/Logs/boolean_log.xml (100%) rename {src/pyfmi/tests => tests}/files/Results/DoublePendulum.mat (100%) rename {src/pyfmi/tests => tests}/files/Results/TestCSV.csv (100%) rename {src/pyfmi/tests => tests}/files/Results/qt_par_est_data.mat (100%) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 8b973249..0c5cea78 100755 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -57,7 +57,4 @@ jobs: - name: Build run: python3 setup.py install --user --fmil-home=/usr - name: Test - run: | - rm src/pyfmi/__init__.py - cp -rv src/pyfmi/tests/files tests - pytest --verbose tests/ + run: pytest diff --git a/CHANGELOG b/CHANGELOG index 66e985f3..e7729a03 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -8,6 +8,10 @@ * Added option to limit the size of the result ("result_max_size"), default set to 2GB. * Added method ResultDymolaBinary.get_variables_data. Included some minor refactorization. The new method allows for retrieving partial trajectories, and multiple trajectories at once. + * Changed testing framework from `nose` to `pytest`. + * Removed tests from the PyFMI installation. + * Moved test files from src/pyfmi/tests/... to tests/files/... + * Moved test_util.* from src/pyfmi/tests to src/pyfmi --- PyFMI-2.14.0 --- * Updated the error message displayed when loading FMUs with needsExecutionTool set to True. diff --git a/tests/pytest.ini b/pytest.ini similarity index 67% rename from tests/pytest.ini rename to pytest.ini index 9cce0b8d..b58567ef 100644 --- a/tests/pytest.ini +++ b/pytest.ini @@ -1,3 +1,7 @@ [pytest] +testpaths = + tests filterwarnings = ignore:.*does not support directional derivatives.*:UserWarning +markers = + assimulo \ No newline at end of file diff --git a/setup.py b/setup.py index 1e7c91b1..0361e316 100644 --- a/setup.py +++ b/setup.py @@ -248,7 +248,7 @@ def check_extensions(): compiler_directives={'language_level' : "3str"}) # Test utilities - ext_list += cythonize([os.path.join("src", "pyfmi", "tests", "test_util.pyx")], + ext_list += cythonize([os.path.join("src", "pyfmi", "test_util.pyx")], include_path = incl_path, compiler_directives={'language_level' : "3str"}) @@ -324,15 +324,14 @@ def check_extensions(): classifiers=CLASSIFIERS, ext_modules = ext_list, package_dir = {'pyfmi': os.path.join('src', 'pyfmi'), - 'pyfmi.common': os.path.join('src', 'common'), - 'pyfmi.tests': 'tests'}, + 'pyfmi.common': os.path.join('src', 'common') + }, packages=[ 'pyfmi', 'pyfmi.simulation', 'pyfmi.examples', 'pyfmi.common', 'pyfmi.common.plotting', - 'pyfmi.tests', 'pyfmi.common.log' ], package_data = {'pyfmi': [ @@ -340,17 +339,11 @@ def check_extensions(): 'examples/files/FMUs/CS1.0/*', 'examples/files/FMUs/ME2.0/*', 'examples/files/FMUs/CS2.0/*', - 'tests/files/FMUs/XML/ME1.0/*', - 'tests/files/FMUs/XML/CS1.0/*', - 'tests/files/FMUs/XML/ME2.0/*', - 'tests/files/FMUs/XML/CS2.0/*', - 'tests/files/Results/*', - 'tests/files/Logs/*', 'version.txt', 'LICENSE', 'CHANGELOG', - 'util/*'] + extra_package_data, - 'pyfmi.tests': ['pytest.ini']}, + 'util/*'] + extra_package_data + }, script_args=copy_args ) diff --git a/src/pyfmi/tests/test_util.pxd b/src/pyfmi/test_util.pxd similarity index 100% rename from src/pyfmi/tests/test_util.pxd rename to src/pyfmi/test_util.pxd diff --git a/src/pyfmi/tests/test_util.pyx b/src/pyfmi/test_util.pyx similarity index 99% rename from src/pyfmi/tests/test_util.pyx rename to src/pyfmi/test_util.pyx index e825bba8..10988975 100644 --- a/src/pyfmi/tests/test_util.pyx +++ b/src/pyfmi/test_util.pyx @@ -24,7 +24,7 @@ cimport pyfmi.fmil_import as FMIL from pyfmi.fmi import FMUException, FMUModelME1, FMUModelCS1, FMUModelCS2, FMUModelME2 def get_examples_folder(): - return os.path.join(os.path.dirname(__file__), '..', 'examples') + return os.path.join(os.path.dirname(__file__), 'examples') cdef class _ForTestingFMUModelME1(FMUModelME1): cdef int _get_nominal_continuous_states_fmil(self, FMIL.fmi1_real_t* xnominal, size_t nx): diff --git a/src/pyfmi/tests/__init__.py b/src/pyfmi/tests/__init__.py deleted file mode 100644 index 8962836d..00000000 --- a/src/pyfmi/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -# Copyright (C) 2024 Modelon AB -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, version 3 of the License. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index e7c78986..00000000 --- a/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -# Copyright (C) 2018-2024 Modelon AB -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License as published by -# the Free Software Foundation, version 3 of the License. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public License -# along with this program. If not, see . diff --git a/src/pyfmi/tests/files/FMUs/XML/CS1.0/CoupledClutches.fmu b/tests/files/FMUs/XML/CS1.0/CoupledClutches.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/CS1.0/CoupledClutches.fmu rename to tests/files/FMUs/XML/CS1.0/CoupledClutches.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/CS1.0/NegatedAlias.fmu b/tests/files/FMUs/XML/CS1.0/NegatedAlias.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/CS1.0/NegatedAlias.fmu rename to tests/files/FMUs/XML/CS1.0/NegatedAlias.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/CS1.0/bouncingBall.fmu b/tests/files/FMUs/XML/CS1.0/bouncingBall.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/CS1.0/bouncingBall.fmu rename to tests/files/FMUs/XML/CS1.0/bouncingBall.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/CoupledClutches.fmu b/tests/files/FMUs/XML/CS2.0/CoupledClutches.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/CoupledClutches.fmu rename to tests/files/FMUs/XML/CS2.0/CoupledClutches.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/GainTestInteger.fmu b/tests/files/FMUs/XML/CS2.0/GainTestInteger.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/GainTestInteger.fmu rename to tests/files/FMUs/XML/CS2.0/GainTestInteger.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/GainTestReal.fmu b/tests/files/FMUs/XML/CS2.0/GainTestReal.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/GainTestReal.fmu rename to tests/files/FMUs/XML/CS2.0/GainTestReal.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/IntegerStep.fmu b/tests/files/FMUs/XML/CS2.0/IntegerStep.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/IntegerStep.fmu rename to tests/files/FMUs/XML/CS2.0/IntegerStep.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearCoSimulation_LinearSubSystem1.fmu b/tests/files/FMUs/XML/CS2.0/LinearCoSimulation_LinearSubSystem1.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearCoSimulation_LinearSubSystem1.fmu rename to tests/files/FMUs/XML/CS2.0/LinearCoSimulation_LinearSubSystem1.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearCoSimulation_LinearSubSystem2.fmu b/tests/files/FMUs/XML/CS2.0/LinearCoSimulation_LinearSubSystem2.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearCoSimulation_LinearSubSystem2.fmu rename to tests/files/FMUs/XML/CS2.0/LinearCoSimulation_LinearSubSystem2.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearStability.SubSystem1.fmu b/tests/files/FMUs/XML/CS2.0/LinearStability.SubSystem1.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearStability.SubSystem1.fmu rename to tests/files/FMUs/XML/CS2.0/LinearStability.SubSystem1.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearStability.SubSystem2.fmu b/tests/files/FMUs/XML/CS2.0/LinearStability.SubSystem2.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearStability.SubSystem2.fmu rename to tests/files/FMUs/XML/CS2.0/LinearStability.SubSystem2.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearStability_LinearSubSystemNoFeed1.fmu b/tests/files/FMUs/XML/CS2.0/LinearStability_LinearSubSystemNoFeed1.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearStability_LinearSubSystemNoFeed1.fmu rename to tests/files/FMUs/XML/CS2.0/LinearStability_LinearSubSystemNoFeed1.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearStability_LinearSubSystemNoFeed2.fmu b/tests/files/FMUs/XML/CS2.0/LinearStability_LinearSubSystemNoFeed2.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/LinearStability_LinearSubSystemNoFeed2.fmu rename to tests/files/FMUs/XML/CS2.0/LinearStability_LinearSubSystemNoFeed2.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/NegatedAlias.fmu b/tests/files/FMUs/XML/CS2.0/NegatedAlias.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/NegatedAlias.fmu rename to tests/files/FMUs/XML/CS2.0/NegatedAlias.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/CS2.0/bouncingBall.fmu b/tests/files/FMUs/XML/CS2.0/bouncingBall.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/CS2.0/bouncingBall.fmu rename to tests/files/FMUs/XML/CS2.0/bouncingBall.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME1.0/Alias1.fmu b/tests/files/FMUs/XML/ME1.0/Alias1.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME1.0/Alias1.fmu rename to tests/files/FMUs/XML/ME1.0/Alias1.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME1.0/CoupledClutches.fmu b/tests/files/FMUs/XML/ME1.0/CoupledClutches.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME1.0/CoupledClutches.fmu rename to tests/files/FMUs/XML/ME1.0/CoupledClutches.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME1.0/Description.fmu b/tests/files/FMUs/XML/ME1.0/Description.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME1.0/Description.fmu rename to tests/files/FMUs/XML/ME1.0/Description.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME1.0/NegatedAlias.fmu b/tests/files/FMUs/XML/ME1.0/NegatedAlias.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME1.0/NegatedAlias.fmu rename to tests/files/FMUs/XML/ME1.0/NegatedAlias.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME1.0/NoState.Example1.fmu b/tests/files/FMUs/XML/ME1.0/NoState.Example1.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME1.0/NoState.Example1.fmu rename to tests/files/FMUs/XML/ME1.0/NoState.Example1.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME1.0/NominalTest4.fmu b/tests/files/FMUs/XML/ME1.0/NominalTest4.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME1.0/NominalTest4.fmu rename to tests/files/FMUs/XML/ME1.0/NominalTest4.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME1.0/RLC_Circuit.fmu b/tests/files/FMUs/XML/ME1.0/RLC_Circuit.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME1.0/RLC_Circuit.fmu rename to tests/files/FMUs/XML/ME1.0/RLC_Circuit.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME1.0/bouncingBall.fmu b/tests/files/FMUs/XML/ME1.0/bouncingBall.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME1.0/bouncingBall.fmu rename to tests/files/FMUs/XML/ME1.0/bouncingBall.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME1.0/dq.fmu b/tests/files/FMUs/XML/ME1.0/dq.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME1.0/dq.fmu rename to tests/files/FMUs/XML/ME1.0/dq.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/Alias.fmu b/tests/files/FMUs/XML/ME2.0/Alias.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/Alias.fmu rename to tests/files/FMUs/XML/ME2.0/Alias.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/BasicSens1.fmu b/tests/files/FMUs/XML/ME2.0/BasicSens1.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/BasicSens1.fmu rename to tests/files/FMUs/XML/ME2.0/BasicSens1.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/BasicSens2.fmu b/tests/files/FMUs/XML/ME2.0/BasicSens2.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/BasicSens2.fmu rename to tests/files/FMUs/XML/ME2.0/BasicSens2.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/Bouncing_Ball.fmu b/tests/files/FMUs/XML/ME2.0/Bouncing_Ball.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/Bouncing_Ball.fmu rename to tests/files/FMUs/XML/ME2.0/Bouncing_Ball.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/CoupledClutches.fmu b/tests/files/FMUs/XML/ME2.0/CoupledClutches.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/CoupledClutches.fmu rename to tests/files/FMUs/XML/ME2.0/CoupledClutches.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/CoupledClutchesModified.fmu b/tests/files/FMUs/XML/ME2.0/CoupledClutchesModified.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/CoupledClutchesModified.fmu rename to tests/files/FMUs/XML/ME2.0/CoupledClutchesModified.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/Description.fmu b/tests/files/FMUs/XML/ME2.0/Description.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/Description.fmu rename to tests/files/FMUs/XML/ME2.0/Description.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/Enumerations.Enumeration3.fmu b/tests/files/FMUs/XML/ME2.0/Enumerations.Enumeration3.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/Enumerations.Enumeration3.fmu rename to tests/files/FMUs/XML/ME2.0/Enumerations.Enumeration3.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/Friction2.fmu b/tests/files/FMUs/XML/ME2.0/Friction2.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/Friction2.fmu rename to tests/files/FMUs/XML/ME2.0/Friction2.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/Large.fmu b/tests/files/FMUs/XML/ME2.0/Large.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/Large.fmu rename to tests/files/FMUs/XML/ME2.0/Large.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/LinearStability.FullSystem.fmu b/tests/files/FMUs/XML/ME2.0/LinearStability.FullSystem.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/LinearStability.FullSystem.fmu rename to tests/files/FMUs/XML/ME2.0/LinearStability.FullSystem.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/LinearStability.SubSystem1.fmu b/tests/files/FMUs/XML/ME2.0/LinearStability.SubSystem1.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/LinearStability.SubSystem1.fmu rename to tests/files/FMUs/XML/ME2.0/LinearStability.SubSystem1.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/LinearStability.SubSystem2.fmu b/tests/files/FMUs/XML/ME2.0/LinearStability.SubSystem2.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/LinearStability.SubSystem2.fmu rename to tests/files/FMUs/XML/ME2.0/LinearStability.SubSystem2.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/LinearStateSpace.fmu b/tests/files/FMUs/XML/ME2.0/LinearStateSpace.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/LinearStateSpace.fmu rename to tests/files/FMUs/XML/ME2.0/LinearStateSpace.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/MalFormed.fmu b/tests/files/FMUs/XML/ME2.0/MalFormed.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/MalFormed.fmu rename to tests/files/FMUs/XML/ME2.0/MalFormed.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/NegatedAlias.fmu b/tests/files/FMUs/XML/ME2.0/NegatedAlias.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/NegatedAlias.fmu rename to tests/files/FMUs/XML/ME2.0/NegatedAlias.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/NoState.Example1.fmu b/tests/files/FMUs/XML/ME2.0/NoState.Example1.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/NoState.Example1.fmu rename to tests/files/FMUs/XML/ME2.0/NoState.Example1.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/NominalTests.NominalTest4.fmu b/tests/files/FMUs/XML/ME2.0/NominalTests.NominalTest4.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/NominalTests.NominalTest4.fmu rename to tests/files/FMUs/XML/ME2.0/NominalTests.NominalTest4.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/OutputTest2.fmu b/tests/files/FMUs/XML/ME2.0/OutputTest2.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/OutputTest2.fmu rename to tests/files/FMUs/XML/ME2.0/OutputTest2.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/ParameterAlias.fmu b/tests/files/FMUs/XML/ME2.0/ParameterAlias.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/ParameterAlias.fmu rename to tests/files/FMUs/XML/ME2.0/ParameterAlias.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/QuadTankPack_Sim_QuadTank.fmu b/tests/files/FMUs/XML/ME2.0/QuadTankPack_Sim_QuadTank.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/QuadTankPack_Sim_QuadTank.fmu rename to tests/files/FMUs/XML/ME2.0/QuadTankPack_Sim_QuadTank.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/bouncingBall.fmu b/tests/files/FMUs/XML/ME2.0/bouncingBall.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/bouncingBall.fmu rename to tests/files/FMUs/XML/ME2.0/bouncingBall.fmu diff --git a/src/pyfmi/tests/files/FMUs/XML/ME2.0/test_type_definitions.fmu b/tests/files/FMUs/XML/ME2.0/test_type_definitions.fmu similarity index 100% rename from src/pyfmi/tests/files/FMUs/XML/ME2.0/test_type_definitions.fmu rename to tests/files/FMUs/XML/ME2.0/test_type_definitions.fmu diff --git a/src/pyfmi/tests/files/Logs/CoupledClutches_CS_log.txt b/tests/files/Logs/CoupledClutches_CS_log.txt similarity index 100% rename from src/pyfmi/tests/files/Logs/CoupledClutches_CS_log.txt rename to tests/files/Logs/CoupledClutches_CS_log.txt diff --git a/src/pyfmi/tests/files/Logs/CoupledClutches_log.txt b/tests/files/Logs/CoupledClutches_log.txt similarity index 100% rename from src/pyfmi/tests/files/Logs/CoupledClutches_log.txt rename to tests/files/Logs/CoupledClutches_log.txt diff --git a/src/pyfmi/tests/files/Logs/CoupledClutches_log.xml b/tests/files/Logs/CoupledClutches_log.xml similarity index 100% rename from src/pyfmi/tests/files/Logs/CoupledClutches_log.xml rename to tests/files/Logs/CoupledClutches_log.xml diff --git a/src/pyfmi/tests/files/Logs/boolean_log.xml b/tests/files/Logs/boolean_log.xml similarity index 100% rename from src/pyfmi/tests/files/Logs/boolean_log.xml rename to tests/files/Logs/boolean_log.xml diff --git a/src/pyfmi/tests/files/Results/DoublePendulum.mat b/tests/files/Results/DoublePendulum.mat similarity index 100% rename from src/pyfmi/tests/files/Results/DoublePendulum.mat rename to tests/files/Results/DoublePendulum.mat diff --git a/src/pyfmi/tests/files/Results/TestCSV.csv b/tests/files/Results/TestCSV.csv similarity index 100% rename from src/pyfmi/tests/files/Results/TestCSV.csv rename to tests/files/Results/TestCSV.csv diff --git a/src/pyfmi/tests/files/Results/qt_par_est_data.mat b/tests/files/Results/qt_par_est_data.mat similarity index 100% rename from src/pyfmi/tests/files/Results/qt_par_est_data.mat rename to tests/files/Results/qt_par_est_data.mat diff --git a/tests/test_fmi.py b/tests/test_fmi.py index 91054d14..6760808a 100644 --- a/tests/test_fmi.py +++ b/tests/test_fmi.py @@ -28,7 +28,7 @@ import pyfmi.fmi as fmi from pyfmi.fmi_algorithm_drivers import AssimuloFMIAlg, AssimuloFMIAlgOptions, \ PYFMI_JACOBIAN_LIMIT, PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT -from pyfmi.tests.test_util import Dummy_FMUModelCS1, Dummy_FMUModelME1, Dummy_FMUModelME2, Dummy_FMUModelCS2, get_examples_folder +from pyfmi.test_util import Dummy_FMUModelCS1, Dummy_FMUModelME1, Dummy_FMUModelME2, Dummy_FMUModelCS2, get_examples_folder from pyfmi.common.io import ResultHandler from pyfmi.common.algorithm_drivers import UnrecognizedOptionError from pyfmi.common.core import create_temp_dir @@ -44,11 +44,10 @@ def solve(self): pass -assimulo_installed = True try: import assimulo except ImportError: - assimulo_installed = False + pass file_path = os.path.dirname(os.path.abspath(__file__)) @@ -72,107 +71,107 @@ def _helper_unzipped_fmu_exception_invalid_dir(fmu_loader): with pytest.raises(FMUException, match = err_msg): fmu = fmu_loader(temp_dir, allow_unzipped_fmu = True) -if assimulo_installed: - class Test_FMUModelME1_Simulation: - def test_simulate_with_debug_option_no_state(self): - """ Verify that an instance of CVodeDebugInformation is created """ - model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NoState.Example1.fmu"), _connect_dll=False) +pytest.mark.assimulo +class Test_FMUModelME1_Simulation: + def test_simulate_with_debug_option_no_state(self): + """ Verify that an instance of CVodeDebugInformation is created """ + model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NoState.Example1.fmu"), _connect_dll=False) - opts=model.simulate_options() - opts["logging"] = True - opts["result_handling"] = "csv" # set to anything except 'binary' + opts=model.simulate_options() + opts["logging"] = True + opts["result_handling"] = "csv" # set to anything except 'binary' - #Verify that a simulation is successful - res=model.simulate(options=opts) + #Verify that a simulation is successful + res=model.simulate(options=opts) - from pyfmi.debug import CVodeDebugInformation - debug = CVodeDebugInformation("NoState_Example1_debug.txt") + from pyfmi.debug import CVodeDebugInformation + debug = CVodeDebugInformation("NoState_Example1_debug.txt") - def test_no_result(self): - model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) + def test_no_result(self): + model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = None - res = model.simulate(options=opts) + opts = model.simulate_options() + opts["result_handling"] = None + res = model.simulate(options=opts) - with pytest.raises(Exception): - res._get_result_data() + with pytest.raises(Exception): + res._get_result_data() - model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) + model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["return_result"] = False - res = model.simulate(options=opts) + opts = model.simulate_options() + opts["return_result"] = False + res = model.simulate(options=opts) + + with pytest.raises(Exception): + res._get_result_data() - with pytest.raises(Exception): - res._get_result_data() + def test_custom_result_handler(self): + model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) - def test_custom_result_handler(self): - model = Dummy_FMUModelME1([], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) + class A: + pass + class B(ResultHandler): + def get_result(self): + return None - class A: - pass - class B(ResultHandler): - def get_result(self): - return None + opts = model.simulate_options() + opts["result_handling"] = "hejhej" + with pytest.raises(Exception): + model.simulate(options=opts) + opts["result_handling"] = "custom" + with pytest.raises(Exception): + model.simulate(options=opts) + opts["result_handler"] = A() + with pytest.raises(Exception): + model.simulate(options=opts) + opts["result_handler"] = B() + res = model.simulate(options=opts) - opts = model.simulate_options() - opts["result_handling"] = "hejhej" - with pytest.raises(Exception): - model.simulate(options=opts) - opts["result_handling"] = "custom" - with pytest.raises(Exception): - model.simulate(options=opts) - opts["result_handler"] = A() - with pytest.raises(Exception): - model.simulate(options=opts) - opts["result_handler"] = B() - res = model.simulate(options=opts) + def setup_atol_auto_update_test_base(self): + model = Dummy_FMUModelME1([], FMU_PATHS.ME1.nominal_test4, _connect_dll=False) + model.override_nominal_continuous_states = False + opts = model.simulate_options() + opts["return_result"] = False + opts["solver"] = "CVode" + return model, opts - def setup_atol_auto_update_test_base(self): - model = Dummy_FMUModelME1([], FMU_PATHS.ME1.nominal_test4, _connect_dll=False) - model.override_nominal_continuous_states = False - opts = model.simulate_options() - opts["return_result"] = False - opts["solver"] = "CVode" - return model, opts - - def test_atol_auto_update1(self): - """ - Tests that atol automatically gets updated when "atol = factor * pre_init_nominals". - """ - model, opts = self.setup_atol_auto_update_test_base() - - opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) - model.simulate(options=opts, algorithm=NoSolveAlg) - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) - - def test_atol_auto_update2(self): - """ - Tests that atol doesn't get auto-updated when heuristic fails. - """ - model, opts = self.setup_atol_auto_update_test_base() - - opts["CVode_options"]["atol"] = (0.01 * model.nominal_continuous_states) + [0.01, 0.01] - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02]) - model.simulate(options=opts, algorithm=NoSolveAlg) - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02]) - - def test_atol_auto_update3(self): - """ - Tests that atol doesn't get auto-updated when nominals are never retrieved. - """ - model, opts = self.setup_atol_auto_update_test_base() - - opts["CVode_options"]["atol"] = [0.02, 0.01] - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) - model.simulate(options=opts, algorithm=NoSolveAlg) - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) - - # NOTE: - # There are more tests for ME2 for auto update of atol, but it should be enough to test - # one FMI version for that, because they mainly test algorithm drivers functionality. + def test_atol_auto_update1(self): + """ + Tests that atol automatically gets updated when "atol = factor * pre_init_nominals". + """ + model, opts = self.setup_atol_auto_update_test_base() + + opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) + model.simulate(options=opts, algorithm=NoSolveAlg) + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) + + def test_atol_auto_update2(self): + """ + Tests that atol doesn't get auto-updated when heuristic fails. + """ + model, opts = self.setup_atol_auto_update_test_base() + + opts["CVode_options"]["atol"] = (0.01 * model.nominal_continuous_states) + [0.01, 0.01] + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02]) + model.simulate(options=opts, algorithm=NoSolveAlg) + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02]) + + def test_atol_auto_update3(self): + """ + Tests that atol doesn't get auto-updated when nominals are never retrieved. + """ + model, opts = self.setup_atol_auto_update_test_base() + + opts["CVode_options"]["atol"] = [0.02, 0.01] + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) + model.simulate(options=opts, algorithm=NoSolveAlg) + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) + + # NOTE: + # There are more tests for ME2 for auto update of atol, but it should be enough to test + # one FMI version for that, because they mainly test algorithm drivers functionality. class Test_FMUModelME1: @@ -703,419 +702,419 @@ def test_error_check_invalid_value(self): assert expected_substr in str(e), f"Error was {str(e)}, expected substring {expected_substr}" assert error_raised -if assimulo_installed: - class Test_FMUModelME2_Simulation: - def test_basicsens1(self): - #Noncompliant FMI test as 'd' is parameter is not supposed to be able to be set during simulation - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens1.fmu"), _connect_dll=False) +@pytest.mark.assimulo +class Test_FMUModelME2_Simulation: + def test_basicsens1(self): + #Noncompliant FMI test as 'd' is parameter is not supposed to be able to be set during simulation + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens1.fmu"), _connect_dll=False) - def f(*args, **kwargs): - d = model.values[model.variables["d"].value_reference] - x = model.continuous_states[0] - model.values[model.variables["der(x)"].value_reference] = d*x - return np.array([d*x]) + def f(*args, **kwargs): + d = model.values[model.variables["d"].value_reference] + x = model.continuous_states[0] + model.values[model.variables["der(x)"].value_reference] = d*x + return np.array([d*x]) - model.get_derivatives = f + model.get_derivatives = f - opts = model.simulate_options() - opts["sensitivities"] = ["d"] + opts = model.simulate_options() + opts["sensitivities"] = ["d"] - res = model.simulate(options=opts) - assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3) + res = model.simulate(options=opts) + assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3) - assert res.solver.statistics["nsensfcnfcns"] > 0 + assert res.solver.statistics["nsensfcnfcns"] > 0 - def test_basicsens1dir(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens1.fmu"), _connect_dll=False) + def test_basicsens1dir(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens1.fmu"), _connect_dll=False) - caps = model.get_capability_flags() - caps["providesDirectionalDerivatives"] = True - model.get_capability_flags = lambda : caps + caps = model.get_capability_flags() + caps["providesDirectionalDerivatives"] = True + model.get_capability_flags = lambda : caps - def f(*args, **kwargs): - d = model.values[model.variables["d"].value_reference] - x = model.continuous_states[0] - model.values[model.variables["der(x)"].value_reference] = d*x - return np.array([d*x]) + def f(*args, **kwargs): + d = model.values[model.variables["d"].value_reference] + x = model.continuous_states[0] + model.values[model.variables["der(x)"].value_reference] = d*x + return np.array([d*x]) + + def d(*args, **kwargs): + if args[0][0] == 40: + return np.array([-1.0]) + else: + return model.continuous_states + + model.get_directional_derivative = d + model.get_derivatives = f + model._provides_directional_derivatives = lambda : True - def d(*args, **kwargs): - if args[0][0] == 40: - return np.array([-1.0]) - else: - return model.continuous_states + opts = model.simulate_options() + opts["sensitivities"] = ["d"] - model.get_directional_derivative = d - model.get_derivatives = f - model._provides_directional_derivatives = lambda : True + res = model.simulate(options=opts) + assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3) - opts = model.simulate_options() - opts["sensitivities"] = ["d"] + assert res.solver.statistics["nsensfcnfcns"] > 0 + assert res.solver.statistics["nfcnjacs"] == 0 - res = model.simulate(options=opts) - assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3) + def test_basicsens2(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens2.fmu"), _connect_dll=False) - assert res.solver.statistics["nsensfcnfcns"] > 0 - assert res.solver.statistics["nfcnjacs"] == 0 + caps = model.get_capability_flags() + caps["providesDirectionalDerivatives"] = True + model.get_capability_flags = lambda : caps - def test_basicsens2(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "BasicSens2.fmu"), _connect_dll=False) + def f(*args, **kwargs): + d = model.values[model.variables["d"].value_reference] + x = model.continuous_states[0] + model.values[model.variables["der(x)"].value_reference] = d*x + return np.array([d*x]) + + def d(*args, **kwargs): + if args[0][0] == 40: + return np.array([-1.0]) + else: + return model.continuous_states + + model.get_directional_derivative = d + model.get_derivatives = f + model._provides_directional_derivatives = lambda : True - caps = model.get_capability_flags() - caps["providesDirectionalDerivatives"] = True - model.get_capability_flags = lambda : caps + opts = model.simulate_options() + opts["sensitivities"] = ["d"] - def f(*args, **kwargs): - d = model.values[model.variables["d"].value_reference] - x = model.continuous_states[0] - model.values[model.variables["der(x)"].value_reference] = d*x - return np.array([d*x]) + res = model.simulate(options=opts) + assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3) - def d(*args, **kwargs): - if args[0][0] == 40: - return np.array([-1.0]) - else: - return model.continuous_states + assert res.solver.statistics["nsensfcnfcns"] == 0 - model.get_directional_derivative = d - model.get_derivatives = f - model._provides_directional_derivatives = lambda : True + def test_relative_tolerance(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["sensitivities"] = ["d"] + opts = model.simulate_options() + opts["CVode_options"]["rtol"] = 1e-8 - res = model.simulate(options=opts) - assert res.final('dx/dd') == pytest.approx(0.36789, abs = 1e-3) + res = model.simulate(options=opts) - assert res.solver.statistics["nsensfcnfcns"] == 0 + assert res.options["CVode_options"]["atol"] == 1e-10 - def test_relative_tolerance(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) + def test_simulate_with_debug_option_no_state(self): + """ Verify that an instance of CVodeDebugInformation is created """ + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["CVode_options"]["rtol"] = 1e-8 + opts=model.simulate_options() + opts["logging"] = True + opts["result_handling"] = "csv" # set to anything except 'binary' - res = model.simulate(options=opts) + #Verify that a simulation is successful + res=model.simulate(options=opts) + + from pyfmi.debug import CVodeDebugInformation + debug = CVodeDebugInformation("NoState_Example1_debug.txt") + + def test_maxord_is_set(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) + opts = model.simulate_options() + opts["solver"] = "CVode" + opts["CVode_options"]["maxord"] = 1 - assert res.options["CVode_options"]["atol"] == 1e-10 + res = model.simulate(final_time=1.5,options=opts) - def test_simulate_with_debug_option_no_state(self): - """ Verify that an instance of CVodeDebugInformation is created """ - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) + assert res.solver.maxord == 1 - opts=model.simulate_options() - opts["logging"] = True - opts["result_handling"] = "csv" # set to anything except 'binary' + def test_with_jacobian_option(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) + opts = model.simulate_options() + opts["solver"] = "CVode" + opts["result_handling"] = None - #Verify that a simulation is successful - res=model.simulate(options=opts) + def run_case(expected, default="Default"): + model.reset() + res = model.simulate(final_time=1.5,options=opts, algorithm=NoSolveAlg) + assert res.options["with_jacobian"] == default, res.options["with_jacobian"] + assert res.solver.problem._with_jacobian == expected, res.solver.problem._with_jacobian - from pyfmi.debug import CVodeDebugInformation - debug = CVodeDebugInformation("NoState_Example1_debug.txt") + run_case(False) - def test_maxord_is_set(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["solver"] = "CVode" - opts["CVode_options"]["maxord"] = 1 + model.get_ode_sizes = lambda: (PYFMI_JACOBIAN_LIMIT+1, 0) + run_case(True) - res = model.simulate(final_time=1.5,options=opts) + opts["solver"] = "Radau5ODE" + run_case(False) - assert res.solver.maxord == 1 + opts["solver"] = "CVode" + opts["with_jacobian"] = False + run_case(False, False) - def test_with_jacobian_option(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) + model.get_ode_sizes = lambda: (PYFMI_JACOBIAN_LIMIT-1, 0) + opts["with_jacobian"] = True + run_case(True, True) + + def test_sparse_option(self): + + def run_case(expected_jacobian, expected_sparse, fnbr=0, nnz={}, set_sparse=False): + class Sparse_FMUModelME2(Dummy_FMUModelME2): + def get_derivatives_dependencies(self): + return (nnz, {}) + + model = Sparse_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) opts = model.simulate_options() opts["solver"] = "CVode" opts["result_handling"] = None + if set_sparse: + opts["CVode_options"]["linear_solver"] = "SPARSE" - def run_case(expected, default="Default"): - model.reset() - res = model.simulate(final_time=1.5,options=opts, algorithm=NoSolveAlg) - assert res.options["with_jacobian"] == default, res.options["with_jacobian"] - assert res.solver.problem._with_jacobian == expected, res.solver.problem._with_jacobian + model.get_ode_sizes = lambda: (fnbr, 0) - run_case(False) + res = model.simulate(final_time=1.5,options=opts, algorithm=NoSolveAlg) + assert res.solver.problem._with_jacobian == expected_jacobian, res.solver.problem._with_jacobian + assert res.solver.linear_solver == expected_sparse, res.solver.linear_solver - model.get_ode_sizes = lambda: (PYFMI_JACOBIAN_LIMIT+1, 0) - run_case(True) + run_case(False, "DENSE") + run_case(True, "DENSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT**2}) + run_case(True, "SPARSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT}) + run_case(True, "SPARSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT}, True) - opts["solver"] = "Radau5ODE" - run_case(False) + def test_ncp_option(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) + opts = model.simulate_options() + assert opts["ncp"] == 500, opts["ncp"] - opts["solver"] = "CVode" - opts["with_jacobian"] = False - run_case(False, False) + def test_solver_options(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) + opts = model.simulate_options() - model.get_ode_sizes = lambda: (PYFMI_JACOBIAN_LIMIT-1, 0) - opts["with_jacobian"] = True - run_case(True, True) + try: + opts["CVode_options"] = "ShouldFail" + raise Exception("Setting an incorrect option should lead to exception being thrown, it wasn't") + except UnrecognizedOptionError: + pass - def test_sparse_option(self): + opts["CVode_options"] = {"maxh":1.0} + assert opts["CVode_options"]["atol"] == "Default", "Default should have been changed: " + opts["CVode_options"]["atol"] + assert opts["CVode_options"]["maxh"] == 1.0, "Value should have been changed to 1.0: " + opts["CVode_options"]["maxh"] - def run_case(expected_jacobian, expected_sparse, fnbr=0, nnz={}, set_sparse=False): - class Sparse_FMUModelME2(Dummy_FMUModelME2): - def get_derivatives_dependencies(self): - return (nnz, {}) + def test_solver_options_using_defaults(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) + opts = model.simulate_options() - model = Sparse_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["solver"] = "CVode" - opts["result_handling"] = None - if set_sparse: - opts["CVode_options"]["linear_solver"] = "SPARSE" + opts["CVode_options"] = {"maxh":1.0} + assert opts["CVode_options"]["atol"] == "Default", "Default should have been changed: " + opts["CVode_options"]["atol"] + assert opts["CVode_options"]["maxh"] == 1.0, "Value should have been changed to 1.0: " + opts["CVode_options"]["maxh"] - model.get_ode_sizes = lambda: (fnbr, 0) + opts["CVode_options"] = {"atol":1e-6} #Defaults should be used together with only the option atol set + assert opts["CVode_options"]["atol"] == 1e-6, "Default should have been changed: " + opts["CVode_options"]["atol"] + assert opts["CVode_options"]["maxh"] == "Default", "Value should have been default is: " + opts["CVode_options"]["maxh"] - res = model.simulate(final_time=1.5,options=opts, algorithm=NoSolveAlg) - assert res.solver.problem._with_jacobian == expected_jacobian, res.solver.problem._with_jacobian - assert res.solver.linear_solver == expected_sparse, res.solver.linear_solver + def test_deepcopy_option(self): + opts = AssimuloFMIAlgOptions() + opts["CVode_options"]["maxh"] = 2.0 - run_case(False, "DENSE") - run_case(True, "DENSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT**2}) - run_case(True, "SPARSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT}) - run_case(True, "SPARSE", PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT+1, {"Dep": [1]*PYFMI_JACOBIAN_SPARSE_SIZE_LIMIT}, True) + import copy - def test_ncp_option(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) - opts = model.simulate_options() - assert opts["ncp"] == 500, opts["ncp"] + opts_copy = copy.deepcopy(opts) - def test_solver_options(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) - opts = model.simulate_options() + assert opts["CVode_options"]["maxh"] == opts_copy["CVode_options"]["maxh"], "Deepcopy not working..." - try: - opts["CVode_options"] = "ShouldFail" - raise Exception("Setting an incorrect option should lead to exception being thrown, it wasn't") - except UnrecognizedOptionError: - pass + def test_maxh_option(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) + opts = model.simulate_options() + opts["result_handling"] = None - opts["CVode_options"] = {"maxh":1.0} - assert opts["CVode_options"]["atol"] == "Default", "Default should have been changed: " + opts["CVode_options"]["atol"] - assert opts["CVode_options"]["maxh"] == 1.0, "Value should have been changed to 1.0: " + opts["CVode_options"]["maxh"] + def run_case(tstart, tstop, solver, ncp="Default"): + model.reset() - def test_solver_options_using_defaults(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) - opts = model.simulate_options() + opts["solver"] = solver - opts["CVode_options"] = {"maxh":1.0} - assert opts["CVode_options"]["atol"] == "Default", "Default should have been changed: " + opts["CVode_options"]["atol"] - assert opts["CVode_options"]["maxh"] == 1.0, "Value should have been changed to 1.0: " + opts["CVode_options"]["maxh"] + if ncp != "Default": + opts["ncp"] = ncp - opts["CVode_options"] = {"atol":1e-6} #Defaults should be used together with only the option atol set - assert opts["CVode_options"]["atol"] == 1e-6, "Default should have been changed: " + opts["CVode_options"]["atol"] - assert opts["CVode_options"]["maxh"] == "Default", "Value should have been default is: " + opts["CVode_options"]["maxh"] + if opts["ncp"] == 0: + expected = 0.0 + else: + expected = (float(tstop)-float(tstart))/float(opts["ncp"]) - def test_deepcopy_option(self): - opts = AssimuloFMIAlgOptions() - opts["CVode_options"]["maxh"] = 2.0 + res = model.simulate(start_time=tstart, final_time=tstop,options=opts, algorithm=NoSolveAlg) + assert res.solver.maxh == expected, res.solver.maxh + assert res.options[solver+"_options"]["maxh"] == "Default", res.options[solver+"_options"]["maxh"] - import copy + run_case(0,1,"CVode") + run_case(0,1,"CVode", 0) + run_case(0,1,"Radau5ODE") + run_case(0,1,"Dopri5") + run_case(0,1,"RodasODE") + run_case(0,1,"LSODAR") + run_case(0,1,"LSODAR") + + def test_rtol_auto_update(self): + """ Test that default rtol picks up the unbounded attribute. """ + model = Dummy_FMUModelME2([], FMU_PATHS.ME2.coupled_clutches_modified, _connect_dll=False) + + res = model.simulate() + + # verify appropriate rtol(s) + for i, state in enumerate(model.get_states_list().keys()): + if res.solver.supports.get('rtol_as_vector', False): + # automatic construction of rtol vector + if model.get_variable_unbounded(state): + assert res.solver.rtol[i] == 0 + else: + assert res.solver.rtol[i] > 0 + else: # no support: scalar rtol + assert isinstance(res.solver.rtol, float) - opts_copy = copy.deepcopy(opts) + def test_rtol_vector_manual_valid(self): + """ Tests manual valid rtol vector works; if supported. """ - assert opts["CVode_options"]["maxh"] == opts_copy["CVode_options"]["maxh"], "Deepcopy not working..." + model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) + + opts = model.simulate_options() + opts["CVode_options"]["rtol"] = [1e-5, 0.] + + try: + res = model.simulate(options=opts) + # solver support + assert res.solver.rtol[0] == 1e-5 + assert res.solver.rtol[1] == 0. + except InvalidOptionException as e: # if no solver support + assert str(e).startswith("Failed to set the solver option 'rtol'") + + def test_rtol_vector_manual_size_mismatch(self): + """ Tests invalid rtol vector: size mismatch. """ + model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) + + opts = model.simulate_options() + opts["CVode_options"]["rtol"] = [1e-5, 0, 1e-5] + + err_msg = "If the relative tolerance is provided as a vector, it need to be equal to the number of states." + with pytest.raises(InvalidOptionException, match = err_msg): + model.simulate(options=opts) - def test_maxh_option(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NoState.Example1.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = None + def test_rtol_vector_manual_invalid(self): + """ Tests invalid rtol vector: different nonzero values. """ + + model = FMUModelME2(FMU_PATHS.ME2.coupled_clutches, _connect_dll=False) - def run_case(tstart, tstop, solver, ncp="Default"): - model.reset() + opts = model.simulate_options() + opts["CVode_options"]["rtol"] = [1e-5, 0, 1e-5, 1e-5, 0, 1e-5,1e-6, 0] + + err_msg = "If the relative tolerance is provided as a vector, the values need to be equal except for zeros." + with pytest.raises(InvalidOptionException, match = err_msg): + model.simulate(options=opts) - opts["solver"] = solver + def test_rtol_vector_manual_scalar_conversion(self): + """ Test automatic scalar conversion of trivial rtol vector. """ + model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) + + opts = model.simulate_options() + opts["CVode_options"]["rtol"] = [1e-5, 1e-5] + + #Verify no exception is raised as the rtol vector should be treated as a scalar + res = model.simulate(options=opts) + assert res.solver.rtol == 1e-5 + + def test_rtol_vector_unsupported(self): + """ Test that rtol as a vector triggers exceptions for unsupported solvers. """ + model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) + opts = model.simulate_options() + opts["result_handling"] = None - if ncp != "Default": - opts["ncp"] = ncp + def run_case(solver): + model.reset() - if opts["ncp"] == 0: - expected = 0.0 - else: - expected = (float(tstop)-float(tstart))/float(opts["ncp"]) - - res = model.simulate(start_time=tstart, final_time=tstop,options=opts, algorithm=NoSolveAlg) - assert res.solver.maxh == expected, res.solver.maxh - assert res.options[solver+"_options"]["maxh"] == "Default", res.options[solver+"_options"]["maxh"] - - run_case(0,1,"CVode") - run_case(0,1,"CVode", 0) - run_case(0,1,"Radau5ODE") - run_case(0,1,"Dopri5") - run_case(0,1,"RodasODE") - run_case(0,1,"LSODAR") - run_case(0,1,"LSODAR") - - def test_rtol_auto_update(self): - """ Test that default rtol picks up the unbounded attribute. """ - model = Dummy_FMUModelME2([], FMU_PATHS.ME2.coupled_clutches_modified, _connect_dll=False) - - res = model.simulate() - - # verify appropriate rtol(s) - for i, state in enumerate(model.get_states_list().keys()): - if res.solver.supports.get('rtol_as_vector', False): - # automatic construction of rtol vector - if model.get_variable_unbounded(state): - assert res.solver.rtol[i] == 0 - else: - assert res.solver.rtol[i] > 0 - else: # no support: scalar rtol - assert isinstance(res.solver.rtol, float) - - def test_rtol_vector_manual_valid(self): - """ Tests manual valid rtol vector works; if supported. """ - - model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) - - opts = model.simulate_options() - opts["CVode_options"]["rtol"] = [1e-5, 0.] + opts["solver"] = solver + opts[solver+"_options"]["rtol"] = [1e-5, 0.0] try: res = model.simulate(options=opts) - # solver support + # solver support; check tolerances assert res.solver.rtol[0] == 1e-5 - assert res.solver.rtol[1] == 0. - except InvalidOptionException as e: # if no solver support + assert res.solver.rtol[1] == 0.0 + except InvalidOptionException as e: assert str(e).startswith("Failed to set the solver option 'rtol'") - - def test_rtol_vector_manual_size_mismatch(self): - """ Tests invalid rtol vector: size mismatch. """ - model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) - - opts = model.simulate_options() - opts["CVode_options"]["rtol"] = [1e-5, 0, 1e-5] - - err_msg = "If the relative tolerance is provided as a vector, it need to be equal to the number of states." - with pytest.raises(InvalidOptionException, match = err_msg): - model.simulate(options=opts) + return # OK - def test_rtol_vector_manual_invalid(self): - """ Tests invalid rtol vector: different nonzero values. """ - - model = FMUModelME2(FMU_PATHS.ME2.coupled_clutches, _connect_dll=False) + run_case("CVode") + run_case("Radau5ODE") + run_case("Dopri5") + run_case("RodasODE") + run_case("LSODAR") + + def setup_atol_auto_update_test_base(self): + model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) + model.override_nominal_continuous_states = False + opts = model.simulate_options() + opts["return_result"] = False + opts["solver"] = "CVode" + return model, opts - opts = model.simulate_options() - opts["CVode_options"]["rtol"] = [1e-5, 0, 1e-5, 1e-5, 0, 1e-5,1e-6, 0] - - err_msg = "If the relative tolerance is provided as a vector, the values need to be equal except for zeros." - with pytest.raises(InvalidOptionException, match = err_msg): - model.simulate(options=opts) + def test_atol_auto_update1(self): + """ + Tests that atol automatically gets updated when "atol = factor * pre_init_nominals". + """ + model, opts = self.setup_atol_auto_update_test_base() - def test_rtol_vector_manual_scalar_conversion(self): - """ Test automatic scalar conversion of trivial rtol vector. """ - model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) - - opts = model.simulate_options() - opts["CVode_options"]["rtol"] = [1e-5, 1e-5] - - #Verify no exception is raised as the rtol vector should be treated as a scalar - res = model.simulate(options=opts) - assert res.solver.rtol == 1e-5 + opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) + model.simulate(options=opts, algorithm=NoSolveAlg) + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) + + def test_atol_auto_update2(self): + """ + Tests that atol doesn't get auto-updated when heuristic fails. + """ + model, opts = self.setup_atol_auto_update_test_base() + + opts["CVode_options"]["atol"] = (0.01 * model.nominal_continuous_states) + [0.01, 0.01] + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02]) + model.simulate(options=opts, algorithm=NoSolveAlg) + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02]) + + def test_atol_auto_update3(self): + """ + Tests that atol doesn't get auto-updated when nominals are never retrieved. + """ + model, opts = self.setup_atol_auto_update_test_base() + + opts["CVode_options"]["atol"] = [0.02, 0.01] + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) + model.simulate(options=opts, algorithm=NoSolveAlg) + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) + + def test_atol_auto_update4(self): + """ + Tests that atol is not auto-updated when it's set the "correct" way (post initialization). + """ + model, opts = self.setup_atol_auto_update_test_base() - def test_rtol_vector_unsupported(self): - """ Test that rtol as a vector triggers exceptions for unsupported solvers. """ - model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = None + model.setup_experiment() + model.initialize() + opts["initialize"] = False + opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) + model.simulate(options=opts, algorithm=NoSolveAlg) + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) - def run_case(solver): - model.reset() - - opts["solver"] = solver - opts[solver+"_options"]["rtol"] = [1e-5, 0.0] - - try: - res = model.simulate(options=opts) - # solver support; check tolerances - assert res.solver.rtol[0] == 1e-5 - assert res.solver.rtol[1] == 0.0 - except InvalidOptionException as e: - assert str(e).startswith("Failed to set the solver option 'rtol'") - return # OK - - run_case("CVode") - run_case("Radau5ODE") - run_case("Dopri5") - run_case("RodasODE") - run_case("LSODAR") + def test_atol_auto_update5(self): + """ + Tests that atol is automatically set and depends on rtol. + """ + model, opts = self.setup_atol_auto_update_test_base() - def setup_atol_auto_update_test_base(self): - model = Dummy_FMUModelME2([], FMU_PATHS.ME2.nominal_test4, _connect_dll=False) - model.override_nominal_continuous_states = False - opts = model.simulate_options() - opts["return_result"] = False - opts["solver"] = "CVode" - return model, opts - - def test_atol_auto_update1(self): - """ - Tests that atol automatically gets updated when "atol = factor * pre_init_nominals". - """ - model, opts = self.setup_atol_auto_update_test_base() - - opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) - model.simulate(options=opts, algorithm=NoSolveAlg) - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) - - def test_atol_auto_update2(self): - """ - Tests that atol doesn't get auto-updated when heuristic fails. - """ - model, opts = self.setup_atol_auto_update_test_base() - - opts["CVode_options"]["atol"] = (0.01 * model.nominal_continuous_states) + [0.01, 0.01] - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02]) - model.simulate(options=opts, algorithm=NoSolveAlg) - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.02]) - - def test_atol_auto_update3(self): - """ - Tests that atol doesn't get auto-updated when nominals are never retrieved. - """ - model, opts = self.setup_atol_auto_update_test_base() - - opts["CVode_options"]["atol"] = [0.02, 0.01] - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) - model.simulate(options=opts, algorithm=NoSolveAlg) - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) - - def test_atol_auto_update4(self): - """ - Tests that atol is not auto-updated when it's set the "correct" way (post initialization). - """ - model, opts = self.setup_atol_auto_update_test_base() - - model.setup_experiment() - model.initialize() - opts["initialize"] = False - opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) - model.simulate(options=opts, algorithm=NoSolveAlg) - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) - - def test_atol_auto_update5(self): - """ - Tests that atol is automatically set and depends on rtol. - """ - model, opts = self.setup_atol_auto_update_test_base() - - opts["CVode_options"]["rtol"] = 1e-6 - model.simulate(options=opts, algorithm=NoSolveAlg) - np.testing.assert_allclose(opts["CVode_options"]["atol"], [3e-8, 3e-8]) + opts["CVode_options"]["rtol"] = 1e-6 + model.simulate(options=opts, algorithm=NoSolveAlg) + np.testing.assert_allclose(opts["CVode_options"]["atol"], [3e-8, 3e-8]) - def test_atol_auto_update6(self): - """ - Tests that rtol doesn't affect explicitly set atol. - """ - model, opts = self.setup_atol_auto_update_test_base() + def test_atol_auto_update6(self): + """ + Tests that rtol doesn't affect explicitly set atol. + """ + model, opts = self.setup_atol_auto_update_test_base() - opts["CVode_options"]["rtol"] = 1e-9 - opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) - model.simulate(options=opts, algorithm=NoSolveAlg) - np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) + opts["CVode_options"]["rtol"] = 1e-9 + opts["CVode_options"]["atol"] = 0.01 * model.nominal_continuous_states + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.02, 0.01]) + model.simulate(options=opts, algorithm=NoSolveAlg) + np.testing.assert_allclose(opts["CVode_options"]["atol"], [0.03, 0.03]) class Test_FMUModelME2: @@ -1153,17 +1152,17 @@ def test_unzipped_fmu_exceptions(self): """ Verify exception is raised if 'fmu' is a file and allow_unzipped_fmu is set to True, with FMUModelME2. """ err_msg = "Argument named 'fmu' must be a directory if argument 'allow_unzipped_fmu' is set to True." with pytest.raises(FMUException, match = err_msg): - model = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "LinearStability.SubSystem2.fmu"), _connect_dll=False, allow_unzipped_fmu=True) + FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "LinearStability.SubSystem2.fmu"), _connect_dll=False, allow_unzipped_fmu=True) def test_invalid_binary(self): err_msg = "The FMU could not be loaded." with pytest.raises(InvalidBinaryException, match = err_msg): - model = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "LinearStability.SubSystem2.fmu"), _connect_dll=True) + FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "LinearStability.SubSystem2.fmu"), _connect_dll=True) def test_invalid_version(self): err_msg = "The FMU version is not supported by this class" with pytest.raises(InvalidVersionException, match = err_msg): - model = FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "RLC_Circuit.fmu"), _connect_dll=True) + FMUModelME2(os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "RLC_Circuit.fmu"), _connect_dll=True) def test_estimate_directional_derivatives_linearstate(self): model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "LinearStateSpace.fmu"), _connect_dll=False) diff --git a/tests/test_fmi_coupled.py b/tests/test_fmi_coupled.py index 08899f8a..d1fe625e 100644 --- a/tests/test_fmi_coupled.py +++ b/tests/test_fmi_coupled.py @@ -15,81 +15,78 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -import pytest import os +import pytest from pyfmi.fmi import FMUModelME2 from pyfmi.fmi_coupled import CoupledFMUModelME2 import pyfmi.fmi as fmi -from pyfmi.tests.test_util import Dummy_FMUModelME2 +from pyfmi.test_util import Dummy_FMUModelME2 -assimulo_installed = True try: import assimulo except ImportError: - assimulo_installed = False + pass file_path = os.path.dirname(os.path.abspath(__file__)) - me2_xml_path = os.path.join(file_path, "files", "FMUs", "XML", "ME2.0") -if assimulo_installed: - class Test_CoupledFMUModelME2_Simulation: - def test_linear_example(self): - model_sub_1 = Dummy_FMUModelME2([], os.path.join(me2_xml_path, "LinearStability.SubSystem1.fmu"), _connect_dll=False) - model_sub_2 = Dummy_FMUModelME2([], os.path.join(me2_xml_path, "LinearStability.SubSystem2.fmu"), _connect_dll=False) +@pytest.mark.assimulo +class Test_CoupledFMUModelME2_Simulation: + def test_linear_example(self): + model_sub_1 = Dummy_FMUModelME2([], os.path.join(me2_xml_path, "LinearStability.SubSystem1.fmu"), _connect_dll=False) + model_sub_2 = Dummy_FMUModelME2([], os.path.join(me2_xml_path, "LinearStability.SubSystem2.fmu"), _connect_dll=False) - def sub1(*args, **kwargs): - u1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("u1")], evaluate = False) - a1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("a1")], evaluate = False) - b1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("b1")], evaluate = False) - c1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("c1")], evaluate = False) - d1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("d1")], evaluate = False) - x1 = model_sub_1.continuous_states[0] - model_sub_1.set_real([model_sub_1.get_variable_valueref("y1")], c1*x1+d1*u1) - model_sub_1.set_real([model_sub_1.get_variable_valueref("x1")], [x1]) - return a1*x1+b1*u1 - - def sub2(*args, **kwargs): - u2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("u2")], evaluate = False) - a2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("a2")], evaluate = False) - b2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("b2")], evaluate = False) - c2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("c2")], evaluate = False) - d2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("d2")], evaluate = False) - x2 = model_sub_2.continuous_states[0] - model_sub_2.set_real([model_sub_2.get_variable_valueref("y2")], c2*x2+d2*u2) - model_sub_2.set_real([model_sub_2.get_variable_valueref("x2")], [x2]) - return a2*x2+b2*u2 - - model_sub_1.get_derivatives = sub1 - model_sub_2.get_derivatives = sub2 - - models = [("First", model_sub_1), ("Second", model_sub_2)] - connections = [(model_sub_1,"y1",model_sub_2,"u2"), - (model_sub_2,"y2",model_sub_1,"u1")] - - coupled = CoupledFMUModelME2(models, connections=connections) + def sub1(*args, **kwargs): + u1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("u1")], evaluate = False) + a1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("a1")], evaluate = False) + b1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("b1")], evaluate = False) + c1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("c1")], evaluate = False) + d1 = model_sub_1.get_real([model_sub_1.get_variable_valueref("d1")], evaluate = False) + x1 = model_sub_1.continuous_states[0] + model_sub_1.set_real([model_sub_1.get_variable_valueref("y1")], c1*x1+d1*u1) + model_sub_1.set_real([model_sub_1.get_variable_valueref("x1")], [x1]) + return a1*x1+b1*u1 + + def sub2(*args, **kwargs): + u2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("u2")], evaluate = False) + a2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("a2")], evaluate = False) + b2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("b2")], evaluate = False) + c2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("c2")], evaluate = False) + d2 = model_sub_2.get_real([model_sub_2.get_variable_valueref("d2")], evaluate = False) + x2 = model_sub_2.continuous_states[0] + model_sub_2.set_real([model_sub_2.get_variable_valueref("y2")], c2*x2+d2*u2) + model_sub_2.set_real([model_sub_2.get_variable_valueref("x2")], [x2]) + return a2*x2+b2*u2 + + model_sub_1.get_derivatives = sub1 + model_sub_2.get_derivatives = sub2 + + models = [("First", model_sub_1), ("Second", model_sub_2)] + connections = [(model_sub_1,"y1",model_sub_2,"u2"), + (model_sub_2,"y2",model_sub_1,"u1")] + + coupled = CoupledFMUModelME2(models, connections=connections) - opts = {"CVode_options": {"rtol":1e-6, "atol":1e-6}, "ncp":0} + opts = {"CVode_options": {"rtol":1e-6, "atol":1e-6}, "ncp":0} - res = coupled.simulate(options=opts) + res = coupled.simulate(options=opts) - assert res.final("First.x1") == pytest.approx(0.08597302307099872) - assert res.final("Second.x2") == pytest.approx(0.0083923348082567) - assert res.initial("First.x1") == pytest.approx(1.0) - assert res.initial("Second.x2") == pytest.approx(1.0) - - assert res.final("First.u1") == pytest.approx(-0.25909975860402856) - assert res.final("Second.u2") == pytest.approx(-0.0011806893910324295) - assert res.initial("First.u1") == pytest.approx(-17.736842105263158) - assert res.initial("Second.u2") == pytest.approx(-14.73684210526316) + assert res.final("First.x1") == pytest.approx(0.08597302307099872) + assert res.final("Second.x2") == pytest.approx(0.0083923348082567) + assert res.initial("First.x1") == pytest.approx(1.0) + assert res.initial("Second.x2") == pytest.approx(1.0) + + assert res.final("First.u1") == pytest.approx(-0.25909975860402856) + assert res.final("Second.u2") == pytest.approx(-0.0011806893910324295) + assert res.initial("First.u1") == pytest.approx(-17.736842105263158) + assert res.initial("Second.u2") == pytest.approx(-14.73684210526316) class Test_CoupledFMUModelME2: def test_reversed_connections(self): model_sub_1 = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.SubSystem1.fmu"), _connect_dll=False) model_sub_2 = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.SubSystem2.fmu"), _connect_dll=False) - model_full = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.FullSystem.fmu"), _connect_dll=False) models = [("First", model_sub_1), ("Second", model_sub_2)] connections = [(model_sub_2,"y1",model_sub_1,"u2"), @@ -107,7 +104,6 @@ def test_reversed_connections(self): def test_inputs_list(self): model_sub_1 = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.SubSystem1.fmu"), _connect_dll=False) model_sub_2 = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.SubSystem2.fmu"), _connect_dll=False) - model_full = FMUModelME2(os.path.join(me2_xml_path, "LinearStability.FullSystem.fmu"), _connect_dll=False) models = [("First", model_sub_1), ("Second", model_sub_2)] connections = [(model_sub_1,"y1",model_sub_2,"u2"), @@ -156,7 +152,6 @@ def test_loading(self): CoupledFMUModelME2(models, connections) models = [("First", model_cc_1), ("Second", model_cc_2)] - coupled = CoupledFMUModelME2(models, connections) connections = [("k")] with pytest.raises(fmi.FMUException): diff --git a/tests/test_fmi_estimate.py b/tests/test_fmi_estimate.py index 7477dd0b..5e5dcbfe 100644 --- a/tests/test_fmi_estimate.py +++ b/tests/test_fmi_estimate.py @@ -16,93 +16,93 @@ # along with this program. If not, see . import os +import pytest import numpy as np -from pyfmi.tests.test_util import Dummy_FMUModelME2 +from pyfmi.test_util import Dummy_FMUModelME2 from scipy.io.matlab import loadmat -assimulo_installed = True try: import assimulo except ImportError: - assimulo_installed = False + pass file_path = os.path.dirname(os.path.abspath(__file__)) -if assimulo_installed: - class Test_FMUModelME2_Estimate: - def test_quadtank_estimate(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "QuadTankPack_Sim_QuadTank.fmu"), _connect_dll=False) - - g = model.get_real([model.get_variable_valueref("qt.g")], evaluate = False) - g1_nmp = model.get_real([model.get_variable_valueref("qt.g1_nmp")], evaluate = False) - g2_nmp = model.get_real([model.get_variable_valueref("qt.g2_nmp")], evaluate = False) - k1_nmp = model.get_real([model.get_variable_valueref("qt.k1_nmp")], evaluate = False) - k2_nmp = model.get_real([model.get_variable_valueref("qt.k2_nmp")], evaluate = False) - A1 = model.get_real([model.get_variable_valueref("qt.A1")], evaluate = False) - A2 = model.get_real([model.get_variable_valueref("qt.A2")], evaluate = False) - A3 = model.get_real([model.get_variable_valueref("qt.A3")], evaluate = False) - A4 = model.get_real([model.get_variable_valueref("qt.A4")], evaluate = False) - a3 = model.get_real([model.get_variable_valueref("qt.a3")], evaluate = False) - a4 = model.get_real([model.get_variable_valueref("qt.a4")], evaluate = False) - u1_vref = model.get_variable_valueref("u1") - u2_vref = model.get_variable_valueref("u2") - a1_vref = model.get_variable_valueref("qt.a1") - a2_vref = model.get_variable_valueref("qt.a2") - - def f(*args, **kwargs): - x1 = model.continuous_states[0] - x2 = model.continuous_states[1] - x3 = model.continuous_states[2] - x4 = model.continuous_states[3] - - u1 = model.get_real([u1_vref], evaluate = False) - u2 = model.get_real([u2_vref], evaluate = False) - a1 = model.get_real([a1_vref], evaluate = False) - a2 = model.get_real([a2_vref], evaluate = False) - - der_x1 = -a1/A1*np.sqrt(2.*g*x1) + a3/A1*np.sqrt(2*g*x3) + g1_nmp*k1_nmp/A1*u1 - der_x2 = -a2/A2*np.sqrt(2.*g*x2) + a4/A2*np.sqrt(2*g*x4) + g2_nmp*k2_nmp/A2*u2 - der_x3 = -a3/A3*np.sqrt(2.*g*x3) + (1.-g2_nmp)*k2_nmp/A3*u2 - der_x4 = -a4/A4*np.sqrt(2.*g*x4) + (1.-g1_nmp)*k1_nmp/A4*u1 - return np.concatenate([der_x1, der_x2, der_x3, der_x4]) - - model.get_derivatives = f - - # Load measurement data from file - data = loadmat(os.path.join(file_path, "files", "Results", "qt_par_est_data.mat"), appendmat=False) - - # Extract data series - t_meas = data['t'][6000::100,0]-60 - y1_meas = data['y1_f'][6000::100,0]/100 - y2_meas = data['y2_f'][6000::100,0]/100 - y3_meas = data['y3_d'][6000::100,0]/100 - y4_meas = data['y4_d'][6000::100,0]/100 - u1 = data['u1_d'][6000::100,0] - u2 = data['u2_d'][6000::100,0] - - # Build input trajectory matrix for use in simulation - u = np.transpose(np.vstack((t_meas,u1,u2))) - - # Estimation of 2 parameters - data = np.vstack((t_meas, y1_meas, y2_meas)).transpose() - - res = model.estimate(parameters=["qt.a1", "qt.a2"], - measurements = (['qt.x1', 'qt.x2'], data), input=(['u1','u2'],u)) - - - model.reset() - - # Set optimal values for a1 and a2 into the model - model.set(['qt.a1'], res["qt.a1"]) - model.set(['qt.a2'], res["qt.a2"]) - - # Simulate model response with optimal parameters a1 and a2 - res = model.simulate(input=(['u1','u2'], u), start_time=0., final_time=60) - - assert np.abs(res.final('qt.x1') - 0.07060188) < 1e-3, "Was: " + str(res.final('qt.x1')) + ", expected: 0.07060188" - assert np.abs(res.final('qt.x2') - 0.06654621) < 1e-3 - assert np.abs(res.final('qt.x3') - 0.02736549) < 1e-3 - assert np.abs(res.final('qt.x4') - 0.02789857) < 1e-3 - assert np.abs(res.final('u1') - 6.0) < 1e-3 - assert np.abs(res.final('u2') - 5.0) < 1e-3 +@pytest.mark.assimulo +class Test_FMUModelME2_Estimate: + def test_quadtank_estimate(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "QuadTankPack_Sim_QuadTank.fmu"), _connect_dll=False) + + g = model.get_real([model.get_variable_valueref("qt.g")], evaluate = False) + g1_nmp = model.get_real([model.get_variable_valueref("qt.g1_nmp")], evaluate = False) + g2_nmp = model.get_real([model.get_variable_valueref("qt.g2_nmp")], evaluate = False) + k1_nmp = model.get_real([model.get_variable_valueref("qt.k1_nmp")], evaluate = False) + k2_nmp = model.get_real([model.get_variable_valueref("qt.k2_nmp")], evaluate = False) + A1 = model.get_real([model.get_variable_valueref("qt.A1")], evaluate = False) + A2 = model.get_real([model.get_variable_valueref("qt.A2")], evaluate = False) + A3 = model.get_real([model.get_variable_valueref("qt.A3")], evaluate = False) + A4 = model.get_real([model.get_variable_valueref("qt.A4")], evaluate = False) + a3 = model.get_real([model.get_variable_valueref("qt.a3")], evaluate = False) + a4 = model.get_real([model.get_variable_valueref("qt.a4")], evaluate = False) + u1_vref = model.get_variable_valueref("u1") + u2_vref = model.get_variable_valueref("u2") + a1_vref = model.get_variable_valueref("qt.a1") + a2_vref = model.get_variable_valueref("qt.a2") + + def f(*args, **kwargs): + x1 = model.continuous_states[0] + x2 = model.continuous_states[1] + x3 = model.continuous_states[2] + x4 = model.continuous_states[3] + + u1 = model.get_real([u1_vref], evaluate = False) + u2 = model.get_real([u2_vref], evaluate = False) + a1 = model.get_real([a1_vref], evaluate = False) + a2 = model.get_real([a2_vref], evaluate = False) + + der_x1 = -a1/A1*np.sqrt(2.*g*x1) + a3/A1*np.sqrt(2*g*x3) + g1_nmp*k1_nmp/A1*u1 + der_x2 = -a2/A2*np.sqrt(2.*g*x2) + a4/A2*np.sqrt(2*g*x4) + g2_nmp*k2_nmp/A2*u2 + der_x3 = -a3/A3*np.sqrt(2.*g*x3) + (1.-g2_nmp)*k2_nmp/A3*u2 + der_x4 = -a4/A4*np.sqrt(2.*g*x4) + (1.-g1_nmp)*k1_nmp/A4*u1 + return np.concatenate([der_x1, der_x2, der_x3, der_x4]) + + model.get_derivatives = f + + # Load measurement data from file + data = loadmat(os.path.join(file_path, "files", "Results", "qt_par_est_data.mat"), appendmat=False) + + # Extract data series + t_meas = data['t'][6000::100,0]-60 + y1_meas = data['y1_f'][6000::100,0]/100 + y2_meas = data['y2_f'][6000::100,0]/100 + y3_meas = data['y3_d'][6000::100,0]/100 + y4_meas = data['y4_d'][6000::100,0]/100 + u1 = data['u1_d'][6000::100,0] + u2 = data['u2_d'][6000::100,0] + + # Build input trajectory matrix for use in simulation + u = np.transpose(np.vstack((t_meas,u1,u2))) + + # Estimation of 2 parameters + data = np.vstack((t_meas, y1_meas, y2_meas)).transpose() + + res = model.estimate(parameters=["qt.a1", "qt.a2"], + measurements = (['qt.x1', 'qt.x2'], data), input=(['u1','u2'],u)) + + + model.reset() + + # Set optimal values for a1 and a2 into the model + model.set(['qt.a1'], res["qt.a1"]) + model.set(['qt.a2'], res["qt.a2"]) + + # Simulate model response with optimal parameters a1 and a2 + res = model.simulate(input=(['u1','u2'], u), start_time=0., final_time=60) + + assert np.abs(res.final('qt.x1') - 0.07060188) < 1e-3, "Was: " + str(res.final('qt.x1')) + ", expected: 0.07060188" + assert np.abs(res.final('qt.x2') - 0.06654621) < 1e-3 + assert np.abs(res.final('qt.x3') - 0.02736549) < 1e-3 + assert np.abs(res.final('qt.x4') - 0.02789857) < 1e-3 + assert np.abs(res.final('u1') - 6.0) < 1e-3 + assert np.abs(res.final('u2') - 5.0) < 1e-3 diff --git a/tests/test_fmi_extended.py b/tests/test_fmi_extended.py index f0045d89..dc47761c 100644 --- a/tests/test_fmi_extended.py +++ b/tests/test_fmi_extended.py @@ -21,7 +21,6 @@ from pyfmi.fmi_extended import FMUModelME1Extended file_path = os.path.dirname(os.path.abspath(__file__)) - me1_xml_path = os.path.join(file_path, "files", "FMUs", "XML", "ME1.0") class Test_FMUModelME1Extended: diff --git a/tests/test_fmi_master.py b/tests/test_fmi_master.py index ea18f579..b1e29012 100644 --- a/tests/test_fmi_master.py +++ b/tests/test_fmi_master.py @@ -23,12 +23,11 @@ from pyfmi import Master from pyfmi.fmi import FMUException, FMUModelCS2, FMUModelME2 -from pyfmi.tests.test_util import Dummy_FMUModelCS2 +from pyfmi.test_util import Dummy_FMUModelCS2 from pyfmi.common.io import ResultHandler, ResultSizeError from pyfmi.common.algorithm_drivers import UnrecognizedOptionError file_path = os.path.dirname(os.path.abspath(__file__)) - cs2_xml_path = os.path.join(file_path, "files", "FMUs", "XML", "CS2.0") me2_xml_path = os.path.join(file_path, "files", "FMUs", "XML", "ME2.0") @@ -167,11 +166,10 @@ def test_basic_simulation_memory(self): opts = {"result_handling":"memory"} self._basic_simulation(opts) - @testattr(stddist = True) def test_basic_simulation_max_result_size(self): opts = {"result_max_size":10000} - with nose.tools.assert_raises(ResultSizeError): + with pytest.raises(ResultSizeError): self._basic_simulation(opts) def test_basic_simulation_mat_file_naming(self): @@ -457,29 +455,20 @@ def test_error_check_invalid_value(self): models, connections = self._load_basic_simulation() test_values = [1/2, 1/3, "0.5", False] - # TODO: tidy up with pytest expected_substr = "Option 'result_downsampling_factor' must be an integer," + ## TODO: Pytest parametrization for value in test_values: - try: + with pytest.raises(Exception, match = expected_substr): self._sim_basic_simulation(models, connections, {'result_downsampling_factor': value}) - error_raised = False - except FMUException as e: - error_raised = True - assert expected_substr in str(e), f"Error was {str(e)}, expected substring {expected_substr}" - assert error_raised - - # TODO: Test case that supports storing FMU states required + + @pytest.mark.skipif(True, reason = "Error controlled simulation only supported if storing FMU states are available.") def test_error_controlled_with_downsampling(self): models, connections = self._load_basic_simulation() uptate_options = {'result_downsampling_factor': 2, 'error_controlled': True} - # TODO: Tidy up with pytest msg = "Result downsampling not supported for error controlled simulation, no downsampling will be performed." - with warnings.catch_warnings(record=True) as w: - warnings.simplefilter("default") + with pytest.warns(UserWarning, match = msg): self._sim_basic_simulation(models, connections, uptate_options) - # there will be some other warnings from FMU loading - assert f"UserWarning('{msg}')" in [i.message for i in w] def test_downsample_result_with_store_step_before_update(self): """ Test result_downsampling_factor with store_step_before_update. """ diff --git a/tests/test_io.py b/tests/test_io.py index 5853d85c..9b632c27 100644 --- a/tests/test_io.py +++ b/tests/test_io.py @@ -48,15 +48,14 @@ ) import pyfmi.fmi as fmi -from pyfmi.tests.test_util import Dummy_FMUModelME1, Dummy_FMUModelCS1, Dummy_FMUModelME2, Dummy_FMUModelCS2 +from pyfmi.test_util import Dummy_FMUModelME1, Dummy_FMUModelCS1, Dummy_FMUModelME2, Dummy_FMUModelCS2 file_path = os.path.dirname(os.path.abspath(__file__)) -assimulo_installed = True try: import assimulo except ImportError: - assimulo_installed = False + pass def _run_negated_alias(model, result_type, result_file_name=""): opts = model.simulate_options() @@ -75,102 +74,102 @@ def _run_negated_alias(model, result_type, result_file_name=""): for i in range(len(x)): assert x[i] == -y[i] -if assimulo_installed: - class TestResultFileText_Simulation: +@pytest.mark.assimulo +class TestResultFileText_Simulation: - def _correct_syntax_after_simulation_failure(self, result_file_name): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) + def _correct_syntax_after_simulation_failure(self, result_file_name): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - def f(*args, **kwargs): - if simple_alias.time > 0.5: - raise Exception - return -simple_alias.continuous_states + def f(*args, **kwargs): + if simple_alias.time > 0.5: + raise Exception + return -simple_alias.continuous_states - simple_alias.get_derivatives = f + simple_alias.get_derivatives = f - opts = simple_alias.simulate_options() - opts["result_handling"] = "file" - opts["solver"] = "ExplicitEuler" - opts["result_file_name"] = result_file_name + opts = simple_alias.simulate_options() + opts["result_handling"] = "file" + opts["solver"] = "ExplicitEuler" + opts["result_file_name"] = result_file_name - successful_simulation = False - try: - res = simple_alias.simulate(options=opts) - successful_simulation = True #The above simulation should fail... - except Exception: - pass + successful_simulation = False + try: + res = simple_alias.simulate(options=opts) + successful_simulation = True #The above simulation should fail... + except Exception: + pass - if successful_simulation: - raise Exception + if successful_simulation: + raise Exception - result = ResultDymolaTextual(result_file_name) + result = ResultDymolaTextual(result_file_name) - x = result.get_variable_data("x").x - y = result.get_variable_data("y").x + x = result.get_variable_data("x").x + y = result.get_variable_data("y").x - assert len(x) > 2 + assert len(x) > 2 - for i in range(len(x)): - assert x[i] == -y[i] + for i in range(len(x)): + assert x[i] == -y[i] - def test_correct_file_after_simulation_failure(self): - self._correct_syntax_after_simulation_failure("NegatedAlias_result.txt") + def test_correct_file_after_simulation_failure(self): + self._correct_syntax_after_simulation_failure("NegatedAlias_result.txt") - def test_correct_stream_after_simulation_failure(self): - stream = StringIO("") - self._correct_syntax_after_simulation_failure(stream) + def test_correct_stream_after_simulation_failure(self): + stream = StringIO("") + self._correct_syntax_after_simulation_failure(stream) - def test_read_all_variables_using_model_variables(self): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) + def test_read_all_variables_using_model_variables(self): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - opts = simple_alias.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerFile(simple_alias) + opts = simple_alias.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerFile(simple_alias) - res = simple_alias.simulate(options=opts) + res = simple_alias.simulate(options=opts) - for var in simple_alias.get_model_variables(): - res[var] + for var in simple_alias.get_model_variables(): + res[var] - def test_read_alias_derivative(self): - simple_alias = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False) + def test_read_alias_derivative(self): + simple_alias = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False) - opts = simple_alias.simulate_options() - opts["result_handling"] = "file" + opts = simple_alias.simulate_options() + opts["result_handling"] = "file" - res = simple_alias.simulate(options=opts) + res = simple_alias.simulate(options=opts) - derx = res["der(x)"] - dery = res["der(y)"] + derx = res["der(x)"] + dery = res["der(y)"] - assert len(derx) > 0 - for i in range(len(derx)): - assert derx[i] == dery[i] + assert len(derx) > 0 + for i in range(len(derx)): + assert derx[i] == dery[i] - def test_no_variables(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) + def test_no_variables(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = "file" - opts["result_file_name"] = "NoMatchingTest.txt" - opts["filter"] = "NoMatchingVariables" + opts = model.simulate_options() + opts["result_handling"] = "file" + opts["result_file_name"] = "NoMatchingTest.txt" + opts["filter"] = "NoMatchingVariables" - res = model.simulate(options=opts) + res = model.simulate(options=opts) - assert 1.0 == pytest.approx(res["time"][-1]) + assert 1.0 == pytest.approx(res["time"][-1]) - def test_enumeration_file(self): + def test_enumeration_file(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) - data_type = model.get_variable_data_type("mode") + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) + data_type = model.get_variable_data_type("mode") - assert data_type == fmi.FMI2_ENUMERATION + assert data_type == fmi.FMI2_ENUMERATION - opts = model.simulate_options() - opts["result_handling"] = "file" + opts = model.simulate_options() + opts["result_handling"] = "file" - res = model.simulate(options=opts) - res["mode"] #Check that the enumeration variable is in the dict, otherwise exception + res = model.simulate(options=opts) + res["mode"] #Check that the enumeration variable is in the dict, otherwise exception class TestResultFileText: def _get_description(self, result_file_name): @@ -343,226 +342,226 @@ def readline(self): with pytest.raises(JIOError, match = msg): res = ResultDymolaTextual(stream) -if assimulo_installed: - class TestResultMemory_Simulation: - def test_memory_options_me1(self): - simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) - _run_negated_alias(simple_alias, "memory") +@pytest.mark.assimulo +class TestResultMemory_Simulation: + def test_memory_options_me1(self): + simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) + _run_negated_alias(simple_alias, "memory") - def test_memory_options_me2(self): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - _run_negated_alias(simple_alias, "memory") + def test_memory_options_me2(self): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) + _run_negated_alias(simple_alias, "memory") - def test_only_parameters(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) + def test_only_parameters(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = "memory" - opts["filter"] = "p2" + opts = model.simulate_options() + opts["result_handling"] = "memory" + opts["filter"] = "p2" - res = model.simulate(options=opts) + res = model.simulate(options=opts) - assert 3.0 == pytest.approx(res["p2"][0]) - assert not isinstance(res.initial("p2"), np.ndarray) - assert not isinstance(res.final("p2"), np.ndarray) + assert 3.0 == pytest.approx(res["p2"][0]) + assert not isinstance(res.initial("p2"), np.ndarray) + assert not isinstance(res.final("p2"), np.ndarray) - def test_no_variables(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) + def test_no_variables(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = "memory" - opts["filter"] = "NoMatchingVariables" + opts = model.simulate_options() + opts["result_handling"] = "memory" + opts["filter"] = "NoMatchingVariables" - res = model.simulate(options=opts) + res = model.simulate(options=opts) - assert 1.0 == pytest.approx(res["time"][-1]) + assert 1.0 == pytest.approx(res["time"][-1]) - def test_enumeration_memory(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) - data_type = model.get_variable_data_type("mode") + def test_enumeration_memory(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) + data_type = model.get_variable_data_type("mode") - assert data_type == fmi.FMI2_ENUMERATION + assert data_type == fmi.FMI2_ENUMERATION - opts = model.simulate_options() - opts["result_handling"] = "memory" + opts = model.simulate_options() + opts["result_handling"] = "memory" - res = model.simulate(options=opts) - res["mode"] #Check that the enumeration variable is in the dict, otherwise exception + res = model.simulate(options=opts) + res["mode"] #Check that the enumeration variable is in the dict, otherwise exception class TestResultMemory: pass -if assimulo_installed: - class TestResultFileBinary_Simulation: - def _correct_file_after_simulation_failure(self, result_file_name): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) +@pytest.mark.assimulo +class TestResultFileBinary_Simulation: + def _correct_file_after_simulation_failure(self, result_file_name): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - def f(*args, **kwargs): - if simple_alias.time > 0.5: - raise Exception - return -simple_alias.continuous_states + def f(*args, **kwargs): + if simple_alias.time > 0.5: + raise Exception + return -simple_alias.continuous_states - simple_alias.get_derivatives = f + simple_alias.get_derivatives = f - opts = simple_alias.simulate_options() - opts["result_handling"] = "binary" - opts["result_file_name"] = result_file_name - opts["solver"] = "ExplicitEuler" + opts = simple_alias.simulate_options() + opts["result_handling"] = "binary" + opts["result_file_name"] = result_file_name + opts["solver"] = "ExplicitEuler" - successful_simulation = False - try: - res = simple_alias.simulate(options=opts) - successful_simulation = True #The above simulation should fail... - except Exception: - pass + successful_simulation = False + try: + res = simple_alias.simulate(options=opts) + successful_simulation = True #The above simulation should fail... + except Exception: + pass - if successful_simulation: - raise Exception + if successful_simulation: + raise Exception - result = ResultDymolaBinary(result_file_name) + result = ResultDymolaBinary(result_file_name) - x = result.get_variable_data("x").x - y = result.get_variable_data("y").x + x = result.get_variable_data("x").x + y = result.get_variable_data("y").x - assert len(x) > 2 + assert len(x) > 2 - for i in range(len(x)): - assert x[i] == -y[i] + for i in range(len(x)): + assert x[i] == -y[i] - def test_work_flow_me2_file(self): - self._correct_file_after_simulation_failure("NegatedAlias_result.mat") + def test_work_flow_me2_file(self): + self._correct_file_after_simulation_failure("NegatedAlias_result.mat") - def test_work_flow_me2_stream(self): - stream = BytesIO() - self._correct_file_after_simulation_failure(stream) + def test_work_flow_me2_stream(self): + stream = BytesIO() + self._correct_file_after_simulation_failure(stream) - def _only_parameters(self, result_file_name): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) + def _only_parameters(self, result_file_name): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerBinaryFile(model) - opts["filter"] = "p2" - opts["result_file_name"] = result_file_name + opts = model.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerBinaryFile(model) + opts["filter"] = "p2" + opts["result_file_name"] = result_file_name - res = model.simulate(options=opts) + res = model.simulate(options=opts) - assert 3.0 == pytest.approx(res["p2"][0]) + assert 3.0 == pytest.approx(res["p2"][0]) - def test_only_parameters_file(self): - self._only_parameters("ParameterAlias_result.mat") + def test_only_parameters_file(self): + self._only_parameters("ParameterAlias_result.mat") - def test_only_parameters_stream(self): - stream = BytesIO() - self._only_parameters(stream) + def test_only_parameters_stream(self): + stream = BytesIO() + self._only_parameters(stream) - def _no_variables(self, result_file_name): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) + def _no_variables(self, result_file_name): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerBinaryFile(model) - opts["filter"] = "NoMatchingVariables" - opts["result_file_name"] = result_file_name + opts = model.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerBinaryFile(model) + opts["filter"] = "NoMatchingVariables" + opts["result_file_name"] = result_file_name - res = model.simulate(options=opts) + res = model.simulate(options=opts) - assert 1.0 == pytest.approx(res["time"][-1]) + assert 1.0 == pytest.approx(res["time"][-1]) - def test_no_variables_file(self): - self._no_variables("ParameterAlias_result.mat") + def test_no_variables_file(self): + self._no_variables("ParameterAlias_result.mat") - def test_no_variables_stream(self): - stream = BytesIO() - self._no_variables(stream) + def test_no_variables_stream(self): + stream = BytesIO() + self._no_variables(stream) - def test_read_alias_derivative(self): - simple_alias = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False) + def test_read_alias_derivative(self): + simple_alias = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False) - opts = simple_alias.simulate_options() - opts["result_handling"] = "binary" + opts = simple_alias.simulate_options() + opts["result_handling"] = "binary" - res = simple_alias.simulate(options=opts) + res = simple_alias.simulate(options=opts) - derx = res["der(x)"] - dery = res["der(y)"] + derx = res["der(x)"] + dery = res["der(y)"] - assert len(derx) > 0 - for i in range(len(derx)): - assert derx[i] == dery[i] + assert len(derx) > 0 + for i in range(len(derx)): + assert derx[i] == dery[i] - def test_enumeration_binary(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) - data_type = model.get_variable_data_type("mode") + def test_enumeration_binary(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) + data_type = model.get_variable_data_type("mode") - assert data_type == fmi.FMI2_ENUMERATION + assert data_type == fmi.FMI2_ENUMERATION - opts = model.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerBinaryFile(model) + opts = model.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerBinaryFile(model) - res = model.simulate(options=opts) - res["mode"] #Check that the enumeration variable is in the dict, otherwise exception + res = model.simulate(options=opts) + res["mode"] #Check that the enumeration variable is in the dict, otherwise exception - def test_integer_start_time(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False) + def test_integer_start_time(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Alias.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = "binary" + opts = model.simulate_options() + opts["result_handling"] = "binary" - #Assert that there is no exception when reloading the file - res = model.simulate(start_time=0, options=opts) + #Assert that there is no exception when reloading the file + res = model.simulate(start_time=0, options=opts) - def test_read_all_variables_using_model_variables(self): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) + def test_read_all_variables_using_model_variables(self): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - opts = simple_alias.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerBinaryFile(simple_alias) + opts = simple_alias.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerBinaryFile(simple_alias) - res = simple_alias.simulate(options=opts) + res = simple_alias.simulate(options=opts) - for var in simple_alias.get_model_variables(): - res[var] + for var in simple_alias.get_model_variables(): + res[var] - def test_variable_alias_custom_handler(self): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) + def test_variable_alias_custom_handler(self): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - opts = simple_alias.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerBinaryFile(simple_alias) + opts = simple_alias.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerBinaryFile(simple_alias) - res = simple_alias.simulate(options=opts) + res = simple_alias.simulate(options=opts) - # test that res['y'] returns a vector of the same length as the time - # vector - assert len(res['y']) ==len(res['time']), "Wrong size of result vector." + # test that res['y'] returns a vector of the same length as the time + # vector + assert len(res['y']) ==len(res['time']), "Wrong size of result vector." - x = res["x"] - y = res["y"] + x = res["x"] + y = res["y"] - for i in range(len(x)): - assert x[i] == -y[i] + for i in range(len(x)): + assert x[i] == -y[i] - def test_binary_options_me1(self): - simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) - _run_negated_alias(simple_alias, "binary") + def test_binary_options_me1(self): + simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) + _run_negated_alias(simple_alias, "binary") - def test_binary_options_me2(self): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - _run_negated_alias(simple_alias, "binary") + def test_binary_options_me2(self): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) + _run_negated_alias(simple_alias, "binary") - def test_binary_options_me1_stream(self): - simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) - stream = BytesIO() - _run_negated_alias(simple_alias, "binary", stream) + def test_binary_options_me1_stream(self): + simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) + stream = BytesIO() + _run_negated_alias(simple_alias, "binary", stream) - def test_binary_options_me2_stream(self): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - stream = BytesIO() - _run_negated_alias(simple_alias, "binary", stream) + def test_binary_options_me2_stream(self): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) + stream = BytesIO() + _run_negated_alias(simple_alias, "binary", stream) class TestResultFileBinary: def _get_description_unicode(self, result_file_name): @@ -1408,83 +1407,84 @@ def test_get_last_result_file3(self): test_model._result_file = 123 # arbitrary number, just verify get_last_result_file works assert test_model.get_last_result_file() is None, "Expected None but got {}".format(test_model.get_last_result_file()) -if assimulo_installed: - class TestResultCSVTextual_Simulation: - def test_only_parameters(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) - opts = model.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerCSV(model) - opts["filter"] = "p2" +@pytest.mark.assimulo +class TestResultCSVTextual_Simulation: + def test_only_parameters(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) - res = model.simulate(options=opts) + opts = model.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerCSV(model) + opts["filter"] = "p2" - assert 3.0 == pytest.approx(res["p2"][0]) + res = model.simulate(options=opts) - def test_no_variables(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) + assert 3.0 == pytest.approx(res["p2"][0]) - opts = model.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerCSV(model) - opts["filter"] = "NoMatchingVariables" - opts["result_file_name"] = "NoMatchingTest.csv" + def test_no_variables(self): + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "ParameterAlias.fmu"), _connect_dll=False) + + opts = model.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerCSV(model) + opts["filter"] = "NoMatchingVariables" + opts["result_file_name"] = "NoMatchingTest.csv" - res = model.simulate(options=opts) + res = model.simulate(options=opts) - assert 1.0 == pytest.approx(res["time"][-1]) + assert 1.0 == pytest.approx(res["time"][-1]) - def test_variable_alias_custom_handler(self): - simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) + def test_variable_alias_custom_handler(self): + simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) - opts = simple_alias.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerCSV(simple_alias) + opts = simple_alias.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerCSV(simple_alias) - res = simple_alias.simulate(options=opts) + res = simple_alias.simulate(options=opts) - # test that res['y'] returns a vector of the same length as the time - # vector - assert len(res['y']) ==len(res['time']), "Wrong size of result vector." + # test that res['y'] returns a vector of the same length as the time + # vector + assert len(res['y']) ==len(res['time']), "Wrong size of result vector." - x = res["x"] - y = res["y"] + x = res["x"] + y = res["y"] - for i in range(len(x)): - assert x[i] == -y[i] + for i in range(len(x)): + assert x[i] == -y[i] - def test_csv_options_me1(self): - simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) - _run_negated_alias(simple_alias, "csv") + def test_csv_options_me1(self): + simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) + _run_negated_alias(simple_alias, "csv") - def test_csv_options_me2(self): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - _run_negated_alias(simple_alias, "csv") + def test_csv_options_me2(self): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) + _run_negated_alias(simple_alias, "csv") - def test_csv_options_me1_stream(self): - simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) - stream = StringIO() - _run_negated_alias(simple_alias, "csv", stream) + def test_csv_options_me1_stream(self): + simple_alias = Dummy_FMUModelME1([40], os.path.join(file_path, "files", "FMUs", "XML", "ME1.0", "NegatedAlias.fmu"), _connect_dll=False) + stream = StringIO() + _run_negated_alias(simple_alias, "csv", stream) - def test_csv_options_me2(self): - simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) - stream = StringIO() - _run_negated_alias(simple_alias, "csv", stream) + def test_csv_options_me2(self): + simple_alias = Dummy_FMUModelME2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "NegatedAlias.fmu"), _connect_dll=False) + stream = StringIO() + _run_negated_alias(simple_alias, "csv", stream) - def test_enumeration_csv(self): + def test_enumeration_csv(self): - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) - data_type = model.get_variable_data_type("mode") + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Friction2.fmu"), _connect_dll=False) + data_type = model.get_variable_data_type("mode") - assert data_type == fmi.FMI2_ENUMERATION + assert data_type == fmi.FMI2_ENUMERATION - opts = model.simulate_options() - opts["result_handling"] = "custom" - opts["result_handler"] = ResultHandlerCSV(model) + opts = model.simulate_options() + opts["result_handling"] = "custom" + opts["result_handler"] = ResultHandlerCSV(model) - res = model.simulate(options=opts) - res["mode"] #Check that the enumeration variable is in the dict, otherwise exception + res = model.simulate(options=opts) + res["mode"] #Check that the enumeration variable is in the dict, otherwise exception class TestResultCSVTextual: @@ -1592,7 +1592,7 @@ def test_csv_options_cs1(self): def test_csv_options_cs2(self): simple_alias = Dummy_FMUModelCS2([("x", "y")], os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "NegatedAlias.fmu"), _connect_dll=False) - self._run_negated_alias(self, simple_alias) + _run_negated_alias(simple_alias, "csv") class TestResultDymolaBinary: @@ -1773,9 +1773,8 @@ def _test_get_variables_data(self, dynamic_diagnostics: bool, nbr_of_calls: int, assert data_to_return, "Something went wrong, no test data was generated" return data_to_return - @testattr(stddist = True) def test_get_variables_data_values0(self): - """ Verifing values from get_variables_data. """ + """ Verifying values from get_variables_data. """ vars_to_test = ['J4.phi'] test_data_sets = self._test_get_variables_data(False, 3, None, vars_to_test, lambda x: None, "TestFile00.mat") @@ -1788,9 +1787,8 @@ def test_get_variables_data_values0(self): for index, test_data in test_data_sets.items(): np.testing.assert_array_almost_equal(test_data['J4.phi'].x, reference_data[index]) - @testattr(stddist = True) def test_get_variables_data_values1(self): - """ Verifing values from get_variables_data, with dynamic_diagnostics = True. """ + """ Verifying values from get_variables_data, with dynamic_diagnostics = True. """ vars_to_test = ['time', 'J4.phi', '@Diagnostics.step_time', '@Diagnostics.nbr_steps'] test_data_sets = self._test_get_variables_data(True, 5, 3, vars_to_test, lambda x: None, "TestFile01.mat") @@ -1807,9 +1805,8 @@ def test_get_variables_data_values1(self): for index, test_data in test_data_sets.items(): np.testing.assert_array_almost_equal(test_data['J4.phi'].x, reference_data[index]) - @testattr(stddist = True) def test_get_variables_data_values2(self): - """ Verifing values from get_variables_data, retrieving partial trajectories. """ + """ Verifying values from get_variables_data, retrieving partial trajectories. """ vars_to_test = ['time', 'J4.phi'] test_data_sets = self._test_get_variables_data(False, 5, None, vars_to_test, lambda x: x + 1, "TestFile02.mat") @@ -1826,7 +1823,7 @@ def test_get_variables_data_values2(self): @testattr(stddist = True) def test_get_variables_data_values3(self): - """ Verifing values from get_variables_data, and only asking for diagnostic variables. """ + """ Verifying values from get_variables_data, and only asking for diagnostic variables. """ vars_to_test = ['@Diagnostics.step_time', '@Diagnostics.nbr_steps'] test_data_sets = self._test_get_variables_data(True, 5, 1, vars_to_test, lambda x: None, "TestFile03.mat") @@ -1853,7 +1850,7 @@ def test_get_variables_data_values3(self): @testattr(stddist = True) def test_get_variables_data_values4(self): - """ Verifing values from get_variables_data, partial trajectories and checking both time and diagnostic data.""" + """ Verifying values from get_variables_data, partial trajectories and checking both time and diagnostic data.""" vars_to_test = ['time', '@Diagnostics.nbr_steps'] test_data_sets = self._test_get_variables_data(True, 5, 1, vars_to_test, lambda x: x + 2, "TestFile04.mat") @@ -1940,187 +1937,207 @@ def test_trajectory_lengths(self): assert rdb.get_variables_data([], start_index = 1)[1] == 1 assert rdb.get_variables_data([], start_index = 5)[1] == 5 -if assimulo_installed: - class TestFileSizeLimit: +@pytest.mark.assimulo +class TestFileSizeLimit: + def _setup(self, result_type, result_file_name="", fmi_type="me"): + if fmi_type == "me": + model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "CoupledClutches.fmu"), _connect_dll=False) + else: + model = Dummy_FMUModelCS2([], os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "CoupledClutches.fmu"), _connect_dll=False) + + opts = model.simulate_options() + opts["result_handling"] = result_type + opts["result_file_name"] = result_file_name - def _setup(self, result_type, result_file_name="", fmi_type="me"): - if fmi_type == "me": - model = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "CoupledClutches.fmu"), _connect_dll=False) - else: - model = Dummy_FMUModelCS2([], os.path.join(file_path, "files", "FMUs", "XML", "CS2.0", "CoupledClutches.fmu"), _connect_dll=False) + return model, opts - opts = model.simulate_options() - opts["result_handling"] = result_type - opts["result_file_name"] = result_file_name + def _test_result(self, result_type, result_file_name="", max_size=1e6): + model, opts = self._setup(result_type, result_file_name) - return model, opts + opts["result_max_size"] = max_size - def _test_result(self, result_type, result_file_name="", max_size=1e6): - model, opts = self._setup(result_type, result_file_name) + #No exception should be raised. + res = model.simulate(options=opts) - opts["result_max_size"] = max_size + def _test_result_exception(self, result_type, result_file_name="", fmi_type="me"): + model, opts = self._setup(result_type, result_file_name, fmi_type) - #No exception should be raised. - res = model.simulate(options=opts) + opts["result_max_size"] = 10 - def _test_result_exception(self, result_type, result_file_name="", fmi_type="me"): - model, opts = self._setup(result_type, result_file_name, fmi_type) + with pytest.raises(ResultSizeError): + model.simulate(options=opts) - opts["result_max_size"] = 10 + def _test_result_size_verification(self, result_type, result_file_name="", dynamic_diagnostics=False): + """ + Verifies that the ResultSizeError exception is triggered (due to too large result) and also verifies + that the resulting file is within bounds of the set maximum size. + """ + model, opts = self._setup(result_type, result_file_name) + model.setup_experiment() + model.initialize() - with pytest.raises(ResultSizeError): - model.simulate(options=opts) + max_size = 1e6 + opts["result_max_size"] = max_size + opts["dynamic_diagnostics"] = dynamic_diagnostics + opts["logging"] = dynamic_diagnostics + opts["ncp"] = 0 #Set to zero to circumvent the early size check + ncp = 10000 - def _test_result_size_verification(self, result_type, result_file_name="", dynamic_diagnostics=False): - """ - Verifies that the ResultSizeError exception is triggered (due to too large result) and also verifies - that the resulting file is within bounds of the set maximum size. - """ - model, opts = self._setup(result_type, result_file_name) - model.setup_experiment() - model.initialize() + result_handler = get_result_handler(model, opts) - max_size = 1e6 - opts["result_max_size"] = max_size - opts["dynamic_diagnostics"] = dynamic_diagnostics - opts["logging"] = dynamic_diagnostics - opts["ncp"] = 0 #Set to zero to circumvent the early size check - ncp = 10000 + result_handler.set_options(opts) + result_handler.initialize_complete() - result_handler = get_result_handler(model, opts) + if opts["dynamic_diagnostics"]: + opts['CVode_options']['rtol'] = 1e-6 + opts['CVode_options']['atol'] = model.nominal_continuous_states * opts['CVode_options']['rtol'] + diag_params, diag_vars = setup_diagnostics_variables(model, 0, opts, opts['CVode_options']) + result_handler.simulation_start(diag_params, diag_vars) + else: + result_handler.simulation_start() - result_handler.set_options(opts) - result_handler.initialize_complete() + with pytest.raises(ResultSizeError): + for _ in range(ncp): + result_handler.integration_point() - if opts["dynamic_diagnostics"]: - opts['CVode_options']['rtol'] = 1e-6 - opts['CVode_options']['atol'] = model.nominal_continuous_states * opts['CVode_options']['rtol'] - diag_params, diag_vars = setup_diagnostics_variables(model, 0, opts, opts['CVode_options']) - result_handler.simulation_start(diag_params, diag_vars) - else: - result_handler.simulation_start() + if opts["dynamic_diagnostics"]: + result_handler.diagnostics_point(np.array([val[0] for val in diag_vars.values()], dtype=float)) - with pytest.raises(ResultSizeError): - for _ in range(ncp): - result_handler.integration_point() + result_file = model.get_last_result_file() + file_size = os.path.getsize(result_file) - if opts["dynamic_diagnostics"]: - result_handler.diagnostics_point(np.array([val[0] for val in diag_vars.values()], dtype=float)) + assert file_size > max_size*0.9 and file_size < max_size*1.1, \ + "The file size is not within 10% of the given max size" + + def _test_result_size_early_abort(self, result_type, result_file_name=""): + """ + Verifies that the ResultSizeError is triggered and also verifies that the cause of the error being + triggered was due to that the ESTIMATE for the result size was too big. + """ + model, opts = self._setup(result_type, result_file_name) - result_file = model.get_last_result_file() + max_size = 1e6 + opts["result_max_size"] = max_size + opts["ncp"] = 10000000 + + with pytest.raises(ResultSizeError): + model.simulate(options=opts) + + result_file = model.get_last_result_file() + if result_file: file_size = os.path.getsize(result_file) assert file_size > max_size*0.9 and file_size < max_size*1.1, \ "The file size is not within 10% of the given max size" - def _test_result_size_early_abort(self, result_type, result_file_name=""): - """ - Verifies that the ResultSizeError is triggered and also verifies that the cause of the error being - triggered was due to that the ESTIMATE for the result size was too big. - """ - model, opts = self._setup(result_type, result_file_name) - - max_size = 1e6 - opts["result_max_size"] = max_size - opts["ncp"] = 10000000 + def _test_result_size_early_abort(self, result_type, result_file_name=""): + """ + Verifies that the ResultSizeError is triggered and also verifies that the cause of the error being + triggered was due to that the ESTIMATE for the result size was too big. + """ + model, opts = self._setup(result_type, result_file_name) - with pytest.raises(ResultSizeError): - model.simulate(options=opts) + max_size = 1e6 + opts["result_max_size"] = max_size + opts["ncp"] = 10000000 - result_file = model.get_last_result_file() - if result_file: - file_size = os.path.getsize(result_file) + with pytest.raises(ResultSizeError): + model.simulate(options=opts) - assert file_size < max_size*0.1, \ - "The file size is not small, no early abort" + result_file = model.get_last_result_file() + if result_file: + file_size = os.path.getsize(result_file) - # TODO: Pytest parametrization + assert file_size < max_size*0.1, \ + "The file size is not small, no early abort" + + # TODO: Pytest parametrization + """ + Binary + """ + def test_binary_file_size_verification_diagnostics(self): """ - Binary + Make sure that the diagnostics variables are also taken into account. """ - def test_binary_file_size_verification_diagnostics(self): - """ - Make sure that the diagnostics variables are also taken into account. - """ - self._test_result_size_verification("binary", dynamic_diagnostics=True) - - def test_binary_file_size_verification(self): - self._test_result_size_verification("binary") + self._test_result_size_verification("binary", dynamic_diagnostics=True) - def test_binary_file_size_early_abort(self): - self._test_result_size_early_abort("binary") + def test_binary_file_size_verification(self): + self._test_result_size_verification("binary") + + def test_binary_file_size_early_abort(self): + self._test_result_size_early_abort("binary") - def test_small_size_binary_file(self): - self._test_result_exception("binary") - - def test_small_size_binary_file_cs(self): - self._test_result_exception("binary", fmi_type="cs") - - def test_small_size_binary_file_stream(self): - self._test_result_exception("binary", BytesIO()) + def test_small_size_binary_file(self): + self._test_result_exception("binary") + + def test_small_size_binary_file_cs(self): + self._test_result_exception("binary", fmi_type="cs") + + def test_small_size_binary_file_stream(self): + self._test_result_exception("binary", BytesIO()) - def test_large_size_binary_file(self): - self._test_result("binary") + def test_large_size_binary_file(self): + self._test_result("binary") - def test_large_size_binary_file_stream(self): - self._test_result("binary", BytesIO()) + def test_large_size_binary_file_stream(self): + self._test_result("binary", BytesIO()) - """ - Text - """ - def test_text_file_size_verification(self): - self._test_result_size_verification("file") - - def test_text_file_size_early_abort(self): - self._test_result_size_early_abort("file") + """ + Text + """ + def test_text_file_size_verification(self): + self._test_result_size_verification("file") + + def test_text_file_size_early_abort(self): + self._test_result_size_early_abort("file") - def test_small_size_text_file(self): - self._test_result_exception("file") - - def test_small_size_text_file_stream(self): - self._test_result_exception("file", StringIO()) + def test_small_size_text_file(self): + self._test_result_exception("file") + + def test_small_size_text_file_stream(self): + self._test_result_exception("file", StringIO()) - def test_large_size_text_file(self): - self._test_result("file") + def test_large_size_text_file(self): + self._test_result("file") - def test_large_size_text_file_stream(self): - self._test_result("file", StringIO()) + def test_large_size_text_file_stream(self): + self._test_result("file", StringIO()) - """ - CSV - """ - def test_csv_file_size_verification(self): - self._test_result_size_verification("csv") - - def test_csv_file_size_early_abort(self): - self._test_result_size_early_abort("csv") + """ + CSV + """ + def test_csv_file_size_verification(self): + self._test_result_size_verification("csv") + + def test_csv_file_size_early_abort(self): + self._test_result_size_early_abort("csv") - def test_small_size_csv_file(self): - self._test_result_exception("csv") - - def test_small_size_csv_file_stream(self): - self._test_result_exception("csv", StringIO()) + def test_small_size_csv_file(self): + self._test_result_exception("csv") + + def test_small_size_csv_file_stream(self): + self._test_result_exception("csv", StringIO()) - def test_large_size_csv_file(self): - self._test_result("csv", max_size=10000000) + def test_large_size_csv_file(self): + self._test_result("csv", max_size=10000000) - def test_large_size_csv_file_stream(self): - self._test_result("csv", StringIO(), max_size=10000000) + def test_large_size_csv_file_stream(self): + self._test_result("csv", StringIO(), max_size=10000000) - """ - Memory - """ - def test_small_size_memory(self): - self._test_result_exception("memory") - - def test_memory_size_early_abort(self): - self._test_result_size_early_abort("memory") - - def test_small_size_memory_stream(self): - self._test_result_exception("memory", StringIO()) + """ + Memory + """ + def test_small_size_memory(self): + self._test_result_exception("memory") + + def test_memory_size_early_abort(self): + self._test_result_size_early_abort("memory") + + def test_small_size_memory_stream(self): + self._test_result_exception("memory", StringIO()) - def test_large_size_memory(self): - self._test_result("memory") + def test_large_size_memory(self): + self._test_result("memory") - def test_large_size_memory_stream(self): - self._test_result("memory", StringIO()) + def test_large_size_memory_stream(self): + self._test_result("memory", StringIO()) diff --git a/tests/test_log.py b/tests/test_log.py index 056e557a..366c902a 100644 --- a/tests/test_log.py +++ b/tests/test_log.py @@ -19,7 +19,7 @@ from pyfmi.common.log import extract_xml_log, parse_xml_log from pyfmi.common.diagnostics import DIAGNOSTICS_PREFIX -from pyfmi.tests.test_util import Dummy_FMUModelME2 +from pyfmi.test_util import Dummy_FMUModelME2 from pyfmi.fmi_util import decode import numpy as np @@ -163,7 +163,6 @@ def test_truncated_log_valid_xml(self): """ Test that a truncated log still contains valid XML.""" # XXX: There currently is no FMU is linux binaries running on Ubuntu 20+ (libgfortran issues) # XXX: This is not a very good test, since it largely tests the mocked implementation, but better than nothing - file_path = os.path.dirname(os.path.abspath(__file__)) fmu_name = os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Bouncing_Ball.fmu") # 1. Simulate + determine log size that corresponds to a truncation (resulting in invalid XML) @@ -209,7 +208,6 @@ def test_truncated_log_valid_xml(self): def test_resume_logging_on_increased_max_log_size(self): """Test that logging will resume when increasing max log size & previously exceeding the maximal size.""" - file_path = os.path.dirname(os.path.abspath(__file__)) fmu_name = os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "Bouncing_Ball.fmu") fmu = Dummy_FMUModelME2([], fmu_name, _connect_dll=False) diff --git a/tests/test_stream.py b/tests/test_stream.py index fd8be522..c821fe28 100644 --- a/tests/test_stream.py +++ b/tests/test_stream.py @@ -23,7 +23,7 @@ from filecmp import cmp as compare_files from pyfmi.fmi import FMUException, load_fmu, FMUModelCS2, FMUModelME2 -from pyfmi.tests.test_util import get_examples_folder +from pyfmi.test_util import get_examples_folder file_path = os.path.dirname(os.path.abspath(__file__)) From 420081b6e39c641ae902077bced55b2bf7a2223e Mon Sep 17 00:00:00 2001 From: Peter Meisrimel Date: Thu, 14 Nov 2024 15:24:22 +0100 Subject: [PATCH 4/6] converting tests from latest changes on master --- tests/test_io.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tests/test_io.py b/tests/test_io.py index 9b632c27..25570c71 100644 --- a/tests/test_io.py +++ b/tests/test_io.py @@ -1821,7 +1821,6 @@ def test_get_variables_data_values2(self): for index, test_data in test_data_sets.items(): np.testing.assert_array_almost_equal(test_data['J4.phi'].x, reference_data[index]) - @testattr(stddist = True) def test_get_variables_data_values3(self): """ Verifying values from get_variables_data, and only asking for diagnostic variables. """ vars_to_test = ['@Diagnostics.step_time', '@Diagnostics.nbr_steps'] @@ -1848,7 +1847,6 @@ def test_get_variables_data_values3(self): np.testing.assert_array_almost_equal(test_data['@Diagnostics.step_time'].x, reference_data['@Diagnostics.step_time'][index]) np.testing.assert_array_almost_equal(test_data['@Diagnostics.nbr_steps'].x, reference_data['@Diagnostics.nbr_steps'][index]) - @testattr(stddist = True) def test_get_variables_data_values4(self): """ Verifying values from get_variables_data, partial trajectories and checking both time and diagnostic data.""" vars_to_test = ['time', '@Diagnostics.nbr_steps'] @@ -1875,7 +1873,6 @@ def test_get_variables_data_values4(self): np.testing.assert_array_almost_equal(test_data['time'].x, reference_data['time'][index]) np.testing.assert_array_almost_equal(test_data['@Diagnostics.nbr_steps'].x, reference_data['@Diagnostics.nbr_steps'][index]) - @testattr(stddist = True) def test_stop_index_near_bounds(self): """ Verify that we get expected results near the end of the result file, including stop_index out of range. @@ -1903,7 +1900,6 @@ def test_stop_index_near_bounds(self): np.array([0.37268813, 0.37194424, 0.37120184, 0.37046092, 0.36972148, 0.36898351])) - @testattr(stddist = True) def test_trajectory_lengths(self): """ Verify lengths of trajectories are expected for a bunch of different inputs. """ fmu = Dummy_FMUModelME2([], os.path.join(file_path, "files", "FMUs", "XML", "ME2.0", "bouncingBall.fmu"), _connect_dll=False) From 21c4c6b80b80a67eec57a233077b2b6984d19c38 Mon Sep 17 00:00:00 2001 From: Peter Meisrimel Date: Thu, 14 Nov 2024 15:40:19 +0100 Subject: [PATCH 5/6] Updating Changelog --- CHANGELOG | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGELOG b/CHANGELOG index e7729a03..7be62932 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,5 +1,9 @@ --- CHANGELOG --- --- Future --- + * Changed testing framework from `nose` to `pytest`. + * Removed tests from the PyFMI installation. + * Moved test files from src/pyfmi/tests/... to tests/files/... + * Moved test_util.* from src/pyfmi/tests to src/pyfmi --- PyFMI-2.15.0 --- * Changed custom result handling for the Master algorithm to instead use a single ResultHandler for each model. @@ -8,10 +12,6 @@ * Added option to limit the size of the result ("result_max_size"), default set to 2GB. * Added method ResultDymolaBinary.get_variables_data. Included some minor refactorization. The new method allows for retrieving partial trajectories, and multiple trajectories at once. - * Changed testing framework from `nose` to `pytest`. - * Removed tests from the PyFMI installation. - * Moved test files from src/pyfmi/tests/... to tests/files/... - * Moved test_util.* from src/pyfmi/tests to src/pyfmi --- PyFMI-2.14.0 --- * Updated the error message displayed when loading FMUs with needsExecutionTool set to True. From c7449676aa8d95014346c7918800ec582b77535b Mon Sep 17 00:00:00 2001 From: Peter Meisrimel Date: Tue, 19 Nov 2024 10:17:00 +0100 Subject: [PATCH 6/6] Adding some more comments based on review --- tests/test_fmi.py | 1 + tests/test_fmi_coupled.py | 1 + tests/test_fmi_estimate.py | 1 + tests/test_io.py | 1 + 4 files changed, 4 insertions(+) diff --git a/tests/test_fmi.py b/tests/test_fmi.py index 6760808a..9806921f 100644 --- a/tests/test_fmi.py +++ b/tests/test_fmi.py @@ -47,6 +47,7 @@ def solve(self): try: import assimulo except ImportError: + # XXX: Accept import failure due to conditional test execution pass file_path = os.path.dirname(os.path.abspath(__file__)) diff --git a/tests/test_fmi_coupled.py b/tests/test_fmi_coupled.py index d1fe625e..41f25494 100644 --- a/tests/test_fmi_coupled.py +++ b/tests/test_fmi_coupled.py @@ -26,6 +26,7 @@ try: import assimulo except ImportError: + # XXX: Accept import failure due to conditional test execution pass file_path = os.path.dirname(os.path.abspath(__file__)) diff --git a/tests/test_fmi_estimate.py b/tests/test_fmi_estimate.py index 5e5dcbfe..d8668740 100644 --- a/tests/test_fmi_estimate.py +++ b/tests/test_fmi_estimate.py @@ -25,6 +25,7 @@ try: import assimulo except ImportError: + # XXX: Accept import failure due to conditional test execution pass file_path = os.path.dirname(os.path.abspath(__file__)) diff --git a/tests/test_io.py b/tests/test_io.py index 25570c71..f55a4f25 100644 --- a/tests/test_io.py +++ b/tests/test_io.py @@ -55,6 +55,7 @@ try: import assimulo except ImportError: + # XXX: Accept import failure due to conditional test execution pass def _run_negated_alias(model, result_type, result_file_name=""):