Skip to content

Commit

Permalink
Updated actions, removed flake8, fixed unused vars
Browse files Browse the repository at this point in the history
  • Loading branch information
Boris committed Apr 26, 2024
1 parent 537cb5e commit f8e0655
Show file tree
Hide file tree
Showing 5 changed files with 199 additions and 66 deletions.
32 changes: 22 additions & 10 deletions tests/import_specifications/test_file_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,9 @@ def test_Error_init_w_PARSE_FAIL_success():


def test_Error_init_w_INCORRECT_COLUMN_COUNT_success():
e = Error(ErrorType.INCORRECT_COLUMN_COUNT, message="42", source_1=spcsrc("somefile"))
e = Error(
ErrorType.INCORRECT_COLUMN_COUNT, message="42", source_1=spcsrc("somefile")
)

assert e.error == ErrorType.INCORRECT_COLUMN_COUNT
assert e.message == "42"
Expand Down Expand Up @@ -175,12 +177,14 @@ def test_Error_init_fail():
error_init_fail(ErrorType.PARSE_FAIL, None, spcsrc("wooo"), None, ValueError(err))
error_init_fail(ErrorType.PARSE_FAIL, "msg", None, None, ValueError(err))
err = "message, source_1 is required for a INCORRECT_COLUMN_COUNT error"
error_init_fail(ErrorType.INCORRECT_COLUMN_COUNT, None, spcsrc("whee"), None, ValueError(err))
error_init_fail(ErrorType.INCORRECT_COLUMN_COUNT, "msg", None, None, ValueError(err))
ms = ErrorType.MULTIPLE_SPECIFICATIONS_FOR_DATA_TYPE
err = (
"message, source_1, source_2 is required for a MULTIPLE_SPECIFICATIONS_FOR_DATA_TYPE error"
error_init_fail(
ErrorType.INCORRECT_COLUMN_COUNT, None, spcsrc("whee"), None, ValueError(err)
)
error_init_fail(
ErrorType.INCORRECT_COLUMN_COUNT, "msg", None, None, ValueError(err)
)
ms = ErrorType.MULTIPLE_SPECIFICATIONS_FOR_DATA_TYPE
err = "message, source_1, source_2 is required for a MULTIPLE_SPECIFICATIONS_FOR_DATA_TYPE error"
error_init_fail(ms, None, None, None, ValueError(err))
error_init_fail(ms, None, spcsrc("foo"), spcsrc("bar"), ValueError(err))
error_init_fail(ms, "msg", None, spcsrc("bar"), ValueError(err))
Expand Down Expand Up @@ -215,7 +219,9 @@ def test_ParseResult_init_success():

def test_ParseResult_init_fail():
parseResult_init_fail(None, None, ValueError("source is required"))
parseResult_init_fail(None, (frozendict({"foo": "bar"}),), ValueError("source is required"))
parseResult_init_fail(
None, (frozendict({"foo": "bar"}),), ValueError("source is required")
)
parseResult_init_fail(spcsrc("foo"), None, ValueError("result is required"))


Expand Down Expand Up @@ -280,7 +286,9 @@ def parseResults_init_fail(
assert_exception_correct(got.value, expected)


def _ftr(parser: Callable[[Path], ParseResults] = None, notype: str = None) -> FileTypeResolution:
def _ftr(
parser: Callable[[Path], ParseResults] = None, notype: str = None
) -> FileTypeResolution:
return FileTypeResolution(parser, notype)


Expand Down Expand Up @@ -318,7 +326,9 @@ def test_parse_import_specifications_success():
)
)

res = parse_import_specifications((Path("myfile.xlsx"), Path("somefile.csv")), resolver, logger)
res = parse_import_specifications(
(Path("myfile.xlsx"), Path("somefile.csv")), resolver, logger
)

assert res == ParseResults(
frozendict(
Expand Down Expand Up @@ -361,7 +371,9 @@ def test_parse_import_specification_resolver_exception():
# test that other errors aren't included in the result
parser1.return_value = ParseResults(errors=tuple([Error(ErrorType.OTHER, "foo")]))

res = parse_import_specifications((Path("myfile.xlsx"), Path("somefile.csv")), resolver, logger)
res = parse_import_specifications(
(Path("myfile.xlsx"), Path("somefile.csv")), resolver, logger
)

assert res == ParseResults(errors=tuple([Error(ErrorType.OTHER, "crapsticks")]))

Expand Down
15 changes: 12 additions & 3 deletions tests/import_specifications/test_file_writers.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,10 @@ def test_file_writers_fail():
"data": [],
}
},
E("Invalid order_and_display entry for datatype t at index 2 - " + "expected 2 item list"),
E(
"Invalid order_and_display entry for datatype t at index 2 - "
+ "expected 2 item list"
),
)
file_writers_fail(
p,
Expand All @@ -248,7 +251,10 @@ def test_file_writers_fail():
"data": [],
}
},
E("Invalid order_and_display entry for datatype t at index 0 - " + "expected 2 item list"),
E(
"Invalid order_and_display entry for datatype t at index 0 - "
+ "expected 2 item list"
),
)
for parm in [None, " \t ", 1]:
file_writers_fail(
Expand Down Expand Up @@ -310,7 +316,10 @@ def test_file_writers_fail():
"data": [{"foo": 2, "whee": 3}, {"foo": 1, "whee": []}],
}
},
E("Data type ty data row 1's value for parameter whee " + "is not a number or a string"),
E(
"Data type ty data row 1's value for parameter whee "
+ "is not a number or a string"
),
)


Expand Down
60 changes: 45 additions & 15 deletions tests/import_specifications/test_individual_parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,9 @@ def test_xsv_parse_success(temp_dir: Path):
_xsv_parse_success(temp_dir, "\t", parse_tsv)


def _xsv_parse_success(temp_dir: Path, sep: str, parser: Callable[[Path], ParseResults]):
def _xsv_parse_success(
temp_dir: Path, sep: str, parser: Callable[[Path], ParseResults]
):
s = sep
input_ = temp_dir / str(uuid.uuid4())
with open(input_, "w") as test_file:
Expand Down Expand Up @@ -119,7 +121,9 @@ def test_xsv_parse_success_nan_inf(temp_dir: Path):
_xsv_parse_success_nan_inf(temp_dir, "\t", parse_tsv)


def _xsv_parse_success_nan_inf(temp_dir: Path, sep: str, parser: Callable[[Path], ParseResults]):
def _xsv_parse_success_nan_inf(
temp_dir: Path, sep: str, parser: Callable[[Path], ParseResults]
):
s = sep
input_ = temp_dir / str(uuid.uuid4())
with open(input_, "w") as test_file:
Expand Down Expand Up @@ -208,7 +212,9 @@ def _xsv_parse_success_with_numeric_headers(
SpecificationSource(input_),
tuple(
[
frozendict({"1": "val3", "2.0": "val4", "3": 1, "4.1": 8.9}),
frozendict(
{"1": "val3", "2.0": "val4", "3": 1, "4.1": 8.9}
),
]
),
)
Expand Down Expand Up @@ -295,7 +301,9 @@ def test_xsv_parse_fail_no_file(temp_dir: Path):
res = parse_csv(input_)

assert res == ParseResults(
errors=tuple([Error(ErrorType.FILE_NOT_FOUND, source_1=SpecificationSource(input_))])
errors=tuple(
[Error(ErrorType.FILE_NOT_FOUND, source_1=SpecificationSource(input_))]
)
)


Expand Down Expand Up @@ -349,7 +357,9 @@ def _xsv_parse_fail(
test_file.writelines(lines)

res = parser(input_)
expected = ParseResults(errors=tuple([Error(err_type, message, SpecificationSource(input_))]))
expected = ParseResults(
errors=tuple([Error(err_type, message, SpecificationSource(input_))])
)
assert res == expected


Expand All @@ -367,12 +377,16 @@ def test_xsv_parse_fail_bad_datatype_header(temp_dir: Path):

def test_xsv_parse_fail_bad_version(temp_dir: Path):
err = "Schema version 87 is larger than maximum processable version 1"
_xsv_parse_fail(temp_dir, ["Data type: foo; Columns: 22; Version: 87"], parse_csv, err)
_xsv_parse_fail(
temp_dir, ["Data type: foo; Columns: 22; Version: 87"], parse_csv, err
)


def test_xsv_parse_fail_missing_column_headers(temp_dir: Path):
err = "Missing 2nd header line"
_xsv_parse_fail(temp_dir, ["Data type: foo; Columns: 3; Version: 1\n"], parse_csv, err)
_xsv_parse_fail(
temp_dir, ["Data type: foo; Columns: 3; Version: 1\n"], parse_csv, err
)

err = "Missing 3rd header line"
lines = ["Data type: foo; Columns: 3; Version: 1\n", "head1, head2, head3\n"]
Expand Down Expand Up @@ -502,10 +516,18 @@ def test_excel_parse_success():
"type1": ParseResult(
SpecificationSource(ex, "tab1"),
(
frozendict({"header1": "foo", "header2": 1, "header3": 6.7}),
frozendict({"header1": "bar", "header2": 2, "header3": 8.9}),
frozendict({"header1": "baz", "header2": None, "header3": 3.4}),
frozendict({"header1": "bat", "header2": 4, "header3": None}),
frozendict(
{"header1": "foo", "header2": 1, "header3": 6.7}
),
frozendict(
{"header1": "bar", "header2": 2, "header3": 8.9}
),
frozendict(
{"header1": "baz", "header2": None, "header3": 3.4}
),
frozendict(
{"header1": "bat", "header2": 4, "header3": None}
),
),
),
"type2": ParseResult(
Expand Down Expand Up @@ -596,19 +618,25 @@ def _excel_parse_fail(

def test_excel_parse_fail_no_file():
f = _get_test_file("testtabs3full2nodata1empty0.xls")
_excel_parse_fail(f, errors=[Error(ErrorType.FILE_NOT_FOUND, source_1=SpecificationSource(f))])
_excel_parse_fail(
f, errors=[Error(ErrorType.FILE_NOT_FOUND, source_1=SpecificationSource(f))]
)


def test_excel_parse_fail_directory(temp_dir):
for d in ["testdir.xls", "testdir.xlsx"]:
f = temp_dir / d
os.makedirs(f, exist_ok=True)
err = "The given path is a directory"
_excel_parse_fail(f, errors=[Error(ErrorType.PARSE_FAIL, err, SpecificationSource(f))])
_excel_parse_fail(
f, errors=[Error(ErrorType.PARSE_FAIL, err, SpecificationSource(f))]
)


def test_excel_parse_fail_empty_file(temp_dir: Path):
_xsv_parse_fail(temp_dir, [], parse_excel, "Not a supported Excel file type", extension=".xls")
_xsv_parse_fail(
temp_dir, [], parse_excel, "Not a supported Excel file type", extension=".xls"
)


def test_excel_parse_fail_non_excel_file(temp_dir: Path):
Expand All @@ -627,7 +655,9 @@ def test_excel_parse_fail_non_excel_file(temp_dir: Path):


def test_excel_parse_1emptytab():
_excel_parse_fail(_get_test_file("testtabs1empty.xls"), "No non-header data in file")
_excel_parse_fail(
_get_test_file("testtabs1empty.xls"), "No non-header data in file"
)


def test_excel_parse_fail_bad_datatype_header():
Expand Down
Loading

0 comments on commit f8e0655

Please sign in to comment.