diff --git a/json_schema_fuzz/schema_operations.py b/json_schema_fuzz/schema_operations.py index fafb460..0ec7456 100644 --- a/json_schema_fuzz/schema_operations.py +++ b/json_schema_fuzz/schema_operations.py @@ -53,10 +53,6 @@ def get_index_or_default( return default -class SchemaIsFalse(Exception): - """ Raised if schema will not validate true for any values """ - - def merge_listify(values): """ Merge values by converting them to lists @@ -71,16 +67,6 @@ def merge_listify(values): return output -def merge_all_equal(values): - """ - Merge values and raise SchemaIsFalse exception - if all values are not equal - """ - if not values.count(values[0]) == len(values): - raise SchemaIsFalse() - return values[0] - - # pylint: disable=too-many-branches # pylint: disable=too-many-locals # pylint: disable=too-many-statements @@ -125,8 +111,6 @@ def merge( "additionalProperties": lambda values: merge(*values), # Array - "hasDuplicates": merge_all_equal, - "uniqueItems": merge_all_equal, "contains": merge_listify, } @@ -201,6 +185,14 @@ def merge( all_values = [d.get(key, {}) for d in properties_values] merged_schema["properties"][key] = merge(*all_values) + has_duplicates_values = get_from_all(schemas, "hasDuplicates") + if has_duplicates_values and any(has_duplicates_values): + merged_schema["hasDuplicates"] = True + + unique_items_values = get_from_all(schemas, "uniqueItems") + if unique_items_values and any(unique_items_values): + merged_schema["uniqueItems"] = True + items_values = get_from_all(schemas, "items") if items_values: if isinstance(items_values[0], list): @@ -340,13 +332,13 @@ def invert( if contains: inverted_schemas.append({"items": invert(contains)}) - unique_items = schema.get("uniqueItems", None) + unique_items = schema.get("uniqueItems", False) if unique_items: - inverted_schemas.append({"hasDuplicates": unique_items}) + inverted_schemas.append({"hasDuplicates": True}) - has_duplicates = schema.get("hasDuplicates", None) + has_duplicates = schema.get("hasDuplicates", False) if has_duplicates: - inverted_schemas.append({"uniqueItems": has_duplicates}) + inverted_schemas.append({"uniqueItems": True}) # Combine all schemas together and return diff --git a/tests/merge_cases/array.json b/tests/merge_cases/array.json index 694a565..e76d87e 100644 --- a/tests/merge_cases/array.json +++ b/tests/merge_cases/array.json @@ -16,7 +16,7 @@ "contains": { "maximum": 5 }, - "uniqueItems": true + "uniqueItems": false } ], "merged": { diff --git a/tests/test_schema_operations.py b/tests/test_schema_operations.py index 1730084..ac0adb4 100644 --- a/tests/test_schema_operations.py +++ b/tests/test_schema_operations.py @@ -5,7 +5,7 @@ import pytest -from json_schema_fuzz.schema_operations import SchemaIsFalse, invert, merge +from json_schema_fuzz.schema_operations import invert, merge THIS_DIR = Path(__file__).parent MERGE_CASE_DIR = THIS_DIR / "merge_cases" @@ -36,15 +36,6 @@ def test_merge_doesnt_modify(): assert len(required_a) == 1 -def test_merge_conflicting(): - """Test that merging conflicting values throws a SchemaIsFalse error.""" - schema_a = {"hasDuplicates": True} - schema_b = {"hasDuplicates": False} - - with pytest.raises(SchemaIsFalse): - merge(schema_a, schema_b) - - INVERT_CASE_DIR = THIS_DIR / "invert_cases" invert_case_files = glob.glob( str(INVERT_CASE_DIR / "*.json"))