Skip to content
This repository has been archived by the owner on Aug 21, 2024. It is now read-only.

Commit

Permalink
Updated behavior of hasDuplicates and uniqueItems to match JSON Schem…
Browse files Browse the repository at this point in the history
…a specification
  • Loading branch information
alongreyber committed Mar 2, 2021
1 parent 0066dc3 commit 0568dba
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 31 deletions.
32 changes: 12 additions & 20 deletions json_schema_fuzz/schema_operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,10 +53,6 @@ def get_index_or_default(
return default


class SchemaIsFalse(Exception):
""" Raised if schema will not validate true for any values """


def merge_listify(values):
"""
Merge values by converting them to lists
Expand All @@ -71,16 +67,6 @@ def merge_listify(values):
return output


def merge_all_equal(values):
"""
Merge values and raise SchemaIsFalse exception
if all values are not equal
"""
if not values.count(values[0]) == len(values):
raise SchemaIsFalse()
return values[0]


# pylint: disable=too-many-branches
# pylint: disable=too-many-locals
# pylint: disable=too-many-statements
Expand Down Expand Up @@ -125,8 +111,6 @@ def merge(
"additionalProperties": lambda values: merge(*values),

# Array
"hasDuplicates": merge_all_equal,
"uniqueItems": merge_all_equal,
"contains": merge_listify,
}

Expand Down Expand Up @@ -201,6 +185,14 @@ def merge(
all_values = [d.get(key, {}) for d in properties_values]
merged_schema["properties"][key] = merge(*all_values)

has_duplicates_values = get_from_all(schemas, "hasDuplicates")
if has_duplicates_values and any(has_duplicates_values):
merged_schema["hasDuplicates"] = True

unique_items_values = get_from_all(schemas, "uniqueItems")
if unique_items_values and any(unique_items_values):
merged_schema["uniqueItems"] = True

items_values = get_from_all(schemas, "items")
if items_values:
if isinstance(items_values[0], list):
Expand Down Expand Up @@ -340,13 +332,13 @@ def invert(
if contains:
inverted_schemas.append({"items": invert(contains)})

unique_items = schema.get("uniqueItems", None)
unique_items = schema.get("uniqueItems", False)
if unique_items:
inverted_schemas.append({"hasDuplicates": unique_items})
inverted_schemas.append({"hasDuplicates": True})

has_duplicates = schema.get("hasDuplicates", None)
has_duplicates = schema.get("hasDuplicates", False)
if has_duplicates:
inverted_schemas.append({"uniqueItems": has_duplicates})
inverted_schemas.append({"uniqueItems": True})

# Combine all schemas together and return

Expand Down
2 changes: 1 addition & 1 deletion tests/merge_cases/array.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
"contains": {
"maximum": 5
},
"uniqueItems": true
"uniqueItems": false
}
],
"merged": {
Expand Down
11 changes: 1 addition & 10 deletions tests/test_schema_operations.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import pytest

from json_schema_fuzz.schema_operations import SchemaIsFalse, invert, merge
from json_schema_fuzz.schema_operations import invert, merge

THIS_DIR = Path(__file__).parent
MERGE_CASE_DIR = THIS_DIR / "merge_cases"
Expand Down Expand Up @@ -36,15 +36,6 @@ def test_merge_doesnt_modify():
assert len(required_a) == 1


def test_merge_conflicting():
"""Test that merging conflicting values throws a SchemaIsFalse error."""
schema_a = {"hasDuplicates": True}
schema_b = {"hasDuplicates": False}

with pytest.raises(SchemaIsFalse):
merge(schema_a, schema_b)


INVERT_CASE_DIR = THIS_DIR / "invert_cases"
invert_case_files = glob.glob(
str(INVERT_CASE_DIR / "*.json"))
Expand Down

0 comments on commit 0568dba

Please sign in to comment.