Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 27 additions & 12 deletions traffic_control/resources/additional_sign.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,22 +277,37 @@ def _content_s_from_row(self, row: OrderedDict, schema: Optional[dict], missing_
return content_s or None

def _content_s_to_columns(self, data: Dataset):
content_rows = data["content_s"]
"""Assume that this is empty template export if there is no data.
In that case add all possible content_s fields as columns."""
if not data.dict:
device_types_schemas = list(
filter(
lambda x: x is not None,
TrafficControlDeviceType.objects.all().values_list("content_schema", flat=True),
)
)
content_properties = set()
for device_type_schema in device_types_schemas:
for property_name in device_type_schema.get("properties", {}).keys():
content_properties.add(f"content_s.{property_name}")
data.headers.extend(list(content_properties))

# Collect all content_s properties names from every row.
# Use dict to retain properties order as they appear in data.
content_properties = {}
for row in content_rows:
for key in row:
content_properties[key] = None
else:
content_rows = data["content_s"]
# Collect all content_s properties names from every row.
# Use dict to retain properties order as they appear in data.
content_properties = {}
for row in content_rows:
for key in row:
content_properties[key] = None

for property in content_properties:
values = self._get_values_for_property(property, content_rows)
for property in content_properties:
values = self._get_values_for_property(property, content_rows)

# Convert arrays and objects to JSON strings
values = self._structured_values_to_string(values)
# Convert arrays and objects to JSON strings
values = self._structured_values_to_string(values)

data.append_col(values, header=f"content_s.{property}")
data.append_col(values, header=f"content_s.{property}")

del data["content_s"]

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,29 @@ def test__additional_sign_real__import(resource, model, factory, format):
}


@pytest.mark.parametrize("resource", (AdditionalSignRealResource, AdditionalSignPlanResource))
@pytest.mark.django_db
def test_additional_sign_template__export(resource):
"""test template export, this happens when no data is provided."""
get_traffic_control_device_type(
code="type1",
target_model=DeviceTypeTargetModel.ADDITIONAL_SIGN,
content_schema=schema_1,
)
dataset = resource().export()
assert dataset.dict == []
# check that all content_s fields from schema_1 are present
assert "content_s.bool" in dataset.headers
assert "content_s.num" in dataset.headers
assert "content_s.int" in dataset.headers
assert "content_s.str" in dataset.headers
assert "content_s.str_nolimit" in dataset.headers
assert "content_s.enum" in dataset.headers
assert "content_s.obj" in dataset.headers
assert "content_s.list" in dataset.headers
assert "content_s.not_required" in dataset.headers


@pytest.mark.parametrize(
"resource, factory",
(
Expand Down Expand Up @@ -262,6 +285,10 @@ def test__additional_sign__import__create_with_content(model, resource, factory,
)

dataset = get_import_dataset(resource, format=format, delete_columns=["id"])
# add content_s column that does not exist in the device type, it should just be ignored
dataset.dict[0]["content_s.does_not_exits_in_schema"] = "Some value"
dataset.dict[1]["content_s.does_not_exits_in_schema"] = "Some value"
dataset.dict[2]["content_s.does_not_exits_in_schema"] = "Some value"
model.objects.all().delete()

result = resource().import_data(dataset, raise_errors=True, collect_failed_rows=True)
Expand Down Expand Up @@ -542,6 +569,8 @@ def test__additional_sign__import__update_with_content(model, resource, factory,
dataset = get_import_dataset(resource, format=format, delete_columns=deleted_columns)
row = dataset.dict[0]
row["content_s.str"] = "Other value"
# add content_s column that does not exist in the device type, it should just be ignored
row["content_s.does_not_exits_in_schema"] = "Some value"
dataset.dict = [row]

result = resource().import_data(dataset, raise_errors=False, collect_failed_rows=True)
Expand Down
Loading