Skip to content

Commit

Permalink
[Fixes #12657] Refactor supported type, fix data retriever and refact…
Browse files Browse the repository at this point in the history
…or handlers configuration
  • Loading branch information
mattiagiupponi committed Oct 18, 2024
1 parent 49edf76 commit 88770fc
Show file tree
Hide file tree
Showing 4 changed files with 38 additions and 102 deletions.
17 changes: 7 additions & 10 deletions geonode/upload/api/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,10 +62,7 @@ def test_upload_method_not_allowed(self):
def test_raise_exception_if_file_is_not_a_handled(self):

self.client.force_login(get_user_model().objects.get(username="admin"))
payload = {
"base_file": SimpleUploadedFile(name="file.invalid", content=b"abc"),
"action": "upload"
}
payload = {"base_file": SimpleUploadedFile(name="file.invalid", content=b"abc"), "action": "upload"}
response = self.client.post(self.url, data=payload)
self.assertEqual(500, response.status_code)

Expand All @@ -77,7 +74,7 @@ def test_gpkg_raise_error_with_invalid_payload(self):
content=b'{"type": "FeatureCollection", "content": "some-content"}',
),
"store_spatial_files": "invalid",
"action": "upload"
"action": "upload",
}
expected = {
"success": False,
Expand All @@ -101,7 +98,7 @@ def test_gpkg_task_is_called(self, patch_upload):
content=b'{"type": "FeatureCollection", "content": "some-content"}',
),
"store_spatial_files": True,
"action": "upload"
"action": "upload",
}

response = self.client.post(self.url, data=payload)
Expand All @@ -119,7 +116,7 @@ def test_geojson_task_is_called(self, patch_upload):
content=b'{"type": "FeatureCollection", "content": "some-content"}',
),
"store_spatial_files": True,
"action": "upload"
"action": "upload",
}

response = self.client.post(self.url, data=payload)
Expand All @@ -137,7 +134,7 @@ def test_zip_file_is_unzip_and_the_handler_is_found(self, patch_upload):
"base_file": open(f"{project_dir}/tests/fixture/valid.zip", "rb"),
"zip_file": open(f"{project_dir}/tests/fixture/valid.zip", "rb"),
"store_spatial_files": True,
"action": "upload"
"action": "upload",
}

response = self.client.post(self.url, data=payload)
Expand Down Expand Up @@ -196,7 +193,7 @@ def test_asset_is_created_before_the_import_start(self, patch_upload):
content=b'{"type": "FeatureCollection", "content": "some-content"}',
),
"store_spatial_files": True,
"action": "upload"
"action": "upload",
}

response = self.client.post(self.url, data=payload)
Expand Down Expand Up @@ -227,7 +224,7 @@ def test_asset_should_be_deleted_if_created_during_with_exception(
content=b'{"type": "FeatureCollection", "content": "some-content"}',
),
"store_spatial_files": True,
"action": "upload"
"action": "upload",
}

response = self.client.post(self.url, data=payload)
Expand Down
98 changes: 23 additions & 75 deletions geonode/upload/tests/end2end/test_end2end.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,10 +176,7 @@ class ImporterGeoPackageImportTest(BaseImporterEndToEndTest):
def test_import_geopackage(self):
self._cleanup_layers(name="stazioni_metropolitana")

payload = {
"base_file": open(self.valid_gkpg, "rb"),
"action": "upload"
}
payload = {"base_file": open(self.valid_gkpg, "rb"), "action": "upload"}
initial_name = "stazioni_metropolitana"
self._assertimport(payload, initial_name)
self._cleanup_layers(name="stazioni_metropolitana")
Expand All @@ -189,16 +186,10 @@ def test_import_geopackage(self):
def test_import_gpkg_overwrite(self):
self._cleanup_layers(name="stazioni_metropolitana")
initial_name = "stazioni_metropolitana"
payload = {
"base_file": open(self.valid_gkpg, "rb"),
"action": "upload"
}
payload = {"base_file": open(self.valid_gkpg, "rb"), "action": "upload"}
prev_dataset = self._assertimport(payload, initial_name, keep_resource=True)

payload = {
"base_file": open(self.valid_gkpg, "rb"),
"action": "upload"
}
payload = {"base_file": open(self.valid_gkpg, "rb"), "action": "upload"}
payload["overwrite_existing_layer"] = True
payload["resource_pk"] = prev_dataset.pk
self._assertimport(payload, initial_name, overwrite=True, last_update=prev_dataset.last_updated)
Expand All @@ -213,10 +204,7 @@ class ImporterNoCRSImportTest(BaseImporterEndToEndTest):
def test_import_geopackage_with_no_crs_table(self):

self._cleanup_layers(name="mattia_test")
payload = {
"base_file": open(self.no_crs_gpkg, "rb"),
"action": "upload"
}
payload = {"base_file": open(self.no_crs_gpkg, "rb"), "action": "upload"}
initial_name = "mattia_test"
with self.assertLogs(level="ERROR") as _log:
self._assertimport(payload, initial_name)
Expand All @@ -239,11 +227,7 @@ def test_import_geopackage_with_no_crs_table_should_raise_error_if_all_layer_are
_select_valid_layers.return_value = []

self._cleanup_layers(name="mattia_test")
payload = {
"base_file": open(self.no_crs_gpkg, "rb"),
"store_spatial_file": True,
"action": "upload"
}
payload = {"base_file": open(self.no_crs_gpkg, "rb"), "store_spatial_file": True, "action": "upload"}

with self.assertLogs(level="ERROR") as _log:
self.client.force_login(self.admin)
Expand All @@ -266,10 +250,7 @@ def test_import_geojson(self):

self._cleanup_layers(name="valid")

payload = {
"base_file": open(self.valid_geojson, "rb"),
"action": "upload"
}
payload = {"base_file": open(self.valid_geojson, "rb"), "action": "upload"}
initial_name = "valid"
self._assertimport(payload, initial_name)

Expand All @@ -279,16 +260,10 @@ def test_import_geojson(self):
@override_settings(GEODATABASE_URL=f"{geourl.split('/geonode_data')[0]}/test_geonode_data")
def test_import_geojson_overwrite(self):
self._cleanup_layers(name="valid")
payload = {
"base_file": open(self.valid_geojson, "rb"),
"action": "upload"
}
payload = {"base_file": open(self.valid_geojson, "rb"), "action": "upload"}
initial_name = "valid"
prev_dataset = self._assertimport(payload, initial_name, keep_resource=True)
payload = {
"base_file": open(self.valid_geojson, "rb"),
"action": "upload"
}
payload = {"base_file": open(self.valid_geojson, "rb"), "action": "upload"}
payload["overwrite_existing_layer"] = True
payload["resource_pk"] = prev_dataset.pk
self._assertimport(payload, initial_name, overwrite=True, last_update=prev_dataset.last_updated)
Expand All @@ -302,10 +277,7 @@ class ImporterGCSVImportTest(BaseImporterEndToEndTest):
def test_import_geojson(self):
self._cleanup_layers(name="valid")

payload = {
"base_file": open(self.valid_csv, "rb"),
"action": "upload"
}
payload = {"base_file": open(self.valid_csv, "rb"), "action": "upload"}
initial_name = "valid"
self._assertimport(payload, initial_name)
self._cleanup_layers(name="valid")
Expand All @@ -314,17 +286,11 @@ def test_import_geojson(self):
@override_settings(GEODATABASE_URL=f"{geourl.split('/geonode_data')[0]}/test_geonode_data")
def test_import_csv_overwrite(self):
self._cleanup_layers(name="valid")
payload = {
"base_file": open(self.valid_csv, "rb"),
"action": "upload"
}
payload = {"base_file": open(self.valid_csv, "rb"), "action": "upload"}
initial_name = "valid"
prev_dataset = self._assertimport(payload, initial_name, keep_resource=True)

payload = {
"base_file": open(self.valid_csv, "rb"),
"action": "upload"
}
payload = {"base_file": open(self.valid_csv, "rb"), "action": "upload"}
initial_name = "valid"
payload["overwrite_existing_layer"] = True
payload["resource_pk"] = prev_dataset.pk
Expand All @@ -337,10 +303,7 @@ class ImporterKMLImportTest(BaseImporterEndToEndTest):
@override_settings(GEODATABASE_URL=f"{geourl.split('/geonode_data')[0]}/test_geonode_data")
def test_import_kml(self):
self._cleanup_layers(name="sample_point_dataset")
payload = {
"base_file": open(self.valid_kml, "rb"),
"action": "upload"
}
payload = {"base_file": open(self.valid_kml, "rb"), "action": "upload"}
initial_name = "sample_point_dataset"
self._assertimport(payload, initial_name)
self._cleanup_layers(name="sample_point_dataset")
Expand All @@ -351,16 +314,10 @@ def test_import_kml_overwrite(self):
initial_name = "sample_point_dataset"

self._cleanup_layers(name="sample_point_dataset")
payload = {
"base_file": open(self.valid_kml, "rb"),
"action": "upload"
}
payload = {"base_file": open(self.valid_kml, "rb"), "action": "upload"}
prev_dataset = self._assertimport(payload, initial_name, keep_resource=True)

payload = {
"base_file": open(self.valid_kml, "rb"),
"action": "upload"
}
payload = {"base_file": open(self.valid_kml, "rb"), "action": "upload"}
payload["overwrite_existing_layer"] = True
payload["resource_pk"] = prev_dataset.pk
self._assertimport(payload, initial_name, overwrite=True, last_update=prev_dataset.last_updated)
Expand All @@ -373,7 +330,7 @@ class ImporterShapefileImportTest(BaseImporterEndToEndTest):
def test_import_shapefile(self):
self._cleanup_layers(name="air_Runways")
payload = {_filename: open(_file, "rb") for _filename, _file in self.valid_shp.items()}
payload['action'] = "upload"
payload["action"] = "upload"
initial_name = "air_Runways"
self._assertimport(payload, initial_name)
self._cleanup_layers(name="air_Runways")
Expand All @@ -384,13 +341,13 @@ def test_import_shapefile_overwrite(self):

self._cleanup_layers(name="air_Runways")
payload = {_filename: open(_file, "rb") for _filename, _file in self.valid_shp.items()}
payload['action'] = "upload"
payload["action"] = "upload"
initial_name = "air_Runways"
prev_dataset = self._assertimport(payload, initial_name, keep_resource=True)
payload = {_filename: open(_file, "rb") for _filename, _file in self.valid_shp.items()}
payload["overwrite_existing_layer"] = True
payload["resource_pk"] = prev_dataset.pk
payload['action'] = "upload"
payload["action"] = "upload"
self._assertimport(
payload, initial_name, overwrite=True, last_update=prev_dataset.last_updated, keep_resource=True
)
Expand All @@ -403,10 +360,7 @@ class ImporterRasterImportTest(BaseImporterEndToEndTest):
def test_import_raster(self):
self._cleanup_layers(name="test_raster")

payload = {
"base_file": open(self.valid_tif, "rb"),
"action": "upload"
}
payload = {"base_file": open(self.valid_tif, "rb"), "action": "upload"}
initial_name = "test_raster"
self._assertimport(payload, initial_name)
self._cleanup_layers(name="test_raster")
Expand All @@ -417,16 +371,10 @@ def test_import_raster_overwrite(self):
initial_name = "test_raster"

self._cleanup_layers(name="test_raster")
payload = {
"base_file": open(self.valid_tif, "rb"),
"action": "upload"
}
payload = {"base_file": open(self.valid_tif, "rb"), "action": "upload"}
prev_dataset = self._assertimport(payload, initial_name, keep_resource=True)

payload = {
"base_file": open(self.valid_tif, "rb"),
"action": "upload"
}
payload = {"base_file": open(self.valid_tif, "rb"), "action": "upload"}
initial_name = "test_raster"
payload["overwrite_existing_layer"] = True
payload["resource_pk"] = prev_dataset.pk
Expand All @@ -442,7 +390,7 @@ def test_import_3dtiles(self):
"url": "https://raw.githubusercontent.com/CesiumGS/3d-tiles-samples/main/1.1/TilesetWithFullMetadata/tileset.json",
"title": "Remote Title",
"type": "3dtiles",
"action": "upload"
"action": "upload",
}
initial_name = "remote_title"
assert_payload = {
Expand All @@ -459,7 +407,7 @@ def test_import_3dtiles_overwrite(self):
"url": "https://raw.githubusercontent.com/CesiumGS/3d-tiles-samples/main/1.1/TilesetWithFullMetadata/tileset.json",
"title": "Remote Title",
"type": "3dtiles",
"action": "upload"
"action": "upload",
}
initial_name = "remote_title"
assert_payload = {
Expand Down Expand Up @@ -507,7 +455,7 @@ def test_import_wms(self):
"type": "wms",
"lookup": resource_to_take,
"parse_remote_metadata": True,
"action": "upload"
"action": "upload",
}
initial_name = res.title
assert_payload = {
Expand Down
21 changes: 6 additions & 15 deletions geonode/upload/tests/end2end/test_end2end_copy.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def _assertCloning(self, initial_name):
# defining the payload
payload = QueryDict("", mutable=True)
payload.update({"defaults": '{"title":"title_of_the_cloned_resource"}'})
payload['action'] = "copy"
payload["action"] = "copy"

# calling the endpoint
response = self.client.put(_url, data=payload, content_type="application/json")
Expand Down Expand Up @@ -114,7 +114,7 @@ def _assertCloning(self, initial_name):
self.assertTrue(schema_entity.name in [y.name for y in resources])

def _import_resource(self, payload, initial_name):
payload['action'] = "upload"
payload["action"] = "upload"
_url = reverse("importer_upload")
self.client.force_login(get_user_model().objects.get(username="admin"))

Expand Down Expand Up @@ -149,10 +149,7 @@ class ImporterCopyEnd2EndGpkgTest(BaseClassEnd2End):
)
@override_settings(GEODATABASE_URL=f"{geourl.split('/geonode_data')[0]}/test_geonode_data")
def test_copy_dataset_from_geopackage(self):
payload = {
"base_file": open(self.valid_gkpg, "rb"),
"action": "copy"
}
payload = {"base_file": open(self.valid_gkpg, "rb"), "action": "copy"}
initial_name = "stazioni_metropolitana"
# first we need to import a resource
with transaction.atomic():
Expand All @@ -171,10 +168,7 @@ class ImporterCopyEnd2EndGeoJsonTest(BaseClassEnd2End):
)
@override_settings(GEODATABASE_URL=f"{geourl.split('/geonode_data')[0]}/test_geonode_data")
def test_copy_dataset_from_geojson(self):
payload = {
"base_file": open(self.valid_geojson, "rb"),
"action": "copy"
}
payload = {"base_file": open(self.valid_geojson, "rb"), "action": "copy"}
initial_name = "valid"
# first we need to import a resource
with transaction.atomic():
Expand All @@ -193,7 +187,7 @@ class ImporterCopyEnd2EndShapeFileTest(BaseClassEnd2End):
@override_settings(GEODATABASE_URL=f"{geourl.split('/geonode_data')[0]}/test_geonode_data")
def test_copy_dataset_from_shapefile(self):
payload = {_filename: open(_file, "rb") for _filename, _file in self.valid_shp.items()}
payload['action'] = "copy"
payload["action"] = "copy"
initial_name = "air_runways"
# first we need to import a resource
with transaction.atomic():
Expand All @@ -211,10 +205,7 @@ class ImporterCopyEnd2EndKMLTest(BaseClassEnd2End):
)
@override_settings(GEODATABASE_URL=f"{geourl.split('/geonode_data')[0]}/test_geonode_data")
def test_copy_dataset_from_kml(self):
payload = {
"base_file": open(self.valid_kml, "rb"),
"action": "copy"
}
payload = {"base_file": open(self.valid_kml, "rb"), "action": "copy"}
initial_name = "sample_point_dataset"
# first we need to import a resource
with transaction.atomic():
Expand Down
4 changes: 2 additions & 2 deletions geonode/upload/tests/unit/test_orchestrator.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ def test_create_execution_request(self):
"files": {"base_file": "/tmp/file.txt"},
"store_spatial_files": True,
},
action='upload'
action="upload",
)
exec_obj = ExecutionRequest.objects.filter(exec_id=exec_id).first()
self.assertEqual(count + 1, ExecutionRequest.objects.count())
Expand Down Expand Up @@ -151,7 +151,7 @@ def test_perform_last_import_step(self, mock_celery):
"files": {"base_file": "/tmp/file.txt"},
"store_spatial_files": True,
},
action='upload'
action="upload",
)
# test under tests
self.orchestrator.perform_next_step(
Expand Down

0 comments on commit 88770fc

Please sign in to comment.