Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Fixes #12657] Align supported_file_extension_config with the new client configuration #12673

Open
wants to merge 9 commits into
base: master
Choose a base branch
from
1 change: 1 addition & 0 deletions geonode/geoserver/management/commands/importlayers.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,7 @@ def execute(self):
params[name] = os.path.basename(value.name)

params["non_interactive"] = 'true'
params["action"] = 'upload'
response = client.post(
urljoin(self.host, "/api/v2/uploads/upload/"),
auth=HTTPBasicAuth(self.username, self.password),
Expand Down
4 changes: 1 addition & 3 deletions geonode/resource/api/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,9 +157,7 @@ def test_endpoint_should_raise_error_if_pk_is_not_passed(self):

def test_endpoint_should_return_the_source(self):
# creating dummy execution request
obj = ExecutionRequest.objects.create(
user=self.superuser, func_name="import_new_resource", action="import", source="upload_workflow"
)
obj = ExecutionRequest.objects.create(user=self.superuser, func_name="import_new_resource", action="upload")
self.client.force_login(self.superuser)

_url = f"{reverse('executionrequest-list')}/{obj.exec_id}"
Expand Down
1 change: 1 addition & 0 deletions geonode/resource/enumerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@

class ExecutionRequestAction(enum.Enum):
IMPORT = _("import")
UPLOAD = _("upload")
CREATE = _("create")
COPY = _("copy")
DELETE = _("delete")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# Generated by Django 4.2.9 on 2024-10-18 10:41

from django.db import migrations, models


class Migration(migrations.Migration):

dependencies = [
("resource", "0008_executionrequest_source"),
]

operations = [
migrations.RemoveField(
model_name="executionrequest",
name="source",
),
migrations.AlterField(
model_name="executionrequest",
name="action",
field=models.CharField(
choices=[
("import", "import"),
("upload", "upload"),
("create", "create"),
("copy", "copy"),
("delete", "delete"),
("permissions", "permissions"),
("update", "update"),
("ingest", "ingest"),
("unknown", "unknown"),
],
default="unknown",
max_length=50,
null=True,
),
),
]
2 changes: 0 additions & 2 deletions geonode/resource/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,5 +60,3 @@ class ExecutionRequest(models.Model):
action = models.CharField(
max_length=50, choices=ACTION_CHOICES, default=ExecutionRequestAction.UNKNOWN.value, null=True
)

source = models.CharField(max_length=250, null=True, default=None)
4 changes: 1 addition & 3 deletions geonode/security/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -752,9 +752,7 @@ def test_dataset_permissions(self):
bobby = get_user_model().objects.get(username="bobby")

self.client.force_login(get_user_model().objects.get(username="admin"))
payload = {
"base_file": open(f"{project_dir}/tests/fixture/valid.geojson", "rb"),
}
payload = {"base_file": open(f"{project_dir}/tests/fixture/valid.geojson", "rb"), "action": "upload"}
response = self.client.post(reverse("importer_upload"), data=payload)
layer = ResourceHandlerInfo.objects.filter(execution_request=response.json()["execution_id"]).first().resource
if layer is None:
Expand Down
50 changes: 0 additions & 50 deletions geonode/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -2240,56 +2240,6 @@ def get_geonode_catalogue_service():
"document_upload",
)

SUPPORTED_DATASET_FILE_TYPES = [
{
"id": "shp",
"label": "ESRI Shapefile",
"format": "vector",
"ext": ["shp"],
"requires": ["shp", "prj", "dbf", "shx"],
"optional": ["xml", "sld"],
},
{
"id": "tiff",
"label": "GeoTIFF",
"format": "raster",
"ext": ["tiff", "tif", "geotiff", "geotif"],
"mimeType": ["image/tiff"],
"optional": ["xml", "sld"],
},
{
"id": "csv",
"label": "Comma Separated Value (CSV)",
"format": "vector",
"ext": ["csv"],
"mimeType": ["text/csv"],
"optional": ["xml", "sld"],
},
{
"id": "zip",
"label": "Zip Archive",
"format": "archive",
"ext": ["zip"],
"mimeType": ["application/zip"],
"optional": ["xml", "sld"],
},
{
"id": "xml",
"label": "XML Metadata File",
"format": "metadata",
"ext": ["xml"],
"mimeType": ["application/json"],
"needsFiles": ["shp", "prj", "dbf", "shx", "csv", "tiff", "zip", "sld"],
},
{
"id": "sld",
"label": "Styled Layer Descriptor (SLD)",
"format": "metadata",
"ext": ["sld"],
"mimeType": ["application/json"],
"needsFiles": ["shp", "prj", "dbf", "shx", "csv", "tiff", "zip", "xml"],
},
]
INSTALLED_APPS += (
"dynamic_models",
# "importer",
Expand Down
17 changes: 0 additions & 17 deletions geonode/storage/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -573,23 +573,6 @@ def test_zip_file_should_correctly_index_file_extensions(self):
# extensions found more than once get indexed
self.assertIsNotNone(_files.get("csv_file_1"))

@override_settings(
SUPPORTED_DATASET_FILE_TYPES=[
{"id": "kmz", "label": "kmz", "format": "vector", "ext": ["kmz"]},
{"id": "kml", "label": "kml", "format": "vector", "ext": ["kml"]},
]
)
def test_zip_file_should_correctly_recognize_main_extension_with_kmz(self):
# reinitiate the storage manager with the zip file
storage_manager = self.sut(
remote_files={"base_file": os.path.join(f"{self.project_root}", "tests/data/Italy.kmz")}
)
storage_manager.clone_remote_files()

self.assertIsNotNone(storage_manager.data_retriever.temporary_folder)
_files = storage_manager.get_retrieved_paths()
self.assertTrue("doc.kml" in _files.get("base_file"), msg=f"files available: {_files}")

def test_zip_file_should_correctly_recognize_main_extension_with_shp(self):
# zipping files
storage_manager = self.sut(remote_files=self.local_files_paths)
Expand Down
37 changes: 1 addition & 36 deletions geonode/tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
#########################################################################
import copy
from unittest import TestCase
from django.test import override_settings

from unittest.mock import patch
from datetime import datetime, timedelta
Expand All @@ -32,8 +31,7 @@
from geonode.geoserver.helpers import set_attributes
from geonode.tests.base import GeoNodeBaseTestSupport
from geonode.br.management.commands.utils.utils import ignore_time
from geonode.utils import copy_tree, get_supported_datasets_file_types, bbox_to_wkt
from geonode import settings
from geonode.utils import copy_tree, bbox_to_wkt


class TestCopyTree(GeoNodeBaseTestSupport):
Expand Down Expand Up @@ -205,39 +203,6 @@ def setUp(self):
},
]

@override_settings(
ADDITIONAL_DATASET_FILE_TYPES=[
{"id": "dummy_type", "label": "Dummy Type", "format": "dummy", "ext": ["dummy"]},
]
)
def test_should_append_additional_type_if_config_is_provided(self):
prev_count = len(settings.SUPPORTED_DATASET_FILE_TYPES)
supported_types = get_supported_datasets_file_types()
supported_keys = [t.get("id") for t in supported_types]
self.assertIn("dummy_type", supported_keys)
self.assertEqual(len(supported_keys), prev_count + 1)

@override_settings(
ADDITIONAL_DATASET_FILE_TYPES=[
{
"id": "shp",
"label": "Replaced type",
"format": "vector",
"ext": ["shp"],
"requires": ["shp", "prj", "dbf", "shx"],
"optional": ["xml", "sld"],
},
]
)
def test_should_replace_the_type_id_if_already_exists(self):
prev_count = len(settings.SUPPORTED_DATASET_FILE_TYPES)
supported_types = get_supported_datasets_file_types()
supported_keys = [t.get("id") for t in supported_types]
self.assertIn("shp", supported_keys)
self.assertEqual(len(supported_keys), prev_count)
shp_type = [t for t in supported_types if t["id"] == "shp"][0]
self.assertEqual(shp_type["label"], "Replaced type")


class TestRegionsCrossingDateLine(TestCase):
def setUp(self):
Expand Down
4 changes: 2 additions & 2 deletions geonode/upload/api/serializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,15 +34,15 @@ class Meta:
"sld_file",
"store_spatial_files",
"skip_existing_layers",
"source",
"action",
)

base_file = serializers.FileField()
xml_file = serializers.FileField(required=False)
sld_file = serializers.FileField(required=False)
store_spatial_files = serializers.BooleanField(required=False, default=True)
skip_existing_layers = serializers.BooleanField(required=False, default=False)
source = serializers.CharField(required=False, default="upload")
action = serializers.CharField(required=True)


class OverwriteImporterSerializer(ImporterSerializer):
Expand Down
10 changes: 7 additions & 3 deletions geonode/upload/api/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,7 @@ def test_upload_method_not_allowed(self):
def test_raise_exception_if_file_is_not_a_handled(self):

self.client.force_login(get_user_model().objects.get(username="admin"))
payload = {
"base_file": SimpleUploadedFile(name="file.invalid", content=b"abc"),
}
payload = {"base_file": SimpleUploadedFile(name="file.invalid", content=b"abc"), "action": "upload"}
response = self.client.post(self.url, data=payload)
self.assertEqual(500, response.status_code)

Expand All @@ -76,6 +74,7 @@ def test_gpkg_raise_error_with_invalid_payload(self):
content=b'{"type": "FeatureCollection", "content": "some-content"}',
),
"store_spatial_files": "invalid",
"action": "upload",
}
expected = {
"success": False,
Expand All @@ -99,6 +98,7 @@ def test_gpkg_task_is_called(self, patch_upload):
content=b'{"type": "FeatureCollection", "content": "some-content"}',
),
"store_spatial_files": True,
"action": "upload",
}

response = self.client.post(self.url, data=payload)
Expand All @@ -116,6 +116,7 @@ def test_geojson_task_is_called(self, patch_upload):
content=b'{"type": "FeatureCollection", "content": "some-content"}',
),
"store_spatial_files": True,
"action": "upload",
}

response = self.client.post(self.url, data=payload)
Expand All @@ -133,6 +134,7 @@ def test_zip_file_is_unzip_and_the_handler_is_found(self, patch_upload):
"base_file": open(f"{project_dir}/tests/fixture/valid.zip", "rb"),
"zip_file": open(f"{project_dir}/tests/fixture/valid.zip", "rb"),
"store_spatial_files": True,
"action": "upload",
}

response = self.client.post(self.url, data=payload)
Expand Down Expand Up @@ -191,6 +193,7 @@ def test_asset_is_created_before_the_import_start(self, patch_upload):
content=b'{"type": "FeatureCollection", "content": "some-content"}',
),
"store_spatial_files": True,
"action": "upload",
}

response = self.client.post(self.url, data=payload)
Expand Down Expand Up @@ -221,6 +224,7 @@ def test_asset_should_be_deleted_if_created_during_with_exception(
content=b'{"type": "FeatureCollection", "content": "some-content"}',
),
"store_spatial_files": True,
"action": "upload",
}

response = self.client.post(self.url, data=payload)
Expand Down
9 changes: 2 additions & 7 deletions geonode/upload/api/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,6 @@ def create(self, request, *args, **kwargs):
)

handler = orchestrator.get_handler(_data)

# not file but handler means that is a remote resource
if handler:
asset = None
Expand All @@ -191,8 +190,6 @@ def create(self, request, *args, **kwargs):

self.validate_upload(request, storage_manager)

action = ExecutionRequestAction.IMPORT.value

input_params = {
**{"files": files, "handler_module_path": str(handler)},
**extracted_params,
Expand All @@ -205,15 +202,14 @@ def create(self, request, *args, **kwargs):
"asset_module_path": f"{asset.__module__}.{asset.__class__.__name__}",
}
)

action = input_params.get("action")
execution_id = orchestrator.create_execution_request(
user=request.user,
func_name=next(iter(handler.get_task_list(action=action))),
step=_(next(iter(handler.get_task_list(action=action)))),
input_params=input_params,
action=action,
name=_file.name if _file else extracted_params.get("title", None),
source=extracted_params.get("source"),
)

sig = import_orchestrator.s(files, str(execution_id), handler=str(handler), action=action)
Expand All @@ -234,7 +230,7 @@ def create(self, request, *args, **kwargs):
logger.exception(e)
raise ImportException(detail=e.args[0] if len(e.args) > 0 else e)

raise ImportException(detail="No handlers found for this dataset type")
raise ImportException(detail="No handlers found for this dataset type/action")

def _handle_asset(self, request, asset_dir, storage_manager, _data, handler):
if storage_manager is None:
Expand Down Expand Up @@ -328,7 +324,6 @@ def copy(self, request, *args, **kwargs):
**{"handler_module_path": handler_module_path},
**extracted_params,
},
source="importer_copy",
)

sig = import_orchestrator.s(
Expand Down
6 changes: 3 additions & 3 deletions geonode/upload/celery_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def import_orchestrator(
step="start_import",
layer_name=None,
alternate=None,
action=exa.IMPORT.value,
action=exa.UPLOAD.value,
**kwargs,
):
"""
Expand Down Expand Up @@ -179,7 +179,7 @@ def import_resource(self, execution_id, /, handler_module_path, action, **kwargs
call_rollback_function(
execution_id,
handlers_module_path=handler_module_path,
prev_action=exa.IMPORT.value,
prev_action=exa.UPLOAD.value,
layer=None,
alternate=None,
error=e,
Expand Down Expand Up @@ -309,7 +309,7 @@ def create_geonode_resource(
layer_name: Optional[str] = None,
alternate: Optional[str] = None,
handler_module_path: str = None,
action: str = exa.IMPORT.value,
action: str = exa.UPLOAD.value,
**kwargs,
):
"""
Expand Down
4 changes: 2 additions & 2 deletions geonode/upload/handlers/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ class BaseVectorFileHandler(BaseHandler):
It must provide the task_lists required to comple the upload
"""

ACTIONS = {
TASKS = {
exa.IMPORT.value: (), # define the list of the step (celery task) needed to execute the action for the resource
exa.COPY.value: (),
exa.DELETE.value: (),
Expand Down Expand Up @@ -242,7 +242,7 @@ class NewVectorFileHandler(BaseVectorFileHandler):
It must provide the task_lists required to comple the upload
"""

ACTIONS = {
TASKS = {
exa.IMPORT.value: (
"start_import",
"geonode.upload.import_resource",
Expand Down
Loading
Loading