Skip to content

Commit

Permalink
[Fixes #12657] Refactor supported type, fix data retriever and refact…
Browse files Browse the repository at this point in the history
…or handlers configuration
  • Loading branch information
mattiagiupponi committed Oct 18, 2024
1 parent 9a46a38 commit ec76dff
Show file tree
Hide file tree
Showing 30 changed files with 112 additions and 172 deletions.
4 changes: 2 additions & 2 deletions geonode/upload/api/serializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,15 +34,15 @@ class Meta:
"sld_file",
"store_spatial_files",
"skip_existing_layers",
"source",
"action",
)

base_file = serializers.FileField()
xml_file = serializers.FileField(required=False)
sld_file = serializers.FileField(required=False)
store_spatial_files = serializers.BooleanField(required=False, default=True)
skip_existing_layers = serializers.BooleanField(required=False, default=False)
source = serializers.CharField(required=False, default="upload")
action = serializers.CharField(required=True)


class OverwriteImporterSerializer(ImporterSerializer):
Expand Down
8 changes: 2 additions & 6 deletions geonode/upload/api/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,6 @@ def create(self, request, *args, **kwargs):
)

handler = orchestrator.get_handler(_data)

# not file but handler means that is a remote resource
if handler:
asset = None
Expand All @@ -191,8 +190,6 @@ def create(self, request, *args, **kwargs):

self.validate_upload(request, storage_manager)

action = ExecutionRequestAction.UPLOAD.value

input_params = {
**{"files": files, "handler_module_path": str(handler)},
**extracted_params,
Expand All @@ -205,15 +202,14 @@ def create(self, request, *args, **kwargs):
"asset_module_path": f"{asset.__module__}.{asset.__class__.__name__}",
}
)

action = input_params.get("action")
execution_id = orchestrator.create_execution_request(
user=request.user,
func_name=next(iter(handler.get_task_list(action=action))),
step=_(next(iter(handler.get_task_list(action=action)))),
input_params=input_params,
action=action,
name=_file.name if _file else extracted_params.get("title", None),
source=extracted_params.get("source"),
)

sig = import_orchestrator.s(files, str(execution_id), handler=str(handler), action=action)
Expand All @@ -234,7 +230,7 @@ def create(self, request, *args, **kwargs):
logger.exception(e)
raise ImportException(detail=e.args[0] if len(e.args) > 0 else e)

raise ImportException(detail="No handlers found for this dataset type")
raise ImportException(detail="No handlers found for this dataset type/action")

def _handle_asset(self, request, asset_dir, storage_manager, _data, handler):
if storage_manager is None:
Expand Down
13 changes: 1 addition & 12 deletions geonode/upload/handlers/common/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
#########################################################################
import logging
from geonode.upload.handlers.base import BaseHandler
from geonode.upload.handlers.utils import UploadSourcesEnum
from geonode.upload.models import ResourceHandlerInfo
from geonode.upload.handlers.xml.serializer import MetadataFileSerializer
from geonode.upload.orchestrator import orchestrator
Expand All @@ -34,16 +33,6 @@ class MetadataFileHandler(BaseHandler):
It must provide the task_lists required to comple the upload
"""

@staticmethod
def can_handle(_data) -> bool:
"""
This endpoint will return True or False if with the info provided
the handler is able to handle the file or not
"""
if _data.get("source", None) == UploadSourcesEnum.resource_file_upload.value:
return True
return False

@staticmethod
def has_serializer(data) -> bool:
_base = data.get("base_file")
Expand All @@ -69,7 +58,7 @@ def extract_params_from_data(_data, action=None):
"overwrite_existing_layer": _data.pop("overwrite_existing_layer", False),
"resource_pk": _data.pop("resource_pk", None),
"store_spatial_file": _data.pop("store_spatial_files", "True"),
"source": _data.pop("source", "resource_file_upload"),
"action": _data.pop("action"),
}, _data

@staticmethod
Expand Down
18 changes: 6 additions & 12 deletions geonode/upload/handlers/common/raster.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
from geonode.upload.celery_tasks import ErrorBaseTaskClass, import_orchestrator
from geonode.upload.handlers.base import BaseHandler
from geonode.upload.handlers.geotiff.exceptions import InvalidGeoTiffException
from geonode.upload.handlers.utils import UploadSourcesEnum, create_alternate, should_be_imported
from geonode.upload.handlers.utils import create_alternate, should_be_imported
from geonode.upload.models import ResourceHandlerInfo
from geonode.upload.orchestrator import orchestrator
from osgeo import gdal
Expand Down Expand Up @@ -83,16 +83,6 @@ def is_valid(files, user, **kwargs):
raise ImportException(stderr)
return True

@staticmethod
def can_handle(_data) -> bool:
"""
This endpoint will return True or False if with the info provided
the handler is able to handle the file or not
"""
if _data.get("source", None) != UploadSourcesEnum.upload.value:
return False
return True

@staticmethod
def has_serializer(_data) -> bool:
"""
Expand Down Expand Up @@ -132,7 +122,7 @@ def extract_params_from_data(_data, action=None):
"overwrite_existing_layer": _data.pop("overwrite_existing_layer", False),
"resource_pk": _data.pop("resource_pk", None),
"store_spatial_file": _data.pop("store_spatial_files", "True"),
"source": _data.pop("source", "upload"),
"action": _data.pop("action", "upload"),
}, _data

@staticmethod
Expand Down Expand Up @@ -285,6 +275,8 @@ def import_resource(self, files: dict, execution_id: str, **kwargs) -> str:
dataset = Dataset.objects.filter(pk=_exec.input_params.get("resource_pk")).first()
if not dataset:
raise ImportException("The dataset selected for the ovewrite does not exists")
if dataset.is_vector():
raise Exception("cannot override a vector dataset with a raster one")
alternate = dataset.alternate.split(":")[-1]
orchestrator.update_execution_request_obj(_exec, {"geonode_resource": dataset})
else:
Expand All @@ -293,6 +285,8 @@ def import_resource(self, files: dict, execution_id: str, **kwargs) -> str:
dataset_exists = user_datasets.exists()

if dataset_exists and should_be_overwritten:
if user_datasets.is_vector():
raise Exception("cannot override a vector dataset with a raster one")
layer_name, alternate = (
layer_name,
user_datasets.first().alternate.split(":")[-1],
Expand Down
2 changes: 1 addition & 1 deletion geonode/upload/handlers/common/remote.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def extract_params_from_data(_data, action=None):
return {"title": title.pop("title")}, _data

return {
"source": _data.pop("source", "upload"),
"action": _data.pop("action", "upload"),
"title": _data.pop("title", None),
"url": _data.pop("url", None),
"type": _data.pop("type", None),
Expand Down
4 changes: 2 additions & 2 deletions geonode/upload/handlers/common/serializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,14 +26,14 @@ class Meta:
ref_name = "RemoteResourceSerializer"
model = ResourceBase
view_name = "importer_upload"
fields = ("url", "title", "type", "source", "overwrite_existing_layer")
fields = ("url", "title", "type", "action", "overwrite_existing_layer")

url = serializers.URLField(required=True, help_text="URL of the remote service / resource")
title = serializers.CharField(required=True, help_text="Title of the resource. Can be None or Empty")
type = serializers.CharField(
required=True,
help_text="Remote resource type, for example wms or 3dtiles. Is used by the handler to understand if can handle the resource",
)
source = serializers.CharField(required=False, default="upload")
action = serializers.CharField(required=True)

overwrite_existing_layer = serializers.BooleanField(required=False, default=False)
38 changes: 35 additions & 3 deletions geonode/upload/handlers/common/vector.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
from geonode.upload.handlers.utils import (
GEOM_TYPE_MAPPING,
STANDARD_TYPE_MAPPING,
UploadSourcesEnum,
drop_dynamic_model_schema,
)
from geonode.resource.manager import resource_manager
Expand All @@ -55,6 +54,7 @@
from django.db.models import Q
import pyproj
from geonode.geoserver.security import delete_dataset_cache, set_geowebcache_invalidate_cache
from geonode.upload.utils import ImporterRequestAction as ira

logger = logging.getLogger("importer")

Expand All @@ -65,6 +65,32 @@ class BaseVectorFileHandler(BaseHandler):
It must provide the task_lists required to comple the upload
"""

TASKS = {
exa.UPLOAD.value: (
"start_import",
"geonode.upload.import_resource",
"geonode.upload.publish_resource",
"geonode.upload.create_geonode_resource",
),
exa.COPY.value: (
"start_copy",
"geonode.upload.copy_dynamic_model",
"geonode.upload.copy_geonode_data_table",
"geonode.upload.publish_resource",
"geonode.upload.copy_geonode_resource",
),
ira.ROLLBACK.value: (
"start_rollback",
"geonode.upload.rollback",
),
ira.REPLACE.value: (
"start_import",
"geonode.upload.import_resource",
"geonode.upload.publish_resource",
"geonode.upload.create_geonode_resource",
),
}

@property
def default_geometry_column_name(self):
return "geometry"
Expand Down Expand Up @@ -99,7 +125,7 @@ def can_handle(_data) -> bool:
This endpoint will return True or False if with the info provided
the handler is able to handle the file or not
"""
if _data.get("source", None) != UploadSourcesEnum.upload.value:
if _data.get("action", None) not in BaseVectorFileHandler.TASKS:
return False
return True

Expand Down Expand Up @@ -142,7 +168,7 @@ def extract_params_from_data(_data, action=None):
"overwrite_existing_layer": _data.pop("overwrite_existing_layer", False),
"resource_pk": _data.pop("resource_pk", None),
"store_spatial_file": _data.pop("store_spatial_files", "True"),
"source": _data.pop("source", "upload"),
"action": _data.pop("action", "upload"),
}, _data

@staticmethod
Expand Down Expand Up @@ -431,13 +457,19 @@ def find_alternate_by_dataset(self, _exec_obj, layer_name, should_be_overwritten
dataset = Dataset.objects.filter(pk=_exec_obj.input_params.get("resource_pk")).first()
if not dataset:
raise ImportException("The dataset selected for the ovewrite does not exists")
if should_be_overwritten:
if not dataset.is_vector():
raise Exception("Cannot override a raster dataset with a vector one")
alternate = dataset.alternate.split(":")
return alternate[-1]

workspace = DataPublisher(None).workspace
dataset_available = Dataset.objects.filter(alternate__iexact=f"{workspace.name}:{layer_name}")

dataset_exists = dataset_available.exists()
if should_be_overwritten:
if not dataset_available.is_vector():
raise Exception("Cannot override a raster dataset with a vector one")

if dataset_exists and should_be_overwritten:
alternate = dataset_available.first().alternate.split(":")[-1]
Expand Down
27 changes: 0 additions & 27 deletions geonode/upload/handlers/csv/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
from dynamic_models.models import ModelSchema
from geonode.upload.handlers.common.vector import BaseVectorFileHandler
from geonode.upload.handlers.utils import GEOM_TYPE_MAPPING
from geonode.upload.utils import ImporterRequestAction as ira

logger = logging.getLogger("importer")

Expand All @@ -40,32 +39,6 @@ class CSVFileHandler(BaseVectorFileHandler):
It must provide the task_lists required to comple the upload
"""

TASKS = {
exa.UPLOAD.value: (
"start_import",
"geonode.upload.import_resource",
"geonode.upload.publish_resource",
"geonode.upload.create_geonode_resource",
),
exa.COPY.value: (
"start_copy",
"geonode.upload.copy_dynamic_model",
"geonode.upload.copy_geonode_data_table",
"geonode.upload.publish_resource",
"geonode.upload.copy_geonode_resource",
),
ira.ROLLBACK.value: (
"start_rollback",
"geonode.upload.rollback",
),
ira.REPLACE.value: (
"start_import",
"geonode.upload.import_resource",
"geonode.upload.publish_resource",
"geonode.upload.create_geonode_resource",
),
}

possible_geometry_column_name = ["geom", "geometry", "wkt_geom", "the_geom"]
possible_lat_column = ["latitude", "lat", "y"]
possible_long_column = ["longitude", "long", "x"]
Expand Down
2 changes: 1 addition & 1 deletion geonode/upload/handlers/csv/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def setUpClass(cls):
cls.missing_geom = f"{project_dir}/tests/fixture/missing_geom.csv"
cls.user, _ = get_user_model().objects.get_or_create(username="admin")
cls.invalid_files = {"base_file": cls.invalid_csv}
cls.valid_files = {"base_file": cls.valid_csv, "source": "upload"}
cls.valid_files = {"base_file": cls.valid_csv, "action": "upload"}
cls.owner = get_user_model().objects.first()
cls.layer = create_single_dataset(name="test", owner=cls.owner)

Expand Down
28 changes: 0 additions & 28 deletions geonode/upload/handlers/geojson/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,9 @@
import json
import logging
import os
from geonode.resource.enumerator import ExecutionRequestAction as exa
from geonode.upload.utils import UploadLimitValidator
from geonode.upload.handlers.common.vector import BaseVectorFileHandler
from osgeo import ogr
from geonode.upload.utils import ImporterRequestAction as ira

from geonode.upload.handlers.geojson.exceptions import InvalidGeoJsonException

Expand All @@ -36,32 +34,6 @@ class GeoJsonFileHandler(BaseVectorFileHandler):
It must provide the task_lists required to comple the upload
"""

TASKS = {
exa.UPLOAD.value: (
"start_import",
"geonode.upload.import_resource",
"geonode.upload.publish_resource",
"geonode.upload.create_geonode_resource",
),
exa.COPY.value: (
"start_copy",
"geonode.upload.copy_dynamic_model",
"geonode.upload.copy_geonode_data_table",
"geonode.upload.publish_resource",
"geonode.upload.copy_geonode_resource",
),
ira.ROLLBACK.value: (
"start_rollback",
"geonode.upload.rollback",
),
ira.REPLACE.value: (
"start_import",
"geonode.upload.import_resource",
"geonode.upload.publish_resource",
"geonode.upload.create_geonode_resource",
),
}

@property
def supported_file_extension_config(self):
return {
Expand Down
2 changes: 1 addition & 1 deletion geonode/upload/handlers/geojson/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def setUpClass(cls):
cls.invalid_geojson = f"{project_dir}/tests/fixture/invalid.geojson"
cls.user, _ = get_user_model().objects.get_or_create(username="admin")
cls.invalid_files = {"base_file": cls.invalid_geojson}
cls.valid_files = {"base_file": cls.valid_geojson, "source": "upload"}
cls.valid_files = {"base_file": cls.valid_geojson, "action": "upload"}
cls.owner = get_user_model().objects.first()
cls.layer = create_single_dataset(name="stazioni_metropolitana", owner=cls.owner)

Expand Down
2 changes: 1 addition & 1 deletion geonode/upload/handlers/geotiff/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def can_handle(_data) -> bool:
if not base:
return False
ext = base.split(".")[-1] if isinstance(base, str) else base.name.split(".")[-1]
return ext in ["tiff", "geotiff", "tif", "geotif"] and BaseRasterFileHandler.can_handle(_data)
return ext in ["tiff", "geotiff", "tif", "geotif"] and _data.get("action", None) in GeoTiffFileHandler.TASKS

@staticmethod
def is_valid(files, user, **kwargs):
Expand Down
2 changes: 1 addition & 1 deletion geonode/upload/handlers/geotiff/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def setUpClass(cls):
super().setUpClass()
cls.handler = GeoTiffFileHandler()
cls.valid_tiff = f"{project_dir}/tests/fixture/test_raster.tif"
cls.valid_files = {"base_file": cls.valid_tiff, "source": "upload"}
cls.valid_files = {"base_file": cls.valid_tiff, "action": "upload"}
cls.user, _ = get_user_model().objects.get_or_create(username="admin")
cls.invalid_tiff = {"base_file": "invalid.file.foo"}
cls.owner = get_user_model().objects.first()
Expand Down
6 changes: 3 additions & 3 deletions geonode/upload/handlers/gpkg/handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,9 +89,9 @@ def can_handle(_data) -> bool:
base = _data.get("base_file")
if not base:
return False
return (
base.endswith(".gpkg") if isinstance(base, str) else base.name.endswith(".gpkg")
) and BaseVectorFileHandler.can_handle(_data)
return (base.endswith(".gpkg") if isinstance(base, str) else base.name.endswith(".gpkg")) and _data.get(
"action", None
) in GPKGFileHandler.TASKS

@staticmethod
def is_valid(files, user, **kwargs):
Expand Down
Loading

0 comments on commit ec76dff

Please sign in to comment.