diff --git a/gateway/sds_gateway/api_methods/helpers/temporal_filtering.py b/gateway/sds_gateway/api_methods/helpers/temporal_filtering.py new file mode 100644 index 000000000..787174fda --- /dev/null +++ b/gateway/sds_gateway/api_methods/helpers/temporal_filtering.py @@ -0,0 +1,133 @@ +import re + +from django.db.models import QuerySet + +from opensearchpy.exceptions import NotFoundError as OpenSearchNotFoundError +from sds_gateway.api_methods.models import CaptureType, Capture, File, DRF_RF_FILENAME_REGEX_STR +from sds_gateway.api_methods.utils.opensearch_client import get_opensearch_client +from sds_gateway.api_methods.utils.relationship_utils import get_capture_files +from loguru import logger as log + +# Digital RF spec: rf@SECONDS.MILLISECONDS.h5 (e.g. rf@1396379502.000.h5) +# https://github.com/MITHaystack/digital_rf +DRF_RF_FILENAME_PATTERN = re.compile( + r"^rf@(\d+)\.(\d+)\.h5$", + re.IGNORECASE, +) + + +def drf_rf_filename_from_ms(ms: int) -> str: + """Format ms as DRF rf data filename (canonical for range queries).""" + return f"rf@{ms // 1000}.{ms % 1000:03d}.h5" + + +def drf_rf_filename_to_ms(file_name: str) -> int | None: + """ + Parse DRF rf data filename to milliseconds. + Handles rf@SECONDS.MILLISECONDS.h5; fractional part padded to 3 digits. + """ + name = file_name.strip() + match = DRF_RF_FILENAME_PATTERN.match(name) + if not match: + return None + try: + seconds = int(match.group(1)) + frac = match.group(2).ljust(3, "0")[:3] + return seconds * 1000 + int(frac) + except (ValueError, TypeError): + return None + + +def _catch_capture_type_error(capture_type: CaptureType) -> None: + if capture_type != CaptureType.DigitalRF: + msg = "Only DigitalRF captures are supported for temporal filtering." + log.error(msg) + raise ValueError(msg) + + +def get_capture_bounds(capture_type: CaptureType, capture_uuid: str) -> tuple[int, int]: + """Get start and end bounds for capture from opensearch.""" + _catch_capture_type_error(capture_type) + + client = get_opensearch_client() + index = f"captures-{capture_type}" + + try: + response = client.get(index=index, id=capture_uuid) + except OpenSearchNotFoundError as e: + raise ValueError( + f"Capture {capture_uuid} not found in OpenSearch index {index}" + ) from e + + if not response.get("found"): + raise ValueError( + f"Capture {capture_uuid} not found in OpenSearch index {index}" + ) + + source = response.get("_source", {}) + search_props = source.get("search_props", {}) + start_time = search_props.get("start_time", 0) + end_time = search_props.get("end_time", 0) + return start_time, end_time + + +def get_file_cadence(capture_type: CaptureType, capture: Capture) -> int: + """Get the file cadence in milliseconds. OpenSearch bounds are in seconds.""" + _catch_capture_type_error(capture_type) + + capture_uuid = str(capture.uuid) + start_time, end_time = get_capture_bounds(capture_type, capture_uuid) + + count = capture.get_drf_data_files_stats()["total_count"] + if count == 0: + return 0 + duration_sec = end_time - start_time + duration_ms = duration_sec * 1000 + return max(1, int(duration_ms / count)) + + +def filter_capture_data_files_selection_bounds( + capture_type: CaptureType, + capture: Capture, + start_time: int, # relative ms from start of capture (from UI) + end_time: int, # relative ms from start of capture (from UI) +) -> QuerySet[File]: + """Filter the capture file selection bounds to the given start and end times.""" + _catch_capture_type_error(capture_type) + epoch_start_sec, _ = get_capture_bounds(capture_type, str(capture.uuid)) + epoch_start_ms = epoch_start_sec * 1000 + start_ms = epoch_start_ms + start_time + end_ms = epoch_start_ms + end_time + + start_file_name = drf_rf_filename_from_ms(start_ms) + end_file_name = drf_rf_filename_from_ms(end_ms) + + data_files = capture.get_drf_data_files_queryset() + return data_files.filter( + name__gte=start_file_name, + name__lte=end_file_name, + ).order_by("name") + +def get_capture_files_with_temporal_filter( + capture_type: CaptureType, + capture: Capture, + start_time: int | None = None, # milliseconds since start of capture + end_time: int | None = None, +) -> QuerySet[File]: + """Get the capture files with temporal filtering.""" + _catch_capture_type_error(capture_type) + + if start_time is None or end_time is None: + log.warning("Start or end time is None, returning all capture files without temporal filtering") + return get_capture_files(capture) + + # get non-data files + non_data_files = get_capture_files(capture).exclude(name__regex=DRF_RF_FILENAME_REGEX_STR) + + # get data files with temporal filtering + data_files = filter_capture_data_files_selection_bounds( + capture_type, capture, start_time, end_time + ) + + # return all files + return non_data_files.union(data_files) \ No newline at end of file diff --git a/gateway/sds_gateway/api_methods/models.py b/gateway/sds_gateway/api_methods/models.py index 7a9aa896e..8bb007266 100644 --- a/gateway/sds_gateway/api_methods/models.py +++ b/gateway/sds_gateway/api_methods/models.py @@ -13,6 +13,8 @@ from blake3 import blake3 as Blake3 # noqa: N812 from django.conf import settings from django.db import models +from django.db.models import Sum +from django.db.models import Count from django.db.models import ProtectedError from django.db.models import QuerySet from django.db.models.signals import post_save @@ -27,6 +29,8 @@ log = logging.getLogger(__name__) +DRF_RF_FILENAME_REGEX_STR = r"^rf@\d+\.\d+\.h5$" + class KeywordNameField(models.CharField): """ @@ -419,6 +423,33 @@ def get_capture(self) -> dict[str, Any]: "owner": self.owner, } + + def get_drf_data_files_queryset(self) -> QuerySet[File]: + """DRF data files (rf@*.h5) for this capture (M2M + FK).""" + if self.capture_type != CaptureType.DigitalRF: + log.warning("Capture %s is not a DigitalRF capture", self.uuid) + return File.objects.none() + + # Local import avoids circular import (relationship_utils imports Capture). + from sds_gateway.api_methods.utils.relationship_utils import get_capture_files + + return get_capture_files(self, include_deleted=False).filter( + name__regex=DRF_RF_FILENAME_REGEX_STR, + ) + + def get_drf_data_files_stats(self) -> dict[str, int]: + """Count + total size in one query; cached per instance. File PK is ``uuid`` — use ``pk``.""" + if hasattr(self, "_drf_data_files_stats_cache"): + return self._drf_data_files_stats_cache + + qs = self.get_drf_data_files_queryset() + agg = qs.aggregate(total_count=Count("pk"), total_size=Sum("size")) + self._drf_data_files_stats_cache = { + "total_count": agg["total_count"] or 0, + "total_size": int(agg["total_size"] or 0), + } + return self._drf_data_files_stats_cache + def get_opensearch_frequency_metadata(self) -> dict[str, Any]: """ Query OpenSearch for frequency metadata for this specific capture. diff --git a/gateway/sds_gateway/api_methods/serializers/capture_serializers.py b/gateway/sds_gateway/api_methods/serializers/capture_serializers.py index 037ebafd3..549c09f72 100644 --- a/gateway/sds_gateway/api_methods/serializers/capture_serializers.py +++ b/gateway/sds_gateway/api_methods/serializers/capture_serializers.py @@ -1,5 +1,6 @@ """Capture serializers for the SDS Gateway API methods.""" +import logging from typing import Any from typing import cast @@ -9,6 +10,8 @@ from rest_framework.utils.serializer_helpers import ReturnList from sds_gateway.api_methods.helpers.index_handling import retrieve_indexed_metadata +from sds_gateway.api_methods.helpers.temporal_filtering import get_capture_bounds +from sds_gateway.api_methods.helpers.temporal_filtering import get_file_cadence from sds_gateway.api_methods.models import Capture from sds_gateway.api_methods.models import CaptureType from sds_gateway.api_methods.models import DEPRECATEDPostProcessedData @@ -68,10 +71,13 @@ class CaptureGetSerializer(serializers.ModelSerializer[Capture]): owner = UserGetSerializer() capture_props = serializers.SerializerMethodField() files = serializers.SerializerMethodField() + total_file_size = serializers.SerializerMethodField() + data_files_info = serializers.SerializerMethodField() center_frequency_ghz = serializers.SerializerMethodField() sample_rate_mhz = serializers.SerializerMethodField() - files_count = serializers.SerializerMethodField() - total_file_size = serializers.SerializerMethodField() + length_of_capture_ms = serializers.SerializerMethodField() + file_cadence_ms = serializers.SerializerMethodField() + capture_start_epoch_sec = serializers.SerializerMethodField() formatted_created_at = serializers.SerializerMethodField() capture_type_display = serializers.SerializerMethodField() post_processed_data = serializers.SerializerMethodField() @@ -90,28 +96,77 @@ def get_files(self, capture: Capture) -> ReturnList[File]: ) return cast("ReturnList[File]", serializer.data) + @extend_schema_field(serializers.IntegerField(allow_null=True)) + def get_total_file_size(self, capture: Capture) -> int | None: + """Get the total file size of all files associated with this capture.""" + + if capture.capture_type != CaptureType.DigitalRF: + return None + + all_files = get_capture_files(capture, include_deleted=False) + result = all_files.aggregate(total_size=Sum("size")) + total = result["total_size"] or 0 + data_total = self.get_data_files_info(capture).get("total_size", 0) + if total < data_total: + logging.getLogger(__name__).warning( + "Capture %s: total_file_size (%s) < data_files_total_size (%s); using data total.", + str(capture.uuid), total, data_total, + ) + total = data_total + + return total + + @extend_schema_field(serializers.DictField(allow_null=True)) + def get_data_files_info(self, capture: Capture) -> dict[str, Any]: + """Get the data files info for the capture.""" + if capture.capture_type != CaptureType.DigitalRF: + return {} + + stats = capture.get_drf_data_files_stats() + total_size = stats["total_size"] + count = stats["total_count"] + return { + "count": count, + "total_size": total_size, + "per_data_file_size": (float(total_size) / count) if count else None, + } + @extend_schema_field(serializers.FloatField) def get_center_frequency_ghz(self, capture: Capture) -> float | None: """Get the center frequency in GHz from the capture model property.""" return capture.center_frequency_ghz - - @extend_schema_field(serializers.FloatField) + + @extend_schema_field(serializers.FloatField(allow_null=True)) def get_sample_rate_mhz(self, capture: Capture) -> float | None: - """Get the sample rate in MHz from the capture model property.""" + """Get the sample rate in MHz from the capture model property. None if not indexed in OpenSearch.""" return capture.sample_rate_mhz - @extend_schema_field(serializers.IntegerField) - def get_files_count(self, capture: Capture) -> int: - """Get the count of files associated with this capture.""" - return get_capture_files(capture, include_deleted=False).count() - - @extend_schema_field(serializers.IntegerField) - def get_total_file_size(self, capture: Capture) -> int: - """Get the total file size of all files associated with this capture.""" - all_files = get_capture_files(capture, include_deleted=False) - result = all_files.aggregate(total_size=Sum("size")) - return result["total_size"] or 0 + @extend_schema_field(serializers.IntegerField(allow_null=True)) + def get_length_of_capture_ms(self, capture: Capture) -> int | None: + """Get the length of the capture in milliseconds. OpenSearch bounds are in seconds.""" + try: + start_time, end_time = get_capture_bounds(capture.capture_type, str(capture.uuid)) + return (end_time - start_time) * 1000 + except (ValueError, IndexError, KeyError): + return None + + @extend_schema_field(serializers.IntegerField(allow_null=True)) + def get_file_cadence_ms(self, capture: Capture) -> int | None: + """Get the file cadence in milliseconds. None if not indexed in OpenSearch.""" + try: + return get_file_cadence(capture.capture_type, capture) + except (ValueError, IndexError, KeyError): + return None + @extend_schema_field(serializers.IntegerField(allow_null=True)) + def get_capture_start_epoch_sec(self, capture: Capture) -> int | None: + """Get the capture start time as Unix epoch seconds. None if not indexed in OpenSearch.""" + try: + start_time, _ = get_capture_bounds(capture.capture_type, str(capture.uuid)) + return start_time + except (ValueError, IndexError, KeyError): + return None + @extend_schema_field(serializers.DictField) def get_capture_props(self, capture: Capture) -> dict[str, Any]: """Retrieve the indexed metadata for the capture.""" @@ -301,9 +356,12 @@ class CompositeCaptureSerializer(serializers.Serializer): # Computed fields files = serializers.SerializerMethodField() - files_count = serializers.SerializerMethodField() total_file_size = serializers.SerializerMethodField() + data_files_info = serializers.SerializerMethodField() formatted_created_at = serializers.SerializerMethodField() + length_of_capture_ms = serializers.SerializerMethodField() + file_cadence_ms = serializers.SerializerMethodField() + capture_start_epoch_sec = serializers.SerializerMethodField() def get_files(self, obj: dict[str, Any]) -> ReturnList[File]: """Get all files from all channels in the composite capture.""" @@ -320,19 +378,11 @@ def get_files(self, obj: dict[str, Any]) -> ReturnList[File]: all_files.extend(serializer.data) return cast("ReturnList[File]", all_files) - @extend_schema_field(serializers.IntegerField) - def get_files_count(self, obj: dict[str, Any]) -> int: - """Get the total count of files across all channels.""" - total_count = 0 - for channel_data in obj["channels"]: - capture_uuid = channel_data["uuid"] - capture = Capture.objects.get(uuid=capture_uuid) - total_count += get_capture_files(capture, include_deleted=False).count() - return total_count - - @extend_schema_field(serializers.IntegerField) - def get_total_file_size(self, obj: dict[str, Any]) -> int: + def get_total_file_size(self, obj: dict[str, Any]) -> int | None: """Get the total file size across all channels.""" + if obj["capture_type"] != CaptureType.DigitalRF: + return None + total_size = 0 for channel_data in obj["channels"]: capture_uuid = channel_data["uuid"] @@ -340,8 +390,37 @@ def get_total_file_size(self, obj: dict[str, Any]) -> int: all_files = get_capture_files(capture, include_deleted=False) result = all_files.aggregate(total_size=Sum("size")) total_size += result["total_size"] or 0 + + data_total = self.get_data_files_info(obj).get("total_size", 0) + + if total_size < data_total: + logging.getLogger(__name__).warning( + "Composite capture: total_file_size (%s) < data_files_total_size (%s); using data total.", + total_size, data_total, + ) + total_size = data_total return total_size + def get_data_files_info(self, obj: dict[str, Any]) -> dict[str, Any]: + """Get the data files info for the composite capture.""" + if obj["capture_type"] != CaptureType.DigitalRF: + return {} + + total_count = 0 + total_size = 0 + for channel_data in obj["channels"]: + capture_uuid = channel_data["uuid"] + capture = Capture.objects.get(uuid=capture_uuid) + stats = capture.get_drf_data_files_stats() + total_count += stats["total_count"] + total_size += stats["total_size"] + + return { + "count": total_count, + "total_size": total_size, + "per_data_file_size": (float(total_size) / total_count) if total_count else None, + } + @extend_schema_field(serializers.CharField) def get_formatted_created_at(self, obj: dict[str, Any]) -> str: """Format the created_at timestamp for display.""" @@ -350,6 +429,48 @@ def get_formatted_created_at(self, obj: dict[str, Any]) -> str: return created_at.strftime("%m/%d/%Y %I:%M:%S %p") return "" + @extend_schema_field(serializers.IntegerField(allow_null=True)) + def get_length_of_capture_ms(self, obj: dict[str, Any]) -> int | None: + """Use first channel's bounds for composite capture duration.""" + channels = obj.get("channels") or [] + if not channels: + return None + try: + capture = Capture.objects.get(uuid=channels[0]["uuid"]) + start_time, end_time = get_capture_bounds( + capture.capture_type, str(capture.uuid) + ) + return (end_time - start_time) * 1000 + except (ValueError, IndexError, KeyError): + return None + + @extend_schema_field(serializers.IntegerField(allow_null=True)) + def get_file_cadence_ms(self, obj: dict[str, Any]) -> int | None: + """Use first channel's file cadence for composite capture.""" + channels = obj.get("channels") or [] + if not channels: + return None + try: + capture = Capture.objects.get(uuid=channels[0]["uuid"]) + return get_file_cadence(capture.capture_type, capture) + except (ValueError, IndexError, KeyError): + return None + + @extend_schema_field(serializers.IntegerField(allow_null=True)) + def get_capture_start_epoch_sec(self, obj: dict[str, Any]) -> int | None: + """Use first channel's start time for composite capture.""" + channels = obj.get("channels") or [] + if not channels: + return None + try: + capture = Capture.objects.get(uuid=channels[0]["uuid"]) + start_time, _ = get_capture_bounds( + capture.capture_type, str(capture.uuid) + ) + return start_time + except (ValueError, IndexError, KeyError): + return None + def build_composite_capture_data(captures: list[Capture]) -> dict[str, Any]: """Build composite capture data from a list of captures with the same top_level_dir. diff --git a/gateway/sds_gateway/api_methods/tasks.py b/gateway/sds_gateway/api_methods/tasks.py index e4aed2651..440b7dfb7 100644 --- a/gateway/sds_gateway/api_methods/tasks.py +++ b/gateway/sds_gateway/api_methods/tasks.py @@ -20,6 +20,7 @@ from redis import Redis from sds_gateway.api_methods.models import Capture +from sds_gateway.api_methods.models import CaptureType from sds_gateway.api_methods.models import Dataset from sds_gateway.api_methods.models import File from sds_gateway.api_methods.models import ItemType @@ -676,15 +677,26 @@ def _process_item_files( item_type: ItemType, item_uuid: UUID, temp_zip: TemporaryZipFile, + start_time: int | None = None, + end_time: int | None = None, ) -> tuple[Mapping[str, UUID | int | str] | None, str | None, int | None, int | None]: # pyright: ignore[reportMissingTypeArgument] """ Process files for an item and create a zip file. + Args: + user: The user requesting the files + item: The item object (Dataset or Capture) + item_type: Type of item (dataset or capture) + item_uuid: UUID of the item to download + temp_zip: The temporary zip file to create + start_time: Optional start time for temporal filtering + end_time: Optional end time for temporal filtering + Returns: tuple: (error_response, zip_file_path, total_size, files_processed) If error_response is not None, the other values are None """ - files = _get_item_files(user, item, item_type) + files = _get_item_files(user, item, item_type, start_time, end_time) if not files: log.warning(f"No files found for {item_type} {item_uuid}") error_message = f"No files found in {item_type}" @@ -929,7 +941,6 @@ def _send_download_email( getattr(item, "name", str(item)) or f"{item_type.capitalize()} {item.uuid}" ) subject = f"Your {item_type} '{item_display_name}' is ready for download" - context = { "item_type": item_type, "item_name": item_display_name, @@ -979,7 +990,11 @@ def _handle_timeout_exception( time_limit=30 * 60, soft_time_limit=25 * 60 ) # 30 min hard limit, 25 min soft limit def send_item_files_email( # noqa: C901, PLR0911, PLR0912, PLR0915 - item_uuid: UUID, user_id: str, item_type: str | ItemType + item_uuid: UUID, + user_id: str, + item_type: str | ItemType, + start_time: int | None = None, + end_time: int | None = None, ) -> Mapping[str, UUID | str | int]: """ Unified Celery task to create a zip file of item files and send it via email. @@ -990,6 +1005,8 @@ def send_item_files_email( # noqa: C901, PLR0911, PLR0912, PLR0915 item_uuid: UUID of the item to process user_id: ID of the user requesting the download item_type: Type of item (dataset or capture) + start_time: Optional start time for temporal filtering + end_time: Optional end time for temporal filtering Returns: dict: Task result with status and details """ @@ -1053,6 +1070,8 @@ def send_item_files_email( # noqa: C901, PLR0911, PLR0912, PLR0915 item_type=item_type_enum, item_uuid=item_uuid, temp_zip=temp_zip, + start_time=start_time, + end_time=end_time, ) ) if error_response: @@ -1251,7 +1270,13 @@ def _validate_item_download_request( return None, user, item -def _get_item_files(user: User, item: Any, item_type: ItemType) -> list[File]: +def _get_item_files( + user: User, + item: Any, + item_type: ItemType, + start_time: int | None = None, + end_time: int | None = None, +) -> list[File]: """ Get all files for an item based on its type. @@ -1259,10 +1284,14 @@ def _get_item_files(user: User, item: Any, item_type: ItemType) -> list[File]: user: The user requesting the files item: The item object (Dataset or Capture) item_type: Type of item (dataset or capture) - + start_time: Optional start time for temporal filtering + end_time: Optional end time for temporal filtering Returns: List of files associated with the item """ + from sds_gateway.api_methods.helpers.temporal_filtering import ( + get_capture_files_with_temporal_filter, + ) from sds_gateway.api_methods.utils.relationship_utils import get_capture_files from sds_gateway.api_methods.utils.relationship_utils import ( get_dataset_files_including_captures, @@ -1272,14 +1301,35 @@ def _get_item_files(user: User, item: Any, item_type: ItemType) -> list[File]: files_queryset = get_dataset_files_including_captures( item, include_deleted=False ) - files = list(files_queryset) # Convert to list before len() to avoid SQL issues + files = list(files_queryset) log.info(f"Found {len(files)} files for dataset {item.uuid}") return files if item_type == ItemType.CAPTURE: - files = get_capture_files(item, include_deleted=False) + capture_type = item.capture_type + # temporal filtering is only supported for DigitalRF captures + if capture_type == CaptureType.DigitalRF: + files = get_capture_files_with_temporal_filter( + capture_type=capture_type, + capture=item, + start_time=start_time, + end_time=end_time, + ) + else: + if start_time is not None or end_time is not None: + log.warning( + "Temporal filtering is only supported for DigitalRF captures, " + "ignoring start_time and end_time" + ) + + files = get_capture_files( + capture=item, + include_deleted=False, + ) + + files = list(files) log.info(f"Found {len(files)} files for capture {item.uuid}") - return list(files) + return files log.warning(f"Unknown item type: {item_type}") return [] diff --git a/gateway/sds_gateway/api_methods/tests/factories.py b/gateway/sds_gateway/api_methods/tests/factories.py index 3503cf4af..d0a00849f 100644 --- a/gateway/sds_gateway/api_methods/tests/factories.py +++ b/gateway/sds_gateway/api_methods/tests/factories.py @@ -13,10 +13,16 @@ from unittest.mock import patch from django.core.files.base import ContentFile -from factory import Faker +from faker import Faker as FakerInstance +from factory import Faker as FactoryFaker +from factory import LazyAttribute +from factory import LazyFunction from factory import post_generation +from factory import Sequence from factory.django import DjangoModelFactory +from sds_gateway.api_methods.helpers.temporal_filtering import drf_rf_filename_from_ms +from sds_gateway.api_methods.models import Capture from sds_gateway.api_methods.models import Dataset from sds_gateway.api_methods.models import File from sds_gateway.api_methods.models import ItemType @@ -24,6 +30,8 @@ from sds_gateway.api_methods.models import UserSharePermission from sds_gateway.users.tests.factories import UserFactory +# Standalone Faker for LazyFunction callbacks (not factory_boy's FactoryFaker declaration) +_faker = FakerInstance() class DatasetFactory(DjangoModelFactory): """Factory for creating Dataset instances for testing. @@ -79,22 +87,22 @@ class DatasetFactory(DjangoModelFactory): dataset = DatasetFactory(keywords=None) """ - uuid = Faker("uuid4") - name = Faker("sentence", nb_words=3) - abstract = Faker("text", max_nb_chars=200) - description = Faker("text", max_nb_chars=500) - doi = Faker("uuid4") + uuid = FactoryFaker("uuid4") + name = FactoryFaker("sentence", nb_words=3) + abstract = FactoryFaker("text", max_nb_chars=200) + description = FactoryFaker("text", max_nb_chars=500) + doi = FactoryFaker("uuid4") authors = ["John Doe", "Jane Smith"] license = "MIT" institutions = ["Example University"] - release_date = Faker("date_time") - repository = Faker("url") + release_date = FactoryFaker("date_time") + repository = FactoryFaker("url") version = 1 - website = Faker("url") + website = FactoryFaker("url") provenance = {"source": "test"} citation = {"title": "Test Dataset"} other = {"notes": "Test dataset"} - owner = Faker("subfactory", factory=UserFactory) + owner = FactoryFaker("subfactory", factory=UserFactory) is_deleted = False is_public = False @@ -181,14 +189,14 @@ class FileFactory(DjangoModelFactory): file = FileFactory(dataset=dataset) """ - uuid = Faker("uuid4") + uuid = FactoryFaker("uuid4") directory = "/files/test/" - name = Faker("file_name", extension="h5") + name = FactoryFaker("file_name", extension="h5") media_type = "application/x-hdf5" permissions = "rw-r--r--" - size = Faker("random_int", min=1000, max=1000000) - sum_blake3 = Faker("sha256") - owner = Faker("subfactory", factory=UserFactory) + size = FactoryFaker("random_int", min=1000, max=1000000) + sum_blake3 = FactoryFaker("sha256") + owner = FactoryFaker("subfactory", factory=UserFactory) is_deleted = False @post_generation @@ -223,6 +231,57 @@ class Meta: model = File +class CaptureFactory(DjangoModelFactory): + class Meta: + model = Capture + + channel = FactoryFaker("word") + capture_type = "drf" + top_level_dir = LazyFunction( + lambda: _faker.file_path(depth=2).replace("/", "_") + ) + owner = FactoryFaker("subfactory", factory=UserFactory) + name = FactoryFaker("slug") + index_name = "captures-drf" + + +class DRFDataFileFactory(DjangoModelFactory): + """Factory for creating DRF data file instances for testing. + + This factory creates realistic DRF data file objects that represent files stored in the system. + It generates test data for file metadata and creates a Django ContentFile for the actual file content. + + The factory creates files with realistic metadata including size, checksums, and proper file extensions. + It also handles the creation of the Django file field with test content. + """ + + uuid = FactoryFaker("uuid4") + directory = LazyAttribute(lambda obj: f"/files/{obj.owner.email}/{obj.capture.top_level_dir}/") + name = Sequence(lambda n: drf_rf_filename_from_ms(1000 + n * 1000)) + media_type = "application/x-hdf5" + permissions = "rw-r----" + size = FactoryFaker("random_int", min=1000, max=1000000) + sum_blake3 = FactoryFaker("sha256") + owner = FactoryFaker("subfactory", factory=UserFactory) + capture = FactoryFaker("subfactory", factory=CaptureFactory) + is_deleted = False + + @post_generation + def file(self, create, extracted, **kwargs): + if not create: + return + if extracted: + self.file = extracted + else: + content = b"test drf file content" + self.file = ContentFile(content, name=self.name) + + + + class Meta: + model = File + + class UserSharePermissionFactory(DjangoModelFactory): """Factory for creating UserSharePermission instances for testing. @@ -255,12 +314,12 @@ class UserSharePermissionFactory(DjangoModelFactory): permission = UserSharePermissionFactory(is_enabled=False) """ - owner = Faker("subfactory", factory=UserFactory) - shared_with = Faker("subfactory", factory=UserFactory) - item_type = Faker("random_element", elements=[ItemType.DATASET, ItemType.CAPTURE]) - item_uuid = Faker("uuid4") + owner = FactoryFaker("subfactory", factory=UserFactory) + shared_with = FactoryFaker("subfactory", factory=UserFactory) + item_type = FactoryFaker("random_element", elements=[ItemType.DATASET, ItemType.CAPTURE]) + item_uuid = FactoryFaker("uuid4") is_enabled = True - message = Faker("sentence", nb_words=5) + message = FactoryFaker("sentence", nb_words=5) is_deleted = False class Meta: diff --git a/gateway/sds_gateway/api_methods/tests/test_celery_tasks.py b/gateway/sds_gateway/api_methods/tests/test_celery_tasks.py index d5c6fba3a..2ceff4451 100644 --- a/gateway/sds_gateway/api_methods/tests/test_celery_tasks.py +++ b/gateway/sds_gateway/api_methods/tests/test_celery_tasks.py @@ -36,6 +36,7 @@ from sds_gateway.api_methods.tasks import get_user_task_status from sds_gateway.api_methods.tasks import is_user_locked from sds_gateway.api_methods.tasks import release_user_lock +from sds_gateway.api_methods.tasks import _get_item_files from sds_gateway.api_methods.tasks import send_item_files_email from sds_gateway.api_methods.utils.disk_utils import estimate_disk_size @@ -1232,3 +1233,56 @@ def test_large_file_download_redirects_to_sdk(self): assert result["status"] == "error" assert "SDK" in result["message"] assert "GB" in result["message"] # Check for GB in general + + def test_get_item_files_with_temporal_bounds_returns_expected_rf_subset(self): + """ + Task-level test: start_time/end_time flow into _get_item_files. + For DigitalRF captures, ``get_capture_files_with_temporal_filter`` returns + non-DRF capture files (metadata) plus DRF files in the selected time range + (temporal_filtering details are unit-tested in test_temporal_filtering.py). + """ + # Create DRF-named files for self.capture (epoch 1s..5s) + epoch_start_sec = 1 + epoch_end_sec = 6 + for i in range(epoch_start_sec, epoch_end_sec): + name = f"rf@{i}.000.h5" + content = ContentFile(b"x", name=name) + File.objects.create( + name=name, + size=100, + directory=self.top_level_dir, + owner=self.user, + capture=self.capture, + file=content, + sum_blake3="a" * 64, + ) + # Link to capture via FK (get_capture_files uses both M2M and FK) + mock_response = { + "found": True, + "_source": { + "search_props": { + "start_time": epoch_start_sec, + "end_time": epoch_end_sec, + } + }, + } + with patch( + "sds_gateway.api_methods.helpers.temporal_filtering.get_opensearch_client" + ) as m: + m.return_value.get.return_value = mock_response + # Relative ms: 1000–4000 from capture start → absolute 2s–5s filenames + result = _get_item_files( + self.user, + self.capture, + ItemType.CAPTURE, + start_time=1000, + end_time=4000, + ) + names = [f.name for f in result] + # DRF files in [2s, 5s] inclusive (see filter_capture_data_files_selection_bounds) + expected_rf = [f"rf@{i}.000.h5" for i in range(2, 6)] + rf_names = sorted(n for n in names if n.startswith("rf@")) + assert rf_names == expected_rf, f"Expected RF files {expected_rf}, got {rf_names}" + # Metadata / non-DRF capture files from setUp are still included in the download set + assert "test_file1.txt" in names + assert "test_file2.txt" in names diff --git a/gateway/sds_gateway/api_methods/tests/test_temporal_filtering.py b/gateway/sds_gateway/api_methods/tests/test_temporal_filtering.py new file mode 100644 index 000000000..fb5dfea8e --- /dev/null +++ b/gateway/sds_gateway/api_methods/tests/test_temporal_filtering.py @@ -0,0 +1,117 @@ +import time + +from unittest.mock import patch +from django.db.models import QuerySet +from django.test import TestCase + +import sds_gateway.api_methods.helpers.temporal_filtering as temporal_filtering +from sds_gateway.api_methods.tests.factories import CaptureFactory, DRFDataFileFactory, UserFactory + + +class TemporalFilteringTestCase(TestCase): + def setUp(self): + # get unix timestamp for now + self.now = int(time.time()) + self.file_count = 10 + self.user = UserFactory() + self.capture = CaptureFactory(owner=self.user, capture_type="drf") + + # Create 5 DRF data files in sequence with 1 second interval + self.files = [ + DRFDataFileFactory( + capture=self.capture, + owner=self.user, + name=f"rf@{self.now + i}.000.h5", + ) + for i in range(self.file_count) + ] + + def _get_test_capture_bounds(self): + start_sec = int(self.now) + end_sec = start_sec + 10 # 10 second span + return start_sec, end_sec + + def test_rf_filename_ms_conversion(self): + for i in range(10): + expected_ms = (self.now + i) * 1000 + filename_to_ms = temporal_filtering.drf_rf_filename_to_ms(self.files[i].name) + assert filename_to_ms is not None + assert filename_to_ms == expected_ms + + ms_to_filename = temporal_filtering.drf_rf_filename_from_ms(expected_ms) + assert ms_to_filename is not None + assert ms_to_filename == self.files[i].name + + def test_get_capture_bounds(self): + start_sec, end_sec = self._get_test_capture_bounds() + # mock response, opensearch calls are tested in test_opensearch.py + mock_response = { + "found": True, + "_source": { + "search_props": { + "start_time": start_sec, + "end_time": end_sec, + } + }, + } + with patch("sds_gateway.api_methods.helpers.temporal_filtering.get_opensearch_client") as m: + m.return_value.get.return_value = mock_response + start_time, end_time = temporal_filtering.get_capture_bounds( + self.capture.capture_type, str(self.capture.uuid) + ) + assert start_time is not None + assert end_time is not None + assert start_time == start_sec + assert end_time == end_sec + + def test_get_file_cadence(self): + start_sec, end_sec = self._get_test_capture_bounds() + # mock response, opensearch calls are tested in test_opensearch.py + mock_response = { + "found": True, + "_source": { + "search_props": { + "start_time": start_sec, + "end_time": end_sec, + } + }, + } + with patch("sds_gateway.api_methods.helpers.temporal_filtering.get_opensearch_client") as m: + m.return_value.get.return_value = mock_response + file_cadence = temporal_filtering.get_file_cadence( + self.capture.capture_type, self.capture + ) + + expected_cadence = max( + 1, int((end_sec - start_sec) * 1000 / self.file_count) + ) + + # duration_ms / DRF data file count (get_drf_data_files_stats total_count) + assert self.capture.get_drf_data_files_stats()["total_count"] == self.file_count + assert file_cadence == expected_cadence + + def test_file_filtering(self): + start_ms = 1000 + end_ms = 5000 + # Inclusive range: 1s, 2s, 3s, 4s, 5s -> 5 files + expected_count = (end_ms - start_ms) // 1000 + 1 + start_sec, end_sec = self._get_test_capture_bounds() + mock_response = { + "found": True, + "_source": { + "search_props": { + "start_time": start_sec, + "end_time": end_sec, + } + }, + } + with patch("sds_gateway.api_methods.helpers.temporal_filtering.get_opensearch_client") as m: + m.return_value.get.return_value = mock_response + filtered_files = temporal_filtering.filter_capture_data_files_selection_bounds( + self.capture.capture_type, self.capture, start_ms, end_ms + ) + assert isinstance(filtered_files, QuerySet) + assert filtered_files.count() == expected_count + names = list(filtered_files.values_list("name", flat=True)) + for i in range(expected_count): + assert names[i] == self.files[1 + i].name diff --git a/gateway/sds_gateway/static/js/actions/DownloadActionManager.js b/gateway/sds_gateway/static/js/actions/DownloadActionManager.js index ca33ca45d..e2af831a9 100644 --- a/gateway/sds_gateway/static/js/actions/DownloadActionManager.js +++ b/gateway/sds_gateway/static/js/actions/DownloadActionManager.js @@ -2,6 +2,67 @@ * Download Action Manager * Handles all download-related actions */ + +function msToHms(ms) { + const n = Number(ms); + if (!Number.isFinite(n) || n < 0) return "0:00:00.000"; + const totalSec = Math.floor(n / 1000); + const h = Math.floor(totalSec / 3600); + const m = Math.floor((totalSec % 3600) / 60); + const s = totalSec % 60; + const decimalMs = n % 1000; + const hms = [h, m, s].map((v) => String(v).padStart(2, "0")).join(":"); + return hms + "." + String(decimalMs).padStart(3, "0"); +} + +function formatUtcRange(startEpochSec, startMs, endMs) { + if (!Number.isFinite(startEpochSec)) return "—"; + const startDate = new Date(startEpochSec * 1000 + startMs); + const endDate = new Date(startEpochSec * 1000 + endMs); + const pad2 = (x) => String(x).padStart(2, "0"); + const fmt = (d) => + pad2(d.getUTCHours()) + + ":" + + pad2(d.getUTCMinutes()) + + ":" + + pad2(d.getUTCSeconds()) + + " " + + pad2(d.getUTCMonth() + 1) + + "/" + + pad2(d.getUTCDate()) + + "/" + + d.getUTCFullYear(); + return fmt(startDate) + " - " + fmt(endDate) + " (UTC)"; +} + +/** Format ms from capture start as UTC string for display (Y-m-d H:i:s). */ +function msToUtcString(captureStartEpochSec, ms) { + if (!Number.isFinite(captureStartEpochSec) || !Number.isFinite(ms)) return ""; + const d = new Date(captureStartEpochSec * 1000 + ms); + const pad2 = (x) => String(x).padStart(2, "0"); + return ( + d.getUTCFullYear() + + "-" + + pad2(d.getUTCMonth() + 1) + + "-" + + pad2(d.getUTCDate()) + + " " + + pad2(d.getUTCHours()) + + ":" + + pad2(d.getUTCMinutes()) + + ":" + + pad2(d.getUTCSeconds()) + ); +} + +/** Parse UTC date string (Y-m-d H:i:s or Y-m-d H:i) to epoch ms. */ +function parseUtcStringToEpochMs(str) { + if (!str || !str.trim()) return NaN; + const s = str.trim(); + const d = new Date(s.endsWith("Z") ? s : s.replace(" ", "T") + "Z"); + return Number.isFinite(d.getTime()) ? d.getTime() : NaN; +} + class DownloadActionManager { /** * Initialize download action manager @@ -16,12 +77,6 @@ class DownloadActionManager { * Initialize event listeners */ initializeEventListeners() { - // Initialize download buttons for datasets - this.initializeDatasetDownloadButtons(); - - // Initialize download buttons for captures - this.initializeCaptureDownloadButtons(); - // Initialize web download modal buttons this.initializeWebDownloadButtons(); @@ -30,10 +85,10 @@ class DownloadActionManager { } /** - * Initialize dataset download buttons + * Initialize web download buttons on the table rows */ - initializeDatasetDownloadButtons() { - const downloadButtons = document.querySelectorAll(".download-dataset-btn"); + initializeWebDownloadButtons() { + const downloadButtons = document.querySelectorAll(".web-download-btn"); for (const button of downloadButtons) { // Prevent duplicate event listener attachment @@ -46,72 +101,330 @@ class DownloadActionManager { e.preventDefault(); e.stopPropagation(); - const datasetUuid = button.getAttribute("data-dataset-uuid"); - const datasetName = button.getAttribute("data-dataset-name"); + const itemUuid = button.getAttribute("data-item-uuid"); + const itemType = button.getAttribute("data-item-type"); if (!this.permissions.canDownload()) { this.showToast( - "You don't have permission to download this dataset", + `You don't have permission to download this ${itemType}`, "warning", ); return; } - this.handleDatasetDownload(datasetUuid, datasetName, button); + this.initializeWebDownloadModal(itemUuid, itemType, button); }); } } /** - * Initialize capture download buttons - */ - initializeCaptureDownloadButtons() { - const downloadButtons = document.querySelectorAll(".download-capture-btn"); - - for (const button of downloadButtons) { - // Prevent duplicate event listener attachment - if (button.dataset.downloadSetup === "true") { - continue; + * Initialize or update the capture download temporal slider. Call before + * showing the modal when opening for a capture with known bounds. + * @param {number} durationMs - Total capture duration in milliseconds + * @param {number} fileCadenceMs - File cadence in milliseconds (step) + * @param {Object} opts - Optional: { perDataFileSize, totalSize, dataFilesCount, totalFilesCount, dataFilesTotalSize, captureUuid, captureStartEpochSec } + */ + initializeCaptureDownloadSlider(modalId, durationMs, fileCadenceMs, opts) { + const webDownloadModal = document.getElementById(modalId); + if (!webDownloadModal) return; + + opts = opts || {}; + const q = (id) => webDownloadModal.querySelector("#" + id); + const sliderEl = q("temporalFilterSlider"); + const rangeLabel = q("temporalFilterRangeLabel"); + const totalFilesLabel = q("totalFilesLabel"); + const metadataFilesLabel = q("metadataFilesLabel"); + const totalSizeLabel = q("totalSizeLabel"); + const dateTimeLabel = q("dateTimeLabel"); + const startTimeInput = q("startTime"); + const endTimeInput = q("endTime"); + const startTimeEntry = q("startTimeEntry"); + const endTimeEntry = q("endTimeEntry"); + const startDateTimeEntry = q("startDateTimeEntry"); + const endDateTimeEntry = q("endDateTimeEntry"); + const rangeHintEl = q("temporalRangeHint"); + const sizeWarningEl = q("temporalFilterSizeWarning"); + if (!sliderEl || typeof noUiSlider === 'undefined') return; + durationMs = Number(durationMs); + if (!Number.isFinite(durationMs) || durationMs < 0) durationMs = 0; + fileCadenceMs = Number(fileCadenceMs); + if (!Number.isFinite(fileCadenceMs) || fileCadenceMs < 1) fileCadenceMs = 1000; + const perDataFileSize = Number(opts.perDataFileSize) || 0; + const totalSize = Number(opts.totalSize) || 0; + const dataFilesCount = Number(opts.dataFilesCount) || 0; + const totalFilesCount = Number(opts.totalFilesCount) || 0; + let dataFilesTotalSize = Number(opts.dataFilesTotalSize); + if (!Number.isFinite(dataFilesTotalSize) || dataFilesTotalSize < 0) { + dataFilesTotalSize = perDataFileSize * dataFilesCount; + } + let metadataFilesTotalSize = totalSize - dataFilesTotalSize; + if (metadataFilesTotalSize < 0) metadataFilesTotalSize = 0; + const metadataFilesCount = Math.max(0, totalFilesCount - dataFilesCount); + const captureUuid = opts.captureUuid != null ? String(opts.captureUuid) : ''; + const captureStartEpochSec = Number(opts.captureStartEpochSec); + if (totalSize > 0 && dataFilesTotalSize > totalSize) { + console.warn( + '[DownloadActionManager] data files total size exceeds total size (backend/query inconsistency).', + { captureUuid: captureUuid || '(unknown)', totalSize, dataFilesTotalSize, perDataFileSize, dataFilesCount } + ); + if (sizeWarningEl) { + sizeWarningEl.classList.remove('d-none'); } - button.dataset.downloadSetup = "true"; + dataFilesTotalSize = totalSize; + metadataFilesTotalSize = 0; + } else if (sizeWarningEl) { + sizeWarningEl.classList.add('d-none'); + } + if (webDownloadModal) { + webDownloadModal.dataset.durationMs = String(Math.round(durationMs)); + webDownloadModal.dataset.fileCadenceMs = String(fileCadenceMs); + webDownloadModal.dataset.captureStartEpochSec = Number.isFinite(captureStartEpochSec) ? String(captureStartEpochSec) : ''; + } + if (rangeHintEl) rangeHintEl.textContent = '0 – ' + Math.round(durationMs) + ' ms'; + if (sliderEl.noUiSlider) { + sliderEl.noUiSlider.destroy(); + } + if (rangeLabel) rangeLabel.textContent = '—'; + if (totalFilesLabel) totalFilesLabel.textContent = '0 files'; + if (totalSizeLabel) totalSizeLabel.textContent = window.DOMUtils.formatFileSize(totalSize); + if (dateTimeLabel) dateTimeLabel.textContent = '—'; + if (startTimeInput) startTimeInput.value = ''; + if (endTimeInput) endTimeInput.value = ''; + if (startTimeEntry) startTimeEntry.value = ''; + if (endTimeEntry) endTimeEntry.value = ''; + const hasEpoch = Number.isFinite(captureStartEpochSec); + if (startDateTimeEntry) { + startDateTimeEntry.value = ''; + startDateTimeEntry.disabled = !hasEpoch; + } + if (endDateTimeEntry) { + endDateTimeEntry.value = ''; + endDateTimeEntry.disabled = !hasEpoch; + } + if (durationMs <= 0) return; + let fpStart = null; + let fpEnd = null; + const epochStart = captureStartEpochSec * 1000; + const epochEnd = epochStart + durationMs; + if (hasEpoch && typeof flatpickr !== 'undefined' && startDateTimeEntry && endDateTimeEntry) { + const fpOpts = { + enableTime: true, + enableSeconds: true, + utc: true, + dateFormat: 'Y-m-d H:i:S', + time_24hr: true, + minDate: epochStart, + maxDate: epochEnd, + allowInput: true, + static: true, + appendTo: webDownloadModal || undefined, + }; + flatpickr(startDateTimeEntry, Object.assign({}, fpOpts, { + onChange: function() { syncFromDateTimeEntries(); } + })); + flatpickr(endDateTimeEntry, Object.assign({}, fpOpts, { + onChange: function() { syncFromDateTimeEntries(); } + })); + fpStart = startDateTimeEntry._flatpickr; + fpEnd = endDateTimeEntry._flatpickr; + startDateTimeEntry.disabled = false; + endDateTimeEntry.disabled = false; + } + noUiSlider.create(sliderEl, { + start: [0, durationMs], + connect: true, + step: fileCadenceMs, + range: { min: 0, max: durationMs }, + }); + sliderEl.noUiSlider.on('update', function(values) { + const startMs = Number(values[0]); + const endMs = Number(values[1]); + // the + 1 is to include the first file in the selection + // as file cadence is the time between files, not the time of the file + const filesInSelection = Math.round((endMs - startMs) / fileCadenceMs) + 1; + if (rangeLabel) { + rangeLabel.textContent = msToHms(startMs) + ' - ' + msToHms(endMs); + } + if (totalFilesLabel) { + totalFilesLabel.textContent = dataFilesCount > 0 + ? filesInSelection + ' of ' + dataFilesCount + ' files' + : filesInSelection + ' files'; + } + if (totalSizeLabel) { + totalSizeLabel.textContent = window.DOMUtils.formatFileSize( + (perDataFileSize * filesInSelection) + metadataFilesTotalSize + ); + } + if (dateTimeLabel && Number.isFinite(captureStartEpochSec)) { + dateTimeLabel.textContent = formatUtcRange(captureStartEpochSec, startMs, endMs); + } + if (startTimeInput) startTimeInput.value = String(Math.round(startMs)); + if (endTimeInput) endTimeInput.value = String(Math.round(endMs)); + if (startTimeEntry) startTimeEntry.value = String(Math.round(startMs)); + if (endTimeEntry) endTimeEntry.value = String(Math.round(endMs)); + if (hasEpoch) { + if (fpStart && typeof fpStart.setDate === 'function') fpStart.setDate(epochStart + startMs); + else if (startDateTimeEntry) startDateTimeEntry.value = msToUtcString(captureStartEpochSec, startMs); + if (fpEnd && typeof fpEnd.setDate === 'function') fpEnd.setDate(epochStart + endMs); + else if (endDateTimeEntry) endDateTimeEntry.value = msToUtcString(captureStartEpochSec, endMs); + } + }); + if (rangeLabel) { + rangeLabel.textContent = '0:00:00.000 - ' + msToHms(durationMs); + } + if (totalFilesLabel) { + totalFilesLabel.textContent = dataFilesCount > 0 + ? dataFilesCount + ' files' + : '0 files'; + } + if (metadataFilesLabel) { + metadataFilesLabel.textContent = metadataFilesCount > 0 + ? metadataFilesCount + ' files' + : '0 files'; + } + if (dateTimeLabel && Number.isFinite(captureStartEpochSec)) { + dateTimeLabel.textContent = formatUtcRange(captureStartEpochSec, 0, durationMs); + } + const startVal = '0'; + const endVal = String(durationMs); + if (startTimeInput) startTimeInput.value = startVal; + if (endTimeInput) endTimeInput.value = endVal; + if (startTimeEntry) startTimeEntry.value = startVal; + if (endTimeEntry) endTimeEntry.value = endVal; + if (hasEpoch && startDateTimeEntry && endDateTimeEntry) { + if (fpStart && typeof fpStart.setDate === 'function') fpStart.setDate(epochStart); + else startDateTimeEntry.value = msToUtcString(captureStartEpochSec, 0); + if (fpEnd && typeof fpEnd.setDate === 'function') fpEnd.setDate(epochEnd); + else endDateTimeEntry.value = msToUtcString(captureStartEpochSec, durationMs); + if (!fpStart) { startDateTimeEntry.disabled = false; endDateTimeEntry.disabled = false; } + } - button.addEventListener("click", (e) => { - e.preventDefault(); - e.stopPropagation(); + function syncSliderFromEntries() { + if (!sliderEl.noUiSlider || !startTimeEntry || !endTimeEntry) return; + const s = startTimeEntry.value.trim(); + const e = endTimeEntry.value.trim(); + let startMs = s === '' ? 0 : parseInt(s, 10); + let endMs = e === '' ? durationMs : parseInt(e, 10); + if (!Number.isFinite(startMs)) startMs = 0; + if (!Number.isFinite(endMs)) endMs = durationMs; + startMs = Math.max(0, Math.min(startMs, durationMs)); + endMs = Math.max(0, Math.min(endMs, durationMs)); + if (startMs >= endMs) endMs = Math.min(startMs + fileCadenceMs, durationMs); + sliderEl.noUiSlider.set([startMs, endMs]); + } + function syncFromDateTimeEntries() { + if (!hasEpoch || !sliderEl.noUiSlider || !startDateTimeEntry || !endDateTimeEntry) return; + let startMs, endMs; + if (startDateTimeEntry._flatpickr && endDateTimeEntry._flatpickr) { + const dStart = startDateTimeEntry._flatpickr.selectedDates[0]; + const dEnd = endDateTimeEntry._flatpickr.selectedDates[0]; + startMs = dStart ? dStart.getTime() - epochStart : 0; + endMs = dEnd ? dEnd.getTime() - epochStart : durationMs; + } else { + startMs = parseUtcStringToEpochMs(startDateTimeEntry.value) - epochStart; + endMs = parseUtcStringToEpochMs(endDateTimeEntry.value) - epochStart; + } + if (Number.isNaN(startMs) || Number.isNaN(endMs)) return; + startMs = Math.max(0, Math.min(startMs, durationMs)); + endMs = Math.max(0, Math.min(endMs, durationMs)); + if (startMs >= endMs) endMs = Math.min(startMs + fileCadenceMs, durationMs); + const cur = sliderEl.noUiSlider.get(); + if (Math.round(Number(cur[0])) === Math.round(startMs) && Math.round(Number(cur[1])) === Math.round(endMs)) return; + sliderEl.noUiSlider.set([startMs, endMs]); + } + if (startTimeEntry) startTimeEntry.addEventListener('change', syncSliderFromEntries); + if (endTimeEntry) endTimeEntry.addEventListener('change', syncSliderFromEntries); + if (startDateTimeEntry && !startDateTimeEntry._flatpickr) startDateTimeEntry.addEventListener('change', syncFromDateTimeEntries); + if (endDateTimeEntry && !endDateTimeEntry._flatpickr) endDateTimeEntry.addEventListener('change', syncFromDateTimeEntries); + } - const captureUuid = button.getAttribute("data-capture-uuid"); + setTemporalSliderAttrs(modalId, button, itemUuid) { + // Initialize temporal slider from button data attributes (clears or builds slider) + let durationMs = parseInt(button.getAttribute("data-length-of-capture-ms"), 10); + let fileCadenceMs = parseInt(button.getAttribute("data-file-cadence-ms"), 10); + let perDataFileSize = parseFloat(button.getAttribute("data-per-data-file-size")); + let dataFilesCount = parseInt(button.getAttribute("data-data-files-count"), 10); + let dataFilesTotalSize = parseInt(button.getAttribute("data-total-data-file-size"), 10); + let totalSize = parseInt(button.getAttribute("data-total-size"), 10); + let totalFilesCount = parseInt(button.getAttribute("data-total-files-count"), 10); + let captureStartEpochSec = parseInt(button.getAttribute("data-capture-start-epoch-sec"), 10); + this.initializeCaptureDownloadSlider( + modalId, + Number.isNaN(durationMs) ? 0 : durationMs, + Number.isNaN(fileCadenceMs) ? 1000 : fileCadenceMs, + { + perDataFileSize: Number.isNaN(perDataFileSize) ? 0 : perDataFileSize, + totalSize: Number.isNaN(totalSize) ? 0 : totalSize, + dataFilesCount: Number.isNaN(dataFilesCount) ? 0 : dataFilesCount, + totalFilesCount: Number.isNaN(totalFilesCount) ? 0 : totalFilesCount, + dataFilesTotalSize: Number.isNaN(dataFilesTotalSize) ? undefined : dataFilesTotalSize, + captureUuid: itemUuid || undefined, + captureStartEpochSec: Number.isNaN(captureStartEpochSec) ? undefined : captureStartEpochSec, + }, + ); + } - if (!this.permissions.canDownload()) { + addTimeFilteringToFetchRequest(modalId) { + const modalEl = document.getElementById(modalId); + if (!modalEl) { + return { body: {}, isJson: false }; + } + const startTimeInput = modalEl.querySelector("#startTime"); + const endTimeInput = modalEl.querySelector("#endTime"); + const startEntry = modalEl.querySelector("#startTimeEntry"); + const endEntry = modalEl.querySelector("#endTimeEntry"); + + if (startEntry && endEntry && modalEl && modalEl.dataset.durationMs) { + const entryStart = startEntry.value.trim(); + const entryEnd = endEntry.value.trim(); + if (entryStart !== "" || entryEnd !== "") { + const durationMs = parseInt(modalEl.dataset.durationMs, 10); + const startMs = entryStart === "" ? 0 : parseInt(entryStart, 10); + const endMs = entryEnd === "" ? durationMs : parseInt(entryEnd, 10); + if ( + !Number.isFinite(startMs) || + !Number.isFinite(endMs) || + startMs < 0 || + endMs > durationMs || + startMs >= endMs + ) { this.showToast( - "You don't have permission to download this capture", + "Please enter valid start/end times (0 ≤ start < end ≤ " + durationMs + " ms).", "warning", ); return; } + if (startTimeInput) startTimeInput.value = String(startMs); + if (endTimeInput) endTimeInput.value = String(endMs); + } + } - this.handleCaptureDownload(captureUuid, button); - }); + let body = {}; + let isJson = true; + if (startTimeInput && endTimeInput && startTimeInput.value && endTimeInput.value) { + body.start_time = startTimeInput.value; + body.end_time = endTimeInput.value; + isJson = false; } + + return { body, isJson }; } /** - * Handle dataset download - * @param {string} datasetUuid - Dataset UUID - * @param {string} datasetName - Dataset name + * Initialize web download modal for assets * @param {Element} button - Download button element */ - async handleDatasetDownload(datasetUuid, datasetName, button) { - // Update modal content - const modalNameElement = document.getElementById("downloadDatasetName"); - if (modalNameElement) { - modalNameElement.textContent = datasetName; - } - + async initializeWebDownloadModal(itemUuid, itemType, button) { + const modalId = `webDownloadModal-${itemUuid}`; // Show the modal - window.DOMUtils.openModal("downloadModal"); + window.DOMUtils.openModal(modalId); + + if (itemType === "capture") { + this.setTemporalSliderAttrs(modalId, button, itemUuid); + } // Handle confirm download - const confirmBtn = document.getElementById("confirmDownloadBtn"); + const confirmBtn = document.getElementById(`confirmWebDownloadBtn-${itemUuid}`); if (!confirmBtn) return; // Remove any existing event listeners @@ -119,9 +432,6 @@ class DownloadActionManager { confirmBtn.parentNode.replaceChild(newConfirmBtn, confirmBtn); newConfirmBtn.onclick = async () => { - // Close modal first - window.DOMUtils.closeModal("downloadModal"); - // Show loading state const originalContent = button.innerHTML; await window.DOMUtils.renderLoading(button, "Processing...", { @@ -130,10 +440,23 @@ class DownloadActionManager { }); button.disabled = true; + // Close modal + window.DOMUtils.closeModal(modalId); + + let body = {}; + let isJson = false; try { + + if (itemType === "capture") { + const result = this.addTimeFilteringToFetchRequest(modalId); + body = result.body; + isJson = result.isJson; + } const response = await window.APIClient.post( - `/users/download-item/dataset/${datasetUuid}/`, - {}, + `/users/download-item/${itemType}/${itemUuid}/`, + body, + null, + isJson, ); if (response.success === true) { @@ -179,136 +502,6 @@ class DownloadActionManager { }; } - /** - * Handle capture download (modal copy comes from web_download_modal.html) - * @param {string} captureUuid - Capture UUID - * @param {Element} [button] - Optional row action button for loading state - */ - async handleCaptureDownload(captureUuid, button) { - const modalId = `webDownloadModal-${captureUuid}`; - const modal = document.getElementById(modalId); - if (!modal) { - console.warn(`Web download modal not found for capture ${captureUuid}`); - return; - } - - const confirmBtn = document.getElementById( - `confirmWebDownloadBtn-${captureUuid}`, - ); - - if (!confirmBtn) { - console.warn( - `Web download confirm button not found for capture ${captureUuid}`, - ); - return; - } - - const newConfirmBtn = confirmBtn.cloneNode(true); - confirmBtn.parentNode.replaceChild(newConfirmBtn, confirmBtn); - - const originalRowButtonContent = button?.innerHTML; - - newConfirmBtn.onclick = async () => { - window.DOMUtils.closeModal(modalId); - - if (button) { - await window.DOMUtils.renderLoading(button, "Processing...", { - format: "spinner", - size: "sm", - }); - button.disabled = true; - } - - try { - const response = await window.APIClient.post( - `/users/download-item/capture/${captureUuid}/`, - {}, - ); - - if (response.success === true) { - if (button) { - await window.DOMUtils.renderContent(button, { - icon: "check-circle", - color: "success", - text: "Download Requested", - }); - } - this.showToast( - response.message || - "Download request submitted successfully! You will receive an email when ready.", - "success", - ); - } else { - if (button) { - await window.DOMUtils.renderContent(button, { - icon: "exclamation-triangle", - color: "danger", - text: "Request Failed", - }); - } - this.showToast( - response.message || "Download request failed. Please try again.", - "danger", - ); - } - } catch (error) { - console.error("Download error:", error); - if (button) { - await window.DOMUtils.renderContent(button, { - icon: "exclamation-triangle", - color: "danger", - text: "Request Failed", - }); - } - this.showToast( - error.message || "An error occurred while processing your request.", - "danger", - ); - } finally { - if (button && originalRowButtonContent !== undefined) { - setTimeout(() => { - button.innerHTML = originalRowButtonContent; - button.disabled = false; - }, 3000); - } - } - }; - - window.DOMUtils.openModal(modalId); - } - - /** - * Initialize web download modal buttons - */ - initializeWebDownloadButtons() { - // Find all web download buttons (by data attribute or class) - const webDownloadButtons = document.querySelectorAll( - '[data-action="web-download"], .web-download-btn', - ); - - for (const button of webDownloadButtons) { - // Prevent duplicate event listener attachment - if (button.dataset.downloadSetup === "true") { - continue; - } - button.dataset.downloadSetup = "true"; - - button.addEventListener("click", (e) => { - e.preventDefault(); - e.stopPropagation(); - - const datasetUuid = button.getAttribute("data-dataset-uuid"); - - if (!datasetUuid) { - console.warn("Web download button missing dataset-uuid attribute"); - return; - } - - this.openWebDownloadModal(datasetUuid); - }); - } - } - /** * Initialize SDK download modal buttons */ @@ -341,96 +534,6 @@ class DownloadActionManager { } } - /** - * Open web download modal for a specific dataset (labels from web_download_modal.html) - * @param {string} datasetUuid - Dataset UUID - */ - openWebDownloadModal(datasetUuid) { - const modalId = `webDownloadModal-${datasetUuid}`; - const modal = document.getElementById(modalId); - if (!modal) { - console.warn(`Web download modal not found for dataset ${datasetUuid}`); - return; - } - - const confirmBtn = document.getElementById( - `confirmWebDownloadBtn-${datasetUuid}`, - ); - - if (!confirmBtn) { - console.warn(`Confirm button not found for dataset ${datasetUuid}`); - return; - } - - // Remove any existing event listeners by cloning - const newConfirmBtn = confirmBtn.cloneNode(true); - confirmBtn.parentNode.replaceChild(newConfirmBtn, confirmBtn); - - // Attach download handler - newConfirmBtn.onclick = async () => { - // Close modal first - window.DOMUtils.closeModal(modalId); - - // Show loading state - const originalContent = newConfirmBtn.innerHTML; - await window.DOMUtils.renderLoading(newConfirmBtn, "Processing...", { - format: "spinner", - size: "sm", - }); - newConfirmBtn.disabled = true; - - try { - const response = await window.APIClient.post( - `/users/download-item/dataset/${datasetUuid}/`, - {}, - ); - - if (response.success === true) { - await window.DOMUtils.renderContent(newConfirmBtn, { - icon: "check-circle", - color: "success", - text: "Download Requested", - }); - this.showToast( - response.message || - "Download request submitted successfully! You will receive an email when ready.", - "success", - ); - } else { - await window.DOMUtils.renderContent(newConfirmBtn, { - icon: "exclamation-triangle", - color: "danger", - text: "Request Failed", - }); - this.showToast( - response.message || "Download request failed. Please try again.", - "danger", - ); - } - } catch (error) { - console.error("Download error:", error); - await window.DOMUtils.renderContent(newConfirmBtn, { - icon: "exclamation-triangle", - color: "danger", - text: "Request Failed", - }); - this.showToast( - error.message || "An error occurred while processing your request.", - "danger", - ); - } finally { - // Reset button after 3 seconds - setTimeout(() => { - newConfirmBtn.innerHTML = originalContent; - newConfirmBtn.disabled = false; - }, 3000); - } - }; - - // Use centralized openModal method - window.DOMUtils.openModal(modalId); - } - /** * Open SDK download modal for a specific dataset * @param {string} datasetUuid - Dataset UUID @@ -476,65 +579,6 @@ class DownloadActionManager { } } - /** - * Initialize download buttons for dynamically loaded content - * @param {Element} container - Container element to search within - */ - initializeDownloadButtonsForContainer(container) { - // Initialize dataset download buttons in the container - const datasetDownloadButtons = container.querySelectorAll( - ".download-dataset-btn", - ); - for (const button of datasetDownloadButtons) { - if (!button.dataset.downloadSetup) { - button.dataset.downloadSetup = "true"; - button.addEventListener("click", (e) => { - e.preventDefault(); - e.stopPropagation(); - - const datasetUuid = button.getAttribute("data-dataset-uuid"); - const datasetName = button.getAttribute("data-dataset-name"); - - if (!this.permissions.canDownload()) { - this.showToast( - "You don't have permission to download this dataset", - "warning", - ); - return; - } - - this.handleDatasetDownload(datasetUuid, datasetName, button); - }); - } - } - - // Initialize capture download buttons in the container - const captureDownloadButtons = container.querySelectorAll( - ".download-capture-btn", - ); - for (const button of captureDownloadButtons) { - if (!button.dataset.downloadSetup) { - button.dataset.downloadSetup = "true"; - button.addEventListener("click", (e) => { - e.preventDefault(); - e.stopPropagation(); - - const captureUuid = button.getAttribute("data-capture-uuid"); - - if (!this.permissions.canDownload()) { - this.showToast( - "You don't have permission to download this capture", - "warning", - ); - return; - } - - this.handleCaptureDownload(captureUuid, button); - }); - } - } - } - /** * Check if user can download specific item * @param {Object} item - Item object @@ -568,11 +612,10 @@ class DownloadActionManager { cleanup() { // Remove event listeners and clean up any resources const downloadButtons = document.querySelectorAll( - ".download-dataset-btn, .download-capture-btn", + ".web-download-btn", ); for (const button of downloadButtons) { - button.removeEventListener("click", this.handleDatasetDownload); - button.removeEventListener("click", this.handleCaptureDownload); + button.removeEventListener("click", this.initializeWebDownloadButtons); } } } diff --git a/gateway/sds_gateway/static/js/actions/__tests__/DownloadActionManager.test.js b/gateway/sds_gateway/static/js/actions/__tests__/DownloadActionManager.test.js index 8db67530c..7f3369262 100644 --- a/gateway/sds_gateway/static/js/actions/__tests__/DownloadActionManager.test.js +++ b/gateway/sds_gateway/static/js/actions/__tests__/DownloadActionManager.test.js @@ -55,17 +55,11 @@ describe("DownloadActionManager", () => { // Mock document methods document.querySelector = jest.fn(() => mockButton); document.querySelectorAll = jest.fn((selector) => { - if (selector === ".download-dataset-btn") return [mockButton]; - if (selector === ".download-capture-btn") return [mockButton]; - if (selector === ".download-dataset-btn, .download-capture-btn") - return [mockButton]; + if (selector === ".web-download-btn") return [mockButton]; return []; }); document.getElementById = jest.fn((id) => { - if (id === "downloadModal") return mockModal; - if (id === "downloadDatasetName") return { textContent: "" }; - if (id === "confirmDownloadBtn") return mockButton; if (id.startsWith("webDownloadModal-")) return mockModal; if (id.startsWith("webDownloadModalLabel-")) return { innerHTML: "" }; if (id.startsWith("webDownloadDatasetName-")) return { textContent: "" }; @@ -163,25 +157,22 @@ describe("DownloadActionManager", () => { addEventListener: jest.fn(), dataset: { downloadSetup: "false" }, getAttribute: jest.fn((attr) => { - if (attr === "data-dataset-uuid") return "test-dataset-uuid"; - if (attr === "data-dataset-name") return "Test Dataset"; + if (attr === "data-item-uuid") return "test-item-uuid"; + if (attr === "data-item-type") return "dataset"; return null; }), }; document.querySelectorAll = jest.fn((selector) => { - if (selector === ".download-dataset-btn") return [mockButton]; - if (selector === ".download-capture-btn") return [mockButton]; - if (selector === ".download-dataset-btn, .download-capture-btn") - return [mockButton]; + if (selector === ".web-download-btn") return [mockButton]; return []; }); document.getElementById = jest.fn((id) => { - if (id === "downloadDatasetName") { + if (id.startsWith("webDownloadDatasetName-")) { return { textContent: "" }; } - if (id === "confirmDownloadBtn") { + if (id.startsWith("confirmWebDownloadBtn-")) { return { cloneNode: jest.fn(() => { clonedConfirmBtn = { @@ -199,16 +190,14 @@ describe("DownloadActionManager", () => { downloadManager = new DownloadActionManager({ permissions: { canDownload: () => true }, }); - downloadManager.openCustomModal = jest.fn(); - downloadManager.closeCustomModal = jest.fn(); downloadManager.showToast = jest.fn(); }); - test("should initialize dataset download buttons", () => { - downloadManager.initializeDatasetDownloadButtons(); + test("should initialize web download buttons", () => { + downloadManager.initializeWebDownloadButtons(); expect(document.querySelectorAll).toHaveBeenCalledWith( - ".download-dataset-btn", + ".web-download-btn", ); expect(mockButton.addEventListener).toHaveBeenCalledWith( "click", @@ -222,9 +211,9 @@ describe("DownloadActionManager", () => { message: "Download request submitted", }); - await downloadManager.handleDatasetDownload( - "test-uuid", - "Test Dataset", + await downloadManager.initializeWebDownloadModal( + "test-item-uuid", + "dataset", mockButton, ); @@ -236,8 +225,10 @@ describe("DownloadActionManager", () => { await new Promise((resolve) => setTimeout(resolve, 0)); expect(mockAPIClient.post).toHaveBeenCalledWith( - "/users/download-item/dataset/test-uuid/", + "/users/download-item/dataset/test-item-uuid/", {}, + null, + false, ); expect(window.DOMUtils.renderContent).toHaveBeenCalledWith( mockButton, @@ -266,7 +257,7 @@ describe("DownloadActionManager", () => { // Clear previous calls mockButtonWithSetup.addEventListener.mockClear(); - downloadManager.initializeDatasetDownloadButtons(); + downloadManager.initializeWebDownloadButtons(); expect(mockButtonWithSetup.addEventListener).not.toHaveBeenCalled(); }); @@ -297,35 +288,60 @@ describe("DownloadActionManager", () => { describe("Capture Download Functionality", () => { beforeEach(() => { - if (!window.DOMUtils) window.DOMUtils = {}; - window.DOMUtils.showAlert = jest.fn(); + window.DOMUtils = { + ...global.window.DOMUtils, + openModal: jest.fn(), + closeModal: jest.fn(), + renderLoading: jest.fn().mockResolvedValue(true), + renderContent: jest.fn().mockResolvedValue(true), + showAlert: jest.fn(), + }; downloadManager = new DownloadActionManager({ permissions: mockPermissions, }); }); - test("should initialize capture download buttons", () => { - downloadManager.initializeCaptureDownloadButtons(); + test("should configure temporal slider when opening web download for capture", async () => { + const spy = jest + .spyOn(downloadManager, "setTemporalSliderAttrs") + .mockImplementation(() => {}); - expect(document.querySelectorAll).toHaveBeenCalledWith( - ".download-capture-btn", - ); - expect(mockButton.addEventListener).toHaveBeenCalledWith( - "click", - expect.any(Function), - ); - }); + document.getElementById = jest.fn((id) => { + if (id.startsWith("confirmWebDownloadBtn-")) { + return { + cloneNode: jest.fn(() => ({ + parentNode: { replaceChild: jest.fn() }, + onclick: null, + })), + parentNode: { replaceChild: jest.fn() }, + }; + } + return null; + }); - test("should handle capture download click with permissions", async () => { - const captureUuid = "test-capture-uuid"; + const captureBtn = { + innerHTML: "Download", + disabled: false, + dataset: {}, + getAttribute: jest.fn((attr) => { + if (attr === "data-item-uuid") return "test-capture-uuid"; + if (attr === "data-item-type") return "capture"; + return null; + }), + }; - // Ensure window.DOMUtils is properly set up - window.DOMUtils = global.window.DOMUtils; + await downloadManager.initializeWebDownloadModal( + "test-capture-uuid", + "capture", + captureBtn, + ); - // Test that the method exists and can be called without throwing - await expect( - downloadManager.handleCaptureDownload(captureUuid, mockButton), - ).resolves.not.toThrow(); + expect(spy).toHaveBeenCalledWith( + "webDownloadModal-test-capture-uuid", + captureBtn, + "test-capture-uuid", + ); + spy.mockRestore(); }); test("should show permission error for capture download", async () => { @@ -352,6 +368,160 @@ describe("DownloadActionManager", () => { }); }); + describe("initializeCaptureDownloadSlider", () => { + const MODAL_ID = "webDownloadModal-test-uuid"; + let mockSliderEl; + let mockNoUiSliderCreate; + let mockSliderInstance; + + function stubEl() { + return { + textContent: "", + value: "", + dataset: {}, + classList: { add: jest.fn(), remove: jest.fn() }, + disabled: false, + addEventListener: jest.fn(), + }; + } + + /** Modal root with querySelector("#id") like the real DOM */ + function mockWebDownloadModal(elementMap) { + const map = elementMap || {}; + return { + dataset: {}, + querySelector: jest.fn((sel) => { + const id = sel.startsWith("#") ? sel.slice(1) : sel; + if (Object.prototype.hasOwnProperty.call(map, id)) { + return map[id]; + } + return stubEl(); + }), + }; + } + + beforeEach(() => { + downloadManager = new DownloadActionManager({ + permissions: mockPermissions, + }); + mockSliderInstance = { + on: jest.fn(), + destroy: jest.fn(), + set: jest.fn(), + }; + mockSliderEl = { + noUiSlider: null, + dataset: {}, + }; + mockNoUiSliderCreate = jest.fn(() => { + mockSliderEl.noUiSlider = mockSliderInstance; + }); + // Slider path touches formatFileSize on totalSizeLabel + global.window.DOMUtils = { + ...global.window.DOMUtils, + formatFileSize: jest.fn((n) => `${n} B`), + }; + }); + + test("should return early when modal root element is missing", () => { + document.getElementById = jest.fn(() => null); + global.noUiSlider = { create: mockNoUiSliderCreate }; + + downloadManager.initializeCaptureDownloadSlider( + MODAL_ID, + 10000, + 1000, + {}, + ); + expect(mockNoUiSliderCreate).not.toHaveBeenCalled(); + }); + + test("should return early when temporalFilterSlider element is missing", () => { + const modal = mockWebDownloadModal({ temporalFilterSlider: null }); + document.getElementById = jest.fn((id) => + id === MODAL_ID ? modal : null, + ); + global.noUiSlider = { create: mockNoUiSliderCreate }; + + downloadManager.initializeCaptureDownloadSlider( + MODAL_ID, + 10000, + 1000, + {}, + ); + expect(mockNoUiSliderCreate).not.toHaveBeenCalled(); + }); + + test("should return early when noUiSlider is undefined", () => { + const originalNoUiSlider = global.noUiSlider; + global.noUiSlider = undefined; + const modal = mockWebDownloadModal({ + temporalFilterSlider: mockSliderEl, + }); + document.getElementById = jest.fn((id) => + id === MODAL_ID ? modal : null, + ); + + expect(() => { + downloadManager.initializeCaptureDownloadSlider( + MODAL_ID, + 10000, + 1000, + {}, + ); + }).not.toThrow(); + + global.noUiSlider = originalNoUiSlider; + }); + + test("should create slider and set modal dataset and range hint when slider and noUiSlider exist", () => { + const rangeHintEl = { textContent: "" }; + const webDownloadModal = mockWebDownloadModal({ + temporalFilterSlider: mockSliderEl, + temporalRangeHint: rangeHintEl, + }); + document.getElementById = jest.fn((id) => + id === MODAL_ID ? webDownloadModal : null, + ); + global.noUiSlider = { create: mockNoUiSliderCreate }; + + downloadManager.initializeCaptureDownloadSlider(MODAL_ID, 5000, 500, { + dataFilesCount: 10, + totalFilesCount: 12, + totalSize: 1000000, + }); + + expect(mockNoUiSliderCreate).toHaveBeenCalledWith( + mockSliderEl, + expect.objectContaining({ + start: [0, 5000], + connect: true, + step: 500, + range: { min: 0, max: 5000 }, + }), + ); + expect(webDownloadModal.dataset.durationMs).toBe("5000"); + expect(webDownloadModal.dataset.fileCadenceMs).toBe("500"); + expect(rangeHintEl.textContent).toBe("0 – 5000 ms"); + }); + + test("should not create slider when durationMs is 0", () => { + const rangeHintEl = { textContent: "" }; + const modal = mockWebDownloadModal({ + temporalFilterSlider: mockSliderEl, + temporalRangeHint: rangeHintEl, + }); + document.getElementById = jest.fn((id) => + id === MODAL_ID ? modal : null, + ); + global.noUiSlider = { create: mockNoUiSliderCreate }; + + downloadManager.initializeCaptureDownloadSlider(MODAL_ID, 0, 1000, {}); + + expect(mockNoUiSliderCreate).not.toHaveBeenCalled(); + }); + }); + describe("Web Download Modal", () => { beforeEach(() => { downloadManager = new DownloadActionManager({ @@ -359,9 +529,11 @@ describe("DownloadActionManager", () => { }); }); - test("should have openWebDownloadModal method", () => { - expect(downloadManager.openWebDownloadModal).toBeDefined(); - expect(typeof downloadManager.openWebDownloadModal).toBe("function"); + test("should have initializeWebDownloadModal method", () => { + expect(downloadManager.initializeWebDownloadModal).toBeDefined(); + expect(typeof downloadManager.initializeWebDownloadModal).toBe( + "function", + ); }); test("should have openSDKDownloadModal method", () => { @@ -369,7 +541,7 @@ describe("DownloadActionManager", () => { expect(typeof downloadManager.openSDKDownloadModal).toBe("function"); }); - test("should use DOMUtils.openModal for opening modals", () => { + test("should use DOMUtils.openModal for opening modals", async () => { const modalId = "webDownloadModal-test-uuid"; const confirmBtn = { dataset: {}, @@ -392,7 +564,22 @@ describe("DownloadActionManager", () => { return null; }); - downloadManager.openWebDownloadModal("test-uuid"); + const btn = { + innerHTML: "Download", + disabled: false, + dataset: {}, + getAttribute: jest.fn((attr) => { + if (attr === "data-item-uuid") return "test-uuid"; + if (attr === "data-item-type") return "dataset"; + return null; + }), + }; + + await downloadManager.initializeWebDownloadModal( + "test-uuid", + "dataset", + btn, + ); expect(global.window.DOMUtils.openModal).toHaveBeenCalledWith(modalId); }); @@ -405,14 +592,11 @@ describe("DownloadActionManager", () => { }); }); - test("should have handleDatasetDownload method", () => { - expect(downloadManager.handleDatasetDownload).toBeDefined(); - expect(typeof downloadManager.handleDatasetDownload).toBe("function"); - }); - - test("should have handleCaptureDownload method", () => { - expect(downloadManager.handleCaptureDownload).toBeDefined(); - expect(typeof downloadManager.handleCaptureDownload).toBe("function"); + test("should have initializeWebDownloadModal method", () => { + expect(downloadManager.initializeWebDownloadModal).toBeDefined(); + expect(typeof downloadManager.initializeWebDownloadModal).toBe( + "function", + ); }); }); @@ -456,7 +640,7 @@ describe("DownloadActionManager", () => { document.querySelectorAll.mockReturnValue([]); expect(() => { - downloadManager.initializeDatasetDownloadButtons(); + downloadManager.initializeWebDownloadButtons(); }).not.toThrow(); }); diff --git a/gateway/sds_gateway/static/js/components.js b/gateway/sds_gateway/static/js/components.js index 2d802eb39..8bb020a5c 100644 --- a/gateway/sds_gateway/static/js/components.js +++ b/gateway/sds_gateway/static/js/components.js @@ -23,18 +23,6 @@ const ComponentUtils = { return div.innerHTML; }, - /** - * Formats file size in human readable format - * @param {number} bytes - File size in bytes - * @returns {string} Formatted file size - */ - formatFileSize(bytes) { - if (bytes === 0) return "0 Bytes"; - const k = 1024; - const sizes = ["Bytes", "KB", "MB", "GB", "TB"]; - const i = Math.floor(Math.log(bytes) / Math.log(k)); - return `${Number.parseFloat((bytes / k ** i).toFixed(2))} ${sizes[i]}`; - }, /** * Formats date for display with date and time on separate lines @@ -335,19 +323,6 @@ class CapturesTableManager extends TableManager { return; } - // Handle download capture button clicks from actions dropdown - if ( - e.target.matches(".download-capture-btn") || - e.target.closest(".download-capture-btn") - ) { - e.preventDefault(); - const button = e.target.matches(".download-capture-btn") - ? e.target - : e.target.closest(".download-capture-btn"); - this.handleDownloadCapture(button); - return; - } - // Handle capture link clicks if ( e.target.matches(".capture-link") || @@ -379,145 +354,6 @@ class CapturesTableManager extends TableManager { document.addEventListener("click", this.eventDelegationHandler); } - /** - * Handle download capture action - */ - handleDownloadCapture(button) { - const captureUuid = button.dataset.captureUuid; - const captureName = - button.dataset.captureName || button.dataset.captureUuid; - - if (!captureUuid) { - console.error("No capture UUID found for download"); - return; - } - - // Update modal content - document.getElementById("downloadCaptureName").textContent = captureName; - - // Show the modal - this.openCustomModal("downloadModal"); - - // Handle confirm download - document.getElementById("confirmDownloadBtn").onclick = () => { - // Close modal first - this.closeCustomModal("downloadModal"); - - // Show loading state - const originalContent = button.innerHTML; - button.innerHTML = ' Processing...'; - button.disabled = true; - - // Make API request using the unified download endpoint - fetch(`/users/download-item/capture/${captureUuid}/`, { - method: "POST", - headers: { - "Content-Type": "application/json", - "X-CSRFToken": this.getCSRFToken(), - }, - }) - .then((response) => response.json()) - .then((data) => { - if (data.success === true) { - button.innerHTML = - ' Download Requested'; - this.showDownloadSuccessMessage(data.message); - } else { - button.innerHTML = - ' Request Failed'; - this.showDownloadErrorMessage( - data.detail || - data.message || - "Download request failed. Please try again.", - ); - } - }) - .catch((error) => { - console.error("Download error:", error); - button.innerHTML = - ' Request Failed'; - this.showDownloadErrorMessage( - "An error occurred while processing your request.", - ); - }) - .finally(() => { - // Reset button after 3 seconds - setTimeout(() => { - button.innerHTML = originalContent; - button.disabled = false; - }, 3000); - }); - }; - } - - /** - * Show download success message - */ - showDownloadSuccessMessage(message) { - // Try to find an existing alert container or create one - let alertContainer = document.querySelector(".alert-container"); - if (!alertContainer) { - alertContainer = document.createElement("div"); - alertContainer.className = "alert-container"; - // Insert at the top of the main content area - const mainContent = - document.querySelector(".container-fluid") || document.body; - mainContent.insertBefore(alertContainer, mainContent.firstChild); - } - - const alertHtml = ` - - `; - - alertContainer.innerHTML = alertHtml; - - // Auto-dismiss after 5 seconds - setTimeout(() => { - const alert = alertContainer.querySelector(".alert"); - if (alert) { - alert.remove(); - } - }, 5000); - } - - /** - * Show download error message - */ - showDownloadErrorMessage(message) { - // Try to find an existing alert container or create one - let alertContainer = document.querySelector(".alert-container"); - if (!alertContainer) { - alertContainer = document.createElement("div"); - alertContainer.className = "alert-container"; - // Insert at the top of the main content area - const mainContent = - document.querySelector(".container-fluid") || document.body; - mainContent.insertBefore(alertContainer, mainContent.firstChild); - } - - const alertHtml = ` - - `; - - alertContainer.innerHTML = alertHtml; - - // Auto-dismiss after 8 seconds (longer for error messages) - setTimeout(() => { - const alert = alertContainer.querySelector(".alert"); - if (alert) { - alert.remove(); - } - }, 8000); - } - renderRow(capture, index) { // Sanitize all data before rendering const safeData = { @@ -1674,7 +1510,7 @@ class ModalManager {

Total Size: - ${ComponentUtils.formatFileSize(totalSize)} + ${window.DOMUtils.formatFileSize(totalSize)}

@@ -1703,7 +1539,7 @@ class ModalManager { // Primary file information - most useful for users if (file.size) { metadata.push( - `Size: ${ComponentUtils.formatFileSize(file.size)} (${file.size.toLocaleString()} bytes)`, + `Size: ${window.DOMUtils.formatFileSize(file.size)} (${file.size.toLocaleString()} bytes)`, ); } diff --git a/gateway/sds_gateway/static/js/core/DOMUtils.js b/gateway/sds_gateway/static/js/core/DOMUtils.js index 902196232..3309538f5 100644 --- a/gateway/sds_gateway/static/js/core/DOMUtils.js +++ b/gateway/sds_gateway/static/js/core/DOMUtils.js @@ -18,11 +18,17 @@ class DOMUtils { * @returns {string} Formatted file size */ formatFileSize(bytes) { - if (bytes === 0) return "0 Bytes"; - const k = 1024; - const sizes = ["Bytes", "KB", "MB", "GB", "TB"]; - const i = Math.floor(Math.log(bytes) / Math.log(k)); - return `${Number.parseFloat((bytes / k ** i).toFixed(2))} ${sizes[i]}`; + const n = Number(bytes); + if (!Number.isFinite(n) || n < 0) return "0 bytes"; + if (n === 0) return "0 bytes"; + const units = ["bytes", "KB", "MB", "GB"]; + let i = 0; + let v = n; + while (v >= 1024 && i < units.length - 1) { + v /= 1024; + i++; + } + return (i === 0 ? v : v.toFixed(2)) + " " + units[i]; } /** diff --git a/gateway/sds_gateway/static/js/core/PageLifecycleManager.js b/gateway/sds_gateway/static/js/core/PageLifecycleManager.js index cc6e03c0b..e3fa07f1d 100644 --- a/gateway/sds_gateway/static/js/core/PageLifecycleManager.js +++ b/gateway/sds_gateway/static/js/core/PageLifecycleManager.js @@ -82,6 +82,9 @@ class PageLifecycleManager { case "capture-list": this.initializeCaptureListPage(); break; + case "published-datasets-list": + this.initializePublishedDatasetsListPage(); + break; default: console.warn(`Unknown page type: ${this.pageType}`); } @@ -165,6 +168,32 @@ class PageLifecycleManager { this.initializeCaptureModals(); } + /** + * Published datasets search page: pagination + dataset modals (same modal wiring as dataset list, no sort UI). + */ + initializePublishedDatasetsListPage() { + this.initializePagination(); + this.initializeDatasetModals(); + } + + + /** + * Single DownloadActionManager for document-wide .web-download-btn / SDK buttons (not per modal). + */ + ensureDownloadActionManager() { + if ( + this.downloadActionManager || + !this.permissions || + !window.DownloadActionManager + ) { + return; + } + this.downloadActionManager = new window.DownloadActionManager({ + permissions: this.permissions, + }); + this.managers.push(this.downloadActionManager); + } + /** * Initialize search handlers */ @@ -269,6 +298,10 @@ class PageLifecycleManager { * Initialize dataset modals */ initializeDatasetModals() { + // TODO: Refactor this to align all modal initialization + // with a single manager instance per modal type. + // Plan to do this on a future PR. + // Pre-initialize all modals on the page with proper config to prevent Bootstrap auto-initialization errors const allModals = document.querySelectorAll(".modal"); for (const modal of allModals) { @@ -299,6 +332,7 @@ class PageLifecycleManager { for (const modal of datasetModals) { const itemUuid = modal.getAttribute("data-item-uuid"); + const itemType = modal.getAttribute("data-item-type"); if (!itemUuid || !this.permissions) { console.warn( @@ -309,9 +343,9 @@ class PageLifecycleManager { if (window.ShareActionManager) { const shareManager = new window.ShareActionManager({ - itemUuid: itemUuid, - itemType: "dataset", permissions: this.permissions, + itemUuid: itemUuid, + itemType: itemType, }); this.managers.push(shareManager); @@ -321,28 +355,18 @@ class PageLifecycleManager { if (window.VersioningActionManager && !modal.versioningActionManager) { const versioningManager = new window.VersioningActionManager({ - datasetUuid: itemUuid, permissions: this.permissions, + datasetUuid: itemUuid, }); this.managers.push(versioningManager); modal.versioningActionManager = versioningManager; } - if (window.DownloadActionManager) { - const downloadManager = new window.DownloadActionManager({ - permissions: this.permissions, - }); - this.managers.push(downloadManager); - - // Store reference on modal - modal.downloadActionManager = downloadManager; - } - if (window.DetailsActionManager) { const detailsManager = new window.DetailsActionManager({ permissions: this.permissions, itemUuid: itemUuid, - itemType: "dataset", + itemType: itemType, }); this.managers.push(detailsManager); @@ -350,18 +374,25 @@ class PageLifecycleManager { modal.detailsActionManager = detailsManager; } } + + this.ensureDownloadActionManager(); } /** * Initialize capture modals */ initializeCaptureModals() { + // TODO: Refactor this to align all modal initialization + // with a single manager instance per modal type. + // Plan to do this on a future PR. + const captureModals = document.querySelectorAll( ".modal[data-item-type='capture']", ); for (const modal of captureModals) { const itemUuid = modal.getAttribute("data-item-uuid"); + const itemType = modal.getAttribute("data-item-type"); if (!itemUuid || !this.permissions) { console.warn( @@ -372,28 +403,18 @@ class PageLifecycleManager { if (window.ShareActionManager) { const shareManager = new window.ShareActionManager({ - itemUuid: itemUuid, - itemType: "capture", permissions: this.permissions, + itemUuid: itemUuid, + itemType: itemType, }); this.managers.push(shareManager); // Store reference on modal modal.shareActionManager = shareManager; } - - if (window.DownloadActionManager) { - const downloadManager = new window.DownloadActionManager({ - itemUuid: itemUuid, - itemType: "capture", - permissions: this.permissions, - }); - this.managers.push(downloadManager); - - // Store reference on modal - modal.downloadActionManager = downloadManager; - } } + + this.ensureDownloadActionManager(); } /** diff --git a/gateway/sds_gateway/static/js/file-list.js b/gateway/sds_gateway/static/js/file-list.js index aba3069c9..6c660c27d 100644 --- a/gateway/sds_gateway/static/js/file-list.js +++ b/gateway/sds_gateway/static/js/file-list.js @@ -1,3 +1,9 @@ +/** + * TODO: This file has a lot of redundancy with manager files + * And needs to be deprecated. and have its functionality migrated + * to the new JS structure. + */ + /* File List Page JavaScript - Refactored to use Components */ /** @@ -711,6 +717,14 @@ class FileListCapturesTableManager extends CapturesTableManager { centerFrequencyGhz: ComponentUtils.escapeHtml( capture.center_frequency_ghz || "", ), + lengthOfCaptureMs: capture.length_of_capture_ms ?? 0, + fileCadenceMs: capture.file_cadence_ms ?? 1000, + perDataFileSize: capture.per_data_file_size ?? 0, + totalSize: capture.total_file_size ?? 0, + dataFilesCount: capture.data_files_count ?? 0, + dataFilesTotalSize: capture.data_files_total_size ?? 0, + totalFilesCount: capture.files.length ?? 0, + captureStartEpochSec: capture.capture_start_epoch_sec ?? 0, }; let typeDisplay = safeData.captureTypeDisplay || safeData.captureType; @@ -835,7 +849,15 @@ class FileListCapturesTableManager extends CapturesTableManager { diff --git a/gateway/sds_gateway/templates/base.html b/gateway/sds_gateway/templates/base.html index 2e14c130a..b121a037d 100644 --- a/gateway/sds_gateway/templates/base.html +++ b/gateway/sds_gateway/templates/base.html @@ -19,6 +19,10 @@ href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.11.3/font/bootstrap-icons.min.css" /> + + {% block css %} @@ -42,6 +46,7 @@ + {% endblock javascript %} {# djlint:off H021 #} @@ -226,6 +231,7 @@ {# Removed JS that was hiding/showing the body #} {% endblock inline_javascript %} +