Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 17 additions & 1 deletion src/data/archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ def copy_to_AUVTCD(self, nc_file_base: Path, freq: str = FREQ) -> None: # noqa:
def copy_to_M3(self, resampled_nc_file: str) -> None:
pass

def copy_to_LRAUV(self, log_file: str, freq: str = FREQ) -> None: # noqa: C901, PLR0912
def copy_to_LRAUV(self, log_file: str, freq: str = FREQ) -> None: # noqa: C901, PLR0912, PLR0915
"Copy the intermediate and resampled netCDF file(s) to the archive LRAUV location"
src_dir = Path(BASE_LRAUV_PATH, Path(log_file).parent)
dst_dir = Path(LRAUV_VOL, Path(log_file).parent)
Expand Down Expand Up @@ -264,6 +264,22 @@ def copy_to_LRAUV(self, log_file: str, freq: str = FREQ) -> None: # noqa: C901,
"%-36s exists, but is not being archived because --clobber is not specified.",
src_file.name,
)

# Copy ODV/text product files created by create_products.py (e.g., *_Sipper.txt)
for src_file in src_dir.glob(f"{Path(log_file).stem}_*.txt"):
dst_file = Path(dst_dir, src_file.name)
if self.clobber:
if dst_file.exists():
self.logger.info("Removing %s", dst_file)
dst_file.unlink()
if src_file.exists():
shutil.copyfile(src_file, dst_file)
self.logger.info("copyfile %s %s done.", src_file, dst_dir)
elif src_file.exists():
self.logger.info(
"%-36s exists, but is not being archived because --clobber is not specified.",
src_file.name,
)
# Copy the processing.log file last so that we get everything
src_file = Path(src_dir, f"{Path(log_file).stem}_{LOG_NAME}")
dst_file = Path(dst_dir, src_file.name)
Expand Down
33 changes: 33 additions & 0 deletions src/data/test_process_lrauv.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
# noqa: INP001

from pathlib import Path

import numpy as np
import pandas as pd
import pytest
Expand Down Expand Up @@ -138,6 +140,37 @@ def test_lrauv_group_file_structure(complete_lrauv_processing):
ds.close()


def test_lrauv_archive_copies_sipper_text_product(tmp_path, monkeypatch):
"""Ensure LRAUV archiving copies generated _Sipper.txt product files."""
import archive as archive_mod
from archive import Archiver

src_root = tmp_path / "lrauv_source"
dst_root = tmp_path / "lrauv_archive"
monkeypatch.setattr(archive_mod, "BASE_LRAUV_PATH", src_root)
monkeypatch.setattr(archive_mod, "LRAUV_VOL", str(dst_root))

log_file = (
"daphne/missionlogs/2026/20260316_20260318/20260317T191958/202603171919_202603181628.nc4"
)
stem = Path(log_file).stem
relative_parent = Path(log_file).parent
src_dir = src_root / relative_parent
dst_dir = dst_root / relative_parent
src_dir.mkdir(parents=True, exist_ok=True)
dst_dir.mkdir(parents=True, exist_ok=True)

src_sipper_txt = src_dir / f"{stem}_1S_Sipper.txt"
src_sipper_txt.write_text("header\nrow\n", encoding="utf-8")

arch = Archiver(add_handlers=False, clobber=True, verbose=1)
arch.copy_to_LRAUV(log_file, freq="1S")

dst_sipper_txt = dst_dir / src_sipper_txt.name
assert dst_sipper_txt.exists() # noqa: S101
assert dst_sipper_txt.read_text(encoding="utf-8") == "header\nrow\n" # noqa: S101


@pytest.mark.skip(reason="Full integration test - requires all processing modules")
def test_lrauv_full_pipeline(complete_lrauv_processing):
"""Test full LRAUV processing pipeline from logs to resampled data."""
Expand Down
Loading