diff --git a/datasets/bathy/docker/oceaneyes/runner.py b/datasets/bathy/docker/oceaneyes/runner.py index 9819dddb..00c5b76a 100644 --- a/datasets/bathy/docker/oceaneyes/runner.py +++ b/datasets/bathy/docker/oceaneyes/runner.py @@ -40,11 +40,11 @@ import time import json -#from cshelph import c_shelph as CSHELPH -#from medianfilter import medianmodel as MEDIANFILTER -#from bathypathfinder.BathyPathFinder import BathyPathSearch -#from pointnet.pointnet2 import PointNet2 -#from openoceans.openoceans import OpenOceans +from cshelph import c_shelph as CSHELPH +from medianfilter import medianmodel as MEDIANFILTER +from bathypathfinder.BathyPathFinder import BathyPathSearch +from pointnet.pointnet2 import PointNet2 +from openoceans.openoceans import OpenOceans import warnings warnings.simplefilter(action='ignore', category=FutureWarning) @@ -264,11 +264,11 @@ def runClassifier(classifier, classifier_func, num_processes=6): return duration # call runners -#profile["cshelph"] = runClassifier("cshelph", cshelph) -#profile["medianfilter"] = runClassifier("medianfilter", medianfilter) -#profile["bathypathfinder"] = runClassifier("bathypathfinder", bathypathfinder) -#profile["pointnet"] = runClassifier("pointnet", pointnet, num_processes=1) -#profile["ensemble"] = runClassifier("ensemble", ensemble) +profile["cshelph"] = runClassifier("cshelph", cshelph) +profile["medianfilter"] = runClassifier("medianfilter", medianfilter) +profile["bathypathfinder"] = runClassifier("bathypathfinder", bathypathfinder) +profile["pointnet"] = runClassifier("pointnet", pointnet, num_processes=1) +profile["ensemble"] = runClassifier("ensemble", ensemble) # ##################### # DataFrame & MetaData @@ -311,17 +311,17 @@ def runClassifier(classifier, classifier_func, num_processes=6): "bathy_min_depth": bathy_df.depth.min(), "bathy_max_depth": bathy_df.depth.max(), "bathy_std_depth": bathy_df.depth.std(), - "subaqueous_mean_uncertainty": bathy_df.subaqueous_sigma_tvu.mean(), - "subaqueous_min_uncertainty": bathy_df.subaqueous_sigma_tvu.min(), - "subaqueous_max_uncertainty": bathy_df.subaqueous_sigma_tvu.max(), - "subaqueous_std_uncertainty": bathy_df.subaqueous_sigma_tvu.std() + "subaqueous_mean_uncertainty": bathy_df.subaqueous_sigma_tvu.mean().item(), + "subaqueous_min_uncertainty": bathy_df.subaqueous_sigma_tvu.min().item(), + "subaqueous_max_uncertainty": bathy_df.subaqueous_sigma_tvu.max().item(), + "subaqueous_std_uncertainty": bathy_df.subaqueous_sigma_tvu.std().item() } # read versions -#with open("cshelph/cshelph_version.txt") as file: -# cshelph_version = file.read() -#with open("medianfilter/medianfilter_version.txt") as file: -# medianfilter_version = file.read() +with open("cshelph/cshelph_version.txt") as file: + cshelph_version = file.read() +with open("medianfilter/medianfilter_version.txt") as file: + medianfilter_version = file.read() # update profile profile["total_duration"] = time.time() - settings["latch"] @@ -332,8 +332,8 @@ def runClassifier(classifier, classifier_func, num_processes=6): "sliderule": json.dumps(rqst_parms), "profile": json.dumps(profile), "stats": json.dumps(stats), -# "cshelph": cshelph_version, -# "medianfilter": medianfilter_version + "cshelph": cshelph_version, + "medianfilter": medianfilter_version } # ##################### @@ -367,21 +367,18 @@ def runClassifier(classifier, classifier_func, num_processes=6): "begin_time": extent_begin_time, "end_time": extent_end_time }) - print(metadata["extent"]) # write ISO XML file - atl24_filename = "ATL24_TEST.h5" #settings["atl03_filename"].replace("ATL03", "ATL24") now = datetime.datetime.now() - generation_time = f'{now.year}-{now.month:02}-{now.day:02}T{now.hour:02}:{now.minute:02}:{now.second}.000000Z' with open("atl24_iso_xml_template.txt", 'r') as template_file: template = template_file.read() - template = template.replace("$FILENAME", atl24_filename) - template = template.replace("$GENERATION_TIME", generation_time) + template = template.replace("$FILENAME", settings["atl24_filename"]) + template = template.replace("$GENERATION_TIME", f'{now.year}-{now.month:02}-{now.day:02}T{now.hour:02}:{now.minute:02}:{now.second}.000000Z') template = template.replace("$EXTENT_POLYGON", extent_polygon) template = template.replace("$EXTENT_BEGIN_TIME", extent_begin_time) template = template.replace("$EXTENT_END_TIME", extent_end_time) template = template.replace("$SLIDERULE_VERSION", rqst_parms["sliderule_version"]) - with open(atl24_filename + ".iso.xml", "w") as iso_xml_file: + with open(settings["iso_xml_filename"], "w") as iso_xml_file: iso_xml_file.write(template) # helper function that adds a variable diff --git a/datasets/bathy/endpoints/atl24g.lua b/datasets/bathy/endpoints/atl24g.lua index 4837ea60..e89d3d41 100644 --- a/datasets/bathy/endpoints/atl24g.lua +++ b/datasets/bathy/endpoints/atl24g.lua @@ -19,7 +19,7 @@ local start_time = time.gps() -- used for timeout handling -- function: cleanup ------------------------------------------------------- local function cleanup(_crenv, _transaction_id) --- runner.cleanup(_crenv) -- container runtime environment + runner.cleanup(_crenv) -- container runtime environment core.orchunlock({_transaction_id}) -- unlock transaction end @@ -300,7 +300,8 @@ outputs["profile"] = profile outputs["format"] = parms["output"]["format"] outputs["filename"] = crenv.container_sandbox_mount.."/"..tmp_filename outputs["ensemble"] = parms["ensemble"] or {ensemble_model_filename=string.format("%s/%s", cre.HOST_DIRECTORY, bathy.ENSEMBLE_MODEL)} -outputs["atl03_filename"] = parms["resource"] +outputs["iso_xml_filename"] = crenv.container_sandbox_mount.."/"..tmp_filename..".iso.xml" +outputs["atl24_filename"] = string.gsub(parms["resource"], "ATL03", "ATL24") outputs["latch"] = latch ------------------------------------------------------- @@ -316,10 +317,22 @@ local container = runner.execute(crenv, container_parms, { ["settings.json"] = o runner.wait(container, timeout) ------------------------------------------------------- --- send final output to user +-- send final granule output to user ------------------------------------------------------- arrow.send2user(crenv.host_sandbox_directory.."/"..tmp_filename, parms, rspq) +------------------------------------------------------- +-- send ISO XML file to user +------------------------------------------------------- +if parms["output"]["format"] == "h5" then + local xml_parms = core.parms({ + output = { + asset=rqst["parms"]["output"]["asset"], -- use original request asset + path=rqst["parms"]["output"]["path"]..".iso.xml" -- modify the original requested path + } + }) + arrow.send2user(crenv.host_sandbox_directory.."/"..tmp_filename..".iso.xml", xml_parms, rspq) +end ------------------------------------------------------- -- exit ------------------------------------------------------- diff --git a/packages/arrow/ArrowCommon.cpp b/packages/arrow/ArrowCommon.cpp index e1f3a074..e1a0aaa1 100644 --- a/packages/arrow/ArrowCommon.cpp +++ b/packages/arrow/ArrowCommon.cpp @@ -279,7 +279,7 @@ bool send2Client (const char* fileName, const char* outPath, const ArrowFields* fseek(fp, 0L, SEEK_SET); /* Log Status */ - mlog(INFO, "Writing file %s of size %ld", fileName, file_size); + mlog(INFO, "Sending file %s of size %ld to %s", fileName, file_size, outPath); do {