Skip to content

Commit

Permalink
iso xml output working for atl24
Browse files Browse the repository at this point in the history
  • Loading branch information
jpswinski committed Oct 16, 2024
1 parent ff68c9d commit ab6dd3f
Show file tree
Hide file tree
Showing 3 changed files with 40 additions and 30 deletions.
49 changes: 23 additions & 26 deletions datasets/bathy/docker/oceaneyes/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,11 +40,11 @@
import time
import json

#from cshelph import c_shelph as CSHELPH
#from medianfilter import medianmodel as MEDIANFILTER
#from bathypathfinder.BathyPathFinder import BathyPathSearch
#from pointnet.pointnet2 import PointNet2
#from openoceans.openoceans import OpenOceans
from cshelph import c_shelph as CSHELPH
from medianfilter import medianmodel as MEDIANFILTER
from bathypathfinder.BathyPathFinder import BathyPathSearch
from pointnet.pointnet2 import PointNet2
from openoceans.openoceans import OpenOceans

import warnings
warnings.simplefilter(action='ignore', category=FutureWarning)
Expand Down Expand Up @@ -264,11 +264,11 @@ def runClassifier(classifier, classifier_func, num_processes=6):
return duration

# call runners
#profile["cshelph"] = runClassifier("cshelph", cshelph)
#profile["medianfilter"] = runClassifier("medianfilter", medianfilter)
#profile["bathypathfinder"] = runClassifier("bathypathfinder", bathypathfinder)
#profile["pointnet"] = runClassifier("pointnet", pointnet, num_processes=1)
#profile["ensemble"] = runClassifier("ensemble", ensemble)
profile["cshelph"] = runClassifier("cshelph", cshelph)
profile["medianfilter"] = runClassifier("medianfilter", medianfilter)
profile["bathypathfinder"] = runClassifier("bathypathfinder", bathypathfinder)
profile["pointnet"] = runClassifier("pointnet", pointnet, num_processes=1)
profile["ensemble"] = runClassifier("ensemble", ensemble)

# #####################
# DataFrame & MetaData
Expand Down Expand Up @@ -311,17 +311,17 @@ def runClassifier(classifier, classifier_func, num_processes=6):
"bathy_min_depth": bathy_df.depth.min(),
"bathy_max_depth": bathy_df.depth.max(),
"bathy_std_depth": bathy_df.depth.std(),
"subaqueous_mean_uncertainty": bathy_df.subaqueous_sigma_tvu.mean(),
"subaqueous_min_uncertainty": bathy_df.subaqueous_sigma_tvu.min(),
"subaqueous_max_uncertainty": bathy_df.subaqueous_sigma_tvu.max(),
"subaqueous_std_uncertainty": bathy_df.subaqueous_sigma_tvu.std()
"subaqueous_mean_uncertainty": bathy_df.subaqueous_sigma_tvu.mean().item(),
"subaqueous_min_uncertainty": bathy_df.subaqueous_sigma_tvu.min().item(),
"subaqueous_max_uncertainty": bathy_df.subaqueous_sigma_tvu.max().item(),
"subaqueous_std_uncertainty": bathy_df.subaqueous_sigma_tvu.std().item()
}

# read versions
#with open("cshelph/cshelph_version.txt") as file:
# cshelph_version = file.read()
#with open("medianfilter/medianfilter_version.txt") as file:
# medianfilter_version = file.read()
with open("cshelph/cshelph_version.txt") as file:
cshelph_version = file.read()
with open("medianfilter/medianfilter_version.txt") as file:
medianfilter_version = file.read()

# update profile
profile["total_duration"] = time.time() - settings["latch"]
Expand All @@ -332,8 +332,8 @@ def runClassifier(classifier, classifier_func, num_processes=6):
"sliderule": json.dumps(rqst_parms),
"profile": json.dumps(profile),
"stats": json.dumps(stats),
# "cshelph": cshelph_version,
# "medianfilter": medianfilter_version
"cshelph": cshelph_version,
"medianfilter": medianfilter_version
}

# #####################
Expand Down Expand Up @@ -367,21 +367,18 @@ def runClassifier(classifier, classifier_func, num_processes=6):
"begin_time": extent_begin_time,
"end_time": extent_end_time
})
print(metadata["extent"])

# write ISO XML file
atl24_filename = "ATL24_TEST.h5" #settings["atl03_filename"].replace("ATL03", "ATL24")
now = datetime.datetime.now()
generation_time = f'{now.year}-{now.month:02}-{now.day:02}T{now.hour:02}:{now.minute:02}:{now.second}.000000Z'
with open("atl24_iso_xml_template.txt", 'r') as template_file:
template = template_file.read()
template = template.replace("$FILENAME", atl24_filename)
template = template.replace("$GENERATION_TIME", generation_time)
template = template.replace("$FILENAME", settings["atl24_filename"])
template = template.replace("$GENERATION_TIME", f'{now.year}-{now.month:02}-{now.day:02}T{now.hour:02}:{now.minute:02}:{now.second}.000000Z')
template = template.replace("$EXTENT_POLYGON", extent_polygon)
template = template.replace("$EXTENT_BEGIN_TIME", extent_begin_time)
template = template.replace("$EXTENT_END_TIME", extent_end_time)
template = template.replace("$SLIDERULE_VERSION", rqst_parms["sliderule_version"])
with open(atl24_filename + ".iso.xml", "w") as iso_xml_file:
with open(settings["iso_xml_filename"], "w") as iso_xml_file:
iso_xml_file.write(template)

# helper function that adds a variable
Expand Down
19 changes: 16 additions & 3 deletions datasets/bathy/endpoints/atl24g.lua
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ local start_time = time.gps() -- used for timeout handling
-- function: cleanup
-------------------------------------------------------
local function cleanup(_crenv, _transaction_id)
-- runner.cleanup(_crenv) -- container runtime environment
runner.cleanup(_crenv) -- container runtime environment
core.orchunlock({_transaction_id}) -- unlock transaction
end

Expand Down Expand Up @@ -300,7 +300,8 @@ outputs["profile"] = profile
outputs["format"] = parms["output"]["format"]
outputs["filename"] = crenv.container_sandbox_mount.."/"..tmp_filename
outputs["ensemble"] = parms["ensemble"] or {ensemble_model_filename=string.format("%s/%s", cre.HOST_DIRECTORY, bathy.ENSEMBLE_MODEL)}
outputs["atl03_filename"] = parms["resource"]
outputs["iso_xml_filename"] = crenv.container_sandbox_mount.."/"..tmp_filename..".iso.xml"
outputs["atl24_filename"] = string.gsub(parms["resource"], "ATL03", "ATL24")
outputs["latch"] = latch

-------------------------------------------------------
Expand All @@ -316,10 +317,22 @@ local container = runner.execute(crenv, container_parms, { ["settings.json"] = o
runner.wait(container, timeout)

-------------------------------------------------------
-- send final output to user
-- send final granule output to user
-------------------------------------------------------
arrow.send2user(crenv.host_sandbox_directory.."/"..tmp_filename, parms, rspq)

-------------------------------------------------------
-- send ISO XML file to user
-------------------------------------------------------
if parms["output"]["format"] == "h5" then
local xml_parms = core.parms({
output = {
asset=rqst["parms"]["output"]["asset"], -- use original request asset
path=rqst["parms"]["output"]["path"]..".iso.xml" -- modify the original requested path
}
})
arrow.send2user(crenv.host_sandbox_directory.."/"..tmp_filename..".iso.xml", xml_parms, rspq)
end
-------------------------------------------------------
-- exit
-------------------------------------------------------
Expand Down
2 changes: 1 addition & 1 deletion packages/arrow/ArrowCommon.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -279,7 +279,7 @@ bool send2Client (const char* fileName, const char* outPath, const ArrowFields*
fseek(fp, 0L, SEEK_SET);

/* Log Status */
mlog(INFO, "Writing file %s of size %ld", fileName, file_size);
mlog(INFO, "Sending file %s of size %ld to %s", fileName, file_size, outPath);

do
{
Expand Down

0 comments on commit ab6dd3f

Please sign in to comment.