From f04d9f099c97799c4b68c7c295cc3f6fdfb1efd9 Mon Sep 17 00:00:00 2001 From: Steven Y Lu Date: Thu, 15 Dec 2022 17:39:14 -0800 Subject: [PATCH 01/14] add multithreading capability --- build_mosaic_glt.jl | 34 +++++++++++++++++++++++----------- 1 file changed, 23 insertions(+), 11 deletions(-) diff --git a/build_mosaic_glt.jl b/build_mosaic_glt.jl index d691c8b..a3e84d7 100644 --- a/build_mosaic_glt.jl +++ b/build_mosaic_glt.jl @@ -102,11 +102,12 @@ function main() max_offset_distance = sqrt(sum(args.target_resolution.^2))*3 pixel_buffer_window = 1 - total_found = 0 + lk = ReentrantLock() for (file_idx, igm_file) in enumerate(igm_files) @info "$igm_file" dataset = ArchGDAL.read(igm_file) igm = PermutedDimsArray(ArchGDAL.read(dataset), (2,1,3)) + if minimum(igm[..,1]) > grid[1,end-1,1] || maximum(igm[..,1]) < grid[1,1,1] || minimum(igm[..,2]) > grid[1,1,2] || maximum(igm[..,2]) < grid[end-1,1,2] #println(minimum(igm[..,1]), " > ", grid[1,end-1,1], " ", maximum(igm[..,1]), " < ", grid[1,1,1]) @@ -119,8 +120,8 @@ function main() criteria_dataset = ArchGDAL.read(criteria_files[file_idx]) criteria = PermutedDimsArray(ArchGDAL.read(criteria_dataset, args.criteria_band), (2,1)) end - for _y=1:size(igm)[1] - for _x=1:size(igm)[2] + Threads.@threads for _y=1:size(igm)[1] + Threads.@threads for _x=1:size(igm)[2] pt = igm[_y,_x,1:2] closest_t = Array{Int64}([round((pt[2] - grid[1,1,2]) / args.target_resolution[2]), round((pt[1] - grid[1,1,1]) / args.target_resolution[1]) ]) .+ 1 @@ -131,7 +132,6 @@ function main() closest[1] = closest_t[1] + xbuffer closest[2] = closest_t[2] + ybuffer - if closest[1] < 1 || closest[2] < 1 || closest[1] > size(grid)[1] || closest[2] > size(grid)[2] continue end @@ -146,16 +146,28 @@ function main() current_crit = criteria[_y, _x] end - if current_crit < best[closest[1], closest[2], 4] - best[closest[1], closest[2], 1:3] = [_x, _y, file_idx] - best[closest[1], closest[2], 4] = current_crit + lock(lk) + try + if current_crit < best[closest[1], closest[2], 4] + best[closest[1], closest[2], 1:3] = [_x, _y, file_idx] + best[closest[1], closest[2], 4] = current_crit + end + finally + unlock(lk) end + elseif args.criteria_mode == "max" current_crit = criteria[_y, _x] - if current_crit > best[closest[1], closest[2], 4] - best[closest[1], closest[2], 1:3] = [_x, _y, file_idx] - best[closest[1], closest[2], 4] = current_crit + lock(lk) + try + if current_crit > best[closest[1], closest[2], 4] + best[closest[1], closest[2], 1:3] = [_x, _y, file_idx] + best[closest[1], closest[2], 4] = current_crit + end + finally + unlock(lk) end + end end end @@ -165,7 +177,7 @@ function main() end end - println(total_found, " ", sum(best[..,1] .!= -9999), " ", size(best)[1]*size(best)[2]) + println(sum(best[..,1] .!= -9999), " ", size(best)[1]*size(best)[2]) if args.mosaic == 1 output = Array{Int32}(permutedims(best[..,1:3], (2,1,3))) else From fbb34582d5e405b77de4e49a8b1f5eb6ee44b49f Mon Sep 17 00:00:00 2001 From: brodrick Date: Sun, 16 Jul 2023 14:55:11 -0700 Subject: [PATCH 02/14] add serial apply_glt --- apply_glt_serial.py | 118 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 118 insertions(+) create mode 100644 apply_glt_serial.py diff --git a/apply_glt_serial.py b/apply_glt_serial.py new file mode 100644 index 0000000..b1d5aa3 --- /dev/null +++ b/apply_glt_serial.py @@ -0,0 +1,118 @@ +import argparse +import numpy as np +import pandas as pd +from osgeo import gdal +from spectral.io import envi +import logging +from typing import List +import time +import os +import multiprocessing +import emit_utils.file_checks + +from emit_utils.file_checks import envi_header + +def _write_bil_chunk(dat, outfile, line, shape, dtype = 'float32'): + """ + Write a chunk of data to a binary, BIL formatted data cube. + Args: + dat: data to write + outfile: output file to write to + line: line of the output file to write to + shape: shape of the output file + dtype: output data type + + Returns: + None + """ + outfile = open(outfile, 'rb+') + outfile.seek(line * shape[1] * shape[2] * np.dtype(dtype).itemsize) + outfile.write(dat.astype(dtype).tobytes()) + outfile.close() + + + +def single_image_ortho(img_dat, glt, glt_nodata_value=-9999): + """Orthorectify a single image + Args: + img_dat (array like): raw input image + glt (array like): glt - 2 band 1-based indexing for output file(x, y) + glt_nodata_value (int, optional): Value from glt to ignore. Defaults to 0. + Returns: + array like: orthorectified version of img_dat + """ + outdat = np.zeros((glt.shape[0], glt.shape[1], img_dat.shape[-1])) - 9999 + valid_glt = np.all(glt != glt_nodata_value, axis=-1) + glt[valid_glt] -= 1 # account for 1-based indexing + outdat[valid_glt, :] = img_dat[glt[valid_glt, 1], glt[valid_glt, 0], :] + return outdat, valid_glt + + +def main(input_args=None): + parser = argparse.ArgumentParser(description="Robust MF") + parser.add_argument('glt_file', type=str, metavar='GLT', help='path to glt image') + parser.add_argument('raw_file', type=str, metavar='RAW', help='path to raw image') + parser.add_argument('out_file', type=str, metavar='OUTPUT', help='path to output image') + parser.add_argument('--mosaic', action='store_true') + parser.add_argument('-b', type=int, nargs='+',default=-1) + args = parser.parse_args(input_args) + + + glt_dataset = envi.open(envi_header(args.glt_file)) + glt = glt_dataset.open_memmap(writeable=False, interleave='bip').copy() + del glt_dataset + glt_dataset = gdal.Open(args.glt_file) + + if args.mosaic: + rawspace_files = np.squeeze(np.array(pd.read_csv(args.rawspace_file, header=None))) + # TODO: make this check more elegant, should run, catch all files present exception, and proceed + if args.run_with_missing_files is False: + emit_utils.file_checks.check_raster_files(rawspace_files, map_space=False) + # TODO: check that all rawspace files have same number of bands + else: + emit_utils.file_checks.check_raster_files([args.rawspace_file], map_space=False) + rawspace_files = [args.rawspace_file] + + ort_img = None + for rawfile in rawspace_files: + img_ds = envi.open(envi_header(rawfile)) + if args.b[0] == -1: + inds = np.arange(int(img_ds.metadata['bands'])) + else: + inds = np.array(args.b) + img_dat = img_ds.open_memmap(writeable=False, interleave='bip')[...,inds].copy() + + if ort_img is None: + ort_img, _ = single_image_ortho(img_dat, glt) + else: + ort_img_update, valid_glt = single_image_ortho(img_dat, glt) + ort_img[valid_glt, :] = ort_img_update[valid_glt, :] + + band_names = None + if 'band names' in envi.open(envi_header(args.raw_file)).metadata.keys(): + band_names = np.array(envi.open(envi_header(args.raw_file)).metadata['band names'],dtype=str)[inds].tolist() + + # Build output dataset + driver = gdal.GetDriverByName('ENVI') + driver.Register() + + #TODO: careful about output datatypes / format + outDataset = driver.Create(args.out_file, glt.shape[1], glt.shape[0], + ort_img.shape[-1], gdal.GDT_Float32, options=['INTERLEAVE=BIL']) + outDataset.SetProjection(glt_dataset.GetProjection()) + outDataset.SetGeoTransform(glt_dataset.GetGeoTransform()) + for _b in range(1, ort_img.shape[-1]+1): + outDataset.GetRasterBand(_b).SetNoDataValue(-9999) + if band_names is not None: + outDataset.GetRasterBand(_b).SetDescription(band_names[_b-1]) + del outDataset + + _write_bil_chunk(ort_img.transpose((0,2,1)), args.out_file, 0, (glt.shape[0], ort_img.shape[-1], glt.shape[1])) + + + + +if __name__ == '__main__': + main() + + From cfd0a54b42055f9ce5f3d1116abd4697b2403565 Mon Sep 17 00:00:00 2001 From: brodrick Date: Sun, 16 Jul 2023 14:56:05 -0700 Subject: [PATCH 03/14] remote unneeded imports --- apply_glt_serial.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/apply_glt_serial.py b/apply_glt_serial.py index b1d5aa3..4df0c66 100644 --- a/apply_glt_serial.py +++ b/apply_glt_serial.py @@ -3,11 +3,7 @@ import pandas as pd from osgeo import gdal from spectral.io import envi -import logging from typing import List -import time -import os -import multiprocessing import emit_utils.file_checks from emit_utils.file_checks import envi_header From 13d0eeda2c9dbad5261c4092e183647cd3457409 Mon Sep 17 00:00:00 2001 From: Phil Brodrick Date: Mon, 17 Jul 2023 09:04:16 -0700 Subject: [PATCH 04/14] add support for counting number of revisits --- apply_glt.py | 6 ++++-- build_mosaic_glt.jl | 52 +++++++++++++++++++++++++++++++++------------ 2 files changed, 43 insertions(+), 15 deletions(-) diff --git a/apply_glt.py b/apply_glt.py index 24d0a2d..a96eead 100644 --- a/apply_glt.py +++ b/apply_glt.py @@ -66,7 +66,9 @@ def main(): if args.mosaic: - rawspace_files = np.squeeze(np.array(pd.read_csv(args.rawspace_file, header=None))) + rawspace_files = open(args.rawspace_file,'r').readlines() + rawspace_files = [x.strip() for x in rawspace_files] + # TODO: make this check more elegant, should run, catch all files present exception, and proceed if args.run_with_missing_files is False: emit_utils.file_checks.check_raster_files(rawspace_files, map_space=False) @@ -211,7 +213,7 @@ def apply_mosaic_glt_line(glt_filename: str, output_filename: str, rawspace_file #glt_line = glt_dataset.ReadAsArray(0, line_index, glt_dataset.RasterXSize, 1) #glt_line = glt[0][:,line_index:line_index+1, :] - glt_line = np.squeeze(glt[line_index,...]).copy().astype(int) + glt_line = np.squeeze(glt[line_index,...]).copy().astype(int)[...,:3] valid_glt = np.all(glt_line != GLT_NODATA_VALUE, axis=-1) glt_line[valid_glt,1] = np.abs(glt_line[valid_glt,1]) diff --git a/build_mosaic_glt.jl b/build_mosaic_glt.jl index a3e84d7..6047afa 100644 --- a/build_mosaic_glt.jl +++ b/build_mosaic_glt.jl @@ -17,6 +17,8 @@ function main() add_argument!(parser, "--criteria_mode", type = String, default = "distance", help = "Band-ordering criteria mode. Options are min or max (require criteria file), or distance (uses closest point)") add_argument!(parser, "--criteria_band", type = Int64, default = 1, help = "band of criteria file to use") add_argument!(parser, "--criteria_file_list", type = String, help = "file(s) to be used for criteria") + add_argument!(parser, "--mask_file_list", type = String, default = nothing, help = "file(s) to be used for mask") + add_argument!(parser, "--mask_band", type = Int64, default = 8, help = "band of mask file to use") add_argument!(parser, "--target_extent_ul_lr", type = Float64, nargs=4, help = "extent to build the mosaic of") add_argument!(parser, "--mosaic", type = Int32, default=1, help = "treat as a mosaic") add_argument!(parser, "--output_epsg", type = Int32, default=4326, help = "epsg to write to destination") @@ -54,6 +56,14 @@ function main() # TODO: add check to make sure criteria file dimensions match igm file dimensions end + if !isnothing(args.mask_file_list) + if args.mosaic == 1 + mask_files = readdlm(args.mask_file_list, String) + else + mask_files = [args.mask_file_list] + end + end + if length(args.target_extent_ul_lr) > 0 ullr = args.target_extent_ul_lr min_x = ullr[1] @@ -78,7 +88,7 @@ function main() @info "Output Image Size (x,y): $x_size_px, $y_size_px. Creating output dataset." if args.mosaic == 1 - output_bands = 3 + output_bands = 4 else output_bands = 2 end @@ -93,11 +103,12 @@ function main() grid[..,2] = fill(1,y_size_px,x_size_px) .* LinRange(max_y + args.target_resolution[2]/2,max_y + args.target_resolution[2] * (1/2 + y_size_px - 1), y_size_px)[:,[CartesianIndex()]] @info "Create GLT." - best = fill(1e12, y_size_px, x_size_px, 4) + best = fill(1e12, y_size_px, x_size_px, 5) if args.criteria_mode == "max" best = best .* -1 end best[..,1:3] .= -9999 + best[..,5] .= 0 max_offset_distance = sqrt(sum(args.target_resolution.^2))*3 pixel_buffer_window = 1 @@ -105,23 +116,37 @@ function main() lk = ReentrantLock() for (file_idx, igm_file) in enumerate(igm_files) @info "$igm_file" - dataset = ArchGDAL.read(igm_file) + dataset = ArchGDAL.read(igm_file,alloweddrivers =["ENVI"]) igm = PermutedDimsArray(ArchGDAL.read(dataset), (2,1,3)) - if minimum(igm[..,1]) > grid[1,end-1,1] || maximum(igm[..,1]) < grid[1,1,1] || - minimum(igm[..,2]) > grid[1,1,2] || maximum(igm[..,2]) < grid[end-1,1,2] - #println(minimum(igm[..,1]), " > ", grid[1,end-1,1], " ", maximum(igm[..,1]), " < ", grid[1,1,1]) - #println(minimum(igm[..,2]), " > ", grid[1,1,2], " ", maximum(igm[..,2]), " < ", grid[end-1,1,2]) - continue - else - println("Entering") - end + #if minimum(igm[..,1]) > grid[1,end-1,1] || maximum(igm[..,1]) < grid[1,1,1] || + # minimum(igm[..,2]) > grid[1,1,2] || maximum(igm[..,2]) < grid[end-1,1,2] + # #println(minimum(igm[..,1]), " > ", grid[1,end-1,1], " ", maximum(igm[..,1]), " < ", grid[1,1,1]) + # #println(minimum(igm[..,2]), " > ", grid[1,1,2], " ", maximum(igm[..,2]), " < ", grid[end-1,1,2]) + # continue + #else + # println("Entering") + #end if args.criteria_mode != "distance" - criteria_dataset = ArchGDAL.read(criteria_files[file_idx]) + cffi = criteria_files[file_idx] + @debug "$cffi" + criteria_dataset = ArchGDAL.read(criteria_files[file_idx],alloweddrivers =["ENVI"]) criteria = PermutedDimsArray(ArchGDAL.read(criteria_dataset, args.criteria_band), (2,1)) end + if !isnothing(args.mask_file_list) + mffi = mask_files[file_idx] + @debug "$mffi" + mask_dataset = ArchGDAL.read(mask_files[file_idx],alloweddrivers =["ENVI"]) + mask = PermutedDimsArray(ArchGDAL.read(mask_dataset, args.mask_band), (2,1)) + end + Threads.@threads for _y=1:size(igm)[1] Threads.@threads for _x=1:size(igm)[2] + if !isnothing(args.mask_file_list) + if mask[_y, _x] > 0 + continue + end + end pt = igm[_y,_x,1:2] closest_t = Array{Int64}([round((pt[2] - grid[1,1,2]) / args.target_resolution[2]), round((pt[1] - grid[1,1,1]) / args.target_resolution[1]) ]) .+ 1 @@ -139,6 +164,7 @@ function main() if dist < max_offset_distance + best[closest[1], closest[2], 5] += 1 if args.criteria_mode in ["distance", "min"] if args.criteria_mode == "distance" current_crit = dist @@ -179,7 +205,7 @@ function main() println(sum(best[..,1] .!= -9999), " ", size(best)[1]*size(best)[2]) if args.mosaic == 1 - output = Array{Int32}(permutedims(best[..,1:3], (2,1,3))) + output = Array{Int32}(permutedims(best[..,[1,2,3,5]], (2,1,3))) else output = Array{Int32}(permutedims(best[..,1:2], (2,1,3))) end From 3d8f33495ff90f25732076025b551fc62acd3d64 Mon Sep 17 00:00:00 2001 From: brodrick Date: Mon, 17 Jul 2023 09:05:06 -0700 Subject: [PATCH 05/14] remote unneeded imports --- apply_glt_serial.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/apply_glt_serial.py b/apply_glt_serial.py index 4df0c66..3726945 100644 --- a/apply_glt_serial.py +++ b/apply_glt_serial.py @@ -1,9 +1,15 @@ +""" +Apply a (possibly multi-file) per-pixel spatial reference, in serial (rayless). + +Author: Philip G. Brodrick, philip.brodrick@jpl.nasa.gov +""" + + import argparse import numpy as np import pandas as pd from osgeo import gdal from spectral.io import envi -from typing import List import emit_utils.file_checks from emit_utils.file_checks import envi_header From 00a32ed5cc2e20b1db42de12fe40d0f05a0d3e6d Mon Sep 17 00:00:00 2001 From: brodrick Date: Mon, 17 Jul 2023 09:23:05 -0700 Subject: [PATCH 06/14] add support for missing files, do actual mosaic check in ortho call --- apply_glt_serial.py | 37 +++++++++++++++++++++++-------------- 1 file changed, 23 insertions(+), 14 deletions(-) diff --git a/apply_glt_serial.py b/apply_glt_serial.py index 3726945..660914a 100644 --- a/apply_glt_serial.py +++ b/apply_glt_serial.py @@ -8,6 +8,7 @@ import argparse import numpy as np import pandas as pd +import os from osgeo import gdal from spectral.io import envi import emit_utils.file_checks @@ -34,17 +35,23 @@ def _write_bil_chunk(dat, outfile, line, shape, dtype = 'float32'): -def single_image_ortho(img_dat, glt, glt_nodata_value=-9999): +def single_image_ortho(img_dat, glt, img_ind=None, glt_nodata_value=-9999): """Orthorectify a single image Args: img_dat (array like): raw input image glt (array like): glt - 2 band 1-based indexing for output file(x, y) + img_ind (int): index of image in glt (if mosaic - otherwise ignored) glt_nodata_value (int, optional): Value from glt to ignore. Defaults to 0. Returns: array like: orthorectified version of img_dat """ outdat = np.zeros((glt.shape[0], glt.shape[1], img_dat.shape[-1])) - 9999 valid_glt = np.all(glt != glt_nodata_value, axis=-1) + + # Only grab data from the correct image, if this is a mosaic + if glt.shape[2] >= 3: + valid_glt[glt[:,:,2] != img_ind] = False + glt[valid_glt] -= 1 # account for 1-based indexing outdat[valid_glt, :] = img_dat[glt[valid_glt, 1], glt[valid_glt, 0], :] return outdat, valid_glt @@ -56,6 +63,7 @@ def main(input_args=None): parser.add_argument('raw_file', type=str, metavar='RAW', help='path to raw image') parser.add_argument('out_file', type=str, metavar='OUTPUT', help='path to output image') parser.add_argument('--mosaic', action='store_true') + parser.add_argument('--run_with_missing_files', action='store_true') parser.add_argument('-b', type=int, nargs='+',default=-1) args = parser.parse_args(input_args) @@ -76,19 +84,20 @@ def main(input_args=None): rawspace_files = [args.rawspace_file] ort_img = None - for rawfile in rawspace_files: - img_ds = envi.open(envi_header(rawfile)) - if args.b[0] == -1: - inds = np.arange(int(img_ds.metadata['bands'])) - else: - inds = np.array(args.b) - img_dat = img_ds.open_memmap(writeable=False, interleave='bip')[...,inds].copy() - - if ort_img is None: - ort_img, _ = single_image_ortho(img_dat, glt) - else: - ort_img_update, valid_glt = single_image_ortho(img_dat, glt) - ort_img[valid_glt, :] = ort_img_update[valid_glt, :] + for _rf, rawfile in enumerate(rawspace_files): + if os.path.isfile(envi_header(rawfile)) and os.path.isfile(rawfile): + img_ds = envi.open(envi_header(rawfile)) + if args.b[0] == -1: + inds = np.arange(int(img_ds.metadata['bands'])) + else: + inds = np.array(args.b) + img_dat = img_ds.open_memmap(writeable=False, interleave='bip')[...,inds].copy() + + if ort_img is None: + ort_img, _ = single_image_ortho(img_dat, glt, img_ind=_rf) + else: + ort_img_update, valid_glt = single_image_ortho(img_dat, glt, img_ind=_rf) + ort_img[valid_glt, :] = ort_img_update[valid_glt, :] band_names = None if 'band names' in envi.open(envi_header(args.raw_file)).metadata.keys(): From b12a28b592671ee507ffd1c1877d511c9a20a0e7 Mon Sep 17 00:00:00 2001 From: Phil Brodrick Date: Mon, 17 Jul 2023 10:40:54 -0700 Subject: [PATCH 07/14] bugfixes - tested and working now --- apply_glt_serial.py | 28 +++++++++++++++++++--------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/apply_glt_serial.py b/apply_glt_serial.py index 660914a..b422f84 100644 --- a/apply_glt_serial.py +++ b/apply_glt_serial.py @@ -52,6 +52,9 @@ def single_image_ortho(img_dat, glt, img_ind=None, glt_nodata_value=-9999): if glt.shape[2] >= 3: valid_glt[glt[:,:,2] != img_ind] = False + if np.sum(valid_glt) == 0: + return outdat, valid_glt + glt[valid_glt] -= 1 # account for 1-based indexing outdat[valid_glt, :] = img_dat[glt[valid_glt, 1], glt[valid_glt, 0], :] return outdat, valid_glt @@ -64,28 +67,35 @@ def main(input_args=None): parser.add_argument('out_file', type=str, metavar='OUTPUT', help='path to output image') parser.add_argument('--mosaic', action='store_true') parser.add_argument('--run_with_missing_files', action='store_true') - parser.add_argument('-b', type=int, nargs='+',default=-1) + parser.add_argument('-b', type=int, nargs='+',default=[-1]) args = parser.parse_args(input_args) glt_dataset = envi.open(envi_header(args.glt_file)) - glt = glt_dataset.open_memmap(writeable=False, interleave='bip').copy() + glt = glt_dataset.open_memmap(writeable=False, interleave='bip').copy().astype(int) del glt_dataset glt_dataset = gdal.Open(args.glt_file) if args.mosaic: - rawspace_files = np.squeeze(np.array(pd.read_csv(args.rawspace_file, header=None))) + rawspace_files = np.squeeze(np.array(pd.read_csv(args.raw_file, header=None))) # TODO: make this check more elegant, should run, catch all files present exception, and proceed if args.run_with_missing_files is False: emit_utils.file_checks.check_raster_files(rawspace_files, map_space=False) # TODO: check that all rawspace files have same number of bands else: - emit_utils.file_checks.check_raster_files([args.rawspace_file], map_space=False) - rawspace_files = [args.rawspace_file] + emit_utils.file_checks.check_raster_files([args.raw_file], map_space=False) + rawspace_files = [args.raw_file] ort_img = None for _rf, rawfile in enumerate(rawspace_files): + print(f'{_rf+1}/{len(rawspace_files)}') if os.path.isfile(envi_header(rawfile)) and os.path.isfile(rawfile): + + # Don't load image data unless we have to + if args.mosaic: + if np.sum(glt[:,:,2] == _rf+1) == 0: + continue + img_ds = envi.open(envi_header(rawfile)) if args.b[0] == -1: inds = np.arange(int(img_ds.metadata['bands'])) @@ -94,14 +104,14 @@ def main(input_args=None): img_dat = img_ds.open_memmap(writeable=False, interleave='bip')[...,inds].copy() if ort_img is None: - ort_img, _ = single_image_ortho(img_dat, glt, img_ind=_rf) + ort_img, _ = single_image_ortho(img_dat, glt, img_ind=_rf+1) else: - ort_img_update, valid_glt = single_image_ortho(img_dat, glt, img_ind=_rf) + ort_img_update, valid_glt = single_image_ortho(img_dat, glt, img_ind=_rf+1) ort_img[valid_glt, :] = ort_img_update[valid_glt, :] band_names = None - if 'band names' in envi.open(envi_header(args.raw_file)).metadata.keys(): - band_names = np.array(envi.open(envi_header(args.raw_file)).metadata['band names'],dtype=str)[inds].tolist() + if 'band names' in envi.open(envi_header(rawspace_files[0])).metadata.keys(): + band_names = np.array(envi.open(envi_header(rawspace_files[0])).metadata['band names'],dtype=str)[inds].tolist() # Build output dataset driver = gdal.GetDriverByName('ENVI') From 0fce63c3bcbaa1400960c7fc3d6f514ad855450d Mon Sep 17 00:00:00 2001 From: Phil Brodrick Date: Sun, 26 Nov 2023 00:29:07 -0800 Subject: [PATCH 08/14] tweaks for band name inclusion, max length (generally not wanted) for glt generation --- apply_glt_serial.py | 18 ++++++++++-------- build_mosaic_glt.jl | 17 ++++++++++++++++- 2 files changed, 26 insertions(+), 9 deletions(-) diff --git a/apply_glt_serial.py b/apply_glt_serial.py index b422f84..73cd378 100644 --- a/apply_glt_serial.py +++ b/apply_glt_serial.py @@ -35,7 +35,7 @@ def _write_bil_chunk(dat, outfile, line, shape, dtype = 'float32'): -def single_image_ortho(img_dat, glt, img_ind=None, glt_nodata_value=-9999): +def single_image_ortho(img_dat, glt, img_ind=None, glt_nodata_value=0): """Orthorectify a single image Args: img_dat (array like): raw input image @@ -66,6 +66,7 @@ def main(input_args=None): parser.add_argument('raw_file', type=str, metavar='RAW', help='path to raw image') parser.add_argument('out_file', type=str, metavar='OUTPUT', help='path to output image') parser.add_argument('--mosaic', action='store_true') + parser.add_argument('--glt_nodata', type=float, default=0) parser.add_argument('--run_with_missing_files', action='store_true') parser.add_argument('-b', type=int, nargs='+',default=[-1]) args = parser.parse_args(input_args) @@ -77,7 +78,7 @@ def main(input_args=None): glt_dataset = gdal.Open(args.glt_file) if args.mosaic: - rawspace_files = np.squeeze(np.array(pd.read_csv(args.raw_file, header=None))) + rawspace_files = [x.strip() for x in open(args.raw_file).readlines()] # TODO: make this check more elegant, should run, catch all files present exception, and proceed if args.run_with_missing_files is False: emit_utils.file_checks.check_raster_files(rawspace_files, map_space=False) @@ -87,6 +88,7 @@ def main(input_args=None): rawspace_files = [args.raw_file] ort_img = None + band_names = None for _rf, rawfile in enumerate(rawspace_files): print(f'{_rf+1}/{len(rawspace_files)}') if os.path.isfile(envi_header(rawfile)) and os.path.isfile(rawfile): @@ -97,22 +99,22 @@ def main(input_args=None): continue img_ds = envi.open(envi_header(rawfile)) + inds = None if args.b[0] == -1: inds = np.arange(int(img_ds.metadata['bands'])) else: inds = np.array(args.b) img_dat = img_ds.open_memmap(writeable=False, interleave='bip')[...,inds].copy() + if band_names is None and 'band names' in envi.open(envi_header(rawfile)).metadata.keys(): + band_names = np.array(envi.open(envi_header(rawfile)).metadata['band names'],dtype=str)[inds].tolist() + if ort_img is None: - ort_img, _ = single_image_ortho(img_dat, glt, img_ind=_rf+1) + ort_img, _ = single_image_ortho(img_dat, glt, img_ind=_rf+1, glt_nodata_value=args.glt_nodata) else: - ort_img_update, valid_glt = single_image_ortho(img_dat, glt, img_ind=_rf+1) + ort_img_update, valid_glt = single_image_ortho(img_dat, glt, img_ind=_rf+1, glt_nodata_value=args.glt_nodata) ort_img[valid_glt, :] = ort_img_update[valid_glt, :] - band_names = None - if 'band names' in envi.open(envi_header(rawspace_files[0])).metadata.keys(): - band_names = np.array(envi.open(envi_header(rawspace_files[0])).metadata['band names'],dtype=str)[inds].tolist() - # Build output dataset driver = gdal.GetDriverByName('ENVI') driver.Register() diff --git a/build_mosaic_glt.jl b/build_mosaic_glt.jl index 6047afa..f380fa1 100644 --- a/build_mosaic_glt.jl +++ b/build_mosaic_glt.jl @@ -21,6 +21,7 @@ function main() add_argument!(parser, "--mask_band", type = Int64, default = 8, help = "band of mask file to use") add_argument!(parser, "--target_extent_ul_lr", type = Float64, nargs=4, help = "extent to build the mosaic of") add_argument!(parser, "--mosaic", type = Int32, default=1, help = "treat as a mosaic") + add_argument!(parser, "--maxlen_file", type = String, default=nothing, help = "max length reference file") add_argument!(parser, "--output_epsg", type = Int32, default=4326, help = "epsg to write to destination") add_argument!(parser, "--log_file", type = String, default = nothing, help = "log file to write to") args = parse_args(parser) @@ -64,6 +65,14 @@ function main() end end + if !isnothing(args.maxlen_file) + if args.mosaic == 1 + maxlen_files = readdlm(args.maxlen_file, String) + else + maxlen_files = [args.maxlen_file] + end + end + if length(args.target_extent_ul_lr) > 0 ullr = args.target_extent_ul_lr min_x = ullr[1] @@ -139,8 +148,14 @@ function main() mask_dataset = ArchGDAL.read(mask_files[file_idx],alloweddrivers =["ENVI"]) mask = PermutedDimsArray(ArchGDAL.read(mask_dataset, args.mask_band), (2,1)) end + if !isnothing(args.maxlen_file) + maxlen_ds = ArchGDAL.readraster(maxlen_files[file_idx],alloweddrivers=["ENVI"]) + maxlines = size(maxlen_ds)[2] + else + maxlines = size(igm)[1] + end - Threads.@threads for _y=1:size(igm)[1] + Threads.@threads for _y=1:min(size(igm)[1],maxlines) Threads.@threads for _x=1:size(igm)[2] if !isnothing(args.mask_file_list) if mask[_y, _x] > 0 From 5a264025dd9dfb55534834decfc83a23449c2941 Mon Sep 17 00:00:00 2001 From: Phil Brodrick Date: Sun, 26 Nov 2023 00:29:39 -0800 Subject: [PATCH 09/14] add simple vegetation adjustment script for aggregation --- veg_correction.py | 179 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 179 insertions(+) create mode 100644 veg_correction.py diff --git a/veg_correction.py b/veg_correction.py new file mode 100644 index 0000000..9368d23 --- /dev/null +++ b/veg_correction.py @@ -0,0 +1,179 @@ +""" +Apply a (possibly multi-file) per-pixel spatial reference, in serial (rayless). + +Author: Philip G. Brodrick, philip.brodrick@jpl.nasa.gov +""" + + +import argparse +import numpy as np +import pandas as pd +import os +from osgeo import gdal +from spectral.io import envi +import emit_utils.file_checks + +from emit_utils.file_checks import envi_header + +def _write_bil_chunk(dat, outfile, line, shape, dtype = 'float32'): + """ + Write a chunk of data to a binary, BIL formatted data cube. + Args: + dat: data to write + outfile: output file to write to + line: line of the output file to write to + shape: shape of the output file + dtype: output data type + + Returns: + None + """ + outfile = open(outfile, 'rb+') + outfile.seek(line * shape[1] * shape[2] * np.dtype(dtype).itemsize) + outfile.write(dat.astype(dtype).tobytes()) + outfile.close() + + + +def main(input_args=None): + parser = argparse.ArgumentParser(description="Robust MF") + parser.add_argument('abun_file', type=str) + parser.add_argument('cover_file', type=str) + parser.add_argument('out_file', type=str) + parser.add_argument('--soil_thresh', type=float, default=0.001) + parser.add_argument('--coarsened_file', type=str, default=None) + parser.add_argument('--resolution', type=float, default=None) + parser.add_argument('--data_threshold', type=float, default=None) + parser.add_argument('--abun_uncert_file', type=str, default=None) + parser.add_argument('--cover_uncert_file', type=str, default=None) + args = parser.parse_args(input_args) + + + abun_ds = envi.open(envi_header(args.abun_file)) + band_names = abun_ds.metadata['band names'] + abun_gdal = gdal.Open(args.abun_file) + cover_ds = envi.open(envi_header(args.cover_file)) + + abun = abun_ds.open_memmap(interleave='bip').copy() + cover = cover_ds.open_memmap(interleave='bip')[...,2].copy() + abun = abun / cover[:,:,np.newaxis] + masked_out = np.any(np.isnan(abun), axis=-1) + masked_out[np.any(np.isfinite(abun) == False,axis=-1)] = True + masked_out[cover < args.soil_thresh] = True + masked_out[np.any(abun == -9999,axis=-1)] = True + + abun[masked_out,:] = -9999 + cover[masked_out] = -9999 + #abun[np.isnan(abun)] = -9999 + #abun[np.isfinite(abun) == False] = -9999 + #abun[cover < args.soil_thresh,:] = -9999 + + do_uncert = False + if args.abun_uncert_file is not None and args.cover_uncert_file is not None and args.coarsened_file is not None and args.resolution is not None: + abununcert_ds = envi.open(envi_header(args.abun_uncert_file)) + coveruncert_ds = envi.open(envi_header(args.cover_uncert_file)) + abununcert = abununcert_ds.open_memmap(interleave='bip').copy() + coveruncert = coveruncert_ds.open_memmap(interleave='bip')[...,2].copy() + + abununcert[masked_out,:] = -9999 + coveruncert[masked_out] = -9999 + do_uncert = True + + + + # Build output dataset + driver = gdal.GetDriverByName('ENVI') + driver.Register() + + #TODO: careful about output datatypes / format + outDataset = driver.Create(args.out_file, abun.shape[1], abun.shape[0], + abun.shape[2], gdal.GDT_Float32, options=['INTERLEAVE=BIL']) + outDataset.SetProjection(abun_gdal.GetProjection()) + outDataset.SetGeoTransform(abun_gdal.GetGeoTransform()) + for _b in range(1, abun.shape[2]+1): + outDataset.GetRasterBand(_b).SetNoDataValue(-9999) + if band_names is not None: + outDataset.GetRasterBand(_b).SetDescription(band_names[_b-1]) + del outDataset + + _write_bil_chunk(abun.transpose((0,2,1)), args.out_file, 0, (abun.shape[0], abun.shape[2], abun.shape[1])) + + + if args.coarsened_file is not None and args.resolution is not None: + + trans = abun_gdal.GetGeoTransform() + num_px = int(round(args.resolution / trans[1])) + + abun[abun == -9999] = np.nan + + numy = int(round(abun.shape[0] / num_px)) + numx = int(round(abun.shape[1] / num_px)) + asa = np.zeros((numy, numx,abun.shape[2])) - 9999 + asa_unc = None + if do_uncert: + asa_unc = np.zeros((numy, numx,abun.shape[2])) - 9999 + + for _y in range(0,numy): + for _x in range(0,numx): + valid_px = np.sum(masked_out[_y*num_px:(_y+1)*num_px,_x*num_px:(_x+1)*num_px] == False) + if args.data_threshold is not None: + complete_frac = valid_px / float(num_px**2) + print(complete_frac) + if complete_frac < args.data_threshold: + continue + asa[_y,_x,:] = np.nanmean(abun[_y*num_px:(_y+1)*num_px,_x*num_px:(_x+1)*num_px,:],axis=(0,1)) + + if do_uncert: + valid_unc = abununcert[_y*num_px:(_y+1)*num_px,_x*num_px:(_x+1)*num_px,:] + valid_subset = masked_out[_y*num_px:(_y+1)*num_px,_x*num_px:(_x+1)*num_px] == False + + + inner_term = np.power(abununcert[_y*num_px:(_y+1)*num_px,_x*num_px:(_x+1)*num_px,:][valid_subset,:] / \ + abun[_y*num_px:(_y+1)*num_px,_x*num_px:(_x+1)*num_px,:][valid_subset,:],2) +\ + np.power(coveruncert[_y*num_px:(_y+1)*num_px,_x*num_px:(_x+1)*num_px][valid_subset] / \ + cover[_y*num_px:(_y+1)*num_px,_x*num_px:(_x+1)*num_px][valid_subset],2)[:,np.newaxis] + inner_term[abun[_y*num_px:(_y+1)*num_px,_x*num_px:(_x+1)*num_px,:][valid_subset,:] == 0] = np.nan + + + asa_unc[_y,_x,:] = np.sqrt(np.power(asa[_y,_x,:] / valid_px,2) * np.nansum(inner_term,axis=0)) + + outDataset = driver.Create(args.coarsened_file, asa.shape[1], asa.shape[0], + asa.shape[2], gdal.GDT_Float32, options=['INTERLEAVE=BIL']) + outDataset.SetProjection(abun_gdal.GetProjection()) + outtrans = list(abun_gdal.GetGeoTransform()) + outtrans[1] = args.resolution + outtrans[5] = -1*args.resolution + outDataset.SetGeoTransform(outtrans) + for _b in range(1, asa.shape[2]+1): + outDataset.GetRasterBand(_b).SetNoDataValue(-9999) + if band_names is not None: + outDataset.GetRasterBand(_b).SetDescription(band_names[_b-1]) + del outDataset + _write_bil_chunk(asa.transpose((0,2,1)), args.coarsened_file, 0, (asa.shape[0], asa.shape[2], asa.shape[1])) + + # Now uncertainty + if do_uncert: + outDataset = driver.Create(args.coarsened_file + '_uncert', asa.shape[1], asa.shape[0], + asa.shape[2], gdal.GDT_Float32, options=['INTERLEAVE=BIL']) + outDataset.SetProjection(abun_gdal.GetProjection()) + outtrans = list(abun_gdal.GetGeoTransform()) + outtrans[1] = args.resolution + outtrans[5] = -1*args.resolution + outDataset.SetGeoTransform(outtrans) + for _b in range(1, asa_unc.shape[2]+1): + outDataset.GetRasterBand(_b).SetNoDataValue(-9999) + if band_names is not None: + outDataset.GetRasterBand(_b).SetDescription(band_names[_b-1]) + del outDataset + _write_bil_chunk(asa_unc.transpose((0,2,1)), args.coarsened_file + '_uncert', 0, (asa_unc.shape[0], asa_unc.shape[2], asa_unc.shape[1])) + + + + + + +if __name__ == '__main__': + main() + + + From dc1cb3457213c1081b16c8064bf3655d9edfe0d5 Mon Sep 17 00:00:00 2001 From: Phil Brodrick Date: Wed, 14 Feb 2024 07:00:04 -0800 Subject: [PATCH 10/14] update to veg adjustment coarsened writes --- veg_correction.py | 33 ++++++++++++++++++++------------- 1 file changed, 20 insertions(+), 13 deletions(-) diff --git a/veg_correction.py b/veg_correction.py index 9368d23..3716f01 100644 --- a/veg_correction.py +++ b/veg_correction.py @@ -39,13 +39,16 @@ def main(input_args=None): parser = argparse.ArgumentParser(description="Robust MF") parser.add_argument('abun_file', type=str) parser.add_argument('cover_file', type=str) - parser.add_argument('out_file', type=str) + parser.add_argument('--out_file', type=str, default=None) parser.add_argument('--soil_thresh', type=float, default=0.001) parser.add_argument('--coarsened_file', type=str, default=None) parser.add_argument('--resolution', type=float, default=None) parser.add_argument('--data_threshold', type=float, default=None) parser.add_argument('--abun_uncert_file', type=str, default=None) parser.add_argument('--cover_uncert_file', type=str, default=None) + parser.add_argument('--valid_fraction_file', type=str, default=None) + parser.add_argument('--mask_file', type=str, default=None) + parser.add_argument('--thresh_only', action='store_true') args = parser.parse_args(input_args) @@ -56,11 +59,15 @@ def main(input_args=None): abun = abun_ds.open_memmap(interleave='bip').copy() cover = cover_ds.open_memmap(interleave='bip')[...,2].copy() - abun = abun / cover[:,:,np.newaxis] + if args.thresh_only is False: + abun = abun / cover[:,:,np.newaxis] masked_out = np.any(np.isnan(abun), axis=-1) masked_out[np.any(np.isfinite(abun) == False,axis=-1)] = True masked_out[cover < args.soil_thresh] = True masked_out[np.any(abun == -9999,axis=-1)] = True + if args.mask_file is not None: + ext_mask = gdal.Open(args.mask_file).ReadAsArray() + masked_out[ext_mask == 1] = True abun[masked_out,:] = -9999 cover[masked_out] = -9999 @@ -86,17 +93,18 @@ def main(input_args=None): driver.Register() #TODO: careful about output datatypes / format - outDataset = driver.Create(args.out_file, abun.shape[1], abun.shape[0], - abun.shape[2], gdal.GDT_Float32, options=['INTERLEAVE=BIL']) - outDataset.SetProjection(abun_gdal.GetProjection()) - outDataset.SetGeoTransform(abun_gdal.GetGeoTransform()) - for _b in range(1, abun.shape[2]+1): - outDataset.GetRasterBand(_b).SetNoDataValue(-9999) - if band_names is not None: - outDataset.GetRasterBand(_b).SetDescription(band_names[_b-1]) - del outDataset + if args.out_file is not None: + outDataset = driver.Create(args.out_file, abun.shape[1], abun.shape[0], + abun.shape[2], gdal.GDT_Float32, options=['INTERLEAVE=BIL']) + outDataset.SetProjection(abun_gdal.GetProjection()) + outDataset.SetGeoTransform(abun_gdal.GetGeoTransform()) + for _b in range(1, abun.shape[2]+1): + outDataset.GetRasterBand(_b).SetNoDataValue(-9999) + if band_names is not None: + outDataset.GetRasterBand(_b).SetDescription(band_names[_b-1]) + del outDataset - _write_bil_chunk(abun.transpose((0,2,1)), args.out_file, 0, (abun.shape[0], abun.shape[2], abun.shape[1])) + _write_bil_chunk(abun.transpose((0,2,1)), args.out_file, 0, (abun.shape[0], abun.shape[2], abun.shape[1])) if args.coarsened_file is not None and args.resolution is not None: @@ -118,7 +126,6 @@ def main(input_args=None): valid_px = np.sum(masked_out[_y*num_px:(_y+1)*num_px,_x*num_px:(_x+1)*num_px] == False) if args.data_threshold is not None: complete_frac = valid_px / float(num_px**2) - print(complete_frac) if complete_frac < args.data_threshold: continue asa[_y,_x,:] = np.nanmean(abun[_y*num_px:(_y+1)*num_px,_x*num_px:(_x+1)*num_px,:],axis=(0,1)) From 9caa93ac0f36ef88b95ea293c20290c5df775da4 Mon Sep 17 00:00:00 2001 From: brodrick Date: Wed, 14 Feb 2024 07:54:20 -0800 Subject: [PATCH 11/14] add count fraction option --- veg_correction.py | 33 +++++++++++++++++++++++++++++++-- 1 file changed, 31 insertions(+), 2 deletions(-) diff --git a/veg_correction.py b/veg_correction.py index 3716f01..8ed1c6d 100644 --- a/veg_correction.py +++ b/veg_correction.py @@ -42,6 +42,7 @@ def main(input_args=None): parser.add_argument('--out_file', type=str, default=None) parser.add_argument('--soil_thresh', type=float, default=0.001) parser.add_argument('--coarsened_file', type=str, default=None) + parser.add_argument('--mask_fraction_file', type=str, default=None) parser.add_argument('--resolution', type=float, default=None) parser.add_argument('--data_threshold', type=float, default=None) parser.add_argument('--abun_uncert_file', type=str, default=None) @@ -59,14 +60,22 @@ def main(input_args=None): abun = abun_ds.open_memmap(interleave='bip').copy() cover = cover_ds.open_memmap(interleave='bip')[...,2].copy() + counts = {} + if args.thresh_only is False: abun = abun / cover[:,:,np.newaxis] + + counts['no_abun'] = np.any(np.logical_or.reduce((np.isnan(abun) , np.isfinite(abun) == False, abun == -9999)),axis=-1) masked_out = np.any(np.isnan(abun), axis=-1) masked_out[np.any(np.isfinite(abun) == False,axis=-1)] = True - masked_out[cover < args.soil_thresh] = True masked_out[np.any(abun == -9999,axis=-1)] = True + + counts['soil_cutoff'] = np.logical_and(masked_out == False, cover < args.soil_thresh) + masked_out[cover < args.soil_thresh] = True + if args.mask_file is not None: ext_mask = gdal.Open(args.mask_file).ReadAsArray() + counts['external_mask'] = np.logical_and(masked_out == False, ext_mask == 1) masked_out[ext_mask == 1] = True abun[masked_out,:] = -9999 @@ -87,7 +96,6 @@ def main(input_args=None): do_uncert = True - # Build output dataset driver = gdal.GetDriverByName('ENVI') driver.Register() @@ -117,6 +125,7 @@ def main(input_args=None): numy = int(round(abun.shape[0] / num_px)) numx = int(round(abun.shape[1] / num_px)) asa = np.zeros((numy, numx,abun.shape[2])) - 9999 + agg_count = np.zeros((numy, numx,len(counts.keys()))) - 9999 asa_unc = None if do_uncert: asa_unc = np.zeros((numy, numx,abun.shape[2])) - 9999 @@ -129,6 +138,8 @@ def main(input_args=None): if complete_frac < args.data_threshold: continue asa[_y,_x,:] = np.nanmean(abun[_y*num_px:(_y+1)*num_px,_x*num_px:(_x+1)*num_px,:],axis=(0,1)) + for _key, key in enumerate(counts.keys()): + agg_count[_y,_x,_key] = np.sum(counts[key][_y*num_px:(_y+1)*num_px,_x*num_px:(_x+1)*num_px]) / np.product(abun[_y*num_px:(_y+1)*num_px,_x*num_px:(_x+1)*num_px,0].shape) if do_uncert: valid_unc = abununcert[_y*num_px:(_y+1)*num_px,_x*num_px:(_x+1)*num_px,:] @@ -144,6 +155,7 @@ def main(input_args=None): asa_unc[_y,_x,:] = np.sqrt(np.power(asa[_y,_x,:] / valid_px,2) * np.nansum(inner_term,axis=0)) + # Spectral Abundance outDataset = driver.Create(args.coarsened_file, asa.shape[1], asa.shape[0], asa.shape[2], gdal.GDT_Float32, options=['INTERLEAVE=BIL']) outDataset.SetProjection(abun_gdal.GetProjection()) @@ -158,6 +170,23 @@ def main(input_args=None): del outDataset _write_bil_chunk(asa.transpose((0,2,1)), args.coarsened_file, 0, (asa.shape[0], asa.shape[2], asa.shape[1])) + + # Count fractions + if args.mask_fraction_file is not None: + outDataset = driver.Create(args.mask_fraction_file, agg_count.shape[1], agg_count.shape[0], + agg_count.shape[2], gdal.GDT_Float32, options=['INTERLEAVE=BIL']) + outDataset.SetProjection(abun_gdal.GetProjection()) + outtrans = list(abun_gdal.GetGeoTransform()) + outtrans[1] = args.resolution + outtrans[5] = -1*args.resolution + outDataset.SetGeoTransform(outtrans) + for _b in range(1, agg_count.shape[2]+1): + outDataset.GetRasterBand(_b).SetNoDataValue(-9999) + if band_names is not None: + outDataset.GetRasterBand(_b).SetDescription(list(counts.keys())[_b-1]) + del outDataset + _write_bil_chunk(agg_count.transpose((0,2,1)), args.mask_fraction_file, 0, (agg_count.shape[0], agg_count.shape[2], agg_count.shape[1])) + # Now uncertainty if do_uncert: outDataset = driver.Create(args.coarsened_file + '_uncert', asa.shape[1], asa.shape[0], From a259dd20f69236fc5cc2ee38dfb24e2596c20d66 Mon Sep 17 00:00:00 2001 From: Phil Brodrick Date: Mon, 17 Jun 2024 14:39:34 -0700 Subject: [PATCH 12/14] add citations --- CITATION.cff | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 CITATION.cff diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 0000000..9e41bbd --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,19 @@ +cff-version: 1.2.0 +title: isofit +message: >- + This codebase provides an interface to executing L3 EMIT + SDS code. +type: software +authors: + - given-names: Philip G Brodrick + affiliation: >- + Jet Propulsion Laboratory, California Institute of + Technology + orcid: 'https://orcid.org/0000-0001-9497-7661' + - given-names: Winston Olson-Duvall + affiliation: >- + Jet Propulsion Laboratory, California Institute of + Technology + orcid: 'https://orcid.org/0000-0002-4210-0283' + +license: Apache-2.0 From d03e195118574cbe0d87a99c9ebfc8f775c73342 Mon Sep 17 00:00:00 2001 From: Phil Brodrick Date: Mon, 17 Jun 2024 14:40:34 -0700 Subject: [PATCH 13/14] update name --- CITATION.cff | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CITATION.cff b/CITATION.cff index 9e41bbd..3f44008 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -1,5 +1,5 @@ cff-version: 1.2.0 -title: isofit +title: emit-sds-l3 message: >- This codebase provides an interface to executing L3 EMIT SDS code. From 4c9eaf417be28c63c9440413fb51aec23a23bd80 Mon Sep 17 00:00:00 2001 From: Winston Olson-Duvall Date: Wed, 3 Jul 2024 10:53:37 -0700 Subject: [PATCH 14/14] Update change log --- CHANGELOG.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1efc730..d7c6f27 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,10 +4,22 @@ All notable changes to this project will be documented in this file. Dates are d Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog). +#### [v1.2.0](https://github.com/emit-sds/emit-sds-l3/compare/v1.1.0...v1.2.0) + +> 3 July 2024 + +- add citations [`#4`](https://github.com/emit-sds/emit-sds-l3/pull/4) +- Serial glt [`#3`](https://github.com/emit-sds/emit-sds-l3/pull/3) +- add multithreading capability [`#2`](https://github.com/emit-sds/emit-sds-l3/pull/2) +- add simple vegetation adjustment script for aggregation [`5a26402`](https://github.com/emit-sds/emit-sds-l3/commit/5a264025dd9dfb55534834decfc83a23449c2941) +- add serial apply_glt [`fbb3458`](https://github.com/emit-sds/emit-sds-l3/commit/fbb34582d5e405b77de4e49a8b1f5eb6ee44b49f) +- add support for counting number of revisits [`13d0eed`](https://github.com/emit-sds/emit-sds-l3/commit/13d0eeda2c9dbad5261c4092e183647cd3457409) + #### [v1.1.0](https://github.com/emit-sds/emit-sds-l3/compare/v1.0.0...v1.1.0) > 6 June 2022 +- Merge develop to main for v1.1.0 [`#1`](https://github.com/emit-sds/emit-sds-l3/pull/1) - removing unmixing.....see emit-sds/SpectralUnmixing [`9d5fa7c`](https://github.com/emit-sds/emit-sds-l3/commit/9d5fa7c48ea669f0eeab3c95fc107d61a5d6f313) - updated aggregator demo for dynamic load, uncertainty calcs, and netcdf conversion [`cc5241c`](https://github.com/emit-sds/emit-sds-l3/commit/cc5241c4fbf89bba6b8cd75a20d87233322a5e66) - add license [`247f9b3`](https://github.com/emit-sds/emit-sds-l3/commit/247f9b32c4490ea40cb4c99f30f6569985310dde)