From c0361da0d6c84fdc4ef67ac07fbe80bb7ff8c593 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Thu, 25 Jan 2024 22:43:52 -0500 Subject: [PATCH 01/37] added ability to set a peak intensity threshold for fitting initial peak parameters --- docs/quickstart.md | 2 +- peakipy/cli/fit.py | 65 ++++++++++++++++++++++++++++++++++++++++----- peakipy/cli/main.py | 5 ++++ peakipy/core.py | 51 ++++++++++++++++++++++------------- pyproject.toml | 2 +- 5 files changed, 99 insertions(+), 26 deletions(-) diff --git a/docs/quickstart.md b/docs/quickstart.md index a3a44dd3..972f372c 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -95,7 +95,7 @@ Requirements === "Latest" - The latest version (1.1.2) of `peakipy` requires Python 3.10 or above (see `pyproject.toml` for details). + The latest version (1.1.3) of `peakipy` requires Python 3.10 or above (see `pyproject.toml` for details). === "0.2.0" diff --git a/peakipy/cli/fit.py b/peakipy/cli/fit.py index 1d0c229b..36baa26b 100644 --- a/peakipy/cli/fit.py +++ b/peakipy/cli/fit.py @@ -2,6 +2,7 @@ """Fit and deconvolute NMR peaks: Functions used for running peakipy fit """ from pathlib import Path +from typing import Optional, List from multiprocessing import cpu_count, Pool import numpy as np @@ -88,12 +89,55 @@ def split_peaklist(peaklist, n_cpu, tmp_path=tmp_path): class FitPeaksInput: """input data for the fit_peaks function""" - def __init__(self, args: dict, data: np.array, config: dict, plane_numbers: list): - + def __init__( + self, + args: dict, + data: np.array, + config: dict, + plane_numbers: list, + planes_for_initial_fit: Optional[List[int]] = None, + use_only_planes_above_threshold: Optional[float] = None, + ): self._data = data self._args = args self._config = config self._plane_numbers = plane_numbers + self._planes_for_initial_fit = planes_for_initial_fit + self._use_only_planes_above_threshold = use_only_planes_above_threshold + + def check_integer_list(self): + if hasattr(self._planes_for_initial_fit, "append"): + pass + else: + return False + if all([(type(i) == int) for i in self._planes_for_initial_fit]): + pass + else: + return False + if all([((i - 1) > self._data.shape[0]) for i in self._planes_for_initial_fit]): + return True + else: + return False + + def sum_planes_for_initial_fit(self): + if ( + self._planes_for_initial_fit + == self._use_only_planes_above_threshold + == None + ): + return self._data.sum(axis=0) + + elif self.check_integer_list(): + return self._data[self._planes_for_initial_fit].sum(axis=0) + + elif type(self._use_only_planes_above_threshold) == float: + # very crude at the moment + return self._data[ + self._data.max(axis=1).max(axis=1) + > self._use_only_planes_above_threshold + ] + else: + return self._data.sum(axis=0) @property def data(self): @@ -111,12 +155,15 @@ def config(self): def plane_numbers(self): return self._plane_numbers + @property + def summed_planes_for_initial_fit(self): + return self.sum_planes_for_initial_fit() + class FitPeaksResult: """Result of fitting a set of peaks""" def __init__(self, df: pd.DataFrame, log: str): - self._df = df self._log = log @@ -151,6 +198,7 @@ def fit_peaks(peaks: pd.DataFrame, fit_input: FitPeaksInput) -> FitPeaksResult: # console.print(to_fix, style="red bold") noise = fit_input.args.get("noise") verb = fit_input.args.get("verb") + initial_fit_threshold = fit_input.args.get("initial_fit_threshold") lineshape = fit_input.args.get("lineshape") xy_bounds = fit_input.args.get("xy_bounds") vclist = fit_input.args.get("vclist") @@ -225,7 +273,7 @@ def fit_peaks(peaks: pd.DataFrame, fit_input: FitPeaksInput) -> FitPeaksResult: # fits sum of all planes first fit_result = fit_first_plane( group, - summed_planes, + fit_input.data, # norm(summed_planes), uc_dics, lineshape=lineshape, @@ -233,6 +281,7 @@ def fit_peaks(peaks: pd.DataFrame, fit_input: FitPeaksInput) -> FitPeaksResult: verbose=verb, noise=noise, fit_method=fit_input.config.get("fit_method", "leastsq"), + threshold=initial_fit_threshold, ) fit_result.plot( plot_path=fit_input.args.get("plot"), @@ -304,8 +353,12 @@ def fit_peaks(peaks: pd.DataFrame, fit_input: FitPeaksInput) -> FitPeaksResult: # deal with lineshape specific parameters match lineshape: case lineshape.PV_PV: - frac_x, frac_err_x, name = get_params(first.params, "fraction_x") - frac_y, frac_err_y, name = get_params(first.params, "fraction_y") + frac_x, frac_err_x, name = get_params( + first.params, "fraction_x" + ) + frac_y, frac_err_y, name = get_params( + first.params, "fraction_y" + ) fractions_x.extend(frac_x) fractions_y.extend(frac_y) case lineshape.V: diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index 9842105d..09d0dbaf 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -373,6 +373,7 @@ def fit( vclist: Optional[Path] = None, plane: Optional[List[int]] = None, exclude_plane: Optional[List[int]] = None, + initial_fit_threshold: Optional[float] = None, mp: bool = True, plot: Optional[Path] = None, show: bool = False, @@ -409,6 +410,9 @@ def fit( exclude_plane : Optional[List[int]] Specific plane(s) to fit [default: None] eg. [1,4,5] will exclude planes 1, 4 and 5 + initial_fit_threshold: Optional[float] + threshold used to select planes for fitting of initial lineshape parameters. Only planes with + intensities above this threshold will be included in the intial fit of summed planes. mp : bool Use multiprocessing [default: True] plot : Optional[Path] @@ -472,6 +476,7 @@ def fit( args["verb"] = verb args["show"] = show args["mp"] = mp + args["initial_fit_threshold"] = initial_fit_threshold # read vclist if vclist is None: diff --git a/peakipy/core.py b/peakipy/core.py index c48fd044..5a8b2dce 100644 --- a/peakipy/core.py +++ b/peakipy/core.py @@ -24,7 +24,7 @@ import json from datetime import datetime from pathlib import Path -from typing import List +from typing import List, Optional from enum import Enum import numpy as np @@ -496,7 +496,6 @@ def make_param_dict(peaks, data, lineshape: Lineshape = Lineshape.PV): param_dict = {} for index, peak in peaks.iterrows(): - str_form = lambda x: "%s%s" % (to_prefix(peak.ASS), x) # using exact value of points (i.e decimal) param_dict[str_form("center_x")] = peak.X_AXISf @@ -668,10 +667,10 @@ def update_params( params[k].min = v - y_bound params[k].max = v + y_bound # pass - #print( + # print( # "setting limit of %s, min = %.3e, max = %.3e" # % (k, params[k].min, params[k].max) - #) + # ) elif "sigma" in k: params[k].min = 0.0 params[k].max = 1e4 @@ -746,6 +745,23 @@ def df_to_rich_table(df, title: str, columns: List[str], styles: str): return table +def make_mask_from_peak_cluster(group, data): + mask = np.zeros(data.shape, dtype=bool) + for index, peak in group.iterrows(): + mask += make_mask( + data, peak.X_AXISf, peak.Y_AXISf, peak.X_RADIUS, peak.Y_RADIUS + ) + return mask, peak + + +def select_planes_above_threshold_from_masked_data(data, threshold=None): + if threshold == None: + data = data + else: + data = data[data.max(axis=1) > threshold] + return data + + def fit_first_plane( group, data, @@ -756,6 +772,7 @@ def fit_first_plane( log=None, noise=1.0, fit_method="leastsq", + threshold: Optional[float] = None, ): """Deconvolute group of peaks @@ -796,8 +813,6 @@ def fit_first_plane( :rtype: FitResult """ - shape = data.shape - mask = np.zeros(shape, dtype=bool) match lineshape: case lineshape.PV | lineshape.G | lineshape.L: lineshape_function = pvoigt2d @@ -821,10 +836,8 @@ def fit_first_plane( cen_x = [p_guess[k].value for k in p_guess if "center_x" in k] cen_y = [p_guess[k].value for k in p_guess if "center_y" in k] - for index, peak in group.iterrows(): - mask += make_mask( - data, peak.X_AXISf, peak.Y_AXISf, peak.X_RADIUS, peak.Y_RADIUS - ) + first_plane_data = data[0] + mask, peak = make_mask_from_peak_cluster(group, first_plane_data) x_radius = group.X_RADIUS.max() y_radius = group.Y_RADIUS.max() @@ -839,6 +852,7 @@ def fit_first_plane( ) #  deal with peaks on the edge of spectrum + shape = data.shape if min_y < 0: min_y = 0 @@ -851,7 +865,14 @@ def fit_first_plane( if max_x > shape[-1]: max_x = shape[-1] - peak_slices = data.copy()[mask] + # print("DATA",data.shape, mask.shape) + peak_slices = np.array([d[mask] for d in data]) + # print("Peak slices", peak_slices.shape) + peak_slices = select_planes_above_threshold_from_masked_data(peak_slices, threshold) + # print(peak_slices.shape) + peak_slices = peak_slices.sum(axis=0) + # print(peak_slices.shape) + # must be a better way to make the meshgrid x = np.arange(shape[-1]) y = np.arange(shape[-2]) @@ -871,7 +892,7 @@ def fit_first_plane( z_sim = mod.eval(XY=XY, params=out.params) z_sim[~mask] = np.nan - z_plot = data.copy() + z_plot = first_plane_data.copy() z_plot[~mask] = np.nan #  also if peak position changed significantly from start then add warning @@ -1435,7 +1456,6 @@ def __init__( posF2="Position F1", verbose=False, ): - dic, data = ng.pipe.read(data_path) Pseudo3D.__init__(self, dic, data, dims) self.fmt = fmt @@ -1649,7 +1669,6 @@ def add_fix_bound_columns(self): pass def _read_analysis(self): - df = pd.read_csv(self.peaklist_path, delimiter="\t") new_columns = [self.analysis_to_pipe_dic.get(i, i) for i in df.columns] pipe_columns = dict(zip(df.columns, new_columns)) @@ -1658,7 +1677,6 @@ def _read_analysis(self): return df def _read_assign(self): - df = pd.read_csv(self.peaklist_path, delimiter="\t") new_columns = [self.assign_to_pipe_dic.get(i, i) for i in df.columns] pipe_columns = dict(zip(df.columns, new_columns)) @@ -1667,7 +1685,6 @@ def _read_assign(self): return df def _read_sparky(self): - df = pd.read_csv( self.peaklist_path, skiprows=1, @@ -1988,7 +2005,6 @@ class LoadData(Peaklist): """ def read_peaklist(self): - if self.peaklist_path.suffix == ".csv": self.df = pd.read_csv(self.peaklist_path) # , comment="#") @@ -2003,7 +2019,6 @@ def read_peaklist(self): return self.df def check_data_frame(self): - # make diameter columns if "X_DIAMETER_PPM" in self.df.columns: pass diff --git a/pyproject.toml b/pyproject.toml index 18a39c91..938d56ee 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "peakipy" -version = "1.1.2" +version = "1.1.3" description = "Deconvolute overlapping NMR peaks" authors = ["Jacob Brady "] From 2fb83f0766b0d242a3389021b360392baabfe39e Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Sun, 28 Jan 2024 23:28:12 -0500 Subject: [PATCH 02/37] now using bokeh contours --- peakipy/cli/edit.py | 148 ++++++++++++++++++++++++-------------------- 1 file changed, 80 insertions(+), 68 deletions(-) diff --git a/peakipy/cli/edit.py b/peakipy/cli/edit.py index ccb9e56a..70b0f0bd 100644 --- a/peakipy/cli/edit.py +++ b/peakipy/cli/edit.py @@ -13,6 +13,7 @@ import nmrglue as ng import numpy as np import matplotlib.pyplot as plt +import pandas as pd from matplotlib.cm import magma, autumn, viridis from skimage.filters import threshold_otsu from rich import print @@ -37,7 +38,8 @@ Div, ) from bokeh.plotting import figure -from bokeh.palettes import PuBuGn9, Category20 +from bokeh.plotting.contour import contour_data +from bokeh.palettes import PuBuGn9, Category20, Viridis256, RdGy11, Reds256 from peakipy.core import LoadData, read_config, StrucEl @@ -46,17 +48,14 @@ class BokehScript: - def __init__( - self, peaklist_path: Path, data_path: Path - ): - + def __init__(self, peaklist_path: Path, data_path: Path): self._path = peaklist_path self._data_path = data_path args, config = read_config({}) - #self.args = args - #self.config = config - self._dims = config.get("dims", [0,1,2]) - self.thres = config.get("thres",1e6) + # self.args = args + # self.config = config + self._dims = config.get("dims", [0, 1, 2]) + self.thres = config.get("thres", 1e6) self._peakipy_data = LoadData( self._path, self._data_path, dims=self._dims, verbose=True ) @@ -192,21 +191,27 @@ def setup_plot(self): self.peakipy_data.f1_ppm_0, self.peakipy_data.f1_ppm_1, ) - self.spec_source = get_contour_data( - self.peakipy_data.data[0], cl, extent=self.extent, cmap=viridis - ) - #  negative contours - self.spec_source_neg = get_contour_data( - self.peakipy_data.data[0] * -1.0, cl, extent=self.extent, cmap=autumn + + self.x_ppm_mesh, self.y_ppm_mesh = np.meshgrid( + self.peakipy_data.f2_ppm_scale, self.peakipy_data.f1_ppm_scale ) - self.p.multi_line( - xs="xs", ys="ys", line_color="line_color", source=self.spec_source + self.positive_contour_renderer = self.p.contour( + self.x_ppm_mesh, + self.y_ppm_mesh, + self.peakipy_data.data[0], + cl, + fill_color=Viridis256, + line_color="black", ) - self.p.multi_line( - xs="xs", ys="ys", line_color="line_color", source=self.spec_source_neg + self.negative_contour_renderer = self.p.contour( + self.x_ppm_mesh, + self.y_ppm_mesh, + self.peakipy_data.data[0] * -1.0, + cl, + fill_color=Reds256, + line_color="black", ) - # contour_num = Slider(title="contour number", value=20, start=1, end=50,step=1) - # contour_start = Slider(title="contour start", value=100000, start=1000, end=10000000,step=1000) + self.contour_start = TextInput( value="%.2e" % self.thres, title="Contour level:", width=100 ) @@ -223,7 +228,7 @@ def setup_plot(self): height="Y_DIAMETER_PPM", source=self.source, fill_color="color", - fill_alpha=0.1, + fill_alpha=0.25, line_dash="dotted", line_color="red", ) @@ -425,7 +430,8 @@ def setup_plot(self): row( column(column(self.ls_div), column(self.radio_button_group)), column(column(self.select_plane), column(self.checkbox_group)), - ), max_width=400, + ), + max_width=400, ) # reclustering tab @@ -448,12 +454,22 @@ def setup_plot(self): fitting_layout = fitting_controls log_layout = self.fit_reports_div recluster_layout = column( - row(self.clust_div,), - row(column( - self.contour_start, self.struct_el, self.struct_el_size, self.recluster - )), max_width=400, + row( + self.clust_div, + ), + row( + column( + self.contour_start, + self.struct_el, + self.struct_el_size, + self.recluster, + ) + ), + max_width=400, + ) + save_layout = column( + self.savefilename, self.button, self.exit_button, max_width=400 ) - save_layout = column(self.savefilename, self.button, self.exit_button, max_width=400) fitting_tab = TabPanel(child=fitting_layout, title="Peak fitting") log_tab = TabPanel(child=log_layout, title="Log") @@ -487,7 +503,6 @@ def recluster_peaks(self, event): return self.peakipy_data.df def update_memcnt(self): - for ind, group in self.peakipy_data.df.groupby("CLUSTID"): self.peakipy_data.df.loc[group.index, "MEMCNT"] = len(group) @@ -504,7 +519,6 @@ def update_memcnt(self): return self.peakipy_data.df def fit_selected(self, event): - selectionIndex = self.source.selected.indices current = self.peakipy_data.df.iloc[selectionIndex] @@ -550,7 +564,6 @@ def fit_selected(self, event): os.system(plot_command) def save_peaks(self, event): - if self.savefilename.value: to_save = Path(self.savefilename.value) else: @@ -630,11 +643,14 @@ def peak_pick_callback(self, event): "include": "yes", "color": "black", } - self.peakipy_data.df = self.peakipy_data.df.append(new_peak, ignore_index=True) + new_peak = {k: [v] for k, v in new_peak.items()} + new_peak = pd.DataFrame(new_peak) + self.peakipy_data.df = pd.concat( + [self.peakipy_data.df, new_peak], ignore_index=True + ) self.update_memcnt() def slider_callback_x(self, attrname, old, new): - selectionIndex = self.source.selected.indices current = self.peakipy_data.df.iloc[selectionIndex] self.peakipy_data.df.loc[selectionIndex, "X_RADIUS"] = ( @@ -657,7 +673,6 @@ def slider_callback_x(self, attrname, old, new): self.source.data = ColumnDataSource.from_df(self.peakipy_data.df) def slider_callback_y(self, attrname, old, new): - selectionIndex = self.source.selected.indices current = self.peakipy_data.df.iloc[selectionIndex] self.peakipy_data.df.loc[selectionIndex, "Y_RADIUS"] = ( @@ -713,7 +728,6 @@ def slider_callback_y(self, attrname, old, new): # self.slider_callback(attrname, old, new, dim="Y") def update_contour(self, attrname, old, new): - new_cs = eval(self.contour_start.value) cl = new_cs * self.contour_factor ** np.arange(self.contour_num) if len(cl) > 1 and np.min(np.diff(cl)) <= 0.0: @@ -723,61 +737,59 @@ def update_contour(self, attrname, old, new): pos_neg = self.pos_neg_contour_dic[self.pos_neg_contour_radiobutton.active] if pos_neg == "pos/neg": - self.spec_source.data = dict( - get_contour_data( + self.positive_contour_renderer.set_data( + contour_data( + self.x_ppm_mesh, + self.y_ppm_mesh, self.peakipy_data.data[plane_index], cl, - extent=self.extent, - cmap=viridis, - ).data + ) ) - self.spec_source_neg.data = dict( - get_contour_data( + self.negative_contour_renderer.set_data( + contour_data( + self.x_ppm_mesh, + self.y_ppm_mesh, self.peakipy_data.data[plane_index] * -1.0, cl, - extent=self.extent, - cmap=autumn, - ).data + ) ) elif pos_neg == "pos": - self.spec_source.data = dict( - get_contour_data( + self.positive_contour_renderer.set_data( + contour_data( + self.x_ppm_mesh, + self.y_ppm_mesh, self.peakipy_data.data[plane_index], cl, - extent=self.extent, - cmap=viridis, - ).data + ) ) - self.spec_source_neg.data = dict( - get_contour_data( - self.peakipy_data.data[plane_index] * 0.0, + self.negative_contour_renderer.set_data( + contour_data( + self.x_ppm_mesh, + self.y_ppm_mesh, + self.peakipy_data.data[plane_index] * 0, cl, - extent=self.extent, - cmap=autumn, - ).data + ) ) elif pos_neg == "neg": - self.spec_source.data = dict( - get_contour_data( + self.positive_contour_renderer.set_data( + contour_data( + self.x_ppm_mesh, + self.y_ppm_mesh, self.peakipy_data.data[plane_index] * 0.0, cl, - extent=self.extent, - cmap=viridis, - ).data + ) ) - self.spec_source_neg.data = dict( - get_contour_data( + self.negative_contour_renderer.set_data( + contour_data( + self.x_ppm_mesh, + self.y_ppm_mesh, self.peakipy_data.data[plane_index] * -1.0, cl, - extent=self.extent, - cmap=autumn, - ).data + ) ) - # print("Value of checkbox",checkbox_group.active) - def exit_edit_peaks(self, event): sys.exit() From 2a6cb33c9c9727fa71852bfccd6b6cf884809176 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Sun, 28 Jan 2024 23:29:06 -0500 Subject: [PATCH 03/37] removed bug in which pdf.savefig() call breaks interactive 3D plot --- peakipy/cli/main.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index 09d0dbaf..590f7efc 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -37,7 +37,7 @@ import matplotlib import matplotlib.pyplot as plt -from mpl_toolkits.mplot3d import Axes3D +from mpl_toolkits.mplot3d import axes3d from matplotlib import cm from matplotlib.backends.backend_pdf import PdfPages from matplotlib.widgets import Button @@ -1044,8 +1044,8 @@ def check( plt = matplotlib.pyplot fig = plt.figure(figsize=(10, 6)) - ax = fig.add_subplot(111, projection="3d") - # slice out plot area + ax = fig.add_subplot(projection="3d") + ## slice out plot area x_plot = pseudo3D.uc_f2.ppm(X[min_y:max_y, min_x:max_x]) y_plot = pseudo3D.uc_f1.ppm(Y[min_y:max_y, min_x:max_x]) masked_data = masked_data[min_y:max_y, min_x:max_x] @@ -1066,7 +1066,8 @@ def check( ) ) plt.close() - # print(Fore.RED + "Maybe your F1/F2 radii for fitting were too small...") + + #print(Fore.RED + "Maybe your F1/F2 radii for fitting were too small...") elif masked_data.shape[0] == 0 or masked_data.shape[1] == 0: print( f"[red]Nothing to plot for cluster {int(plane.clustid)}[/red]" @@ -1125,7 +1126,6 @@ def check( ) for c, z_single in zip(single_colors, sim_data_singles) ] - ax.plot_wireframe( x_plot, y_plot, @@ -1184,10 +1184,8 @@ def check( ) ax.legend() - pdf.savefig() if show: - def exit_program(event): exit() @@ -1204,6 +1202,9 @@ def next_plot(event): plt.show(windowTitle="", size=(1000, 500)) else: plt.show() + else: + pdf.savefig() + plt.close() From e27cb4b26db46289df3247d792b72c19bc5b237d Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Sun, 28 Jan 2024 23:29:38 -0500 Subject: [PATCH 04/37] updates --- poetry.lock | 1366 +++++++++++++++++++++++------------------------- pyproject.toml | 4 +- 2 files changed, 661 insertions(+), 709 deletions(-) diff --git a/poetry.lock b/poetry.lock index c50b296a..98af3b22 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,15 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. - -[[package]] -name = "appnope" -version = "0.1.3" -description = "Disable App Nap on macOS >= 10.9" -optional = false -python-versions = "*" -files = [ - {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, - {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, -] +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "asteval" @@ -30,61 +19,51 @@ test = ["coverage", "pytest", "pytest-cov"] [[package]] name = "asttokens" -version = "2.4.0" +version = "2.4.1" description = "Annotate AST trees with source code positions" optional = false python-versions = "*" files = [ - {file = "asttokens-2.4.0-py2.py3-none-any.whl", hash = "sha256:cf8fc9e61a86461aa9fb161a14a0841a03c405fa829ac6b202670b3495d2ce69"}, - {file = "asttokens-2.4.0.tar.gz", hash = "sha256:2e0171b991b2c959acc6c49318049236844a5da1d65ba2672c4880c1c894834e"}, + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, ] [package.dependencies] six = ">=1.12.0" [package.extras] -test = ["astroid", "pytest"] - -[[package]] -name = "backcall" -version = "0.2.0" -description = "Specifications for callback functions passed in to an API" -optional = false -python-versions = "*" -files = [ - {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, - {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, -] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] [[package]] name = "black" -version = "23.9.1" +version = "23.12.1" description = "The uncompromising code formatter." optional = false python-versions = ">=3.8" files = [ - {file = "black-23.9.1-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:d6bc09188020c9ac2555a498949401ab35bb6bf76d4e0f8ee251694664df6301"}, - {file = "black-23.9.1-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:13ef033794029b85dfea8032c9d3b92b42b526f1ff4bf13b2182ce4e917f5100"}, - {file = "black-23.9.1-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:75a2dc41b183d4872d3a500d2b9c9016e67ed95738a3624f4751a0cb4818fe71"}, - {file = "black-23.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13a2e4a93bb8ca74a749b6974925c27219bb3df4d42fc45e948a5d9feb5122b7"}, - {file = "black-23.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:adc3e4442eef57f99b5590b245a328aad19c99552e0bdc7f0b04db6656debd80"}, - {file = "black-23.9.1-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:8431445bf62d2a914b541da7ab3e2b4f3bc052d2ccbf157ebad18ea126efb91f"}, - {file = "black-23.9.1-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:8fc1ddcf83f996247505db6b715294eba56ea9372e107fd54963c7553f2b6dfe"}, - {file = "black-23.9.1-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:7d30ec46de88091e4316b17ae58bbbfc12b2de05e069030f6b747dfc649ad186"}, - {file = "black-23.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031e8c69f3d3b09e1aa471a926a1eeb0b9071f80b17689a655f7885ac9325a6f"}, - {file = "black-23.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:538efb451cd50f43aba394e9ec7ad55a37598faae3348d723b59ea8e91616300"}, - {file = "black-23.9.1-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:638619a559280de0c2aa4d76f504891c9860bb8fa214267358f0a20f27c12948"}, - {file = "black-23.9.1-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:a732b82747235e0542c03bf352c126052c0fbc458d8a239a94701175b17d4855"}, - {file = "black-23.9.1-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:cf3a4d00e4cdb6734b64bf23cd4341421e8953615cba6b3670453737a72ec204"}, - {file = "black-23.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf99f3de8b3273a8317681d8194ea222f10e0133a24a7548c73ce44ea1679377"}, - {file = "black-23.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:14f04c990259576acd093871e7e9b14918eb28f1866f91968ff5524293f9c573"}, - {file = "black-23.9.1-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:c619f063c2d68f19b2d7270f4cf3192cb81c9ec5bc5ba02df91471d0b88c4c5c"}, - {file = "black-23.9.1-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:6a3b50e4b93f43b34a9d3ef00d9b6728b4a722c997c99ab09102fd5efdb88325"}, - {file = "black-23.9.1-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c46767e8df1b7beefb0899c4a95fb43058fa8500b6db144f4ff3ca38eb2f6393"}, - {file = "black-23.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50254ebfa56aa46a9fdd5d651f9637485068a1adf42270148cd101cdf56e0ad9"}, - {file = "black-23.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:403397c033adbc45c2bd41747da1f7fc7eaa44efbee256b53842470d4ac5a70f"}, - {file = "black-23.9.1-py3-none-any.whl", hash = "sha256:6ccd59584cc834b6d127628713e4b6b968e5f79572da66284532525a042549f9"}, - {file = "black-23.9.1.tar.gz", hash = "sha256:24b6b3ff5c6d9ea08a8888f6977eae858e1f340d7260cf56d70a49823236b62d"}, + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, ] [package.dependencies] @@ -98,19 +77,19 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "bokeh" -version = "3.2.2" +version = "3.3.4" description = "Interactive plots and applications in the browser from Python" optional = false python-versions = ">=3.9" files = [ - {file = "bokeh-3.2.2-py3-none-any.whl", hash = "sha256:e31670a013e1ff15c3d4d04f587c8162c4cc9fe1af507fd741d295e6c4e1225b"}, - {file = "bokeh-3.2.2.tar.gz", hash = "sha256:b2959b8524d69ec4e7886bc36407445f0a92e1f19530d3bfc4045236a1b7a6ff"}, + {file = "bokeh-3.3.4-py3-none-any.whl", hash = "sha256:ad7b6f89d0a7c2be01eff1db0ca24e2755ac41de14539db919a62e791809c309"}, + {file = "bokeh-3.3.4.tar.gz", hash = "sha256:73b7982dc2b8df15bf660cdddc8d3825e829195c438015a5d09824f1a7028368"}, ] [package.dependencies] @@ -126,112 +105,112 @@ xyzservices = ">=2021.09.1" [[package]] name = "certifi" -version = "2023.7.22" +version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, ] [[package]] name = "charset-normalizer" -version = "3.3.0" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, - {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] @@ -275,87 +254,76 @@ test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] [[package]] name = "contourpy" -version = "1.1.1" +version = "1.2.0" description = "Python library for calculating contours of 2D quadrilateral grids" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "contourpy-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:46e24f5412c948d81736509377e255f6040e94216bf1a9b5ea1eaa9d29f6ec1b"}, - {file = "contourpy-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e48694d6a9c5a26ee85b10130c77a011a4fedf50a7279fa0bdaf44bafb4299d"}, - {file = "contourpy-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a66045af6cf00e19d02191ab578a50cb93b2028c3eefed999793698e9ea768ae"}, - {file = "contourpy-1.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ebf42695f75ee1a952f98ce9775c873e4971732a87334b099dde90b6af6a916"}, - {file = "contourpy-1.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6aec19457617ef468ff091669cca01fa7ea557b12b59a7908b9474bb9674cf0"}, - {file = "contourpy-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:462c59914dc6d81e0b11f37e560b8a7c2dbab6aca4f38be31519d442d6cde1a1"}, - {file = "contourpy-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6d0a8efc258659edc5299f9ef32d8d81de8b53b45d67bf4bfa3067f31366764d"}, - {file = "contourpy-1.1.1-cp310-cp310-win32.whl", hash = "sha256:d6ab42f223e58b7dac1bb0af32194a7b9311065583cc75ff59dcf301afd8a431"}, - {file = "contourpy-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:549174b0713d49871c6dee90a4b499d3f12f5e5f69641cd23c50a4542e2ca1eb"}, - {file = "contourpy-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:407d864db716a067cc696d61fa1ef6637fedf03606e8417fe2aeed20a061e6b2"}, - {file = "contourpy-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe80c017973e6a4c367e037cb31601044dd55e6bfacd57370674867d15a899b"}, - {file = "contourpy-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e30aaf2b8a2bac57eb7e1650df1b3a4130e8d0c66fc2f861039d507a11760e1b"}, - {file = "contourpy-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3de23ca4f381c3770dee6d10ead6fff524d540c0f662e763ad1530bde5112532"}, - {file = "contourpy-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:566f0e41df06dfef2431defcfaa155f0acfa1ca4acbf8fd80895b1e7e2ada40e"}, - {file = "contourpy-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b04c2f0adaf255bf756cf08ebef1be132d3c7a06fe6f9877d55640c5e60c72c5"}, - {file = "contourpy-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d0c188ae66b772d9d61d43c6030500344c13e3f73a00d1dc241da896f379bb62"}, - {file = "contourpy-1.1.1-cp311-cp311-win32.whl", hash = "sha256:0683e1ae20dc038075d92e0e0148f09ffcefab120e57f6b4c9c0f477ec171f33"}, - {file = "contourpy-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:8636cd2fc5da0fb102a2504fa2c4bea3cbc149533b345d72cdf0e7a924decc45"}, - {file = "contourpy-1.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:560f1d68a33e89c62da5da4077ba98137a5e4d3a271b29f2f195d0fba2adcb6a"}, - {file = "contourpy-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:24216552104ae8f3b34120ef84825400b16eb6133af2e27a190fdc13529f023e"}, - {file = "contourpy-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56de98a2fb23025882a18b60c7f0ea2d2d70bbbcfcf878f9067234b1c4818442"}, - {file = "contourpy-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:07d6f11dfaf80a84c97f1a5ba50d129d9303c5b4206f776e94037332e298dda8"}, - {file = "contourpy-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1eaac5257a8f8a047248d60e8f9315c6cff58f7803971170d952555ef6344a7"}, - {file = "contourpy-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19557fa407e70f20bfaba7d55b4d97b14f9480856c4fb65812e8a05fe1c6f9bf"}, - {file = "contourpy-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:081f3c0880712e40effc5f4c3b08feca6d064cb8cfbb372ca548105b86fd6c3d"}, - {file = "contourpy-1.1.1-cp312-cp312-win32.whl", hash = "sha256:059c3d2a94b930f4dafe8105bcdc1b21de99b30b51b5bce74c753686de858cb6"}, - {file = "contourpy-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:f44d78b61740e4e8c71db1cf1fd56d9050a4747681c59ec1094750a658ceb970"}, - {file = "contourpy-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:70e5a10f8093d228bb2b552beeb318b8928b8a94763ef03b858ef3612b29395d"}, - {file = "contourpy-1.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8394e652925a18ef0091115e3cc191fef350ab6dc3cc417f06da66bf98071ae9"}, - {file = "contourpy-1.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5bd5680f844c3ff0008523a71949a3ff5e4953eb7701b28760805bc9bcff217"}, - {file = "contourpy-1.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66544f853bfa85c0d07a68f6c648b2ec81dafd30f272565c37ab47a33b220684"}, - {file = "contourpy-1.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0c02b75acfea5cab07585d25069207e478d12309557f90a61b5a3b4f77f46ce"}, - {file = "contourpy-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41339b24471c58dc1499e56783fedc1afa4bb018bcd035cfb0ee2ad2a7501ef8"}, - {file = "contourpy-1.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f29fb0b3f1217dfe9362ec55440d0743fe868497359f2cf93293f4b2701b8251"}, - {file = "contourpy-1.1.1-cp38-cp38-win32.whl", hash = "sha256:f9dc7f933975367251c1b34da882c4f0e0b2e24bb35dc906d2f598a40b72bfc7"}, - {file = "contourpy-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:498e53573e8b94b1caeb9e62d7c2d053c263ebb6aa259c81050766beb50ff8d9"}, - {file = "contourpy-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ba42e3810999a0ddd0439e6e5dbf6d034055cdc72b7c5c839f37a7c274cb4eba"}, - {file = "contourpy-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c06e4c6e234fcc65435223c7b2a90f286b7f1b2733058bdf1345d218cc59e34"}, - {file = "contourpy-1.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca6fab080484e419528e98624fb5c4282148b847e3602dc8dbe0cb0669469887"}, - {file = "contourpy-1.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93df44ab351119d14cd1e6b52a5063d3336f0754b72736cc63db59307dabb718"}, - {file = "contourpy-1.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eafbef886566dc1047d7b3d4b14db0d5b7deb99638d8e1be4e23a7c7ac59ff0f"}, - {file = "contourpy-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efe0fab26d598e1ec07d72cf03eaeeba8e42b4ecf6b9ccb5a356fde60ff08b85"}, - {file = "contourpy-1.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f08e469821a5e4751c97fcd34bcb586bc243c39c2e39321822060ba902eac49e"}, - {file = "contourpy-1.1.1-cp39-cp39-win32.whl", hash = "sha256:bfc8a5e9238232a45ebc5cb3bfee71f1167064c8d382cadd6076f0d51cff1da0"}, - {file = "contourpy-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:c84fdf3da00c2827d634de4fcf17e3e067490c4aea82833625c4c8e6cdea0887"}, - {file = "contourpy-1.1.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:229a25f68046c5cf8067d6d6351c8b99e40da11b04d8416bf8d2b1d75922521e"}, - {file = "contourpy-1.1.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a10dab5ea1bd4401c9483450b5b0ba5416be799bbd50fc7a6cc5e2a15e03e8a3"}, - {file = "contourpy-1.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4f9147051cb8fdb29a51dc2482d792b3b23e50f8f57e3720ca2e3d438b7adf23"}, - {file = "contourpy-1.1.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a75cc163a5f4531a256f2c523bd80db509a49fc23721b36dd1ef2f60ff41c3cb"}, - {file = "contourpy-1.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b53d5769aa1f2d4ea407c65f2d1d08002952fac1d9e9d307aa2e1023554a163"}, - {file = "contourpy-1.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11b836b7dbfb74e049c302bbf74b4b8f6cb9d0b6ca1bf86cfa8ba144aedadd9c"}, - {file = "contourpy-1.1.1.tar.gz", hash = "sha256:96ba37c2e24b7212a77da85004c38e7c4d155d3e72a45eeaf22c1f03f607e8ab"}, + {file = "contourpy-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0274c1cb63625972c0c007ab14dd9ba9e199c36ae1a231ce45d725cbcbfd10a8"}, + {file = "contourpy-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ab459a1cbbf18e8698399c595a01f6dcc5c138220ca3ea9e7e6126232d102bb4"}, + {file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fdd887f17c2f4572ce548461e4f96396681212d858cae7bd52ba3310bc6f00f"}, + {file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d16edfc3fc09968e09ddffada434b3bf989bf4911535e04eada58469873e28e"}, + {file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c203f617abc0dde5792beb586f827021069fb6d403d7f4d5c2b543d87edceb9"}, + {file = "contourpy-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b69303ceb2e4d4f146bf82fda78891ef7bcd80c41bf16bfca3d0d7eb545448aa"}, + {file = "contourpy-1.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:884c3f9d42d7218304bc74a8a7693d172685c84bd7ab2bab1ee567b769696df9"}, + {file = "contourpy-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4a1b1208102be6e851f20066bf0e7a96b7d48a07c9b0cfe6d0d4545c2f6cadab"}, + {file = "contourpy-1.2.0-cp310-cp310-win32.whl", hash = "sha256:34b9071c040d6fe45d9826cbbe3727d20d83f1b6110d219b83eb0e2a01d79488"}, + {file = "contourpy-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:bd2f1ae63998da104f16a8b788f685e55d65760cd1929518fd94cd682bf03e41"}, + {file = "contourpy-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dd10c26b4eadae44783c45ad6655220426f971c61d9b239e6f7b16d5cdaaa727"}, + {file = "contourpy-1.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5c6b28956b7b232ae801406e529ad7b350d3f09a4fde958dfdf3c0520cdde0dd"}, + {file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebeac59e9e1eb4b84940d076d9f9a6cec0064e241818bcb6e32124cc5c3e377a"}, + {file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:139d8d2e1c1dd52d78682f505e980f592ba53c9f73bd6be102233e358b401063"}, + {file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e9dc350fb4c58adc64df3e0703ab076f60aac06e67d48b3848c23647ae4310e"}, + {file = "contourpy-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18fc2b4ed8e4a8fe849d18dce4bd3c7ea637758c6343a1f2bae1e9bd4c9f4686"}, + {file = "contourpy-1.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:16a7380e943a6d52472096cb7ad5264ecee36ed60888e2a3d3814991a0107286"}, + {file = "contourpy-1.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8d8faf05be5ec8e02a4d86f616fc2a0322ff4a4ce26c0f09d9f7fb5330a35c95"}, + {file = "contourpy-1.2.0-cp311-cp311-win32.whl", hash = "sha256:67b7f17679fa62ec82b7e3e611c43a016b887bd64fb933b3ae8638583006c6d6"}, + {file = "contourpy-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:99ad97258985328b4f207a5e777c1b44a83bfe7cf1f87b99f9c11d4ee477c4de"}, + {file = "contourpy-1.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:575bcaf957a25d1194903a10bc9f316c136c19f24e0985a2b9b5608bdf5dbfe0"}, + {file = "contourpy-1.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9e6c93b5b2dbcedad20a2f18ec22cae47da0d705d454308063421a3b290d9ea4"}, + {file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:464b423bc2a009088f19bdf1f232299e8b6917963e2b7e1d277da5041f33a779"}, + {file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:68ce4788b7d93e47f84edd3f1f95acdcd142ae60bc0e5493bfd120683d2d4316"}, + {file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d7d1f8871998cdff5d2ff6a087e5e1780139abe2838e85b0b46b7ae6cc25399"}, + {file = "contourpy-1.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e739530c662a8d6d42c37c2ed52a6f0932c2d4a3e8c1f90692ad0ce1274abe0"}, + {file = "contourpy-1.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:247b9d16535acaa766d03037d8e8fb20866d054d3c7fbf6fd1f993f11fc60ca0"}, + {file = "contourpy-1.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:461e3ae84cd90b30f8d533f07d87c00379644205b1d33a5ea03381edc4b69431"}, + {file = "contourpy-1.2.0-cp312-cp312-win32.whl", hash = "sha256:1c2559d6cffc94890b0529ea7eeecc20d6fadc1539273aa27faf503eb4656d8f"}, + {file = "contourpy-1.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:491b1917afdd8638a05b611a56d46587d5a632cabead889a5440f7c638bc6ed9"}, + {file = "contourpy-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5fd1810973a375ca0e097dee059c407913ba35723b111df75671a1976efa04bc"}, + {file = "contourpy-1.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:999c71939aad2780f003979b25ac5b8f2df651dac7b38fb8ce6c46ba5abe6ae9"}, + {file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7caf9b241464c404613512d5594a6e2ff0cc9cb5615c9475cc1d9b514218ae8"}, + {file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:266270c6f6608340f6c9836a0fb9b367be61dde0c9a9a18d5ece97774105ff3e"}, + {file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbd50d0a0539ae2e96e537553aff6d02c10ed165ef40c65b0e27e744a0f10af8"}, + {file = "contourpy-1.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11f8d2554e52f459918f7b8e6aa20ec2a3bce35ce95c1f0ef4ba36fbda306df5"}, + {file = "contourpy-1.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ce96dd400486e80ac7d195b2d800b03e3e6a787e2a522bfb83755938465a819e"}, + {file = "contourpy-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6d3364b999c62f539cd403f8123ae426da946e142312a514162adb2addd8d808"}, + {file = "contourpy-1.2.0-cp39-cp39-win32.whl", hash = "sha256:1c88dfb9e0c77612febebb6ac69d44a8d81e3dc60f993215425b62c1161353f4"}, + {file = "contourpy-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:78e6ad33cf2e2e80c5dfaaa0beec3d61face0fb650557100ee36db808bfa6843"}, + {file = "contourpy-1.2.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:be16975d94c320432657ad2402f6760990cb640c161ae6da1363051805fa8108"}, + {file = "contourpy-1.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b95a225d4948b26a28c08307a60ac00fb8671b14f2047fc5476613252a129776"}, + {file = "contourpy-1.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d7e03c0f9a4f90dc18d4e77e9ef4ec7b7bbb437f7f675be8e530d65ae6ef956"}, + {file = "contourpy-1.2.0.tar.gz", hash = "sha256:171f311cb758de7da13fc53af221ae47a5877be5a0843a9fe150818c51ed276a"}, ] [package.dependencies] -numpy = [ - {version = ">=1.16,<2.0", markers = "python_version <= \"3.11\""}, - {version = ">=1.26.0rc1,<2.0", markers = "python_version >= \"3.12\""}, -] +numpy = ">=1.20,<2.0" [package.extras] bokeh = ["bokeh", "selenium"] docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] -mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.4.1)", "types-Pillow"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.6.1)", "types-Pillow"] test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] -test-no-images = ["pytest", "pytest-cov", "wurlitzer"] +test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] [[package]] name = "cycler" -version = "0.12.0" +version = "0.12.1" description = "Composable style cycles" optional = false python-versions = ">=3.8" files = [ - {file = "cycler-0.12.0-py3-none-any.whl", hash = "sha256:7896994252d006771357777d0251f3e34d266f4fa5f2c572247a80ab01440947"}, - {file = "cycler-0.12.0.tar.gz", hash = "sha256:8cc3a7b4861f91b1095157f9916f748549a617046e67eb7619abed9b34d2c94a"}, + {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, + {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, ] [package.extras] @@ -375,13 +343,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.1.3" +version = "1.2.0" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, - {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, ] [package.extras] @@ -389,13 +357,13 @@ test = ["pytest (>=6)"] [[package]] name = "executing" -version = "2.0.0" +version = "2.0.1" description = "Get the currently executing AST node of a frame, and other information" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "executing-2.0.0-py2.py3-none-any.whl", hash = "sha256:06df6183df67389625f4e763921c6cf978944721abf3e714000200aab95b0657"}, - {file = "executing-2.0.0.tar.gz", hash = "sha256:0ff053696fdeef426cda5bd18eacd94f82c91f49823a2e9090124212ceea9b08"}, + {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, + {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, ] [package.extras] @@ -403,59 +371,59 @@ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipyth [[package]] name = "fonttools" -version = "4.43.0" +version = "4.47.2" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" files = [ - {file = "fonttools-4.43.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ab80e7d6bb01316d5fc8161a2660ca2e9e597d0880db4927bc866c76474472ef"}, - {file = "fonttools-4.43.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82d8e687a42799df5325e7ee12977b74738f34bf7fde1c296f8140efd699a213"}, - {file = "fonttools-4.43.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d08a694b280d615460563a6b4e2afb0b1b9df708c799ec212bf966652b94fc84"}, - {file = "fonttools-4.43.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d654d3e780e0ceabb1f4eff5a3c042c67d4428d0fe1ea3afd238a721cf171b3"}, - {file = "fonttools-4.43.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:20fc43783c432862071fa76da6fa714902ae587bc68441e12ff4099b94b1fcef"}, - {file = "fonttools-4.43.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:33c40a657fb87ff83185828c0323032d63a4df1279d5c1c38e21f3ec56327803"}, - {file = "fonttools-4.43.0-cp310-cp310-win32.whl", hash = "sha256:b3813f57f85bbc0e4011a0e1e9211f9ee52f87f402e41dc05bc5135f03fa51c1"}, - {file = "fonttools-4.43.0-cp310-cp310-win_amd64.whl", hash = "sha256:05056a8c9af048381fdb17e89b17d45f6c8394176d01e8c6fef5ac96ea950d38"}, - {file = "fonttools-4.43.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:da78f39b601ed0b4262929403186d65cf7a016f91ff349ab18fdc5a7080af465"}, - {file = "fonttools-4.43.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5056f69a18f3f28ab5283202d1efcfe011585d31de09d8560f91c6c88f041e92"}, - {file = "fonttools-4.43.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcc01cea0a121fb0c009993497bad93cae25e77db7dee5345fec9cce1aaa09cd"}, - {file = "fonttools-4.43.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee728d5af70f117581712966a21e2e07031e92c687ef1fdc457ac8d281016f64"}, - {file = "fonttools-4.43.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b5e760198f0b87e42478bb35a6eae385c636208f6f0d413e100b9c9c5efafb6a"}, - {file = "fonttools-4.43.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af38f5145258e9866da5881580507e6d17ff7756beef175d13213a43a84244e9"}, - {file = "fonttools-4.43.0-cp311-cp311-win32.whl", hash = "sha256:25620b738d4533cfc21fd2a4f4b667e481f7cb60e86b609799f7d98af657854e"}, - {file = "fonttools-4.43.0-cp311-cp311-win_amd64.whl", hash = "sha256:635658464dccff6fa5c3b43fe8f818ae2c386ee6a9e1abc27359d1e255528186"}, - {file = "fonttools-4.43.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a682fb5cbf8837d1822b80acc0be5ff2ea0c49ca836e468a21ffd388ef280fd3"}, - {file = "fonttools-4.43.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3d7adfa342e6b3a2b36960981f23f480969f833d565a4eba259c2e6f59d2674f"}, - {file = "fonttools-4.43.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa67d1e720fdd902fde4a59d0880854ae9f19fc958f3e1538bceb36f7f4dc92"}, - {file = "fonttools-4.43.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77e5113233a2df07af9dbf493468ce526784c3b179c0e8b9c7838ced37c98b69"}, - {file = "fonttools-4.43.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:57c22e5f9f53630d458830f710424dce4f43c5f0d95cb3368c0f5178541e4db7"}, - {file = "fonttools-4.43.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:206808f9717c9b19117f461246372a2c160fa12b9b8dbdfb904ab50ca235ba0a"}, - {file = "fonttools-4.43.0-cp312-cp312-win32.whl", hash = "sha256:f19c2b1c65d57cbea25cabb80941fea3fbf2625ff0cdcae8900b5fb1c145704f"}, - {file = "fonttools-4.43.0-cp312-cp312-win_amd64.whl", hash = "sha256:7c76f32051159f8284f1a5f5b605152b5a530736fb8b55b09957db38dcae5348"}, - {file = "fonttools-4.43.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e3f8acc6ef4a627394021246e099faee4b343afd3ffe2e517d8195b4ebf20289"}, - {file = "fonttools-4.43.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a68b71adc3b3a90346e4ac92f0a69ab9caeba391f3b04ab6f1e98f2c8ebe88e3"}, - {file = "fonttools-4.43.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ace0fd5afb79849f599f76af5c6aa5e865bd042c811e4e047bbaa7752cc26126"}, - {file = "fonttools-4.43.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f9660e70a2430780e23830476332bc3391c3c8694769e2c0032a5038702a662"}, - {file = "fonttools-4.43.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:48078357984214ccd22d7fe0340cd6ff7286b2f74f173603a1a9a40b5dc25afe"}, - {file = "fonttools-4.43.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d27d960e10cf7617d70cf3104c32a69b008dde56f2d55a9bed4ba6e3df611544"}, - {file = "fonttools-4.43.0-cp38-cp38-win32.whl", hash = "sha256:a6a2e99bb9ea51e0974bbe71768df42c6dd189308c22f3f00560c3341b345646"}, - {file = "fonttools-4.43.0-cp38-cp38-win_amd64.whl", hash = "sha256:030355fbb0cea59cf75d076d04d3852900583d1258574ff2d7d719abf4513836"}, - {file = "fonttools-4.43.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:52e77f23a9c059f8be01a07300ba4c4d23dc271d33eed502aea5a01ab5d2f4c1"}, - {file = "fonttools-4.43.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6a530fa28c155538d32214eafa0964989098a662bd63e91e790e6a7a4e9c02da"}, - {file = "fonttools-4.43.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70f021a6b9eb10dfe7a411b78e63a503a06955dd6d2a4e130906d8760474f77c"}, - {file = "fonttools-4.43.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:812142a0e53cc853964d487e6b40963df62f522b1b571e19d1ff8467d7880ceb"}, - {file = "fonttools-4.43.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ace51902ab67ef5fe225e8b361039e996db153e467e24a28d35f74849b37b7ce"}, - {file = "fonttools-4.43.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8dfd8edfce34ad135bd69de20c77449c06e2c92b38f2a8358d0987737f82b49e"}, - {file = "fonttools-4.43.0-cp39-cp39-win32.whl", hash = "sha256:e5d53eddaf436fa131042f44a76ea1ead0a17c354ab9de0d80e818f0cb1629f1"}, - {file = "fonttools-4.43.0-cp39-cp39-win_amd64.whl", hash = "sha256:93c5b6d77baf28f306bc13fa987b0b13edca6a39dc2324eaca299a74ccc6316f"}, - {file = "fonttools-4.43.0-py3-none-any.whl", hash = "sha256:e4bc589d8da09267c7c4ceaaaa4fc01a7908ac5b43b286ac9279afe76407c384"}, - {file = "fonttools-4.43.0.tar.gz", hash = "sha256:b62a53a4ca83c32c6b78cac64464f88d02929779373c716f738af6968c8c821e"}, + {file = "fonttools-4.47.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3b629108351d25512d4ea1a8393a2dba325b7b7d7308116b605ea3f8e1be88df"}, + {file = "fonttools-4.47.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c19044256c44fe299d9a73456aabee4b4d06c6b930287be93b533b4737d70aa1"}, + {file = "fonttools-4.47.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8be28c036b9f186e8c7eaf8a11b42373e7e4949f9e9f370202b9da4c4c3f56c"}, + {file = "fonttools-4.47.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f83a4daef6d2a202acb9bf572958f91cfde5b10c8ee7fb1d09a4c81e5d851fd8"}, + {file = "fonttools-4.47.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a5a5318ba5365d992666ac4fe35365f93004109d18858a3e18ae46f67907670"}, + {file = "fonttools-4.47.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8f57ecd742545362a0f7186774b2d1c53423ed9ece67689c93a1055b236f638c"}, + {file = "fonttools-4.47.2-cp310-cp310-win32.whl", hash = "sha256:a1c154bb85dc9a4cf145250c88d112d88eb414bad81d4cb524d06258dea1bdc0"}, + {file = "fonttools-4.47.2-cp310-cp310-win_amd64.whl", hash = "sha256:3e2b95dce2ead58fb12524d0ca7d63a63459dd489e7e5838c3cd53557f8933e1"}, + {file = "fonttools-4.47.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:29495d6d109cdbabe73cfb6f419ce67080c3ef9ea1e08d5750240fd4b0c4763b"}, + {file = "fonttools-4.47.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0a1d313a415eaaba2b35d6cd33536560deeebd2ed758b9bfb89ab5d97dc5deac"}, + {file = "fonttools-4.47.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90f898cdd67f52f18049250a6474185ef6544c91f27a7bee70d87d77a8daf89c"}, + {file = "fonttools-4.47.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3480eeb52770ff75140fe7d9a2ec33fb67b07efea0ab5129c7e0c6a639c40c70"}, + {file = "fonttools-4.47.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0255dbc128fee75fb9be364806b940ed450dd6838672a150d501ee86523ac61e"}, + {file = "fonttools-4.47.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f791446ff297fd5f1e2247c188de53c1bfb9dd7f0549eba55b73a3c2087a2703"}, + {file = "fonttools-4.47.2-cp311-cp311-win32.whl", hash = "sha256:740947906590a878a4bde7dd748e85fefa4d470a268b964748403b3ab2aeed6c"}, + {file = "fonttools-4.47.2-cp311-cp311-win_amd64.whl", hash = "sha256:63fbed184979f09a65aa9c88b395ca539c94287ba3a364517698462e13e457c9"}, + {file = "fonttools-4.47.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4ec558c543609e71b2275c4894e93493f65d2f41c15fe1d089080c1d0bb4d635"}, + {file = "fonttools-4.47.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e040f905d542362e07e72e03612a6270c33d38281fd573160e1003e43718d68d"}, + {file = "fonttools-4.47.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6dd58cc03016b281bd2c74c84cdaa6bd3ce54c5a7f47478b7657b930ac3ed8eb"}, + {file = "fonttools-4.47.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32ab2e9702dff0dd4510c7bb958f265a8d3dd5c0e2547e7b5f7a3df4979abb07"}, + {file = "fonttools-4.47.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a808f3c1d1df1f5bf39be869b6e0c263570cdafb5bdb2df66087733f566ea71"}, + {file = "fonttools-4.47.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ac71e2e201df041a2891067dc36256755b1229ae167edbdc419b16da78732c2f"}, + {file = "fonttools-4.47.2-cp312-cp312-win32.whl", hash = "sha256:69731e8bea0578b3c28fdb43dbf95b9386e2d49a399e9a4ad736b8e479b08085"}, + {file = "fonttools-4.47.2-cp312-cp312-win_amd64.whl", hash = "sha256:b3e1304e5f19ca861d86a72218ecce68f391646d85c851742d265787f55457a4"}, + {file = "fonttools-4.47.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:254d9a6f7be00212bf0c3159e0a420eb19c63793b2c05e049eb337f3023c5ecc"}, + {file = "fonttools-4.47.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eabae77a07c41ae0b35184894202305c3ad211a93b2eb53837c2a1143c8bc952"}, + {file = "fonttools-4.47.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a86a5ab2873ed2575d0fcdf1828143cfc6b977ac448e3dc616bb1e3d20efbafa"}, + {file = "fonttools-4.47.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13819db8445a0cec8c3ff5f243af6418ab19175072a9a92f6cc8ca7d1452754b"}, + {file = "fonttools-4.47.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4e743935139aa485fe3253fc33fe467eab6ea42583fa681223ea3f1a93dd01e6"}, + {file = "fonttools-4.47.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d49ce3ea7b7173faebc5664872243b40cf88814ca3eb135c4a3cdff66af71946"}, + {file = "fonttools-4.47.2-cp38-cp38-win32.whl", hash = "sha256:94208ea750e3f96e267f394d5588579bb64cc628e321dbb1d4243ffbc291b18b"}, + {file = "fonttools-4.47.2-cp38-cp38-win_amd64.whl", hash = "sha256:0f750037e02beb8b3569fbff701a572e62a685d2a0e840d75816592280e5feae"}, + {file = "fonttools-4.47.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3d71606c9321f6701642bd4746f99b6089e53d7e9817fc6b964e90d9c5f0ecc6"}, + {file = "fonttools-4.47.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:86e0427864c6c91cf77f16d1fb9bf1bbf7453e824589e8fb8461b6ee1144f506"}, + {file = "fonttools-4.47.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a00bd0e68e88987dcc047ea31c26d40a3c61185153b03457956a87e39d43c37"}, + {file = "fonttools-4.47.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5d77479fb885ef38a16a253a2f4096bc3d14e63a56d6246bfdb56365a12b20c"}, + {file = "fonttools-4.47.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5465df494f20a7d01712b072ae3ee9ad2887004701b95cb2cc6dcb9c2c97a899"}, + {file = "fonttools-4.47.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4c811d3c73b6abac275babb8aa439206288f56fdb2c6f8835e3d7b70de8937a7"}, + {file = "fonttools-4.47.2-cp39-cp39-win32.whl", hash = "sha256:5b60e3afa9635e3dfd3ace2757039593e3bd3cf128be0ddb7a1ff4ac45fa5a50"}, + {file = "fonttools-4.47.2-cp39-cp39-win_amd64.whl", hash = "sha256:7ee48bd9d6b7e8f66866c9090807e3a4a56cf43ffad48962725a190e0dd774c8"}, + {file = "fonttools-4.47.2-py3-none-any.whl", hash = "sha256:7eb7ad665258fba68fd22228a09f347469d95a97fb88198e133595947a20a184"}, + {file = "fonttools-4.47.2.tar.gz", hash = "sha256:7df26dd3650e98ca45f1e29883c96a0b9f5bb6af8d632a6a108bc744fa0bd9b3"}, ] [package.extras] -all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.0.0)", "xattr", "zopfli (>=0.1.4)"] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] graphite = ["lz4 (>=1.7.4.2)"] -interpolatable = ["munkres", "scipy"] +interpolatable = ["munkres", "pycairo", "scipy"] lxml = ["lxml (>=4.0,<5)"] pathops = ["skia-pathops (>=0.5.0)"] plot = ["matplotlib"] @@ -463,7 +431,7 @@ repacker = ["uharfbuzz (>=0.23.0)"] symfont = ["sympy"] type1 = ["xattr"] ufo = ["fs (>=2.2.0,<3)"] -unicode = ["unicodedata2 (>=15.0.0)"] +unicode = ["unicodedata2 (>=15.1.0)"] woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] [[package]] @@ -495,24 +463,24 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "idna" -version = "3.4" +version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] [[package]] name = "imageio" -version = "2.31.5" +version = "2.33.1" description = "Library for reading and writing a wide range of image, video, scientific, and volumetric data formats." optional = false python-versions = ">=3.8" files = [ - {file = "imageio-2.31.5-py3-none-any.whl", hash = "sha256:97f68e12ba676f2f4b541684ed81f7f3370dc347e8321bc68ee34d37b2dbac9f"}, - {file = "imageio-2.31.5.tar.gz", hash = "sha256:d8e53f9cd4054880276a3dac0a28c85ba7874084856a55a0294a8ae6ed7f3a8e"}, + {file = "imageio-2.33.1-py3-none-any.whl", hash = "sha256:c5094c48ccf6b2e6da8b4061cd95e1209380afafcbeae4a4e280938cce227e1d"}, + {file = "imageio-2.33.1.tar.gz", hash = "sha256:78722d40b137bd98f5ec7312119f8aea9ad2049f76f434748eb306b6937cc1ce"}, ] [package.dependencies] @@ -520,17 +488,18 @@ numpy = "*" pillow = ">=8.3.2" [package.extras] -all-plugins = ["astropy", "av", "imageio-ffmpeg", "psutil", "tifffile"] -all-plugins-pypy = ["av", "imageio-ffmpeg", "psutil", "tifffile"] +all-plugins = ["astropy", "av", "imageio-ffmpeg", "pillow-heif", "psutil", "tifffile"] +all-plugins-pypy = ["av", "imageio-ffmpeg", "pillow-heif", "psutil", "tifffile"] build = ["wheel"] dev = ["black", "flake8", "fsspec[github]", "pytest", "pytest-cov"] docs = ["numpydoc", "pydata-sphinx-theme", "sphinx (<6)"] ffmpeg = ["imageio-ffmpeg", "psutil"] fits = ["astropy"] -full = ["astropy", "av", "black", "flake8", "fsspec[github]", "gdal", "imageio-ffmpeg", "itk", "numpydoc", "psutil", "pydata-sphinx-theme", "pytest", "pytest-cov", "sphinx (<6)", "tifffile", "wheel"] +full = ["astropy", "av", "black", "flake8", "fsspec[github]", "gdal", "imageio-ffmpeg", "itk", "numpydoc", "pillow-heif", "psutil", "pydata-sphinx-theme", "pytest", "pytest-cov", "sphinx (<6)", "tifffile", "wheel"] gdal = ["gdal"] itk = ["itk"] linting = ["black", "flake8"] +pillow-heif = ["pillow-heif"] pyav = ["av"] test = ["fsspec[github]", "pytest", "pytest-cov"] tifffile = ["tifffile"] @@ -548,42 +517,39 @@ files = [ [[package]] name = "ipython" -version = "8.16.1" +version = "8.20.0" description = "IPython: Productive Interactive Computing" optional = false -python-versions = ">=3.9" +python-versions = ">=3.10" files = [ - {file = "ipython-8.16.1-py3-none-any.whl", hash = "sha256:0852469d4d579d9cd613c220af7bf0c9cc251813e12be647cb9d463939db9b1e"}, - {file = "ipython-8.16.1.tar.gz", hash = "sha256:ad52f58fca8f9f848e256c629eff888efc0528c12fe0f8ec14f33205f23ef938"}, + {file = "ipython-8.20.0-py3-none-any.whl", hash = "sha256:bc9716aad6f29f36c449e30821c9dd0c1c1a7b59ddcc26931685b87b4c569619"}, + {file = "ipython-8.20.0.tar.gz", hash = "sha256:2f21bd3fc1d51550c89ee3944ae04bbc7bc79e129ea0937da6e6c68bfdbf117a"}, ] [package.dependencies] -appnope = {version = "*", markers = "sys_platform == \"darwin\""} -backcall = "*" colorama = {version = "*", markers = "sys_platform == \"win32\""} decorator = "*" exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} jedi = ">=0.16" matplotlib-inline = "*" pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} -pickleshare = "*" -prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" +prompt-toolkit = ">=3.0.41,<3.1.0" pygments = ">=2.4.0" stack-data = "*" traitlets = ">=5" [package.extras] -all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.23)", "pandas", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] black = ["black"] -doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] -test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] +test = ["pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath", "trio"] [[package]] name = "jedi" @@ -606,13 +572,13 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.3" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, ] [package.dependencies] @@ -774,126 +740,135 @@ test = ["coverage", "flaky", "pytest", "pytest-cov"] [[package]] name = "markdown" -version = "3.4.4" +version = "3.5.2" description = "Python implementation of John Gruber's Markdown." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "Markdown-3.4.4-py3-none-any.whl", hash = "sha256:a4c1b65c0957b4bd9e7d86ddc7b3c9868fb9670660f6f99f6d1bca8954d5a941"}, - {file = "Markdown-3.4.4.tar.gz", hash = "sha256:225c6123522495d4119a90b3a3ba31a1e87a70369e03f14799ea9c0d7183a3d6"}, + {file = "Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd"}, + {file = "Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8"}, ] [package.extras] -docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.0)", "mkdocs-nature (>=0.4)"] +docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] testing = ["coverage", "pyyaml"] [[package]] name = "markupsafe" -version = "2.1.3" +version = "2.1.4" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, - {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, - {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, - {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, - {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, - {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, - {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-win32.whl", hash = "sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0"}, + {file = "MarkupSafe-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-win32.whl", hash = "sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74"}, + {file = "MarkupSafe-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-win32.whl", hash = "sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475"}, + {file = "MarkupSafe-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-win32.whl", hash = "sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0"}, + {file = "MarkupSafe-2.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-win32.whl", hash = "sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a"}, + {file = "MarkupSafe-2.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-win32.whl", hash = "sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6"}, + {file = "MarkupSafe-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959"}, + {file = "MarkupSafe-2.1.4.tar.gz", hash = "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f"}, ] [[package]] name = "matplotlib" -version = "3.8.0" +version = "3.8.2" description = "Python plotting package" optional = false python-versions = ">=3.9" files = [ - {file = "matplotlib-3.8.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:c4940bad88a932ddc69734274f6fb047207e008389489f2b6f77d9ca485f0e7a"}, - {file = "matplotlib-3.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a33bd3045c7452ca1fa65676d88ba940867880e13e2546abb143035fa9072a9d"}, - {file = "matplotlib-3.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ea6886e93401c22e534bbfd39201ce8931b75502895cfb115cbdbbe2d31f287"}, - {file = "matplotlib-3.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d670b9348e712ec176de225d425f150dc8e37b13010d85233c539b547da0be39"}, - {file = "matplotlib-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7b37b74f00c4cb6af908cb9a00779d97d294e89fd2145ad43f0cdc23f635760c"}, - {file = "matplotlib-3.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:0e723f5b96f3cd4aad99103dc93e9e3cdc4f18afdcc76951f4857b46f8e39d2d"}, - {file = "matplotlib-3.8.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5dc945a9cb2deb7d197ba23eb4c210e591d52d77bf0ba27c35fc82dec9fa78d4"}, - {file = "matplotlib-3.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8b5a1bf27d078453aa7b5b27f52580e16360d02df6d3dc9504f3d2ce11f6309"}, - {file = "matplotlib-3.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f25ffb6ad972cdffa7df8e5be4b1e3cadd2f8d43fc72085feb1518006178394"}, - {file = "matplotlib-3.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee482731c8c17d86d9ddb5194d38621f9b0f0d53c99006275a12523ab021732"}, - {file = "matplotlib-3.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:36eafe2128772195b373e1242df28d1b7ec6c04c15b090b8d9e335d55a323900"}, - {file = "matplotlib-3.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:061ee58facb3580cd2d046a6d227fb77e9295599c5ec6ad069f06b5821ad1cfc"}, - {file = "matplotlib-3.8.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3cc3776836d0f4f22654a7f2d2ec2004618d5cf86b7185318381f73b80fd8a2d"}, - {file = "matplotlib-3.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6c49a2bd6981264bddcb8c317b6bd25febcece9e2ebfcbc34e7f4c0c867c09dc"}, - {file = "matplotlib-3.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23ed11654fc83cd6cfdf6170b453e437674a050a452133a064d47f2f1371f8d3"}, - {file = "matplotlib-3.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dae97fdd6996b3a25da8ee43e3fc734fff502f396801063c6b76c20b56683196"}, - {file = "matplotlib-3.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:87df75f528020a6299f76a1d986c0ed4406e3b2bd44bc5e306e46bca7d45e53e"}, - {file = "matplotlib-3.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:90d74a95fe055f73a6cd737beecc1b81c26f2893b7a3751d52b53ff06ca53f36"}, - {file = "matplotlib-3.8.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c3499c312f5def8f362a2bf761d04fa2d452b333f3a9a3f58805273719bf20d9"}, - {file = "matplotlib-3.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31e793c8bd4ea268cc5d3a695c27b30650ec35238626961d73085d5e94b6ab68"}, - {file = "matplotlib-3.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d5ee602ef517a89d1f2c508ca189cfc395dd0b4a08284fb1b97a78eec354644"}, - {file = "matplotlib-3.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5de39dc61ca35342cf409e031f70f18219f2c48380d3886c1cf5ad9f17898e06"}, - {file = "matplotlib-3.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dd386c80a98b5f51571b9484bf6c6976de383cd2a8cd972b6a9562d85c6d2087"}, - {file = "matplotlib-3.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:f691b4ef47c7384d0936b2e8ebdeb5d526c81d004ad9403dfb9d4c76b9979a93"}, - {file = "matplotlib-3.8.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0b11f354aae62a2aa53ec5bb09946f5f06fc41793e351a04ff60223ea9162955"}, - {file = "matplotlib-3.8.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f54b9fb87ca5acbcdd0f286021bedc162e1425fa5555ebf3b3dfc167b955ad9"}, - {file = "matplotlib-3.8.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:60a6e04dfd77c0d3bcfee61c3cd335fff1b917c2f303b32524cd1235e194ef99"}, - {file = "matplotlib-3.8.0.tar.gz", hash = "sha256:df8505e1c19d5c2c26aff3497a7cbd3ccfc2e97043d1e4db3e76afa399164b69"}, + {file = "matplotlib-3.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:09796f89fb71a0c0e1e2f4bdaf63fb2cefc84446bb963ecdeb40dfee7dfa98c7"}, + {file = "matplotlib-3.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6f9c6976748a25e8b9be51ea028df49b8e561eed7809146da7a47dbecebab367"}, + {file = "matplotlib-3.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b78e4f2cedf303869b782071b55fdde5987fda3038e9d09e58c91cc261b5ad18"}, + {file = "matplotlib-3.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e208f46cf6576a7624195aa047cb344a7f802e113bb1a06cfd4bee431de5e31"}, + {file = "matplotlib-3.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:46a569130ff53798ea5f50afce7406e91fdc471ca1e0e26ba976a8c734c9427a"}, + {file = "matplotlib-3.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:830f00640c965c5b7f6bc32f0d4ce0c36dfe0379f7dd65b07a00c801713ec40a"}, + {file = "matplotlib-3.8.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d86593ccf546223eb75a39b44c32788e6f6440d13cfc4750c1c15d0fcb850b63"}, + {file = "matplotlib-3.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a5430836811b7652991939012f43d2808a2db9b64ee240387e8c43e2e5578c8"}, + {file = "matplotlib-3.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9576723858a78751d5aacd2497b8aef29ffea6d1c95981505877f7ac28215c6"}, + {file = "matplotlib-3.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ba9cbd8ac6cf422f3102622b20f8552d601bf8837e49a3afed188d560152788"}, + {file = "matplotlib-3.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:03f9d160a29e0b65c0790bb07f4f45d6a181b1ac33eb1bb0dd225986450148f0"}, + {file = "matplotlib-3.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:3773002da767f0a9323ba1a9b9b5d00d6257dbd2a93107233167cfb581f64717"}, + {file = "matplotlib-3.8.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4c318c1e95e2f5926fba326f68177dee364aa791d6df022ceb91b8221bd0a627"}, + {file = "matplotlib-3.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:091275d18d942cf1ee9609c830a1bc36610607d8223b1b981c37d5c9fc3e46a4"}, + {file = "matplotlib-3.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b0f3b8ea0e99e233a4bcc44590f01604840d833c280ebb8fe5554fd3e6cfe8d"}, + {file = "matplotlib-3.8.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7b1704a530395aaf73912be741c04d181f82ca78084fbd80bc737be04848331"}, + {file = "matplotlib-3.8.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:533b0e3b0c6768eef8cbe4b583731ce25a91ab54a22f830db2b031e83cca9213"}, + {file = "matplotlib-3.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:0f4fc5d72b75e2c18e55eb32292659cf731d9d5b312a6eb036506304f4675630"}, + {file = "matplotlib-3.8.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:deaed9ad4da0b1aea77fe0aa0cebb9ef611c70b3177be936a95e5d01fa05094f"}, + {file = "matplotlib-3.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:172f4d0fbac3383d39164c6caafd3255ce6fa58f08fc392513a0b1d3b89c4f89"}, + {file = "matplotlib-3.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7d36c2209d9136cd8e02fab1c0ddc185ce79bc914c45054a9f514e44c787917"}, + {file = "matplotlib-3.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5864bdd7da445e4e5e011b199bb67168cdad10b501750367c496420f2ad00843"}, + {file = "matplotlib-3.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ef8345b48e95cee45ff25192ed1f4857273117917a4dcd48e3905619bcd9c9b8"}, + {file = "matplotlib-3.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:7c48d9e221b637c017232e3760ed30b4e8d5dfd081daf327e829bf2a72c731b4"}, + {file = "matplotlib-3.8.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:aa11b3c6928a1e496c1a79917d51d4cd5d04f8a2e75f21df4949eeefdf697f4b"}, + {file = "matplotlib-3.8.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1095fecf99eeb7384dabad4bf44b965f929a5f6079654b681193edf7169ec20"}, + {file = "matplotlib-3.8.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:bddfb1db89bfaa855912261c805bd0e10218923cc262b9159a49c29a7a1c1afa"}, + {file = "matplotlib-3.8.2.tar.gz", hash = "sha256:01a978b871b881ee76017152f1f1a0cbf6bd5f7b8ff8c96df0df1bd57d8755a1"}, ] [package.dependencies] contourpy = ">=1.0.1" cycler = ">=0.10" fonttools = ">=4.22.0" -kiwisolver = ">=1.0.1" +kiwisolver = ">=1.3.1" numpy = ">=1.21,<2" packaging = ">=20.0" -pillow = ">=6.2.0" +pillow = ">=8" pyparsing = ">=2.3.1" python-dateutil = ">=2.7" -setuptools_scm = ">=7" [[package]] name = "matplotlib-inline" @@ -972,13 +947,13 @@ requests = ">=2.26" [[package]] name = "mkdocs-material-extensions" -version = "1.2" +version = "1.3.1" description = "Extension pack for Python Markdown and MkDocs Material." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mkdocs_material_extensions-1.2-py3-none-any.whl", hash = "sha256:c767bd6d6305f6420a50f0b541b0c9966d52068839af97029be14443849fb8a1"}, - {file = "mkdocs_material_extensions-1.2.tar.gz", hash = "sha256:27e2d1ed2d031426a6e10d5ea06989d67e90bb02acd588bc5673106b5ee5eedf"}, + {file = "mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31"}, + {file = "mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443"}, ] [[package]] @@ -994,21 +969,21 @@ files = [ [[package]] name = "networkx" -version = "3.1" +version = "3.2.1" description = "Python package for creating and manipulating graphs and networks" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "networkx-3.1-py3-none-any.whl", hash = "sha256:4f33f68cb2afcf86f28a45f43efc27a9386b535d567d2127f8f61d51dec58d36"}, - {file = "networkx-3.1.tar.gz", hash = "sha256:de346335408f84de0eada6ff9fafafff9bcda11f0a0dfaa931133debb146ab61"}, + {file = "networkx-3.2.1-py3-none-any.whl", hash = "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2"}, + {file = "networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6"}, ] [package.extras] -default = ["matplotlib (>=3.4)", "numpy (>=1.20)", "pandas (>=1.3)", "scipy (>=1.8)"] -developer = ["mypy (>=1.1)", "pre-commit (>=3.2)"] -doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.13)", "sphinx (>=6.1)", "sphinx-gallery (>=0.12)", "texext (>=0.6.7)"] -extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.10)", "sympy (>=1.10)"] -test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] +default = ["matplotlib (>=3.5)", "numpy (>=1.22)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"] +developer = ["changelist (==0.4)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] +doc = ["nb2plots (>=0.7)", "nbconvert (<7.9)", "numpydoc (>=1.6)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"] +extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.11)", "sympy (>=1.10)"] +test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] [[package]] name = "nmrglue" @@ -1048,43 +1023,47 @@ build-sphinx = ["sphinx (>=1.3.1)"] [[package]] name = "numpy" -version = "1.26.0" +version = "1.26.3" description = "Fundamental package for array computing in Python" optional = false -python-versions = "<3.13,>=3.9" -files = [ - {file = "numpy-1.26.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f8db2f125746e44dce707dd44d4f4efeea8d7e2b43aace3f8d1f235cfa2733dd"}, - {file = "numpy-1.26.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0621f7daf973d34d18b4e4bafb210bbaf1ef5e0100b5fa750bd9cde84c7ac292"}, - {file = "numpy-1.26.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51be5f8c349fdd1a5568e72713a21f518e7d6707bcf8503b528b88d33b57dc68"}, - {file = "numpy-1.26.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:767254ad364991ccfc4d81b8152912e53e103ec192d1bb4ea6b1f5a7117040be"}, - {file = "numpy-1.26.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:436c8e9a4bdeeee84e3e59614d38c3dbd3235838a877af8c211cfcac8a80b8d3"}, - {file = "numpy-1.26.0-cp310-cp310-win32.whl", hash = "sha256:c2e698cb0c6dda9372ea98a0344245ee65bdc1c9dd939cceed6bb91256837896"}, - {file = "numpy-1.26.0-cp310-cp310-win_amd64.whl", hash = "sha256:09aaee96c2cbdea95de76ecb8a586cb687d281c881f5f17bfc0fb7f5890f6b91"}, - {file = "numpy-1.26.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:637c58b468a69869258b8ae26f4a4c6ff8abffd4a8334c830ffb63e0feefe99a"}, - {file = "numpy-1.26.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:306545e234503a24fe9ae95ebf84d25cba1fdc27db971aa2d9f1ab6bba19a9dd"}, - {file = "numpy-1.26.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6adc33561bd1d46f81131d5352348350fc23df4d742bb246cdfca606ea1208"}, - {file = "numpy-1.26.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e062aa24638bb5018b7841977c360d2f5917268d125c833a686b7cbabbec496c"}, - {file = "numpy-1.26.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:546b7dd7e22f3c6861463bebb000646fa730e55df5ee4a0224408b5694cc6148"}, - {file = "numpy-1.26.0-cp311-cp311-win32.whl", hash = "sha256:c0b45c8b65b79337dee5134d038346d30e109e9e2e9d43464a2970e5c0e93229"}, - {file = "numpy-1.26.0-cp311-cp311-win_amd64.whl", hash = "sha256:eae430ecf5794cb7ae7fa3808740b015aa80747e5266153128ef055975a72b99"}, - {file = "numpy-1.26.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:166b36197e9debc4e384e9c652ba60c0bacc216d0fc89e78f973a9760b503388"}, - {file = "numpy-1.26.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f042f66d0b4ae6d48e70e28d487376204d3cbf43b84c03bac57e28dac6151581"}, - {file = "numpy-1.26.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5e18e5b14a7560d8acf1c596688f4dfd19b4f2945b245a71e5af4ddb7422feb"}, - {file = "numpy-1.26.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f6bad22a791226d0a5c7c27a80a20e11cfe09ad5ef9084d4d3fc4a299cca505"}, - {file = "numpy-1.26.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4acc65dd65da28060e206c8f27a573455ed724e6179941edb19f97e58161bb69"}, - {file = "numpy-1.26.0-cp312-cp312-win32.whl", hash = "sha256:bb0d9a1aaf5f1cb7967320e80690a1d7ff69f1d47ebc5a9bea013e3a21faec95"}, - {file = "numpy-1.26.0-cp312-cp312-win_amd64.whl", hash = "sha256:ee84ca3c58fe48b8ddafdeb1db87388dce2c3c3f701bf447b05e4cfcc3679112"}, - {file = "numpy-1.26.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4a873a8180479bc829313e8d9798d5234dfacfc2e8a7ac188418189bb8eafbd2"}, - {file = "numpy-1.26.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:914b28d3215e0c721dc75db3ad6d62f51f630cb0c277e6b3bcb39519bed10bd8"}, - {file = "numpy-1.26.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c78a22e95182fb2e7874712433eaa610478a3caf86f28c621708d35fa4fd6e7f"}, - {file = "numpy-1.26.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f737708b366c36b76e953c46ba5827d8c27b7a8c9d0f471810728e5a2fe57c"}, - {file = "numpy-1.26.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b44e6a09afc12952a7d2a58ca0a2429ee0d49a4f89d83a0a11052da696440e49"}, - {file = "numpy-1.26.0-cp39-cp39-win32.whl", hash = "sha256:5671338034b820c8d58c81ad1dafc0ed5a00771a82fccc71d6438df00302094b"}, - {file = "numpy-1.26.0-cp39-cp39-win_amd64.whl", hash = "sha256:020cdbee66ed46b671429c7265cf00d8ac91c046901c55684954c3958525dab2"}, - {file = "numpy-1.26.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0792824ce2f7ea0c82ed2e4fecc29bb86bee0567a080dacaf2e0a01fe7654369"}, - {file = "numpy-1.26.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d484292eaeb3e84a51432a94f53578689ffdea3f90e10c8b203a99be5af57d8"}, - {file = "numpy-1.26.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:186ba67fad3c60dbe8a3abff3b67a91351100f2661c8e2a80364ae6279720299"}, - {file = "numpy-1.26.0.tar.gz", hash = "sha256:f93fc78fe8bf15afe2b8d6b6499f1c73953169fad1e9a8dd086cdff3190e7fdf"}, +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf"}, + {file = "numpy-1.26.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6"}, + {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178"}, + {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485"}, + {file = "numpy-1.26.3-cp310-cp310-win32.whl", hash = "sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3"}, + {file = "numpy-1.26.3-cp310-cp310-win_amd64.whl", hash = "sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374"}, + {file = "numpy-1.26.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2"}, + {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e"}, + {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00"}, + {file = "numpy-1.26.3-cp311-cp311-win32.whl", hash = "sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b"}, + {file = "numpy-1.26.3-cp311-cp311-win_amd64.whl", hash = "sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13"}, + {file = "numpy-1.26.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3"}, + {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166"}, + {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36"}, + {file = "numpy-1.26.3-cp312-cp312-win32.whl", hash = "sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511"}, + {file = "numpy-1.26.3-cp312-cp312-win_amd64.whl", hash = "sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b"}, + {file = "numpy-1.26.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1666f634cb3c80ccbd77ec97bc17337718f56d6658acf5d3b906ca03e90ce87f"}, + {file = "numpy-1.26.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18c3319a7d39b2c6a9e3bb75aab2304ab79a811ac0168a671a62e6346c29b03f"}, + {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b7e807d6888da0db6e7e75838444d62495e2b588b99e90dd80c3459594e857b"}, + {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d362e17bcb0011738c2d83e0a65ea8ce627057b2fdda37678f4374a382a137"}, + {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b8c275f0ae90069496068c714387b4a0eba5d531aace269559ff2b43655edd58"}, + {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc0743f0302b94f397a4a65a660d4cd24267439eb16493fb3caad2e4389bccbb"}, + {file = "numpy-1.26.3-cp39-cp39-win32.whl", hash = "sha256:9bc6d1a7f8cedd519c4b7b1156d98e051b726bf160715b769106661d567b3f03"}, + {file = "numpy-1.26.3-cp39-cp39-win_amd64.whl", hash = "sha256:867e3644e208c8922a3be26fc6bbf112a035f50f0a86497f98f228c50c607bb2"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0"}, + {file = "numpy-1.26.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5"}, + {file = "numpy-1.26.3.tar.gz", hash = "sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4"}, ] [[package]] @@ -1100,71 +1079,75 @@ files = [ [[package]] name = "pandas" -version = "2.1.1" +version = "2.2.0" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58d997dbee0d4b64f3cb881a24f918b5f25dd64ddf31f467bb9b67ae4c63a1e4"}, - {file = "pandas-2.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02304e11582c5d090e5a52aec726f31fe3f42895d6bfc1f28738f9b64b6f0614"}, - {file = "pandas-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffa8f0966de2c22de408d0e322db2faed6f6e74265aa0856f3824813cf124363"}, - {file = "pandas-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1f84c144dee086fe4f04a472b5cd51e680f061adf75c1ae4fc3a9275560f8f4"}, - {file = "pandas-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75ce97667d06d69396d72be074f0556698c7f662029322027c226fd7a26965cb"}, - {file = "pandas-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:4c3f32fd7c4dccd035f71734df39231ac1a6ff95e8bdab8d891167197b7018d2"}, - {file = "pandas-2.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e2959720b70e106bb1d8b6eadd8ecd7c8e99ccdbe03ee03260877184bb2877d"}, - {file = "pandas-2.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25e8474a8eb258e391e30c288eecec565bfed3e026f312b0cbd709a63906b6f8"}, - {file = "pandas-2.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8bd1685556f3374520466998929bade3076aeae77c3e67ada5ed2b90b4de7f0"}, - {file = "pandas-2.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc3657869c7902810f32bd072f0740487f9e030c1a3ab03e0af093db35a9d14e"}, - {file = "pandas-2.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:05674536bd477af36aa2effd4ec8f71b92234ce0cc174de34fd21e2ee99adbc2"}, - {file = "pandas-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:b407381258a667df49d58a1b637be33e514b07f9285feb27769cedb3ab3d0b3a"}, - {file = "pandas-2.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c747793c4e9dcece7bb20156179529898abf505fe32cb40c4052107a3c620b49"}, - {file = "pandas-2.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3bcad1e6fb34b727b016775bea407311f7721db87e5b409e6542f4546a4951ea"}, - {file = "pandas-2.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5ec7740f9ccb90aec64edd71434711f58ee0ea7f5ed4ac48be11cfa9abf7317"}, - {file = "pandas-2.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29deb61de5a8a93bdd033df328441a79fcf8dd3c12d5ed0b41a395eef9cd76f0"}, - {file = "pandas-2.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4f99bebf19b7e03cf80a4e770a3e65eee9dd4e2679039f542d7c1ace7b7b1daa"}, - {file = "pandas-2.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:84e7e910096416adec68075dc87b986ff202920fb8704e6d9c8c9897fe7332d6"}, - {file = "pandas-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366da7b0e540d1b908886d4feb3d951f2f1e572e655c1160f5fde28ad4abb750"}, - {file = "pandas-2.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9e50e72b667415a816ac27dfcfe686dc5a0b02202e06196b943d54c4f9c7693e"}, - {file = "pandas-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc1ab6a25da197f03ebe6d8fa17273126120874386b4ac11c1d687df288542dd"}, - {file = "pandas-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0dbfea0dd3901ad4ce2306575c54348d98499c95be01b8d885a2737fe4d7a98"}, - {file = "pandas-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0489b0e6aa3d907e909aef92975edae89b1ee1654db5eafb9be633b0124abe97"}, - {file = "pandas-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:4cdb0fab0400c2cb46dafcf1a0fe084c8bb2480a1fa8d81e19d15e12e6d4ded2"}, - {file = "pandas-2.1.1.tar.gz", hash = "sha256:fecb198dc389429be557cde50a2d46da8434a17fe37d7d41ff102e3987fd947b"}, + {file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"}, + {file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"}, + {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e0b4fc3ddceb56ec8a287313bc22abe17ab0eb184069f08fc6a9352a769b18"}, + {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20404d2adefe92aed3b38da41d0847a143a09be982a31b85bc7dd565bdba0f4e"}, + {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ea3ee3f125032bfcade3a4cf85131ed064b4f8dd23e5ce6fa16473e48ebcaf5"}, + {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9670b3ac00a387620489dfc1bca66db47a787f4e55911f1293063a78b108df1"}, + {file = "pandas-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a946f210383c7e6d16312d30b238fd508d80d927014f3b33fb5b15c2f895430"}, + {file = "pandas-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a1b438fa26b208005c997e78672f1aa8138f67002e833312e6230f3e57fa87d5"}, + {file = "pandas-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ce2fbc8d9bf303ce54a476116165220a1fedf15985b09656b4b4275300e920b"}, + {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2707514a7bec41a4ab81f2ccce8b382961a29fbe9492eab1305bb075b2b1ff4f"}, + {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85793cbdc2d5bc32620dc8ffa715423f0c680dacacf55056ba13454a5be5de88"}, + {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cfd6c2491dc821b10c716ad6776e7ab311f7df5d16038d0b7458bc0b67dc10f3"}, + {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a146b9dcacc3123aa2b399df1a284de5f46287a4ab4fbfc237eac98a92ebcb71"}, + {file = "pandas-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbc1b53c0e1fdf16388c33c3cca160f798d38aea2978004dd3f4d3dec56454c9"}, + {file = "pandas-2.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a41d06f308a024981dcaa6c41f2f2be46a6b186b902c94c2674e8cb5c42985bc"}, + {file = "pandas-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:159205c99d7a5ce89ecfc37cb08ed179de7783737cea403b295b5eda8e9c56d1"}, + {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1e1f3861ea9132b32f2133788f3b14911b68102d562715d71bd0013bc45440"}, + {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:761cb99b42a69005dec2b08854fb1d4888fdf7b05db23a8c5a099e4b886a2106"}, + {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a20628faaf444da122b2a64b1e5360cde100ee6283ae8effa0d8745153809a2e"}, + {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f5be5d03ea2073627e7111f61b9f1f0d9625dc3c4d8dda72cc827b0c58a1d042"}, + {file = "pandas-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:a626795722d893ed6aacb64d2401d017ddc8a2341b49e0384ab9bf7112bdec30"}, + {file = "pandas-2.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9f66419d4a41132eb7e9a73dcec9486cf5019f52d90dd35547af11bc58f8637d"}, + {file = "pandas-2.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57abcaeda83fb80d447f28ab0cc7b32b13978f6f733875ebd1ed14f8fbc0f4ab"}, + {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60f1f7dba3c2d5ca159e18c46a34e7ca7247a73b5dd1a22b6d59707ed6b899a"}, + {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb61dc8567b798b969bcc1fc964788f5a68214d333cade8319c7ab33e2b5d88a"}, + {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:52826b5f4ed658fa2b729264d63f6732b8b29949c7fd234510d57c61dbeadfcd"}, + {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bde2bc699dbd80d7bc7f9cab1e23a95c4375de615860ca089f34e7c64f4a8de7"}, + {file = "pandas-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:3de918a754bbf2da2381e8a3dcc45eede8cd7775b047b923f9006d5f876802ae"}, + {file = "pandas-2.2.0.tar.gz", hash = "sha256:30b83f7c3eb217fb4d1b494a57a2fda5444f17834f5df2de6b2ffff68dc3c8e2"}, ] [package.dependencies] numpy = [ - {version = ">=1.22.4", markers = "python_version < \"3.11\""}, - {version = ">=1.23.2", markers = "python_version == \"3.11\""}, - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, + {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" -tzdata = ">=2022.1" +tzdata = ">=2022.7" [package.extras] -all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"] -aws = ["s3fs (>=2022.05.0)"] -clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"] -compression = ["zstandard (>=0.17.0)"] -computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"] -feather = ["pyarrow (>=7.0.0)"] -fss = ["fsspec (>=2022.05.0)"] -gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"] -hdf5 = ["tables (>=3.7.0)"] -html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"] -mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"] -parquet = ["pyarrow (>=7.0.0)"] -performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"] -plot = ["matplotlib (>=3.6.1)"] -postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"] -spss = ["pyreadstat (>=1.1.5)"] -sql-other = ["SQLAlchemy (>=1.4.36)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.8.0)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] [[package]] name = "parso" @@ -1183,24 +1166,24 @@ testing = ["docopt", "pytest (<6.0.0)"] [[package]] name = "pathspec" -version = "0.11.2" +version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, - {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] name = "patsy" -version = "0.5.3" +version = "0.5.6" description = "A Python package for describing statistical models and for building design matrices." optional = false python-versions = "*" files = [ - {file = "patsy-0.5.3-py2.py3-none-any.whl", hash = "sha256:7eb5349754ed6aa982af81f636479b1b8db9d5b1a6e957a6016ec0534b5c86b7"}, - {file = "patsy-0.5.3.tar.gz", hash = "sha256:bdc18001875e319bc91c812c1eb6a10be4bb13cb81eb763f466179dca3b67277"}, + {file = "patsy-0.5.6-py2.py3-none-any.whl", hash = "sha256:19056886fd8fa71863fa32f0eb090267f21fb74be00f19f5c70b2e9d76c883c6"}, + {file = "patsy-0.5.6.tar.gz", hash = "sha256:95c6d47a7222535f84bff7f63d7303f2e297747a598db89cf5c67f0c0c7d2cdb"}, ] [package.dependencies] @@ -1212,105 +1195,112 @@ test = ["pytest", "pytest-cov", "scipy"] [[package]] name = "pexpect" -version = "4.8.0" +version = "4.9.0" description = "Pexpect allows easy control of interactive console applications." optional = false python-versions = "*" files = [ - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, - {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, ] [package.dependencies] ptyprocess = ">=0.5" -[[package]] -name = "pickleshare" -version = "0.7.5" -description = "Tiny 'shelve'-like database with concurrency support" -optional = false -python-versions = "*" -files = [ - {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, - {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, -] - [[package]] name = "pillow" -version = "10.0.1" +version = "10.2.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.8" files = [ - {file = "Pillow-10.0.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:8f06be50669087250f319b706decf69ca71fdecd829091a37cc89398ca4dc17a"}, - {file = "Pillow-10.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50bd5f1ebafe9362ad622072a1d2f5850ecfa44303531ff14353a4059113b12d"}, - {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6a90167bcca1216606223a05e2cf991bb25b14695c518bc65639463d7db722d"}, - {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f11c9102c56ffb9ca87134bd025a43d2aba3f1155f508eff88f694b33a9c6d19"}, - {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:186f7e04248103482ea6354af6d5bcedb62941ee08f7f788a1c7707bc720c66f"}, - {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0462b1496505a3462d0f35dc1c4d7b54069747d65d00ef48e736acda2c8cbdff"}, - {file = "Pillow-10.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d889b53ae2f030f756e61a7bff13684dcd77e9af8b10c6048fb2c559d6ed6eaf"}, - {file = "Pillow-10.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:552912dbca585b74d75279a7570dd29fa43b6d93594abb494ebb31ac19ace6bd"}, - {file = "Pillow-10.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:787bb0169d2385a798888e1122c980c6eff26bf941a8ea79747d35d8f9210ca0"}, - {file = "Pillow-10.0.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:fd2a5403a75b54661182b75ec6132437a181209b901446ee5724b589af8edef1"}, - {file = "Pillow-10.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2d7e91b4379f7a76b31c2dda84ab9e20c6220488e50f7822e59dac36b0cd92b1"}, - {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19e9adb3f22d4c416e7cd79b01375b17159d6990003633ff1d8377e21b7f1b21"}, - {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93139acd8109edcdeffd85e3af8ae7d88b258b3a1e13a038f542b79b6d255c54"}, - {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:92a23b0431941a33242b1f0ce6c88a952e09feeea9af4e8be48236a68ffe2205"}, - {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cbe68deb8580462ca0d9eb56a81912f59eb4542e1ef8f987405e35a0179f4ea2"}, - {file = "Pillow-10.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:522ff4ac3aaf839242c6f4e5b406634bfea002469656ae8358644fc6c4856a3b"}, - {file = "Pillow-10.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:84efb46e8d881bb06b35d1d541aa87f574b58e87f781cbba8d200daa835b42e1"}, - {file = "Pillow-10.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:898f1d306298ff40dc1b9ca24824f0488f6f039bc0e25cfb549d3195ffa17088"}, - {file = "Pillow-10.0.1-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:bcf1207e2f2385a576832af02702de104be71301c2696d0012b1b93fe34aaa5b"}, - {file = "Pillow-10.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d6c9049c6274c1bb565021367431ad04481ebb54872edecfcd6088d27edd6ed"}, - {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28444cb6ad49726127d6b340217f0627abc8732f1194fd5352dec5e6a0105635"}, - {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de596695a75496deb3b499c8c4f8e60376e0516e1a774e7bc046f0f48cd620ad"}, - {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:2872f2d7846cf39b3dbff64bc1104cc48c76145854256451d33c5faa55c04d1a"}, - {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:4ce90f8a24e1c15465048959f1e94309dfef93af272633e8f37361b824532e91"}, - {file = "Pillow-10.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ee7810cf7c83fa227ba9125de6084e5e8b08c59038a7b2c9045ef4dde61663b4"}, - {file = "Pillow-10.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b1be1c872b9b5fcc229adeadbeb51422a9633abd847c0ff87dc4ef9bb184ae08"}, - {file = "Pillow-10.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:98533fd7fa764e5f85eebe56c8e4094db912ccbe6fbf3a58778d543cadd0db08"}, - {file = "Pillow-10.0.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:764d2c0daf9c4d40ad12fbc0abd5da3af7f8aa11daf87e4fa1b834000f4b6b0a"}, - {file = "Pillow-10.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fcb59711009b0168d6ee0bd8fb5eb259c4ab1717b2f538bbf36bacf207ef7a68"}, - {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:697a06bdcedd473b35e50a7e7506b1d8ceb832dc238a336bd6f4f5aa91a4b500"}, - {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f665d1e6474af9f9da5e86c2a3a2d2d6204e04d5af9c06b9d42afa6ebde3f21"}, - {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:2fa6dd2661838c66f1a5473f3b49ab610c98a128fc08afbe81b91a1f0bf8c51d"}, - {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:3a04359f308ebee571a3127fdb1bd01f88ba6f6fb6d087f8dd2e0d9bff43f2a7"}, - {file = "Pillow-10.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:723bd25051454cea9990203405fa6b74e043ea76d4968166dfd2569b0210886a"}, - {file = "Pillow-10.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:71671503e3015da1b50bd18951e2f9daf5b6ffe36d16f1eb2c45711a301521a7"}, - {file = "Pillow-10.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:44e7e4587392953e5e251190a964675f61e4dae88d1e6edbe9f36d6243547ff3"}, - {file = "Pillow-10.0.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:3855447d98cced8670aaa63683808df905e956f00348732448b5a6df67ee5849"}, - {file = "Pillow-10.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ed2d9c0704f2dc4fa980b99d565c0c9a543fe5101c25b3d60488b8ba80f0cce1"}, - {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5bb289bb835f9fe1a1e9300d011eef4d69661bb9b34d5e196e5e82c4cb09b37"}, - {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0d3e54ab1df9df51b914b2233cf779a5a10dfd1ce339d0421748232cea9876"}, - {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:2cc6b86ece42a11f16f55fe8903595eff2b25e0358dec635d0a701ac9586588f"}, - {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ca26ba5767888c84bf5a0c1a32f069e8204ce8c21d00a49c90dabeba00ce0145"}, - {file = "Pillow-10.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f0b4b06da13275bc02adfeb82643c4a6385bd08d26f03068c2796f60d125f6f2"}, - {file = "Pillow-10.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bc2e3069569ea9dbe88d6b8ea38f439a6aad8f6e7a6283a38edf61ddefb3a9bf"}, - {file = "Pillow-10.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:8b451d6ead6e3500b6ce5c7916a43d8d8d25ad74b9102a629baccc0808c54971"}, - {file = "Pillow-10.0.1-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:32bec7423cdf25c9038fef614a853c9d25c07590e1a870ed471f47fb80b244db"}, - {file = "Pillow-10.0.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cf63d2c6928b51d35dfdbda6f2c1fddbe51a6bc4a9d4ee6ea0e11670dd981e"}, - {file = "Pillow-10.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f6d3d4c905e26354e8f9d82548475c46d8e0889538cb0657aa9c6f0872a37aa4"}, - {file = "Pillow-10.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:847e8d1017c741c735d3cd1883fa7b03ded4f825a6e5fcb9378fd813edee995f"}, - {file = "Pillow-10.0.1-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:7f771e7219ff04b79e231d099c0a28ed83aa82af91fd5fa9fdb28f5b8d5addaf"}, - {file = "Pillow-10.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459307cacdd4138edee3875bbe22a2492519e060660eaf378ba3b405d1c66317"}, - {file = "Pillow-10.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b059ac2c4c7a97daafa7dc850b43b2d3667def858a4f112d1aa082e5c3d6cf7d"}, - {file = "Pillow-10.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6caf3cd38449ec3cd8a68b375e0c6fe4b6fd04edb6c9766b55ef84a6e8ddf2d"}, - {file = "Pillow-10.0.1.tar.gz", hash = "sha256:d72967b06be9300fed5cfbc8b5bafceec48bf7cdc7dab66b1d2549035287191d"}, + {file = "pillow-10.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:7823bdd049099efa16e4246bdf15e5a13dbb18a51b68fa06d6c1d4d8b99a796e"}, + {file = "pillow-10.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:83b2021f2ade7d1ed556bc50a399127d7fb245e725aa0113ebd05cfe88aaf588"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fad5ff2f13d69b7e74ce5b4ecd12cc0ec530fcee76356cac6742785ff71c452"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da2b52b37dad6d9ec64e653637a096905b258d2fc2b984c41ae7d08b938a67e4"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:47c0995fc4e7f79b5cfcab1fc437ff2890b770440f7696a3ba065ee0fd496563"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:322bdf3c9b556e9ffb18f93462e5f749d3444ce081290352c6070d014c93feb2"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51f1a1bffc50e2e9492e87d8e09a17c5eea8409cda8d3f277eb6edc82813c17c"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69ffdd6120a4737710a9eee73e1d2e37db89b620f702754b8f6e62594471dee0"}, + {file = "pillow-10.2.0-cp310-cp310-win32.whl", hash = "sha256:c6dafac9e0f2b3c78df97e79af707cdc5ef8e88208d686a4847bab8266870023"}, + {file = "pillow-10.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:aebb6044806f2e16ecc07b2a2637ee1ef67a11840a66752751714a0d924adf72"}, + {file = "pillow-10.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:7049e301399273a0136ff39b84c3678e314f2158f50f517bc50285fb5ec847ad"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35bb52c37f256f662abdfa49d2dfa6ce5d93281d323a9af377a120e89a9eafb5"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c23f307202661071d94b5e384e1e1dc7dfb972a28a2310e4ee16103e66ddb67"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:773efe0603db30c281521a7c0214cad7836c03b8ccff897beae9b47c0b657d61"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11fa2e5984b949b0dd6d7a94d967743d87c577ff0b83392f17cb3990d0d2fd6e"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:716d30ed977be8b37d3ef185fecb9e5a1d62d110dfbdcd1e2a122ab46fddb03f"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a086c2af425c5f62a65e12fbf385f7c9fcb8f107d0849dba5839461a129cf311"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c8de2789052ed501dd829e9cae8d3dcce7acb4777ea4a479c14521c942d395b1"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609448742444d9290fd687940ac0b57fb35e6fd92bdb65386e08e99af60bf757"}, + {file = "pillow-10.2.0-cp311-cp311-win32.whl", hash = "sha256:823ef7a27cf86df6597fa0671066c1b596f69eba53efa3d1e1cb8b30f3533068"}, + {file = "pillow-10.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1da3b2703afd040cf65ec97efea81cfba59cdbed9c11d8efc5ab09df9509fc56"}, + {file = "pillow-10.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:edca80cbfb2b68d7b56930b84a0e45ae1694aeba0541f798e908a49d66b837f1"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:1b5e1b74d1bd1b78bc3477528919414874748dd363e6272efd5abf7654e68bef"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0eae2073305f451d8ecacb5474997c08569fb4eb4ac231ffa4ad7d342fdc25ac"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7c2286c23cd350b80d2fc9d424fc797575fb16f854b831d16fd47ceec078f2c"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e23412b5c41e58cec602f1135c57dfcf15482013ce6e5f093a86db69646a5aa"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:52a50aa3fb3acb9cf7213573ef55d31d6eca37f5709c69e6858fe3bc04a5c2a2"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:127cee571038f252a552760076407f9cff79761c3d436a12af6000cd182a9d04"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8d12251f02d69d8310b046e82572ed486685c38f02176bd08baf216746eb947f"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54f1852cd531aa981bc0965b7d609f5f6cc8ce8c41b1139f6ed6b3c54ab82bfb"}, + {file = "pillow-10.2.0-cp312-cp312-win32.whl", hash = "sha256:257d8788df5ca62c980314053197f4d46eefedf4e6175bc9412f14412ec4ea2f"}, + {file = "pillow-10.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:154e939c5f0053a383de4fd3d3da48d9427a7e985f58af8e94d0b3c9fcfcf4f9"}, + {file = "pillow-10.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:f379abd2f1e3dddb2b61bc67977a6b5a0a3f7485538bcc6f39ec76163891ee48"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8373c6c251f7ef8bda6675dd6d2b3a0fcc31edf1201266b5cf608b62a37407f9"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:870ea1ada0899fd0b79643990809323b389d4d1d46c192f97342eeb6ee0b8483"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4b6b1e20608493548b1f32bce8cca185bf0480983890403d3b8753e44077129"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3031709084b6e7852d00479fd1d310b07d0ba82765f973b543c8af5061cf990e"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:3ff074fc97dd4e80543a3e91f69d58889baf2002b6be64347ea8cf5533188213"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:cb4c38abeef13c61d6916f264d4845fab99d7b711be96c326b84df9e3e0ff62d"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b1b3020d90c2d8e1dae29cf3ce54f8094f7938460fb5ce8bc5c01450b01fbaf6"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:170aeb00224ab3dc54230c797f8404507240dd868cf52066f66a41b33169bdbe"}, + {file = "pillow-10.2.0-cp38-cp38-win32.whl", hash = "sha256:c4225f5220f46b2fde568c74fca27ae9771536c2e29d7c04f4fb62c83275ac4e"}, + {file = "pillow-10.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0689b5a8c5288bc0504d9fcee48f61a6a586b9b98514d7d29b840143d6734f39"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b792a349405fbc0163190fde0dc7b3fef3c9268292586cf5645598b48e63dc67"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c570f24be1e468e3f0ce7ef56a89a60f0e05b30a3669a459e419c6eac2c35364"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8ecd059fdaf60c1963c58ceb8997b32e9dc1b911f5da5307aab614f1ce5c2fb"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c365fd1703040de1ec284b176d6af5abe21b427cb3a5ff68e0759e1e313a5e7e"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:70c61d4c475835a19b3a5aa42492409878bbca7438554a1f89d20d58a7c75c01"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6f491cdf80ae540738859d9766783e3b3c8e5bd37f5dfa0b76abdecc5081f13"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d189550615b4948f45252d7f005e53c2040cea1af5b60d6f79491a6e147eef7"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:49d9ba1ed0ef3e061088cd1e7538a0759aab559e2e0a80a36f9fd9d8c0c21591"}, + {file = "pillow-10.2.0-cp39-cp39-win32.whl", hash = "sha256:babf5acfede515f176833ed6028754cbcd0d206f7f614ea3447d67c33be12516"}, + {file = "pillow-10.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:0304004f8067386b477d20a518b50f3fa658a28d44e4116970abfcd94fac34a8"}, + {file = "pillow-10.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:0fb3e7fc88a14eacd303e90481ad983fd5b69c761e9e6ef94c983f91025da869"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:322209c642aabdd6207517e9739c704dc9f9db943015535783239022002f054a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eedd52442c0a5ff4f887fab0c1c0bb164d8635b32c894bc1faf4c618dd89df2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb28c753fd5eb3dd859b4ee95de66cc62af91bcff5db5f2571d32a520baf1f04"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:33870dc4653c5017bf4c8873e5488d8f8d5f8935e2f1fb9a2208c47cdd66efd2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3c31822339516fb3c82d03f30e22b1d038da87ef27b6a78c9549888f8ceda39a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a2b56ba36e05f973d450582fb015594aaa78834fefe8dfb8fcd79b93e64ba4c6"}, + {file = "pillow-10.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d8e6aeb9201e655354b3ad049cb77d19813ad4ece0df1249d3c793de3774f8c7"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2247178effb34a77c11c0e8ac355c7a741ceca0a732b27bf11e747bbc950722f"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15587643b9e5eb26c48e49a7b33659790d28f190fc514a322d55da2fb5c2950e"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753cd8f2086b2b80180d9b3010dd4ed147efc167c90d3bf593fe2af21265e5a5"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7c8f97e8e7a9009bcacbe3766a36175056c12f9a44e6e6f2d5caad06dcfbf03b"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d1b35bcd6c5543b9cb547dee3150c93008f8dd0f1fef78fc0cd2b141c5baf58a"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe4c15f6c9285dc54ce6553a3ce908ed37c8f3825b5a51a15c91442bb955b868"}, + {file = "pillow-10.2.0.tar.gz", hash = "sha256:e87f0b2c78157e12d7686b27d63c070fd65d994e8ddae6f328e0dcf4a0cd007e"}, ] [package.extras] docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] [[package]] name = "platformdirs" -version = "3.11.0" +version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"}, - {file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"}, + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, ] [package.extras] @@ -1319,13 +1309,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -1334,13 +1324,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "prompt-toolkit" -version = "3.0.39" +version = "3.0.43" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" files = [ - {file = "prompt_toolkit-3.0.39-py3-none-any.whl", hash = "sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"}, - {file = "prompt_toolkit-3.0.39.tar.gz", hash = "sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac"}, + {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, + {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, ] [package.dependencies] @@ -1373,31 +1363,32 @@ tests = ["pytest"] [[package]] name = "pygments" -version = "2.16.1" +version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" files = [ - {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, - {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, ] [package.extras] plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pymdown-extensions" -version = "10.3" +version = "10.7" description = "Extension pack for Python Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "pymdown_extensions-10.3-py3-none-any.whl", hash = "sha256:77a82c621c58a83efc49a389159181d570e370fff9f810d3a4766a75fc678b66"}, - {file = "pymdown_extensions-10.3.tar.gz", hash = "sha256:94a0d8a03246712b64698af223848fd80aaf1ae4c4be29c8c61939b0467b5722"}, + {file = "pymdown_extensions-10.7-py3-none-any.whl", hash = "sha256:6ca215bc57bc12bf32b414887a68b810637d039124ed9b2e5bd3325cbb2c050c"}, + {file = "pymdown_extensions-10.7.tar.gz", hash = "sha256:c0d64d5cf62566f59e6b2b690a4095c931107c250a8c8e1351c1de5f6b036deb"}, ] [package.dependencies] -markdown = ">=3.2" +markdown = ">=3.5" pyyaml = "*" [package.extras] @@ -1419,13 +1410,13 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "7.4.2" +version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"}, - {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -1455,51 +1446,51 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2023.3.post1" +version = "2023.4" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, - {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, + {file = "pytz-2023.4-py2.py3-none-any.whl", hash = "sha256:f90ef520d95e7c46951105338d918664ebfd6f1d995bd7d153127ce90efafa6a"}, + {file = "pytz-2023.4.tar.gz", hash = "sha256:31d4583c4ed539cd037956140d695e42c033a19e984bfce9964a3f7d59bc2b40"}, ] [[package]] name = "pywavelets" -version = "1.4.1" +version = "1.5.0" description = "PyWavelets, wavelet transform module" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "PyWavelets-1.4.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:d854411eb5ee9cb4bc5d0e66e3634aeb8f594210f6a1bed96dbed57ec70f181c"}, - {file = "PyWavelets-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:231b0e0b1cdc1112f4af3c24eea7bf181c418d37922a67670e9bf6cfa2d544d4"}, - {file = "PyWavelets-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:754fa5085768227c4f4a26c1e0c78bc509a266d9ebd0eb69a278be7e3ece943c"}, - {file = "PyWavelets-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da7b9c006171be1f9ddb12cc6e0d3d703b95f7f43cb5e2c6f5f15d3233fcf202"}, - {file = "PyWavelets-1.4.1-cp310-cp310-win32.whl", hash = "sha256:67a0d28a08909f21400cb09ff62ba94c064882ffd9e3a6b27880a111211d59bd"}, - {file = "PyWavelets-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:91d3d393cffa634f0e550d88c0e3f217c96cfb9e32781f2960876f1808d9b45b"}, - {file = "PyWavelets-1.4.1-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:64c6bac6204327321db30b775060fbe8e8642316e6bff17f06b9f34936f88875"}, - {file = "PyWavelets-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3f19327f2129fb7977bc59b966b4974dfd72879c093e44a7287500a7032695de"}, - {file = "PyWavelets-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad987748f60418d5f4138db89d82ba0cb49b086e0cbb8fd5c3ed4a814cfb705e"}, - {file = "PyWavelets-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:875d4d620eee655346e3589a16a73790cf9f8917abba062234439b594e706784"}, - {file = "PyWavelets-1.4.1-cp311-cp311-win32.whl", hash = "sha256:7231461d7a8eb3bdc7aa2d97d9f67ea5a9f8902522818e7e2ead9c2b3408eeb1"}, - {file = "PyWavelets-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:daf0aa79842b571308d7c31a9c43bc99a30b6328e6aea3f50388cd8f69ba7dbc"}, - {file = "PyWavelets-1.4.1-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:ab7da0a17822cd2f6545626946d3b82d1a8e106afc4b50e3387719ba01c7b966"}, - {file = "PyWavelets-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:578af438a02a86b70f1975b546f68aaaf38f28fb082a61ceb799816049ed18aa"}, - {file = "PyWavelets-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb5ca8d11d3f98e89e65796a2125be98424d22e5ada360a0dbabff659fca0fc"}, - {file = "PyWavelets-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:058b46434eac4c04dd89aeef6fa39e4b6496a951d78c500b6641fd5b2cc2f9f4"}, - {file = "PyWavelets-1.4.1-cp38-cp38-win32.whl", hash = "sha256:de7cd61a88a982edfec01ea755b0740e94766e00a1ceceeafef3ed4c85c605cd"}, - {file = "PyWavelets-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:7ab8d9db0fe549ab2ee0bea61f614e658dd2df419d5b75fba47baa761e95f8f2"}, - {file = "PyWavelets-1.4.1-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:23bafd60350b2b868076d976bdd92f950b3944f119b4754b1d7ff22b7acbf6c6"}, - {file = "PyWavelets-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d0e56cd7a53aed3cceca91a04d62feb3a0aca6725b1912d29546c26f6ea90426"}, - {file = "PyWavelets-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:030670a213ee8fefa56f6387b0c8e7d970c7f7ad6850dc048bd7c89364771b9b"}, - {file = "PyWavelets-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71ab30f51ee4470741bb55fc6b197b4a2b612232e30f6ac069106f0156342356"}, - {file = "PyWavelets-1.4.1-cp39-cp39-win32.whl", hash = "sha256:47cac4fa25bed76a45bc781a293c26ac63e8eaae9eb8f9be961758d22b58649c"}, - {file = "PyWavelets-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:88aa5449e109d8f5e7f0adef85f7f73b1ab086102865be64421a3a3d02d277f4"}, - {file = "PyWavelets-1.4.1.tar.gz", hash = "sha256:6437af3ddf083118c26d8f97ab43b0724b956c9f958e9ea788659f6a2834ba93"}, + {file = "pywavelets-1.5.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:05723b35191ceb7d0c0bc2898a9ff391c0f20e8ed9b75d30211464872efcac95"}, + {file = "pywavelets-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e045ee612de58e3175ae863c34072b6bf5b45b61264c1adbd75506ce31cedbb2"}, + {file = "pywavelets-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c857081c037552f174732d864b55d8db4845f5e2fdf0e7bfc2df675a417906f4"}, + {file = "pywavelets-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8b58eaf946fbee002cce460d32a0e932c6d9e158aad10eea984e7f26cda15e"}, + {file = "pywavelets-1.5.0-cp310-cp310-win32.whl", hash = "sha256:51c8e9e081af40f61d194960db0f3dc0434bbd979dafcbbd6463134b3f482f37"}, + {file = "pywavelets-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:67b65da9ef6380a48b8b53de6d8a4f83747b84b217a37944a4dcf3a53cdf308d"}, + {file = "pywavelets-1.5.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:2cae4a0151e443e915905c120435e69ad410b484ce8af4839220e43a494c7c53"}, + {file = "pywavelets-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4aca65696341aa64b98bf852d6768dbb345516710a2912419d68e9d484ddd6cd"}, + {file = "pywavelets-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa54e6c6f2d6953f5f962eb1d1de7f9fbc5bdf06141f58c05d0d87072a05b8be"}, + {file = "pywavelets-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c3b10f1e1b08df4d918fa238ef5e5c51c111c4f6abdfecb19c26c540cbd8187"}, + {file = "pywavelets-1.5.0-cp311-cp311-win32.whl", hash = "sha256:ca2e1faaea7f7ff42c771e180635e2fb165cf23c9805c4fe05f9458bcb97d093"}, + {file = "pywavelets-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:526e874ba79ee3779245737a3b8540defc7e92f6cec8f13258719cc1669f8b42"}, + {file = "pywavelets-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f3eba7f581a723132beb213ce4b291a51306e3d2f79241a71063294a71cfa25d"}, + {file = "pywavelets-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7da6c2acd7253e5d45f371bcd6c0f34d70b2f82694420afb0631130bc89e3288"}, + {file = "pywavelets-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7dc392c3d3d5415b25b5c6ab3b77bb2ac2b7ff6c4d2fb81bd4633b9ac4b66f3"}, + {file = "pywavelets-1.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:322995ea0a57c96086782f0391934f9f00123087a62ad7bef0e778491f121931"}, + {file = "pywavelets-1.5.0-cp312-cp312-win32.whl", hash = "sha256:eb123f01315c0fa54e25780f3b0ce0b096bab35f6c11cacbcd4ac9915f26508a"}, + {file = "pywavelets-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:7115439f0dff291b8f81b69caff1a240695566f17c483752a49de9576c7332a4"}, + {file = "pywavelets-1.5.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:34d189aed544687500a2fba5b8970951a76f62f1d140cc5f9440d9b32b14b8f5"}, + {file = "pywavelets-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:00d5c37775a2baa4e5e6e9df3f93e6fc700a76bd50acd3b234bb13467cc54b6b"}, + {file = "pywavelets-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d9763987b4a79917f007c1d5df0adc81adabbad3c7c0a368f4a7f12034816f3"}, + {file = "pywavelets-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49aa6abf9ac941f47f7ea26a3c7dd5c8bfcf0e903dc5ec68ed105b52bfccd4e2"}, + {file = "pywavelets-1.5.0-cp39-cp39-win32.whl", hash = "sha256:f457d9faee286bd542c8f1921e38b8f5f54bc1949c0e349c8f1e9f8eb6d251a6"}, + {file = "pywavelets-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:91847ac1b658cf985a7f91ff638ba1d4a9a0544c5480ecbf8db427baf455725a"}, + {file = "pywavelets-1.5.0.tar.gz", hash = "sha256:d9e25c7cabef7ccd53f5fead26ab22152fe4cb937bad7411b5d506e2b5de38f6"}, ] [package.dependencies] -numpy = ">=1.17.3" +numpy = ">=1.22.4,<2.0" [[package]] name = "pyyaml" @@ -1665,83 +1656,45 @@ test = ["asv", "matplotlib (>=3.5)", "pooch (>=1.6.0)", "pytest (>=7.0)", "pytes [[package]] name = "scipy" -version = "1.11.3" +version = "1.12.0" description = "Fundamental algorithms for scientific computing in Python" optional = false -python-versions = "<3.13,>=3.9" -files = [ - {file = "scipy-1.11.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:370f569c57e1d888304052c18e58f4a927338eafdaef78613c685ca2ea0d1fa0"}, - {file = "scipy-1.11.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:9885e3e4f13b2bd44aaf2a1a6390a11add9f48d5295f7a592393ceb8991577a3"}, - {file = "scipy-1.11.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e04aa19acc324a1a076abb4035dabe9b64badb19f76ad9c798bde39d41025cdc"}, - {file = "scipy-1.11.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e1a8a4657673bfae1e05e1e1d6e94b0cabe5ed0c7c144c8aa7b7dbb774ce5c1"}, - {file = "scipy-1.11.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7abda0e62ef00cde826d441485e2e32fe737bdddee3324e35c0e01dee65e2a88"}, - {file = "scipy-1.11.3-cp310-cp310-win_amd64.whl", hash = "sha256:033c3fd95d55012dd1148b201b72ae854d5086d25e7c316ec9850de4fe776929"}, - {file = "scipy-1.11.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:925c6f09d0053b1c0f90b2d92d03b261e889b20d1c9b08a3a51f61afc5f58165"}, - {file = "scipy-1.11.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5664e364f90be8219283eeb844323ff8cd79d7acbd64e15eb9c46b9bc7f6a42a"}, - {file = "scipy-1.11.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00f325434b6424952fbb636506f0567898dca7b0f7654d48f1c382ea338ce9a3"}, - {file = "scipy-1.11.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f290cf561a4b4edfe8d1001ee4be6da60c1c4ea712985b58bf6bc62badee221"}, - {file = "scipy-1.11.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:91770cb3b1e81ae19463b3c235bf1e0e330767dca9eb4cd73ba3ded6c4151e4d"}, - {file = "scipy-1.11.3-cp311-cp311-win_amd64.whl", hash = "sha256:e1f97cd89c0fe1a0685f8f89d85fa305deb3067d0668151571ba50913e445820"}, - {file = "scipy-1.11.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dfcc1552add7cb7c13fb70efcb2389d0624d571aaf2c80b04117e2755a0c5d15"}, - {file = "scipy-1.11.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:0d3a136ae1ff0883fffbb1b05b0b2fea251cb1046a5077d0b435a1839b3e52b7"}, - {file = "scipy-1.11.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bae66a2d7d5768eaa33008fa5a974389f167183c87bf39160d3fefe6664f8ddc"}, - {file = "scipy-1.11.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2f6dee6cbb0e263b8142ed587bc93e3ed5e777f1f75448d24fb923d9fd4dce6"}, - {file = "scipy-1.11.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:74e89dc5e00201e71dd94f5f382ab1c6a9f3ff806c7d24e4e90928bb1aafb280"}, - {file = "scipy-1.11.3-cp312-cp312-win_amd64.whl", hash = "sha256:90271dbde4be191522b3903fc97334e3956d7cfb9cce3f0718d0ab4fd7d8bfd6"}, - {file = "scipy-1.11.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a63d1ec9cadecce838467ce0631c17c15c7197ae61e49429434ba01d618caa83"}, - {file = "scipy-1.11.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:5305792c7110e32ff155aed0df46aa60a60fc6e52cd4ee02cdeb67eaccd5356e"}, - {file = "scipy-1.11.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ea7f579182d83d00fed0e5c11a4aa5ffe01460444219dedc448a36adf0c3917"}, - {file = "scipy-1.11.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c77da50c9a91e23beb63c2a711ef9e9ca9a2060442757dffee34ea41847d8156"}, - {file = "scipy-1.11.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:15f237e890c24aef6891c7d008f9ff7e758c6ef39a2b5df264650eb7900403c0"}, - {file = "scipy-1.11.3-cp39-cp39-win_amd64.whl", hash = "sha256:4b4bb134c7aa457e26cc6ea482b016fef45db71417d55cc6d8f43d799cdf9ef2"}, - {file = "scipy-1.11.3.tar.gz", hash = "sha256:bba4d955f54edd61899776bad459bf7326e14b9fa1c552181f0479cc60a568cd"}, +python-versions = ">=3.9" +files = [ + {file = "scipy-1.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:78e4402e140879387187f7f25d91cc592b3501a2e51dfb320f48dfb73565f10b"}, + {file = "scipy-1.12.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:f5f00ebaf8de24d14b8449981a2842d404152774c1a1d880c901bf454cb8e2a1"}, + {file = "scipy-1.12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e53958531a7c695ff66c2e7bb7b79560ffdc562e2051644c5576c39ff8efb563"}, + {file = "scipy-1.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e32847e08da8d895ce09d108a494d9eb78974cf6de23063f93306a3e419960c"}, + {file = "scipy-1.12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4c1020cad92772bf44b8e4cdabc1df5d87376cb219742549ef69fc9fd86282dd"}, + {file = "scipy-1.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:75ea2a144096b5e39402e2ff53a36fecfd3b960d786b7efd3c180e29c39e53f2"}, + {file = "scipy-1.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:408c68423f9de16cb9e602528be4ce0d6312b05001f3de61fe9ec8b1263cad08"}, + {file = "scipy-1.12.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5adfad5dbf0163397beb4aca679187d24aec085343755fcdbdeb32b3679f254c"}, + {file = "scipy-1.12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3003652496f6e7c387b1cf63f4bb720951cfa18907e998ea551e6de51a04467"}, + {file = "scipy-1.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b8066bce124ee5531d12a74b617d9ac0ea59245246410e19bca549656d9a40a"}, + {file = "scipy-1.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8bee4993817e204d761dba10dbab0774ba5a8612e57e81319ea04d84945375ba"}, + {file = "scipy-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:a24024d45ce9a675c1fb8494e8e5244efea1c7a09c60beb1eeb80373d0fecc70"}, + {file = "scipy-1.12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e7e76cc48638228212c747ada851ef355c2bb5e7f939e10952bc504c11f4e372"}, + {file = "scipy-1.12.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:f7ce148dffcd64ade37b2df9315541f9adad6efcaa86866ee7dd5db0c8f041c3"}, + {file = "scipy-1.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c39f92041f490422924dfdb782527a4abddf4707616e07b021de33467f917bc"}, + {file = "scipy-1.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7ebda398f86e56178c2fa94cad15bf457a218a54a35c2a7b4490b9f9cb2676c"}, + {file = "scipy-1.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:95e5c750d55cf518c398a8240571b0e0782c2d5a703250872f36eaf737751338"}, + {file = "scipy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e646d8571804a304e1da01040d21577685ce8e2db08ac58e543eaca063453e1c"}, + {file = "scipy-1.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:913d6e7956c3a671de3b05ccb66b11bc293f56bfdef040583a7221d9e22a2e35"}, + {file = "scipy-1.12.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba1b0c7256ad75401c73e4b3cf09d1f176e9bd4248f0d3112170fb2ec4db067"}, + {file = "scipy-1.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:730badef9b827b368f351eacae2e82da414e13cf8bd5051b4bdfd720271a5371"}, + {file = "scipy-1.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6546dc2c11a9df6926afcbdd8a3edec28566e4e785b915e849348c6dd9f3f490"}, + {file = "scipy-1.12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:196ebad3a4882081f62a5bf4aeb7326aa34b110e533aab23e4374fcccb0890dc"}, + {file = "scipy-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:b360f1b6b2f742781299514e99ff560d1fe9bd1bff2712894b52abe528d1fd1e"}, + {file = "scipy-1.12.0.tar.gz", hash = "sha256:4bf5abab8a36d20193c698b0f1fc282c1d083c94723902c447e5d2f1780936a3"}, ] [package.dependencies] -numpy = ">=1.21.6,<1.28.0" +numpy = ">=1.22.4,<1.29.0" [package.extras] dev = ["click", "cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] doc = ["jupytext", "matplotlib (>2)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] -test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] - -[[package]] -name = "setuptools" -version = "68.2.2" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, - {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "setuptools-scm" -version = "8.0.4" -description = "the blessed package to manage your versions by scm tags" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-scm-8.0.4.tar.gz", hash = "sha256:b5f43ff6800669595193fd09891564ee9d1d7dcb196cab4b2506d53a2e1c95c7"}, - {file = "setuptools_scm-8.0.4-py3-none-any.whl", hash = "sha256:b47844cd2a84b83b3187a5782c71128c28b4c94cad8bfb871da2784a5cb54c4f"}, -] - -[package.dependencies] -packaging = ">=20" -setuptools = "*" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} -typing-extensions = "*" - -[package.extras] -docs = ["entangled-cli[rich]", "mkdocs", "mkdocs-entangled-plugin", "mkdocs-material", "mkdocstrings[python]", "pygments"] -rich = ["rich"] -test = ["build", "pytest", "rich", "wheel"] +test = ["asv", "gmpy2", "hypothesis", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] [[package]] name = "six" @@ -1824,13 +1777,13 @@ docs = ["ipykernel", "jupyter-client", "matplotlib", "nbconvert", "nbformat", "n [[package]] name = "tifffile" -version = "2023.9.26" +version = "2023.12.9" description = "Read and write TIFF files" optional = false python-versions = ">=3.9" files = [ - {file = "tifffile-2023.9.26-py3-none-any.whl", hash = "sha256:1de47fa945fddaade256e25ad4f375ae65547f3c1354063aded881c32a64cf89"}, - {file = "tifffile-2023.9.26.tar.gz", hash = "sha256:67e355e4595aab397f8405d04afe1b4ae7c6f62a44e22d933fee1a571a48c7ae"}, + {file = "tifffile-2023.12.9-py3-none-any.whl", hash = "sha256:9b066e4b1a900891ea42ffd33dab8ba34c537935618b9893ddef42d7d422692f"}, + {file = "tifffile-2023.12.9.tar.gz", hash = "sha256:9dd1da91180a6453018a241ff219e1905f169384355cd89c9ef4034c1b46cdb8"}, ] [package.dependencies] @@ -1852,22 +1805,22 @@ files = [ [[package]] name = "tornado" -version = "6.3.3" +version = "6.4" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false python-versions = ">= 3.8" files = [ - {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:502fba735c84450974fec147340016ad928d29f1e91f49be168c0a4c18181e1d"}, - {file = "tornado-6.3.3-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:805d507b1f588320c26f7f097108eb4023bbaa984d63176d1652e184ba24270a"}, - {file = "tornado-6.3.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bd19ca6c16882e4d37368e0152f99c099bad93e0950ce55e71daed74045908f"}, - {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ac51f42808cca9b3613f51ffe2a965c8525cb1b00b7b2d56828b8045354f76a"}, - {file = "tornado-6.3.3-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:71a8db65160a3c55d61839b7302a9a400074c9c753040455494e2af74e2501f2"}, - {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ceb917a50cd35882b57600709dd5421a418c29ddc852da8bcdab1f0db33406b0"}, - {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:7d01abc57ea0dbb51ddfed477dfe22719d376119844e33c661d873bf9c0e4a16"}, - {file = "tornado-6.3.3-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9dc4444c0defcd3929d5c1eb5706cbe1b116e762ff3e0deca8b715d14bf6ec17"}, - {file = "tornado-6.3.3-cp38-abi3-win32.whl", hash = "sha256:65ceca9500383fbdf33a98c0087cb975b2ef3bfb874cb35b8de8740cf7f41bd3"}, - {file = "tornado-6.3.3-cp38-abi3-win_amd64.whl", hash = "sha256:22d3c2fa10b5793da13c807e6fc38ff49a4f6e1e3868b0a6f4164768bb8e20f5"}, - {file = "tornado-6.3.3.tar.gz", hash = "sha256:e7d8db41c0181c80d76c982aacc442c0783a2c54d6400fe028954201a2e032fe"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, + {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, + {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, + {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, ] [[package]] @@ -1892,18 +1845,18 @@ telegram = ["requests"] [[package]] name = "traitlets" -version = "5.10.1" +version = "5.14.1" description = "Traitlets Python configuration system" optional = false python-versions = ">=3.8" files = [ - {file = "traitlets-5.10.1-py3-none-any.whl", hash = "sha256:07ab9c5bf8a0499fd7b088ba51be899c90ffc936ffc797d7b6907fc516bcd116"}, - {file = "traitlets-5.10.1.tar.gz", hash = "sha256:db9c4aa58139c3ba850101913915c042bdba86f7c8a0dda1c6f7f92c5da8e542"}, + {file = "traitlets-5.14.1-py3-none-any.whl", hash = "sha256:2e5a030e6eff91737c643231bfcf04a65b0132078dad75e4936700b213652e74"}, + {file = "traitlets-5.14.1.tar.gz", hash = "sha256:8585105b371a04b8316a43d5ce29c098575c2e477850b62b848b964f1444527e"}, ] [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.5.1)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] [[package]] name = "typer" @@ -1928,24 +1881,24 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6. [[package]] name = "typing-extensions" -version = "4.8.0" +version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, ] [[package]] name = "tzdata" -version = "2023.3" +version = "2023.4" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, - {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, + {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, + {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, ] [[package]] @@ -1970,18 +1923,17 @@ tests = ["nose", "numpy"] [[package]] name = "urllib3" -version = "2.0.6" +version = "2.1.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "urllib3-2.0.6-py3-none-any.whl", hash = "sha256:7a7c7003b000adf9e7ca2a377c9688bbc54ed41b985789ed576570342a375cd2"}, - {file = "urllib3-2.0.6.tar.gz", hash = "sha256:b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564"}, + {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, + {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -2026,27 +1978,27 @@ watchmedo = ["PyYAML (>=3.10)"] [[package]] name = "wcwidth" -version = "0.2.8" +version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" files = [ - {file = "wcwidth-0.2.8-py2.py3-none-any.whl", hash = "sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704"}, - {file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"}, + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] [[package]] name = "xyzservices" -version = "2023.7.0" +version = "2023.10.1" description = "Source of XYZ tiles providers" optional = false python-versions = ">=3.8" files = [ - {file = "xyzservices-2023.7.0-py3-none-any.whl", hash = "sha256:88e9cbf22b31a2f9c1b242e2b18690f5c705f0e539c9bfd37a10399e1037731b"}, - {file = "xyzservices-2023.7.0.tar.gz", hash = "sha256:0ec928742227d6f5d4367ea7b457fcfed943429f4de2949b5b02a82cdf5569d6"}, + {file = "xyzservices-2023.10.1-py3-none-any.whl", hash = "sha256:6a4c38d3a9f89d3e77153eff9414b36a8ee0850c9e8b85796fd1b2a85b8dfd68"}, + {file = "xyzservices-2023.10.1.tar.gz", hash = "sha256:091229269043bc8258042edbedad4fcb44684b0473ede027b5672ad40dc9fa02"}, ] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "d141a9b72da94c48b398cca5e29f9ebff8971418ce8faf06a92856897e3575da" +content-hash = "34645291ffe1814c1c60ed845fda663bedfdea83baa6abec223b5b6f3d39031f" diff --git a/pyproject.toml b/pyproject.toml index 938d56ee..8ef1013e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,13 +24,13 @@ license="GNU GPLv3" python = ">=3.10,<3.13" pandas = "^2.1.0" numpy = "^1.25.2" -matplotlib = "^3.7.2" +matplotlib = "^3.8.2" PyYAML = "^6.0.1" nmrglue = "^0.9" scipy = "^1.11.2" lmfit = "^1.2.2" scikit-image = "^0.21.0" -bokeh = "^3.2.2" +bokeh = "^3.3.4" numdifftools = "^0.9.39" Jinja2 = "^3.1.2" jedi = "^0.19.0" From 577ee5046d01e276f75752720884c672d82d6797 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Mon, 29 Jan 2024 22:40:37 -0500 Subject: [PATCH 05/37] changed styles --- peakipy/cli/edit.py | 43 +++++++++++++++++++++++++++++++++++-------- 1 file changed, 35 insertions(+), 8 deletions(-) diff --git a/peakipy/cli/edit.py b/peakipy/cli/edit.py index 70b0f0bd..977b456f 100644 --- a/peakipy/cli/edit.py +++ b/peakipy/cli/edit.py @@ -18,9 +18,10 @@ from skimage.filters import threshold_otsu from rich import print +from bokeh.io import curdoc from bokeh.events import ButtonClick, DoubleTap from bokeh.layouts import row, column -from bokeh.models import ColumnDataSource, Tabs, TabPanel +from bokeh.models import ColumnDataSource, Tabs, TabPanel, InlineStyleSheet from bokeh.models.tools import HoverTool from bokeh.models.widgets import ( Slider, @@ -39,7 +40,7 @@ ) from bokeh.plotting import figure from bokeh.plotting.contour import contour_data -from bokeh.palettes import PuBuGn9, Category20, Viridis256, RdGy11, Reds256 +from bokeh.palettes import PuBuGn9, Category20, Viridis256, RdGy11, Reds256, YlOrRd9 from peakipy.core import LoadData, read_config, StrucEl @@ -81,6 +82,7 @@ def init(self, doc): ) ) doc.title = "peakipy: Edit Fits" + doc.theme = "dark_minimal" @property def args(self): @@ -200,7 +202,7 @@ def setup_plot(self): self.y_ppm_mesh, self.peakipy_data.data[0], cl, - fill_color=Viridis256, + fill_color=YlOrRd9, line_color="black", ) self.negative_contour_renderer = self.p.contour( @@ -271,8 +273,8 @@ def setup_plot(self): self.p.line( spec_border_x, spec_border_y, - line_width=1, - line_color="black", + line_width=2, + line_color="red", line_dash="dotted", line_alpha=0.5, ) @@ -361,7 +363,7 @@ def setup_plot(self): self.fit_button.on_event(ButtonClick, self.fit_selected) columns = [ - TableColumn(field="ASS", title="Assignment"), + TableColumn(field="ASS", title="Assignment", width=500), TableColumn(field="CLUSTID", title="Cluster", editor=IntEditor()), TableColumn( field="X_PPM", @@ -405,14 +407,39 @@ def setup_plot(self): TableColumn( field="include", title="Include", + width=7, editor=SelectEditor(options=["yes", "no"]), ), TableColumn(field="MEMCNT", title="MEMCNT", editor=IntEditor()), ] self.data_table = DataTable( - source=self.source, columns=columns, editable=True, fit_columns=True - ) + source=self.source, columns=columns, editable=True, width=800, + ) + self.table_style = InlineStyleSheet( + css=""" + .slick-header-columns { + background-color: #2B303A !important; + font-family: arial; + font-weight: bold; + font-size: 12pt; + color: #FFFFFF; + text-align: right; + } + .slick-row { + font-size: 12pt; + font-family: arial; + text-align: left; + } + .slick-row:hover{ + background: none repeat scroll 0 0 #7c7c7c; + } + + + """ + ) + + self.data_table.stylesheets = [self.table_style] # callback for adding # source.selected.on_change('indices', callback) From 94b70f4f59fa9136cead42baff15facf86b7dcb1 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Mon, 29 Jan 2024 22:42:04 -0500 Subject: [PATCH 06/37] black --- peakipy/cli/edit_fits_app/main.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/peakipy/cli/edit_fits_app/main.py b/peakipy/cli/edit_fits_app/main.py index b0749a26..de0d67fe 100755 --- a/peakipy/cli/edit_fits_app/main.py +++ b/peakipy/cli/edit_fits_app/main.py @@ -144,7 +144,6 @@ def clusters( def recluster_peaks(event): - struc_size = tuple([int(i) for i in struct_el_size.value.split(",")]) print(struc_size) @@ -164,7 +163,6 @@ def recluster_peaks(event): def update_memcnt(df): - for ind, group in df.groupby("CLUSTID"): df.loc[group.index, "MEMCNT"] = len(group) @@ -179,7 +177,6 @@ def update_memcnt(df): def fit_selected(event): - selectionIndex = source.selected.indices current = df.iloc[selectionIndex] @@ -234,7 +231,6 @@ def peak_pick_callback(event): def slider_callback(attrname, old, new): - selectionIndex = source.selected.indices current = df.iloc[selectionIndex] From 2ea36321535cd1e5c6109cd2b9e669b4093f40c2 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Mon, 29 Jan 2024 22:44:58 -0500 Subject: [PATCH 07/37] added --reference-plane-index for selecting planes for initial lineshape fitting, fixed threshold to work with negative data --- peakipy/cli/fit.py | 6 ++++-- peakipy/cli/main.py | 39 +++++++++++++++++++++++++++++---------- peakipy/core.py | 25 ++++++++++++++++++++++++- 3 files changed, 57 insertions(+), 13 deletions(-) diff --git a/peakipy/cli/fit.py b/peakipy/cli/fit.py index 36baa26b..86544149 100644 --- a/peakipy/cli/fit.py +++ b/peakipy/cli/fit.py @@ -95,14 +95,14 @@ def __init__( data: np.array, config: dict, plane_numbers: list, - planes_for_initial_fit: Optional[List[int]] = None, + reference_planes_for_initial_fit: List[int] = [], use_only_planes_above_threshold: Optional[float] = None, ): self._data = data self._args = args self._config = config self._plane_numbers = plane_numbers - self._planes_for_initial_fit = planes_for_initial_fit + self._planes_for_initial_fit = reference_planes_for_initial_fit self._use_only_planes_above_threshold = use_only_planes_above_threshold def check_integer_list(self): @@ -199,6 +199,7 @@ def fit_peaks(peaks: pd.DataFrame, fit_input: FitPeaksInput) -> FitPeaksResult: noise = fit_input.args.get("noise") verb = fit_input.args.get("verb") initial_fit_threshold = fit_input.args.get("initial_fit_threshold") + reference_plane_indices = fit_input.args.get("reference_plane_indices") lineshape = fit_input.args.get("lineshape") xy_bounds = fit_input.args.get("xy_bounds") vclist = fit_input.args.get("vclist") @@ -281,6 +282,7 @@ def fit_peaks(peaks: pd.DataFrame, fit_input: FitPeaksInput) -> FitPeaksResult: verbose=verb, noise=noise, fit_method=fit_input.config.get("fit_method", "leastsq"), + reference_plane_indices=reference_plane_indices, threshold=initial_fit_threshold, ) fit_result.plot( diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index 590f7efc..26bd3e07 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -102,15 +102,18 @@ y_radius_ppm_help = "Y radius in ppm of the elliptical fitting mask for each peak" dims_help = "Dimension order of your data" + @app.command(help="Read NMRPipe/Analysis peaklist into pandas dataframe") def read( peaklist_path: Annotated[Path, typer.Argument(help=peaklist_path_help)], data_path: Annotated[Path, typer.Argument(help=data_path_help)], - peaklist_format: Annotated[PeaklistFormat, typer.Argument(help=peaklist_format_help)], - thres: Annotated[Optional[float],typer.Option(help=thres_help)] = None, + peaklist_format: Annotated[ + PeaklistFormat, typer.Argument(help=peaklist_format_help) + ], + thres: Annotated[Optional[float], typer.Option(help=thres_help)] = None, struc_el: StrucEl = StrucEl.disk, struc_size: Tuple[int, int] = (3, None), # Tuple[int, Optional[int]] = (3, None), - x_radius_ppm: Annotated[float,typer.Option(help=x_radius_ppm_help)] = 0.04, + x_radius_ppm: Annotated[float, typer.Option(help=x_radius_ppm_help)] = 0.04, y_radius_ppm: Annotated[float, typer.Option(help=y_radius_ppm_help)] = 0.4, x_ppm_column_name: str = "Position F1", y_ppm_column_name: str = "Position F2", @@ -352,13 +355,20 @@ def read( out.write(yaml) os.system("peakipy spec show_clusters.yml") - print(f"""[green] + print( + f"""[green] ✨✨ Finished reading and clustering peaks! ✨✨ Use {outname} to run peakipy edit or fit.[/green] - """) + """ + ) + + +reference_plane_index_help = ( + "Select planes to use for initial estimation of lineshape parameters" +) @app.command(help="Fit NMR data to lineshape models and deconvolute overlapping peaks") @@ -373,6 +383,9 @@ def fit( vclist: Optional[Path] = None, plane: Optional[List[int]] = None, exclude_plane: Optional[List[int]] = None, + reference_plane_index: Annotated[ + List[int], typer.Option(help=reference_plane_index_help) + ] = [], initial_fit_threshold: Optional[float] = None, mp: bool = True, plot: Optional[Path] = None, @@ -477,6 +490,7 @@ def fit( args["show"] = show args["mp"] = mp args["initial_fit_threshold"] = initial_fit_threshold + args["reference_plane_indices"] = reference_plane_index # read vclist if vclist is None: @@ -584,12 +598,17 @@ def fit( nclusters = peakipy_data.df.CLUSTID.nunique() npeaks = peakipy_data.df.shape[0] if (nclusters >= n_cpu) and mp: - print(f"[green]Using multiprocessing to fit {npeaks} peaks in {nclusters} clusters [/green]"+"\n") + print( + f"[green]Using multiprocessing to fit {npeaks} peaks in {nclusters} clusters [/green]" + + "\n" + ) # split peak lists # tmp_dir = split_peaklist(peakipy_data.df, n_cpu) fit_peaks_args = FitPeaksInput(args, peakipy_data.data, config, plane_numbers) with Pool(processes=n_cpu) as pool, tqdm( - total=len(peakipy_data.df.CLUSTID.unique()), ascii="▱▰",colour="green", + total=len(peakipy_data.df.CLUSTID.unique()), + ascii="▱▰", + colour="green", ) as pbar: # result = pool.map(fit_peaks, peaklists) # result = pool.starmap(fit_peaks, zip(peaklists, args_list)) @@ -1066,8 +1085,8 @@ def check( ) ) plt.close() - - #print(Fore.RED + "Maybe your F1/F2 radii for fitting were too small...") + + # print(Fore.RED + "Maybe your F1/F2 radii for fitting were too small...") elif masked_data.shape[0] == 0 or masked_data.shape[1] == 0: print( f"[red]Nothing to plot for cluster {int(plane.clustid)}[/red]" @@ -1186,6 +1205,7 @@ def check( ax.legend() if show: + def exit_program(event): exit() @@ -1204,7 +1224,6 @@ def next_plot(event): plt.show() else: pdf.savefig() - plt.close() diff --git a/peakipy/core.py b/peakipy/core.py index 5a8b2dce..1b43a94d 100644 --- a/peakipy/core.py +++ b/peakipy/core.py @@ -754,11 +754,32 @@ def make_mask_from_peak_cluster(group, data): return mask, peak +def select_reference_planes_using_indices(data, indices: List[int]): + n_planes = data.shape[0] + if indices == []: + return data + + max_index = max(indices) + min_index = min(indices) + + if max_index >= n_planes: + raise IndexError( + f"Your data has {n_planes}. You selected plane {max_index} (allowed indices between 0 and {n_planes-1})" + ) + elif min_index < (-1 * n_planes): + raise IndexError( + f"Your data has {n_planes}. You selected plane {min_index} (allowed indices between -{n_planes} and {n_planes-1})" + ) + else: + data = data[indices] + return data + + def select_planes_above_threshold_from_masked_data(data, threshold=None): if threshold == None: data = data else: - data = data[data.max(axis=1) > threshold] + data = data[np.abs(data).max(axis=1) > threshold] return data @@ -772,6 +793,7 @@ def fit_first_plane( log=None, noise=1.0, fit_method="leastsq", + reference_plane_indices: List[int] = [], threshold: Optional[float] = None, ): """Deconvolute group of peaks @@ -868,6 +890,7 @@ def fit_first_plane( # print("DATA",data.shape, mask.shape) peak_slices = np.array([d[mask] for d in data]) # print("Peak slices", peak_slices.shape) + peak_slices = select_reference_planes_using_indices(peak_slices, reference_plane_indices) peak_slices = select_planes_above_threshold_from_masked_data(peak_slices, threshold) # print(peak_slices.shape) peak_slices = peak_slices.sum(axis=0) From ac42030c48ffe75e1348a27a7c9b01f301845721 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Thu, 1 Feb 2024 22:40:04 -0500 Subject: [PATCH 08/37] added all peakipy fit options to bokeh interface --- peakipy/cli/edit.py | 210 +++++++++++++++++++++++++++++++------------- peakipy/cli/main.py | 106 ++++++++++++---------- peakipy/core.py | 4 +- test/test_cli.py | 5 +- 4 files changed, 212 insertions(+), 113 deletions(-) diff --git a/peakipy/cli/edit.py b/peakipy/cli/edit.py index 977b456f..bb45c5eb 100644 --- a/peakipy/cli/edit.py +++ b/peakipy/cli/edit.py @@ -5,22 +5,18 @@ import sys import shutil -from pathlib import Path from subprocess import check_output from pathlib import Path -import nmrglue as ng import numpy as np -import matplotlib.pyplot as plt import pandas as pd -from matplotlib.cm import magma, autumn, viridis from skimage.filters import threshold_otsu from rich import print from bokeh.io import curdoc from bokeh.events import ButtonClick, DoubleTap -from bokeh.layouts import row, column +from bokeh.layouts import row, column, grid from bokeh.models import ColumnDataSource, Tabs, TabPanel, InlineStyleSheet from bokeh.models.tools import HoverTool from bokeh.models.widgets import ( @@ -64,10 +60,13 @@ def __init__(self, peaklist_path: Path, data_path: Path): self.peakipy_data.check_data_frame() # make temporary paths self.make_temp_files() - self.make_data_source() self.setup_radii_sliders() self.setup_save_buttons() + self.setup_set_fixed_parameters() + self.setup_xybounds() + self.setup_set_reference_planes() + self.setup_initial_fit_threshold() self.setup_quit_button() self.setup_plot() @@ -151,6 +150,78 @@ def setup_save_buttons(self): self.button = Button(label="Save", button_type="success") self.button.on_event(ButtonClick, self.save_peaks) + def setup_set_fixed_parameters(self): + self.select_fixed_parameters_help = Div( + text="Select parameters to fix after initial lineshape parameters have been fitted" + ) + self.select_fixed_parameters = TextInput( + value="fraction sigma center", width=200 + ) + + def setup_xybounds(self): + self.set_xybounds_help = Div( + text="If floating the peak centers you can bound the fits in the x and y dimensions. Units of ppm." + ) + self.set_xybounds = TextInput(placeholder="e.g. 0.01 0.1") + + def get_xybounds(self): + try: + x_bound, y_bound = self.set_xybounds.value.split(" ") + x_bound = float(x_bound) + y_bound = float(y_bound) + xy_bounds = x_bound, y_bound + except: + xy_bounds = None, None + return xy_bounds + + def make_xybound_command(self, x_bound, y_bound): + if (x_bound != None) and (y_bound != None): + xy_bounds_command = f" --xy-bounds {x_bound} {y_bound}" + else: + xy_bounds_command = "" + return xy_bounds_command + + def setup_set_reference_planes(self): + self.select_reference_planes_help = Div( + text="Select reference planes (index starts at 0)" + ) + self.select_reference_planes = TextInput(placeholder="0 1 2 3") + + def get_reference_planes(self): + if self.select_reference_planes.value: + print("You have selected1") + return self.select_reference_planes.value.split(" ") + else: + return [] + + def make_reference_planes_command(self, reference_plane_list): + reference_plane_command = "" + for plane in reference_plane_list: + reference_plane_command += f" --reference-plane-index {plane}" + return reference_plane_command + + def setup_initial_fit_threshold(self): + self.set_initial_fit_threshold_help = Div( + text="Set an intensity threshold for selection of planes for initial estimation of lineshape parameters" + ) + self.set_initial_fit_threshold = TextInput(placeholder="e.g. 1e7") + + def get_initial_fit_threshold(self): + try: + initial_fit_threshold = float(self.set_initial_fit_threshold.value) + except ValueError: + initial_fit_threshold = None + return initial_fit_threshold + + def make_initial_fit_threshold_command(self, initial_fit_threshold): + if initial_fit_threshold is not None: + initial_fit_threshold_command = ( + f" --initial-fit-threshold {initial_fit_threshold}" + ) + else: + initial_fit_threshold_command = "" + return initial_fit_threshold_command + def setup_quit_button(self): # Quit button self.exit_button = Button(label="Quit", button_type="warning") @@ -204,6 +275,7 @@ def setup_plot(self): cl, fill_color=YlOrRd9, line_color="black", + line_width=0.25, ) self.negative_contour_renderer = self.p.contour( self.x_ppm_mesh, @@ -212,6 +284,7 @@ def setup_plot(self): cl, fill_color=Reds256, line_color="black", + line_width=0.25, ) self.contour_start = TextInput( @@ -313,12 +386,12 @@ def setup_plot(self): # 6: "PV_G", # 7: "G_L", } - self.radio_button_group = RadioButtonGroup( + self.select_lineshape_radiobuttons = RadioButtonGroup( labels=[self.lineshapes[i] for i in self.lineshapes.keys()], active=0 ) - self.ls_div = Div( + self.select_lineshape_radiobuttons_help = Div( text="""Choose lineshape you wish to fit. This can be Voigt (V), pseudo-Voigt (PV), Gaussian (G), Lorentzian (L). - PV_PV fits a PV lineshape with independent "fraction" parameters for the direct and indirect dimensions""" + PV_PV fits a PV lineshape with independent "fraction" parameters for the direct and indirect dimensions""", ) self.clust_div = Div( text="""If you want to adjust how the peaks are automatically clustered then try changing the @@ -414,7 +487,10 @@ def setup_plot(self): ] self.data_table = DataTable( - source=self.source, columns=columns, editable=True, width=800, + source=self.source, + columns=columns, + editable=True, + width=800, ) self.table_style = InlineStyleSheet( css=""" @@ -445,20 +521,48 @@ def setup_plot(self): # source.selected.on_change('indices', callback) self.source.selected.on_change("indices", self.select_callback) - # Document layout - fitting_controls = column( - row( - column(self.slider_X_RADIUS, self.slider_Y_RADIUS), - column( - row(column(self.contour_start, self.pos_neg_contour_radiobutton)), - column(self.fit_button), - ), + # # Document layout + # fitting_controls = column( + # row( + # column(self.slider_X_RADIUS, self.slider_Y_RADIUS), + # column( + # row(column(self.contour_start, self.pos_neg_contour_radiobutton)), + # column(self.fit_button), + # ), + # ), + # row( + # column(column(self.select_lineshape_radiobuttons_help), column(self.select_lineshape_radiobuttons)), + # column(column(self.select_plane), column(self.checkbox_group)), + # column(self.select_fixed_parameters_help, self.select_fixed_parameters), + # column(self.select_reference_planes) + # ), + # max_width=400, + # ) + fitting_controls = row( + column( + self.slider_X_RADIUS, + self.slider_Y_RADIUS, + self.contour_start, + self.pos_neg_contour_radiobutton, + self.select_lineshape_radiobuttons_help, + self.select_lineshape_radiobuttons, + max_width=400, ), - row( - column(column(self.ls_div), column(self.radio_button_group)), - column(column(self.select_plane), column(self.checkbox_group)), + column( + self.select_plane, + self.checkbox_group, + self.select_fixed_parameters_help, + self.select_fixed_parameters, + self.set_xybounds_help, + self.set_xybounds, + self.select_reference_planes_help, + self.select_reference_planes, + self.set_initial_fit_threshold_help, + self.set_initial_fit_threshold, + self.fit_button, + max_width=400, ), - max_width=400, + max_width=800, ) # reclustering tab @@ -545,6 +649,15 @@ def update_memcnt(self): self.source.data = ColumnDataSource.from_df(self.peakipy_data.df) return self.peakipy_data.df + def unpack_parameters_to_fix(self): + return self.select_fixed_parameters.value.strip().split(" ") + + def make_fix_command_from_parameters(self, parameters): + command = "" + for parameter in parameters: + command += f" --fix {parameter}" + return command + def fit_selected(self, event): selectionIndex = self.source.selected.indices current = self.peakipy_data.df.iloc[selectionIndex] @@ -568,16 +681,26 @@ def fit_selected(self, event): ] selected_df.to_csv(self.TEMP_INPUT_CSV) + fix_command = self.make_fix_command_from_parameters( + self.unpack_parameters_to_fix() + ) + xy_bounds_command = self.make_xybound_command(*self.get_xybounds()) + reference_planes_command = self.make_reference_planes_command( + self.get_reference_planes() + ) + initial_fit_threshold_command = self.make_initial_fit_threshold_command( + self.get_initial_fit_threshold() + ) - lineshape = self.lineshapes[self.radio_button_group.active] + lineshape = self.lineshapes[self.select_lineshape_radiobuttons.active] print(f"[yellow]Using LS = {lineshape}[/yellow]") if self.checkbox_group.active == []: - fit_command = f"peakipy fit {self.TEMP_INPUT_CSV} {self.data_path} {self.TEMP_OUT_CSV} --lineshape {lineshape}" + fit_command = f"peakipy fit {self.TEMP_INPUT_CSV} {self.data_path} {self.TEMP_OUT_CSV} --lineshape {lineshape}{fix_command}{reference_planes_command}{initial_fit_threshold_command}{xy_bounds_command}" plot_command = f"peakipy check {self.TEMP_OUT_CSV} {self.data_path} --label --individual --show --outname {self.TEMP_OUT_PLOT / Path('tmp.pdf')}" else: plane_index = self.select_plane.value print(f"[yellow]Only fitting plane {plane_index}[/yellow]") - fit_command = f"peakipy fit {self.TEMP_INPUT_CSV} {self.data_path} {self.TEMP_OUT_CSV} --lineshape {lineshape} --plane {plane_index}" + fit_command = f"peakipy fit {self.TEMP_INPUT_CSV} {self.data_path} {self.TEMP_OUT_CSV} --lineshape {lineshape} --plane {plane_index}{fix_command}{reference_planes_command}{initial_fit_threshold_command}{xy_bounds_command}" plot_command = f"peakipy check {self.TEMP_OUT_CSV} {self.data_path} --label --individual --show --outname {self.TEMP_OUT_PLOT / Path('tmp.pdf')} --plane {plane_index}" print(f"[blue]{fit_command}[/blue]") @@ -819,40 +942,3 @@ def update_contour(self, attrname, old, new): def exit_edit_peaks(self, event): sys.exit() - - -def get_contour_data(data, levels, **kwargs): - cs = plt.contour(data, levels, **kwargs) - xs = [] - ys = [] - xt = [] - yt = [] - col = [] - text = [] - isolevelid = 0 - for isolevel in cs.collections: - isocol = isolevel.get_edgecolor()[0] - thecol = 3 * [None] - theiso = str(cs.get_array()[isolevelid]) - isolevelid += 1 - for i in range(3): - thecol[i] = int(255 * isocol[i]) - thecol = "#%02x%02x%02x" % (thecol[0], thecol[1], thecol[2]) - - for path in isolevel.get_paths(): - v = path.vertices - x = v[:, 0] - y = v[:, 1] - xs.append(x.tolist()) - ys.append(y.tolist()) - indx = int(len(x) / 2) - indy = int(len(y) / 2) - xt.append(x[indx]) - yt.append(y[indy]) - text.append(theiso) - col.append(thecol) - - source = ColumnDataSource( - data={"xs": xs, "ys": ys, "line_color": col, "xt": xt, "yt": yt, "text": text} - ) - return source diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index 26bd3e07..e5272532 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -801,6 +801,57 @@ def fit( run_log() +def validate_plane_selection(plane, pseudo3D): + if plane > pseudo3D.n_planes: + raise ValueError( + f"[red]There are {pseudo3D.n_planes} planes in your data you selected --plane {plane}...[red]" + f"plane numbering starts from 0." + ) + elif plane < 0: + raise ValueError( + f"[red]Plane number can not be negative; you selected --plane {plane}...[/red]" + ) + else: + return plane + + +def validate_ccount(ccount): + if type(ccount) == int: + ccount = ccount + else: + raise TypeError("ccount should be an integer") + return ccount + + +def validate_rcount(rcount): + if type(rcount) == int: + rcount = rcount + else: + raise TypeError("rcount should be an integer") + return rcount + + +def unpack_plotting_colors(colors): + match colors: + case (data_color, fit_color): + data_color, fit_color = colors + case _: + data_color, fit_color = "green", "blue" + return data_color, fit_color + + +def get_fit_data_for_selected_peak_clusters(fits, clusters): + match clusters: + case None | []: + pass + case _: + # only use these clusters + fits = fits[fits.clustid.isin(clusters)] + if len(fits) < 1: + exit(f"Are you sure clusters {clusters} exist?") + return fits + + @app.command(help="Interactive plots for checking fits") def check( fits: Path, @@ -830,7 +881,7 @@ def check( e.g. clusters=[2,4,6,7] plane : int Plot selected plane [default: 0] - e.g. plane=2 will plot second plane only + e.g. --plane 2 will plot second plane only outname : Path Plot name [default: Path("plots.pdf")] first : bool @@ -887,52 +938,11 @@ def check( else: plane = plane - if plane > pseudo3D.n_planes: - raise ValueError( - f"[red]There are {pseudo3D.n_planes} planes in your data you selected --plane {plane}...[red]" - f"plane numbering starts from 0." - ) - elif plane < 0: - raise ValueError( - f"[red]Plane number can not be negative; you selected --plane {plane}...[/red]" - ) - # in case first plane is chosen - elif plane == 0: - selected_plane = plane - # plane numbers start from 1 so adjust for indexing - else: - selected_plane = plane - # fits = fits[fits["plane"] == plane] - # print(fits) - - if type(ccount) == int: - ccount = ccount - else: - raise TypeError("ccount should be an integer") - - if type(rcount) == int: - rcount = rcount - else: - raise TypeError("rcount should be an integer") - - match colors: - case (data_color, fit_color): - data_color, fit_color = colors - case _: - data_color, fit_color = "green", "blue" - - # raise TypeError( - # "colors should be valid pair for matplotlib. i.e. g,b or green,blue" - # ) - - match clusters: - case None | []: - pass - case _: - # only use these clusters - fits = fits[fits.clustid.isin(clusters)] - if len(fits) < 1: - exit(f"Are you sure clusters {clusters} exist?") + selected_plane = validate_plane_selection(plane, pseudo3D) + ccount = validate_ccount(ccount) + rcount = validate_rcount(rcount) + data_color, fit_color = unpack_plotting_colors(colors) + fits = get_fit_data_for_selected_peak_clusters(fits, clusters) groups = fits.groupby("clustid") @@ -943,7 +953,7 @@ def check( X, Y = XY with PdfPages(outname) as pdf: - for name, group in groups: + for _, group in groups: table = df_to_rich_table( group, title="", diff --git a/peakipy/core.py b/peakipy/core.py index 1b43a94d..d404db3d 100644 --- a/peakipy/core.py +++ b/peakipy/core.py @@ -890,7 +890,9 @@ def fit_first_plane( # print("DATA",data.shape, mask.shape) peak_slices = np.array([d[mask] for d in data]) # print("Peak slices", peak_slices.shape) - peak_slices = select_reference_planes_using_indices(peak_slices, reference_plane_indices) + peak_slices = select_reference_planes_using_indices( + peak_slices, reference_plane_indices + ) peak_slices = select_planes_above_threshold_from_masked_data(peak_slices, threshold) # print(peak_slices.shape) peak_slices = peak_slices.sum(axis=0) diff --git a/test/test_cli.py b/test/test_cli.py index 8badf4ca..a6a4e653 100644 --- a/test/test_cli.py +++ b/test/test_cli.py @@ -132,10 +132,11 @@ def test_check_main_with_voigt(protein_L): ) peakipy.cli.main.check(**args) + def test_edit_with_default(protein_L): args = dict( - peaklist_path=protein_L/Path("peaks.csv"), - data_path=protein_L/Path("test1.ft2"), + peaklist_path=protein_L / Path("peaks.csv"), + data_path=protein_L / Path("test1.ft2"), test=True, ) peakipy.cli.main.edit(**args) From 0c022f9f1db267f695f76a501f5609b1259f4286 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Sat, 10 Feb 2024 22:25:09 -0500 Subject: [PATCH 09/37] some refactoring, added some tests, updated bokeh app --- .pre-commit-config.yaml | 14 +++ ccpn_macros/selected_to_peakipy.py | 2 +- peakipy/cli/edit.py | 51 +++++--- peakipy/core.py | 188 ++++++++++++++++------------- poetry.lock | 122 ++++++++++++++++++- pyproject.toml | 1 + test/test_core.py | 87 ++++++++++++- 7 files changed, 358 insertions(+), 107 deletions(-) create mode 100644 .pre-commit-config.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..d4585edb --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,14 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.2.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files +- repo: https://github.com/psf/black + rev: 24.1.1 + hooks: + - id: black diff --git a/ccpn_macros/selected_to_peakipy.py b/ccpn_macros/selected_to_peakipy.py index b88e317c..1fb2ff0e 100644 --- a/ccpn_macros/selected_to_peakipy.py +++ b/ccpn_macros/selected_to_peakipy.py @@ -1,4 +1,5 @@ """ Export selected peaks to TSV file """ + import os from pathlib import Path import pandas as pd @@ -84,7 +85,6 @@ def peakipy_check(): if __name__ == "__main__": - peaks = current.peaks df = PeaksToDataFrame(peaks) column_order = [ diff --git a/peakipy/cli/edit.py b/peakipy/cli/edit.py index bb45c5eb..6f5a5859 100644 --- a/peakipy/cli/edit.py +++ b/peakipy/cli/edit.py @@ -401,12 +401,12 @@ def setup_plot(self): any manual edits will be lost.""" ) self.intro_div = Div( - text="""

peakipy - interactive fit adjustment

+ text="""

peakipy - interactive fit adjustment

""" ) self.doc_link = Div( - text="

ℹ️ click here for documentation

" + text="

ℹ️ click here for documentation

" ) self.fit_reports = "" self.fit_reports_div = Div(text="", height=400, styles={"overflow": "scroll"}) @@ -490,18 +490,21 @@ def setup_plot(self): source=self.source, columns=columns, editable=True, - width=800, + width=1200, ) self.table_style = InlineStyleSheet( css=""" .slick-header-columns { - background-color: #2B303A !important; + background-color: #00296b !important; font-family: arial; font-weight: bold; font-size: 12pt; color: #FFFFFF; text-align: right; } + .slick-header-column:hover { + background: none repeat scroll 0 0 #fdc500; + } .slick-row { font-size: 12pt; font-family: arial; @@ -510,8 +513,22 @@ def setup_plot(self): .slick-row:hover{ background: none repeat scroll 0 0 #7c7c7c; } + .slick-cell { + header-font-weight: 500; + border-width: 1px 1px 1px 1px; + border-color: #d4d4d4; + background-color: #00509D; + color: #FFFFFF; + } + .slick-cell.selected { + header-font-weight: 500; + border-width: 1px 1px 1px 1px; + border-color: #00509D; + background-color: #FDC500; + color: black; + } + - """ ) @@ -662,12 +679,12 @@ def fit_selected(self, event): selectionIndex = self.source.selected.indices current = self.peakipy_data.df.iloc[selectionIndex] - self.peakipy_data.df.loc[ - selectionIndex, "X_RADIUS_PPM" - ] = self.slider_X_RADIUS.value - self.peakipy_data.df.loc[ - selectionIndex, "Y_RADIUS_PPM" - ] = self.slider_Y_RADIUS.value + self.peakipy_data.df.loc[selectionIndex, "X_RADIUS_PPM"] = ( + self.slider_X_RADIUS.value + ) + self.peakipy_data.df.loc[selectionIndex, "Y_RADIUS_PPM"] = ( + self.slider_Y_RADIUS.value + ) self.peakipy_data.df.loc[selectionIndex, "X_DIAMETER_PPM"] = ( current["X_RADIUS_PPM"] * 2.0 @@ -806,9 +823,9 @@ def slider_callback_x(self, attrname, old, new): self.peakipy_data.df.loc[selectionIndex, "X_RADIUS"] = ( self.slider_X_RADIUS.value * self.peakipy_data.pt_per_ppm_f2 ) - self.peakipy_data.df.loc[ - selectionIndex, "X_RADIUS_PPM" - ] = self.slider_X_RADIUS.value + self.peakipy_data.df.loc[selectionIndex, "X_RADIUS_PPM"] = ( + self.slider_X_RADIUS.value + ) self.peakipy_data.df.loc[selectionIndex, "X_DIAMETER_PPM"] = ( current["X_RADIUS_PPM"] * 2.0 @@ -828,9 +845,9 @@ def slider_callback_y(self, attrname, old, new): self.peakipy_data.df.loc[selectionIndex, "Y_RADIUS"] = ( self.slider_Y_RADIUS.value * self.peakipy_data.pt_per_ppm_f1 ) - self.peakipy_data.df.loc[ - selectionIndex, "Y_RADIUS_PPM" - ] = self.slider_Y_RADIUS.value + self.peakipy_data.df.loc[selectionIndex, "Y_RADIUS_PPM"] = ( + self.slider_Y_RADIUS.value + ) self.peakipy_data.df.loc[selectionIndex, "Y_DIAMETER_PPM"] = ( current["Y_RADIUS_PPM"] * 2.0 diff --git a/peakipy/core.py b/peakipy/core.py index d404db3d..114cc854 100644 --- a/peakipy/core.py +++ b/peakipy/core.py @@ -1,4 +1,4 @@ -""" +""" peakipy - deconvolute overlapping NMR peaks Copyright (C) 2019 Jacob Peter Brady @@ -19,7 +19,6 @@ """ - import sys import json from datetime import datetime @@ -91,7 +90,7 @@ class Lineshape(str, Enum): def gaussian(x, center=0.0, sigma=1.0): - """1-dimensional Gaussian function. + r"""1-dimensional Gaussian function. gaussian(x, center, sigma) = (1/(s2pi*sigma)) * exp(-(1.0*x-center)**2 / (2*sigma**2)) @@ -115,7 +114,7 @@ def gaussian(x, center=0.0, sigma=1.0): def lorentzian(x, center=0.0, sigma=1.0): - """1-dimensional Lorentzian function. + r"""1-dimensional Lorentzian function. lorentzian(x, center, sigma) = (1/(1 + ((1.0*x-center)/sigma)**2)) / (pi*sigma) @@ -139,7 +138,7 @@ def lorentzian(x, center=0.0, sigma=1.0): def voigt(x, center=0.0, sigma=1.0, gamma=None): - """Return a 1-dimensional Voigt function. + r"""Return a 1-dimensional Voigt function. voigt(x, center, sigma, gamma) = amplitude*wofz(z).real / (sigma*sqrt(2.0 * π)) @@ -174,7 +173,7 @@ def voigt(x, center=0.0, sigma=1.0, gamma=None): def pseudo_voigt(x, center=0.0, sigma=1.0, fraction=0.5): - """1-dimensional Pseudo-voigt function + r"""1-dimensional Pseudo-voigt function Superposition of Gaussian and Lorentzian function @@ -212,7 +211,7 @@ def pvoigt2d( sigma_y=1.0, fraction=0.5, ): - """2D pseudo-voigt model + r"""2D pseudo-voigt model :math:`(1-fraction) G(x,center,\sigma_{gx}) + (fraction) L(x, center, \sigma_x) * (1-fraction) G(y,center,\sigma_{gy}) + (fraction) L(y, center, \sigma_y)` @@ -241,21 +240,9 @@ def pvoigt2d( :rtype: numpy.array """ - x, y = XY - # sigma_gx = sigma_x / sqrt(2 * log2) - # sigma_gy = sigma_y / sqrt(2 * log2) - # fraction same for both dimensions - # super position of gaussian and lorentzian - # then convoluted for x y - # pv_x = (1 - fraction) * gaussian(x, center_x, sigma_gx) + fraction * lorentzian( - # x, center_x, sigma_x - # ) pv_x = pseudo_voigt(x, center_x, sigma_x, fraction) pv_y = pseudo_voigt(y, center_y, sigma_y, fraction) - # pv_y = (1 - fraction) * gaussian(y, center_y, sigma_gy) + fraction * lorentzian( - # y, center_y, sigma_y - # ) return amplitude * pv_x * pv_y @@ -495,7 +482,7 @@ def make_param_dict(peaks, data, lineshape: Lineshape = Lineshape.PV): param_dict = {} - for index, peak in peaks.iterrows(): + for _, peak in peaks.iterrows(): str_form = lambda x: "%s%s" % (to_prefix(peak.ASS), x) # using exact value of points (i.e decimal) param_dict[str_form("center_x")] = peak.X_AXISf @@ -614,7 +601,7 @@ def make_models( # make model for first peak first_peak, *remaining_peaks = peaks.iterrows() mod = Model(model, prefix="%s" % to_prefix(first_peak[1].ASS)) - for index, peak in remaining_peaks: + for _, peak in remaining_peaks: mod += Model(model, prefix="%s" % to_prefix(peak.ASS)) param_dict = make_param_dict(peaks, data, lineshape=lineshape) @@ -726,7 +713,7 @@ def df_to_rich_table(df, title: str, columns: List[str], styles: str): table = Table(title=title) for col, style in zip(columns, styles): table.add_column(col, style=style) - for ind, row in df.iterrows(): + for _, row in df.iterrows(): row = row[columns].values str_row = [] for i in row: @@ -747,7 +734,7 @@ def df_to_rich_table(df, title: str, columns: List[str], styles: str): def make_mask_from_peak_cluster(group, data): mask = np.zeros(data.shape, dtype=bool) - for index, peak in group.iterrows(): + for _, peak in group.iterrows(): mask += make_mask( data, peak.X_AXISf, peak.Y_AXISf, peak.X_RADIUS, peak.Y_RADIUS ) @@ -776,11 +763,76 @@ def select_reference_planes_using_indices(data, indices: List[int]): def select_planes_above_threshold_from_masked_data(data, threshold=None): + """This function returns planes with data above the threshold. + + It currently uses absolute intensity values. + Negative thresholds just result in return of the orignal data. + + """ if threshold == None: - data = data + selected_data = data else: - data = data[np.abs(data).max(axis=1) > threshold] - return data + selected_data = data[np.abs(data).max(axis=1) > threshold] + + if selected_data.shape[0] == 0: + selected_data = data + + return selected_data + + +def get_lineshape_function(lineshape: Lineshape): + match lineshape: + case lineshape.PV | lineshape.G | lineshape.L: + lineshape_function = pvoigt2d + case lineshape.V: + lineshape_function = voigt2d + case lineshape.PV_PV: + lineshape_function = pv_pv + case lineshape.G_L: + lineshape_function = gaussian_lorentzian + case lineshape.PV_G: + lineshape_function = pv_g + case lineshape.PV_L: + lineshape_function = pv_l + case _: + raise Exception("No lineshape was selected!") + return lineshape_function + + +def slice_peaks_from_data_using_mask(data, mask): + peak_slices = np.array([d[mask] for d in data]) + return peak_slices + + +def get_limits_for_axis_in_points(group_axis_points, mask_radius_in_points): + max_point, min_point = ( + int(np.ceil(max(group_axis_points) + mask_radius_in_points + 1)), + int(np.floor(min(group_axis_points) - mask_radius_in_points)), + ) + return max_point, min_point + + +def deal_with_peaks_on_edge_of_spectrum(data_shape, max_x, min_x, max_y, min_y): + if min_y < 0: + min_y = 0 + + if min_x < 0: + min_x = 0 + + if max_y > data_shape[-2]: + max_y = data_shape[-2] + + if max_x > data_shape[-1]: + max_x = data_shape[-1] + return max_x, min_x, max_y, min_y + + +def make_meshgrid(data_shape): + # must be a better way to make the meshgrid + x = np.arange(data_shape[-1]) + y = np.arange(data_shape[-2]) + XY = np.meshgrid(x, y) + return XY def fit_first_plane( @@ -796,7 +848,7 @@ def fit_first_plane( reference_plane_indices: List[int] = [], threshold: Optional[float] = None, ): - """Deconvolute group of peaks + r"""Deconvolute group of peaks :param group: pandas data from containing group of peaks using groupby("CLUSTID") :type group: pandas.core.groupby.generic.DataFrameGroupBy @@ -835,28 +887,10 @@ def fit_first_plane( :rtype: FitResult """ - match lineshape: - case lineshape.PV | lineshape.G | lineshape.L: - lineshape_function = pvoigt2d - case lineshape.V: - lineshape_function = voigt2d - case lineshape.PV_PV: - lineshape_function = pv_pv - case lineshape.G_L: - lineshape_function = gaussian_lorentzian - case lineshape.PV_G: - lineshape_function = pv_g - case lineshape.PV_L: - lineshape_function = pv_l - case _: - raise Exception("No lineshape was selected!") - + lineshape_function = get_lineshape_function(lineshape) mod, p_guess = make_models( lineshape_function, group, data, lineshape=lineshape, xy_bounds=xy_bounds ) - # get initial peak centers - cen_x = [p_guess[k].value for k in p_guess if "center_x" in k] - cen_y = [p_guess[k].value for k in p_guess if "center_y" in k] first_plane_data = data[0] mask, peak = make_mask_from_peak_cluster(group, first_plane_data) @@ -864,48 +898,28 @@ def fit_first_plane( x_radius = group.X_RADIUS.max() y_radius = group.Y_RADIUS.max() - max_x, min_x = ( - int(np.ceil(max(group.X_AXISf) + x_radius + 1)), - int(np.floor(min(group.X_AXISf) - x_radius)), + max_x, min_x = get_limits_for_axis_in_points( + group_axis_points=group.X_AXISf, mask_radius_in_points=x_radius ) - max_y, min_y = ( - int(np.ceil(max(group.Y_AXISf) + y_radius + 1)), - int(np.floor(min(group.Y_AXISf) - y_radius)), + max_y, min_y = get_limits_for_axis_in_points( + group_axis_points=group.Y_AXISf, mask_radius_in_points=y_radius + ) + max_x, min_x, max_y, min_y = deal_with_peaks_on_edge_of_spectrum( + data.shape, max_x, min_x, max_y, min_y ) - #  deal with peaks on the edge of spectrum - shape = data.shape - if min_y < 0: - min_y = 0 - - if min_x < 0: - min_x = 0 - - if max_y > shape[-2]: - max_y = shape[-2] - - if max_x > shape[-1]: - max_x = shape[-1] - - # print("DATA",data.shape, mask.shape) - peak_slices = np.array([d[mask] for d in data]) - # print("Peak slices", peak_slices.shape) + peak_slices = slice_peaks_from_data_using_mask(data, mask) peak_slices = select_reference_planes_using_indices( peak_slices, reference_plane_indices ) peak_slices = select_planes_above_threshold_from_masked_data(peak_slices, threshold) - # print(peak_slices.shape) peak_slices = peak_slices.sum(axis=0) - # print(peak_slices.shape) - # must be a better way to make the meshgrid - x = np.arange(shape[-1]) - y = np.arange(shape[-2]) - XY = np.meshgrid(x, y) + XY = make_meshgrid(data.shape) X, Y = XY XY_slices = np.array([X.copy()[mask], Y.copy()[mask]]) - # print("XY_slices", XY_slices) + weights = 1.0 / np.array([noise] * len(np.ravel(peak_slices))) out = mod.fit( @@ -1104,9 +1118,9 @@ def plot(self, plot_path=None, show=False, mp=False): z_sim = self.Z_sim[self.min_y : self.max_y, self.min_x : self.max_x] ax.set_title( - "$\chi^2$=" + r"$\chi^2$=" + f"{self.out.chisqr:.3f}, " - + "$\chi_{red}^2$=" + + r"$\chi_{red}^2$=" + f"{self.out.redchi:.4f}" ) @@ -1635,10 +1649,10 @@ def update_df(self): self.df["X_AXISf"] = self.df.X_PPM.apply(lambda x: self.uc_f2.f(x, "ppm")) self.df["Y_AXISf"] = self.df.Y_PPM.apply(lambda x: self.uc_f1.f(x, "ppm")) # in case of missing values (should estimate though) - self.df.XW_HZ.replace("None", "20.0", inplace=True) - self.df.YW_HZ.replace("None", "20.0", inplace=True) - self.df.XW_HZ.replace(np.NaN, "20.0", inplace=True) - self.df.YW_HZ.replace(np.NaN, "20.0", inplace=True) + self.df["XW_HZ"] = self.df.XW_HZ.replace("None", "20.0") + self.df["YW_HZ"] = self.df.YW_HZ.replace("None", "20.0") + self.df["XW_HZ"] = self.df.XW_HZ.replace(np.NaN, "20.0") + self.df["YW_HZ"] = self.df.YW_HZ.replace(np.NaN, "20.0") # convert linewidths to float self.df["XW_HZ"] = self.df.XW_HZ.apply(lambda x: float(x)) self.df["YW_HZ"] = self.df.YW_HZ.apply(lambda x: float(x)) @@ -1713,7 +1727,7 @@ def _read_sparky(self): df = pd.read_csv( self.peaklist_path, skiprows=1, - delim_whitespace=True, + sep=r"\s+", names=["ASS", "Y_PPM", "X_PPM", "VOLUME", "HEIGHT", "YW_HZ", "XW_HZ"], ) df["INDEX"] = df.index @@ -1732,7 +1746,7 @@ def _read_pipe(self): else: to_skip += 1 df = pd.read_csv( - self.peaklist_path, skiprows=to_skip, names=columns, delim_whitespace=True + self.peaklist_path, skiprows=to_skip, names=columns, sep=r"\s+" ) return df @@ -1764,7 +1778,7 @@ def check_assignments(self): textwrap.dedent( """ Creating dummy assignments for duplicates - + """ ) ) @@ -1782,7 +1796,7 @@ def check_peak_bounds(self): f"""[red] ################################################################################# - Excluding the following peaks as they are not within the spectrum which has shape + Excluding the following peaks as they are not within the spectrum which has shape {self.data.shape} [/red]""" @@ -1967,7 +1981,7 @@ def to_fuda(self, fname="params.fuda"): fuda_file = textwrap.dedent( f"""\ - + # Read peaklist and spectrum info PEAKLIST=peaks.fuda SPECFILE={self.data_path} diff --git a/poetry.lock b/poetry.lock index 98af3b22..40676c6e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -114,6 +114,17 @@ files = [ {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, ] +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + [[package]] name = "charset-normalizer" version = "3.3.2" @@ -341,6 +352,17 @@ files = [ {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + [[package]] name = "exceptiongroup" version = "1.2.0" @@ -369,6 +391,22 @@ files = [ [package.extras] tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] +[[package]] +name = "filelock" +version = "3.13.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + [[package]] name = "fonttools" version = "4.47.2" @@ -461,6 +499,20 @@ python-dateutil = ">=2.8.1" [package.extras] dev = ["flake8", "markdown", "twine", "wheel"] +[[package]] +name = "identify" +version = "2.5.33" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"}, + {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"}, +] + +[package.extras] +license = ["ukkonen"] + [[package]] name = "idna" version = "3.6" @@ -1003,6 +1055,20 @@ scipy = "*" [package.extras] docs = ["numpydoc", "sphinx", "sphinx-rtd-theme"] +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + [[package]] name = "numdifftools" version = "0.9.41" @@ -1322,6 +1388,24 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pre-commit" +version = "3.6.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pre_commit-3.6.0-py2.py3-none-any.whl", hash = "sha256:c255039ef399049a5544b6ce13d135caba8f2c28c3b4033277a788f434308376"}, + {file = "pre_commit-3.6.0.tar.gz", hash = "sha256:d30bad9abf165f7785c15a21a1f46da7d0677cb00ee7ff4c579fd38922efe15d"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + [[package]] name = "prompt-toolkit" version = "3.0.43" @@ -1696,6 +1780,22 @@ dev = ["click", "cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyl doc = ["jupytext", "matplotlib (>2)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] test = ["asv", "gmpy2", "hypothesis", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +[[package]] +name = "setuptools" +version = "69.0.3" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, + {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "six" version = "1.16.0" @@ -1937,6 +2037,26 @@ brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "virtualenv" +version = "20.25.0" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, + {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + [[package]] name = "watchdog" version = "3.0.0" @@ -2001,4 +2121,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "34645291ffe1814c1c60ed845fda663bedfdea83baa6abec223b5b6f3d39031f" +content-hash = "06f1d10780fb94ccd26fa372524e161efbc5a75fac8ce7812912e3f70d6985a7" diff --git a/pyproject.toml b/pyproject.toml index 8ef1013e..61f3ec61 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,6 +47,7 @@ black = "^23.7.0" pytest = "^7.4.2" mkdocs-material = "^8.5.7" ipython = "^8.15.0" +pre-commit = "^3.6.0" [build-system] requires = ["poetry>=0.12"] diff --git a/test/test_core.py b/test/test_core.py index 39384105..dd8a02a7 100644 --- a/test/test_core.py +++ b/test/test_core.py @@ -2,10 +2,12 @@ from unittest.mock import patch import numpy as np +from numpy.testing import assert_array_equal import pandas as pd import nmrglue as ng from lmfit import Model + from peakipy.core import ( make_mask, fix_params, @@ -19,9 +21,92 @@ Peaklist, Lineshape, PeaklistFormat, + select_reference_planes_using_indices, + select_planes_above_threshold_from_masked_data, + slice_peaks_from_data_using_mask, ) +def test_select_reference_planes_using_indices(): + data = np.zeros((6, 100, 200)) + indices = [] + np.testing.assert_array_equal( + select_reference_planes_using_indices(data, indices), data + ) + indices = [1] + assert select_reference_planes_using_indices(data, indices).shape == (1, 100, 200) + indices = [1, -1] + assert select_reference_planes_using_indices(data, indices).shape == (2, 100, 200) + + +def test_slice_peaks_from_data_using_mask(): + data = np.array( + [ + np.array( + [ + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 1, 1, 0, 0, 0, 0], + [0, 0, 0, 1, 2, 2, 1, 0, 0, 0], + [0, 0, 1, 2, 3, 3, 2, 1, 0, 0], + [0, 1, 2, 3, 4, 4, 3, 2, 1, 0], + [1, 2, 3, 4, 5, 5, 4, 3, 2, 1], + [0, 1, 2, 3, 4, 4, 3, 2, 1, 0], + [0, 0, 1, 2, 3, 3, 2, 1, 0, 0], + [0, 0, 0, 1, 2, 2, 1, 0, 0, 0], + [0, 0, 0, 0, 1, 1, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + ] + ) + for i in range(5) + ] + ) + mask = data[0] > 0 + assert data.shape == (5, 11, 10) + assert mask.shape == (11, 10) + peak_slices = slice_peaks_from_data_using_mask(data, mask) + # array is flattened by application of mask + assert peak_slices.shape == (5, 50) + + +def test_select_planes_above_threshold_from_masked_data(): + peak_slices = np.array( + [ + [1, 1, 1, 1, 1, 1], + [2, 2, 2, 2, 2, 2], + [-1, -1, -1, -1, -1, -1], + [-2, -2, -2, -2, -2, -2], + ] + ) + assert peak_slices.shape == (4, 6) + threshold = -1 + assert select_planes_above_threshold_from_masked_data( + peak_slices, threshold + ).shape == ( + 4, + 6, + ) + threshold = 2 + assert_array_equal( + select_planes_above_threshold_from_masked_data(peak_slices, threshold), + peak_slices, + ) + threshold = 1 + assert select_planes_above_threshold_from_masked_data( + peak_slices, threshold + ).shape == (2, 6) + + threshold = None + assert_array_equal( + select_planes_above_threshold_from_masked_data(peak_slices, threshold), + peak_slices, + ) + threshold = 10 + assert_array_equal( + select_planes_above_threshold_from_masked_data(peak_slices, threshold), + peak_slices, + ) + + class TestCoreFunctions(unittest.TestCase): def test_make_mask(self): data = np.ones((10, 10)) @@ -140,7 +225,7 @@ def test_to_prefix(self): (" one", "_one_"), (" one/two", "_oneortwo_"), (" one?two", "_onemaybetwo_"), - (" [{one?two\}][", "___onemaybetwo____"), + (r" [{one?two\}][", "___onemaybetwo____"), ] for test, expect in names: prefix = to_prefix(test) From 202c68bb20bf156a89f532e89299df450236d668 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Sun, 11 Feb 2024 23:13:17 -0500 Subject: [PATCH 10/37] refactored check code and added plotly 3D surface and wireframe --- peakipy/cli/main.py | 700 +++++++++++++++++++++++++++----------------- poetry.lock | 225 +++++++++++++- pyproject.toml | 2 + 3 files changed, 652 insertions(+), 275 deletions(-) diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index e5272532..88feb731 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -22,6 +22,7 @@ import json import shutil from pathlib import Path +from dataclasses import dataclass, field from enum import Enum from typing import Optional, Tuple, List, Annotated from multiprocessing import Pool @@ -41,7 +42,10 @@ from matplotlib import cm from matplotlib.backends.backend_pdf import PdfPages from matplotlib.widgets import Button + import yaml +import plotly.graph_objects as go +import panel as pn from peakipy.core import ( Peaklist, @@ -61,6 +65,8 @@ PeaklistFormat, Lineshape, OutFmt, + get_limits_for_axis_in_points, + deal_with_peaks_on_edge_of_spectrum, ) from .fit import ( cpu_count, @@ -357,11 +363,11 @@ def read( print( f"""[green] - + ✨✨ Finished reading and clustering peaks! ✨✨ - + Use {outname} to run peakipy edit or fit.[/green] - + """ ) @@ -605,11 +611,14 @@ def fit( # split peak lists # tmp_dir = split_peaklist(peakipy_data.df, n_cpu) fit_peaks_args = FitPeaksInput(args, peakipy_data.data, config, plane_numbers) - with Pool(processes=n_cpu) as pool, tqdm( - total=len(peakipy_data.df.CLUSTID.unique()), - ascii="▱▰", - colour="green", - ) as pbar: + with ( + Pool(processes=n_cpu) as pool, + tqdm( + total=len(peakipy_data.df.CLUSTID.unique()), + ascii="▱▰", + colour="green", + ) as pbar, + ): # result = pool.map(fit_peaks, peaklists) # result = pool.starmap(fit_peaks, zip(peaklists, args_list)) result = [ @@ -795,7 +804,7 @@ def fit( print( """[green] 🍾 ✨ Finished! ✨ 🍾 - [/green] + [/green] """ ) run_log() @@ -852,6 +861,351 @@ def get_fit_data_for_selected_peak_clusters(fits, clusters): return fits +def make_masks_from_plane_data(empty_mask_array, plane_data): + # make masks + individual_masks = [] + for cx, cy, rx, ry, name in zip( + plane_data.center_x, + plane_data.center_y, + plane_data.x_radius, + plane_data.y_radius, + plane_data.assignment, + ): + tmp_mask = make_mask(empty_mask_array, cx, cy, rx, ry) + empty_mask_array += tmp_mask + individual_masks.append(tmp_mask) + filled_mask_array = empty_mask_array + return individual_masks, filled_mask_array + + +def simulate_pv_pv_lineshapes_from_fitted_peak_parameters( + peak_parameters, XY, sim_data, sim_data_singles +): + for amp, c_x, c_y, s_x, s_y, frac_x, frac_y, ls in zip( + peak_parameters.amp, + peak_parameters.center_x, + peak_parameters.center_y, + peak_parameters.sigma_x, + peak_parameters.sigma_y, + peak_parameters.fraction_x, + peak_parameters.fraction_y, + peak_parameters.lineshape, + ): + sim_data_i = pv_pv(XY, amp, c_x, c_y, s_x, s_y, frac_x, frac_y).reshape( + sim_data.shape + ) + sim_data += sim_data_i + sim_data_singles.append(sim_data_i) + return sim_data, sim_data_singles + + +def simulate_lineshapes_from_fitted_peak_parameters( + peak_parameters, XY, sim_data, sim_data_singles +): + shape = sim_data.shape + for amp, c_x, c_y, s_x, s_y, frac, lineshape in zip( + peak_parameters.amp, + peak_parameters.center_x, + peak_parameters.center_y, + peak_parameters.sigma_x, + peak_parameters.sigma_y, + peak_parameters.fraction, + peak_parameters.lineshape, + ): + # print(amp) + match lineshape: + case "G" | "L" | "PV": + sim_data_i = pvoigt2d(XY, amp, c_x, c_y, s_x, s_y, frac).reshape(shape) + case "PV_L": + sim_data_i = pv_l(XY, amp, c_x, c_y, s_x, s_y, frac).reshape(shape) + + case "PV_G": + sim_data_i = pv_g(XY, amp, c_x, c_y, s_x, s_y, frac).reshape(shape) + + case "G_L": + sim_data_i = gaussian_lorentzian( + XY, amp, c_x, c_y, s_x, s_y, frac + ).reshape(shape) + + case "V": + sim_data_i = voigt2d(XY, amp, c_x, c_y, s_x, s_y, frac).reshape(shape) + sim_data += sim_data_i + sim_data_singles.append(sim_data_i) + return sim_data, sim_data_singles + + +@dataclass +class PlottingDataForPlane: + pseudo3D: Pseudo3D + plane_id: int + plane: pd.DataFrame + X: np.array + Y: np.array + mask: np.array + individual_masks: List[np.array] + sim_data: np.array + sim_data_singles: List[np.array] + min_x: int + max_x: int + min_y: int + max_y: int + fit_color: str + data_color: str + rcount: int + ccount: int + + x_plot: np.array = field(init=False) + y_plot: np.array = field(init=False) + masked_data: np.array = field(init=False) + masked_sim_data: np.array = field(init=False) + residual: np.array = field(init=False) + single_colors: List = field(init=False) + + def __post_init__(self): + self.plane_data = self.pseudo3D.data[self.plane_id] + self.masked_data = self.plane_data.copy() + self.masked_sim_data = self.sim_data.copy() + self.masked_data[~self.mask] = np.nan + self.masked_sim_data[~self.mask] = np.nan + + self.x_plot = self.pseudo3D.uc_f2.ppm( + self.X[self.min_y : self.max_y, self.min_x : self.max_x] + ) + self.y_plot = self.pseudo3D.uc_f1.ppm( + self.Y[self.min_y : self.max_y, self.min_x : self.max_x] + ) + self.masked_data = self.masked_data[ + self.min_y : self.max_y, self.min_x : self.max_x + ] + self.sim_plot = self.masked_sim_data[ + self.min_y : self.max_y, self.min_x : self.max_x + ] + self.residual = self.masked_data - self.sim_plot + + for single_mask, single in zip(self.individual_masks, self.sim_data_singles): + single[~single_mask] = np.nan + self.sim_data_singles = [ + sim_data_single[self.min_y : self.max_y, self.min_x : self.max_x] + for sim_data_single in self.sim_data_singles + ] + self.single_colors = [ + cm.viridis(i) for i in np.linspace(0, 1, len(self.sim_data_singles)) + ] + + +def plot_data_is_valid(plot_data: PlottingDataForPlane) -> bool: + if len(plot_data.x_plot) < 1 or len(plot_data.y_plot) < 1: + print(f"[red]Nothing to plot for cluster {int(plot_data.plane.clustid)}[/red]") + print(f"[red]x={plot_data.x_plot},y={plot_data.y_plot}[/red]") + print( + df_to_rich_table( + plot_data.plane, + title="", + columns=bad_column_selection, + styles=bad_color_selection, + ) + ) + plt.close() + validated = False + # print(Fore.RED + "Maybe your F1/F2 radii for fitting were too small...") + elif plot_data.masked_data.shape[0] == 0 or plot_data.masked_data.shape[1] == 0: + print(f"[red]Nothing to plot for cluster {int(plot_data.plane.clustid)}[/red]") + print( + df_to_rich_table( + plot_data.plane, + title="Bad plane", + columns=bad_column_selection, + styles=bad_color_selection, + ) + ) + spec_lim_f1 = " - ".join( + ["%8.3f" % i for i in plot_data.pseudo3D.f1_ppm_limits] + ) + spec_lim_f2 = " - ".join( + ["%8.3f" % i for i in plot_data.pseudo3D.f2_ppm_limits] + ) + print(f"Spectrum limits are {plot_data.pseudo3D.f2_label:4s}:{spec_lim_f2} ppm") + print(f" {plot_data.pseudo3D.f1_label:4s}:{spec_lim_f1} ppm") + plt.close() + validated = False + else: + validated = True + return validated + + +def create_matplotlib_figure( + plot_data: PlottingDataForPlane, + pdf: PdfPages, + individual=False, + label=False, + ccpn_flag=False, + show=True, +): + fig = plt.figure(figsize=(10, 6)) + ax = fig.add_subplot(projection="3d") + if plot_data_is_valid(plot_data): + cset = ax.contourf( + plot_data.x_plot, + plot_data.y_plot, + plot_data.residual, + zdir="z", + offset=np.nanmin(plot_data.masked_data) * 1.1, + alpha=0.5, + cmap=cm.coolwarm, + ) + cbl = fig.colorbar(cset, ax=ax, shrink=0.5, format="%.2e") + cbl.ax.set_title("Residual", pad=20) + + if individual: + #  for plotting single fit surfaces + single_colors = [ + cm.viridis(i) + for i in np.linspace(0, 1, len(plot_data.sim_data_singles)) + ] + [ + ax.plot_surface( + plot_data.x_plot, + plot_data.y_plot, + z_single, + color=c, + alpha=0.5, + ) + for c, z_single in zip(single_colors, plot_data.sim_data_singles) + ] + ax.plot_wireframe( + plot_data.x_plot, + plot_data.y_plot, + plot_data.sim_plot, + # colors=[cm.coolwarm(i) for i in np.ravel(residual)], + colors=plot_data.fit_color, + linestyle="--", + label="fit", + rcount=plot_data.rcount, + ccount=plot_data.ccount, + ) + ax.plot_wireframe( + plot_data.x_plot, + plot_data.y_plot, + plot_data.masked_data, + colors=plot_data.data_color, + linestyle="-", + label="data", + rcount=plot_data.rcount, + ccount=plot_data.ccount, + ) + ax.set_ylabel(plot_data.pseudo3D.f1_label) + ax.set_xlabel(plot_data.pseudo3D.f2_label) + + # axes will appear inverted + ax.view_init(30, 120) + + # names = ",".join(plane.assignment) + title = f"Plane={plot_data.plane_id},Cluster={plot_data.plane.clustid.iloc[0]}" + plt.title(title) + print(f"[green]Plotting: {title}[/green]") + out_str = "Volumes (Heights)\n===========\n" + # chi2s = [] + for _, row in plot_data.plane.iterrows(): + out_str += f"{row.assignment} = {row.amp:.3e} ({row.height:.3e})\n" + if label: + ax.text( + row.center_x_ppm, + row.center_y_ppm, + row.height * 1.2, + row.assignment, + (1, 1, 1), + ) + + ax.text2D( + -0.5, + 1.0, + out_str, + transform=ax.transAxes, + fontsize=10, + fontfamily="sans-serif", + va="top", + bbox=dict(boxstyle="round", ec="k", fc="k", alpha=0.5), + ) + + ax.legend() + + if show: + + def exit_program(event): + exit() + + def next_plot(event): + plt.close() + + axexit = plt.axes([0.81, 0.05, 0.1, 0.075]) + bnexit = Button(axexit, "Exit") + bnexit.on_clicked(exit_program) + axnext = plt.axes([0.71, 0.05, 0.1, 0.075]) + bnnext = Button(axnext, "Next") + bnnext.on_clicked(next_plot) + if ccpn_flag: + plt.show(windowTitle="", size=(1000, 500)) + else: + plt.show() + else: + pdf.savefig() + + plt.close() + + +def create_plotly_wireframe_lines(plot_data: PlottingDataForPlane): + lines = [] + show_legend = False + # make simulated data wireframe + line_marker = dict(color=plot_data.fit_color, width=4) + for i, j, k in zip(plot_data.x_plot, plot_data.y_plot, plot_data.sim_plot): + lines.append(go.Scatter3d(x=i, y=j, z=k, mode="lines", line=line_marker)) + for i, j, k in zip(plot_data.x_plot.T, plot_data.y_plot.T, plot_data.sim_plot.T): + lines.append(go.Scatter3d(x=i, y=j, z=k, mode="lines", line=line_marker)) + # make experimental data wireframe + line_marker = dict(color=plot_data.data_color, width=4) + for i, j, k in zip(plot_data.x_plot, plot_data.y_plot, plot_data.masked_data): + lines.append(go.Scatter3d(x=i, y=j, z=k, mode="lines", line=line_marker)) + for i, j, k in zip(plot_data.x_plot.T, plot_data.y_plot.T, plot_data.masked_data.T): + lines.append(go.Scatter3d(x=i, y=j, z=k, mode="lines", line=line_marker)) + + return lines + + +def create_plotly_surfaces(plot_data: PlottingDataForPlane): + data = [] + color_scale_values = np.linspace(0, 1, len(plot_data.single_colors)) + color_scale = [ + [val, f"rgb({', '.join('%d'%(i*255) for i in c[0:3])})"] + for val, c in zip(color_scale_values, plot_data.single_colors) + ] + for val, individual_peak in zip(color_scale_values, plot_data.sim_data_singles): + colors = np.zeros(shape=individual_peak.shape) + val + data.append( + go.Surface( + z=individual_peak, + x=plot_data.x_plot, + y=plot_data.y_plot, + opacity=0.5, + surfacecolor=colors, + colorscale=color_scale, + showscale=False, + cmin=0, + cmax=1, + ) + ) + return data + + +def create_plotly_figure(plot_data: PlottingDataForPlane): + lines = create_plotly_wireframe_lines(plot_data) + surfaces = create_plotly_surfaces(plot_data) + fig = go.Figure(data=lines + surfaces) + layout = go.Layout(showlegend=False) + fig.update_layout(layout) + return fig + + @app.command(help="Interactive plots for checking fits") def check( fits: Path, @@ -944,7 +1298,7 @@ def check( data_color, fit_color = unpack_plotting_colors(colors) fits = get_fit_data_for_selected_peak_clusters(fits, clusters) - groups = fits.groupby("clustid") + peak_clusters = fits.groupby("clustid") # make plotting meshes x = np.arange(pseudo3D.f2_size) @@ -953,292 +1307,90 @@ def check( X, Y = XY with PdfPages(outname) as pdf: - for _, group in groups: + for _, peak_cluster in peak_clusters: table = df_to_rich_table( - group, + peak_cluster, title="", columns=columns_to_print, styles=["blue" for _ in columns_to_print], ) print(table) - mask = np.zeros((pseudo3D.f1_size, pseudo3D.f2_size), dtype=bool) - - first_plane = group[group.plane == selected_plane] - - x_radius = group.x_radius.max() - y_radius = group.y_radius.max() - max_x, min_x = ( - int(np.ceil(max(group.center_x) + x_radius + 1)), - int(np.floor(min(group.center_x) - x_radius)), + x_radius = peak_cluster.x_radius.max() + y_radius = peak_cluster.y_radius.max() + max_x, min_x = get_limits_for_axis_in_points( + group_axis_points=peak_cluster.center_x, mask_radius_in_points=x_radius ) - max_y, min_y = ( - int(np.ceil(max(group.center_y) + y_radius + 1)), - int(np.floor(min(group.center_y) - y_radius)), + max_y, min_y = get_limits_for_axis_in_points( + group_axis_points=peak_cluster.center_y, mask_radius_in_points=y_radius + ) + max_x, min_x, max_y, min_y = deal_with_peaks_on_edge_of_spectrum( + pseudo3D.data.shape, max_x, min_x, max_y, min_y ) - #  deal with peaks on the edge of spectrum - if min_y < 0: - min_y = 0 - - if min_x < 0: - min_x = 0 - - if max_y > pseudo3D.f1_size: - max_y = pseudo3D.f1_size - - if max_x > pseudo3D.f2_size: - max_x = pseudo3D.f2_size - - masks = [] - # make masks - for cx, cy, rx, ry, name in zip( - first_plane.center_x, - first_plane.center_y, - first_plane.x_radius, - first_plane.y_radius, - first_plane.assignment, - ): - tmp_mask = make_mask(mask, cx, cy, rx, ry) - mask += tmp_mask - masks.append(tmp_mask) + empty_mask_array = np.zeros( + (pseudo3D.f1_size, pseudo3D.f2_size), dtype=bool + ) + first_plane = peak_cluster[peak_cluster.plane == selected_plane] + individual_masks, mask = make_masks_from_plane_data( + empty_mask_array, first_plane + ) # generate simulated data - for plane_id, plane in group.groupby("plane"): + for plane_id, plane in peak_cluster.groupby("plane"): sim_data_singles = [] sim_data = np.zeros((pseudo3D.f1_size, pseudo3D.f2_size)) - shape = sim_data.shape try: - for amp, c_x, c_y, s_x, s_y, frac_x, frac_y, ls in zip( - plane.amp, - plane.center_x, - plane.center_y, - plane.sigma_x, - plane.sigma_y, - plane.fraction_x, - plane.fraction_y, - plane.lineshape, - ): - sim_data_i = pv_pv( - XY, amp, c_x, c_y, s_x, s_y, frac_x, frac_y - ).reshape(shape) - sim_data += sim_data_i - sim_data_singles.append(sim_data_i) + ( + sim_data, + sim_data_singles, + ) = simulate_pv_pv_lineshapes_from_fitted_peak_parameters( + plane, XY, sim_data, sim_data_singles + ) except: - for amp, c_x, c_y, s_x, s_y, frac, ls in zip( - plane.amp, - plane.center_x, - plane.center_y, - plane.sigma_x, - plane.sigma_y, - plane.fraction, - plane.lineshape, - ): - # print(amp) - match ls: - case "G" | "L" | "PV": - sim_data_i = pvoigt2d( - XY, amp, c_x, c_y, s_x, s_y, frac - ).reshape(shape) - case "PV_L": - sim_data_i = pv_l( - XY, amp, c_x, c_y, s_x, s_y, frac - ).reshape(shape) - - case "PV_G": - sim_data_i = pv_g( - XY, amp, c_x, c_y, s_x, s_y, frac - ).reshape(shape) - - case "G_L": - sim_data_i = gaussian_lorentzian( - XY, amp, c_x, c_y, s_x, s_y, frac - ).reshape(shape) - - case "V": - sim_data_i = voigt2d( - XY, amp, c_x, c_y, s_x, s_y, frac - ).reshape(shape) - sim_data += sim_data_i - sim_data_singles.append(sim_data_i) - - masked_data = pseudo3D.data[plane_id].copy() - masked_sim_data = sim_data.copy() - masked_data[~mask] = np.nan - masked_sim_data[~mask] = np.nan + ( + sim_data, + sim_data_singles, + ) = simulate_lineshapes_from_fitted_peak_parameters( + plane, XY, sim_data, sim_data_singles + ) + + plot_data = PlottingDataForPlane( + pseudo3D, + plane_id, + plane, + X, + Y, + mask, + individual_masks, + sim_data, + sim_data_singles, + min_x, + max_x, + min_y, + max_y, + fit_color, + data_color, + rcount, + ccount, + ) if ccpn_flag: plt = PlotterWidget() else: plt = matplotlib.pyplot + create_matplotlib_figure( + plot_data, pdf, individual, label, ccpn_flag, show + ) + # fig = create_plotly_figure(plot_data) + fig = create_plotly_figure(plot_data) + fig.show() + # surf = pn.pane.plotly.Plotly(fig) + # app = pn.Column(surf) + # app.show(threaded=True) + if first: + break - fig = plt.figure(figsize=(10, 6)) - ax = fig.add_subplot(projection="3d") - ## slice out plot area - x_plot = pseudo3D.uc_f2.ppm(X[min_y:max_y, min_x:max_x]) - y_plot = pseudo3D.uc_f1.ppm(Y[min_y:max_y, min_x:max_x]) - masked_data = masked_data[min_y:max_y, min_x:max_x] - sim_plot = masked_sim_data[min_y:max_y, min_x:max_x] - # or len(masked_data)<1 or len(sim_plot)<1 - - if len(x_plot) < 1 or len(y_plot) < 1: - print( - f"[red]Nothing to plot for cluster {int(plane.clustid)}[/red]" - ) - print(f"[red]x={x_plot},y={y_plot}[/red]") - print( - df_to_rich_table( - plane, - title="", - columns=bad_column_selection, - styles=bad_color_selection, - ) - ) - plt.close() - - # print(Fore.RED + "Maybe your F1/F2 radii for fitting were too small...") - elif masked_data.shape[0] == 0 or masked_data.shape[1] == 0: - print( - f"[red]Nothing to plot for cluster {int(plane.clustid)}[/red]" - ) - print( - df_to_rich_table( - plane, - title="Bad plane", - columns=bad_column_selection, - styles=bad_color_selection, - ) - ) - spec_lim_f1 = " - ".join( - ["%8.3f" % i for i in pseudo3D.f1_ppm_limits] - ) - spec_lim_f2 = " - ".join( - ["%8.3f" % i for i in pseudo3D.f2_ppm_limits] - ) - print( - f"Spectrum limits are {pseudo3D.f2_label:4s}:{spec_lim_f2} ppm" - ) - print( - f" {pseudo3D.f1_label:4s}:{spec_lim_f1} ppm" - ) - plt.close() - else: - residual = masked_data - sim_plot - cset = ax.contourf( - x_plot, - y_plot, - residual, - zdir="z", - offset=np.nanmin(masked_data) * 1.1, - alpha=0.5, - cmap=cm.coolwarm, - ) - cbl = fig.colorbar(cset, ax=ax, shrink=0.5, format="%.2e") - cbl.ax.set_title("Residual", pad=20) - - if individual: - # for making colored masks - for single_mask, single in zip(masks, sim_data_singles): - single[~single_mask] = np.nan - sim_data_singles = [ - sim_data_single[min_y:max_y, min_x:max_x] - for sim_data_single in sim_data_singles - ] - #  for plotting single fit surfaces - single_colors = [ - cm.viridis(i) - for i in np.linspace(0, 1, len(sim_data_singles)) - ] - [ - ax.plot_surface( - x_plot, y_plot, z_single, color=c, alpha=0.5 - ) - for c, z_single in zip(single_colors, sim_data_singles) - ] - ax.plot_wireframe( - x_plot, - y_plot, - sim_plot, - # colors=[cm.coolwarm(i) for i in np.ravel(residual)], - colors=fit_color, - linestyle="--", - label="fit", - rcount=rcount, - ccount=ccount, - ) - ax.plot_wireframe( - x_plot, - y_plot, - masked_data, - colors=data_color, - linestyle="-", - label="data", - rcount=rcount, - ccount=ccount, - ) - ax.set_ylabel(pseudo3D.f1_label) - ax.set_xlabel(pseudo3D.f2_label) - - # axes will appear inverted - ax.view_init(30, 120) - - # names = ",".join(plane.assignment) - title = f"Plane={plane_id},Cluster={plane.clustid.iloc[0]}" - plt.title(title) - print(f"[green]Plotting: {title}[/green]") - out_str = "Volumes (Heights)\n===========\n" - # chi2s = [] - for ind, row in plane.iterrows(): - out_str += ( - f"{row.assignment} = {row.amp:.3e} ({row.height:.3e})\n" - ) - if label: - ax.text( - row.center_x_ppm, - row.center_y_ppm, - row.height * 1.2, - row.assignment, - (1, 1, 1), - ) - - ax.text2D( - -0.5, - 1.0, - out_str, - transform=ax.transAxes, - fontsize=10, - fontfamily="sans-serif", - va="top", - bbox=dict(boxstyle="round", ec="k", fc="k", alpha=0.5), - ) - - ax.legend() - - if show: - - def exit_program(event): - exit() - - def next_plot(event): - plt.close() - - axexit = plt.axes([0.81, 0.05, 0.1, 0.075]) - bnexit = Button(axexit, "Exit") - bnexit.on_clicked(exit_program) - axnext = plt.axes([0.71, 0.05, 0.1, 0.075]) - bnnext = Button(axnext, "Next") - bnnext.on_clicked(next_plot) - if ccpn_flag: - plt.show(windowTitle="", size=(1000, 500)) - else: - plt.show() - else: - pdf.savefig() - - plt.close() - - if first: - break run_log() diff --git a/poetry.lock b/poetry.lock index 40676c6e..5ae425a4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -81,6 +81,24 @@ d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "bleach" +version = "6.1.0" +description = "An easy safelist-based HTML-sanitizing tool." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, + {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, +] + +[package.dependencies] +six = ">=1.9.0" +webencodings = "*" + +[package.extras] +css = ["tinycss2 (>=1.1.0,<1.3)"] + [[package]] name = "bokeh" version = "3.3.4" @@ -767,6 +785,26 @@ files = [ lint = ["pre-commit (>=3.3)"] test = ["pytest (>=7.4)", "pytest-cov (>=4.1)"] +[[package]] +name = "linkify-it-py" +version = "2.0.3" +description = "Links recognition library with FULL unicode support." +optional = false +python-versions = ">=3.7" +files = [ + {file = "linkify-it-py-2.0.3.tar.gz", hash = "sha256:68cda27e162e9215c17d786649d1da0021a451bdc436ef9e0fa0ba5234b9b048"}, + {file = "linkify_it_py-2.0.3-py3-none-any.whl", hash = "sha256:6bcbc417b0ac14323382aef5c5192c0075bf8a9d6b41820a2b66371eac6b6d79"}, +] + +[package.dependencies] +uc-micro-py = "*" + +[package.extras] +benchmark = ["pytest", "pytest-benchmark"] +dev = ["black", "flake8", "isort", "pre-commit", "pyproject-flake8"] +doc = ["myst-parser", "sphinx", "sphinx-book-theme"] +test = ["coverage", "pytest", "pytest-cov"] + [[package]] name = "lmfit" version = "1.2.2" @@ -805,6 +843,30 @@ files = [ docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] testing = ["coverage", "pyyaml"] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "markupsafe" version = "2.1.4" @@ -936,6 +998,36 @@ files = [ [package.dependencies] traitlets = "*" +[[package]] +name = "mdit-py-plugins" +version = "0.4.0" +description = "Collection of plugins for markdown-it-py" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mdit_py_plugins-0.4.0-py3-none-any.whl", hash = "sha256:b51b3bb70691f57f974e257e367107857a93b36f322a9e6d44ca5bf28ec2def9"}, + {file = "mdit_py_plugins-0.4.0.tar.gz", hash = "sha256:d8ab27e9aed6c38aa716819fedfde15ca275715955f8a185a8e1cf90fb1d2c1b"}, +] + +[package.dependencies] +markdown-it-py = ">=1.0.0,<4.0.0" + +[package.extras] +code-style = ["pre-commit"] +rtd = ["myst-parser", "sphinx-book-theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "mergedeep" version = "1.3.4" @@ -1215,6 +1307,64 @@ sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-d test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] xml = ["lxml (>=4.9.2)"] +[[package]] +name = "panel" +version = "1.3.8" +description = "The powerful data exploration & web app framework for Python." +optional = false +python-versions = ">=3.9" +files = [ + {file = "panel-1.3.8-py2.py3-none-any.whl", hash = "sha256:49bf3931986a0ddf3f7b4bda3c65c6a311d5277524acdb4e0bff69cba6bf5775"}, + {file = "panel-1.3.8.tar.gz", hash = "sha256:809afd2b861747a31d6ddaadbbc7c25b8dab392dc78256f68b759214113c5be3"}, +] + +[package.dependencies] +bleach = "*" +bokeh = ">=3.2.0,<3.4.0" +linkify-it-py = "*" +markdown = "*" +markdown-it-py = "*" +mdit-py-plugins = "*" +pandas = ">=1.2" +param = ">=2.0.0,<3.0" +pyviz-comms = ">=2.0.0" +requests = "*" +tqdm = ">=4.48.0" +typing-extensions = "*" +xyzservices = ">=2021.09.1" + +[package.extras] +all = ["aiohttp", "altair", "anywidget", "channels", "croniter", "datashader", "diskcache", "django (<4)", "fastparquet", "flake8", "folium", "graphviz", "holoviews (>=1.16.0)", "hvplot", "ipyleaflet", "ipympl", "ipython (>=7.0)", "ipyvolume", "ipyvuetify", "ipywidgets", "ipywidgets-bokeh", "jupyter-bokeh (>=3.0.7)", "jupyter-server", "jupyterlab", "lxml", "matplotlib", "nbsite (>=0.8.4)", "nbval", "networkx (>=2.5)", "numba (<0.58)", "numpy", "pandas (<2.1.0)", "pandas (>=1.3)", "parameterized", "pillow", "playwright", "plotly", "plotly (>=4.0)", "pre-commit", "psutil", "pydeck", "pygraphviz", "pyinstrument (>=4.0)", "pytest", "pytest-asyncio (<0.22)", "pytest-cov", "pytest-playwright", "pytest-rerunfailures", "pytest-xdist", "python-graphviz", "pyvista", "reacton", "scikit-image", "scikit-learn", "scipy", "seaborn", "streamz", "twine", "vega-datasets", "vtk", "xarray", "xgboost"] +all-pip = ["aiohttp", "altair", "anywidget", "channels", "croniter", "datashader", "diskcache", "django (<4)", "fastparquet", "flake8", "folium", "graphviz", "holoviews (>=1.16.0)", "hvplot", "ipyleaflet", "ipympl", "ipython (>=7.0)", "ipyvolume", "ipyvuetify", "ipywidgets", "ipywidgets-bokeh", "jupyter-bokeh (>=3.0.7)", "jupyter-server", "jupyterlab", "lxml", "matplotlib", "nbsite (>=0.8.4)", "nbval", "networkx (>=2.5)", "numba (<0.58)", "numpy", "pandas (<2.1.0)", "pandas (>=1.3)", "parameterized", "pillow", "playwright", "plotly", "plotly (>=4.0)", "pre-commit", "psutil", "pydeck", "pyinstrument (>=4.0)", "pytest", "pytest-asyncio (<0.22)", "pytest-cov", "pytest-playwright", "pytest-rerunfailures", "pytest-xdist", "pyvista", "reacton", "scikit-image", "scikit-learn", "scipy", "seaborn", "streamz", "twine", "vega-datasets", "vtk", "xarray", "xgboost"] +build = ["bleach", "bokeh (>=3.3.0,<3.4.0)", "cryptography (<39)", "markdown", "packaging", "param (>=2.0.0)", "pyviz-comms (>=2.0.0)", "requests", "setuptools (>=42)", "tqdm (>=4.48.0)", "urllib3 (<2.0)"] +doc = ["holoviews (>=1.16.0)", "jupyterlab", "lxml", "matplotlib", "nbsite (>=0.8.4)", "pandas (<2.1.0)", "pillow", "plotly"] +examples = ["aiohttp", "altair", "channels", "croniter", "datashader", "django (<4)", "fastparquet", "folium", "graphviz", "holoviews (>=1.16.0)", "hvplot", "ipyleaflet", "ipympl", "ipyvolume", "ipyvuetify", "ipywidgets", "ipywidgets-bokeh", "jupyter-bokeh (>=3.0.7)", "networkx (>=2.5)", "plotly (>=4.0)", "pydeck", "pygraphviz", "pyinstrument (>=4.0)", "python-graphviz", "pyvista", "reacton", "scikit-image", "scikit-learn", "seaborn", "streamz", "vega-datasets", "vtk", "xarray", "xgboost"] +recommended = ["holoviews (>=1.16.0)", "jupyterlab", "matplotlib", "pillow", "plotly"] +tests = ["altair", "anywidget", "diskcache", "flake8", "folium", "holoviews (>=1.16.0)", "ipympl", "ipython (>=7.0)", "ipyvuetify", "ipywidgets-bokeh", "nbval", "numba (<0.58)", "numpy", "pandas (>=1.3)", "parameterized", "pre-commit", "psutil", "pytest", "pytest-asyncio (<0.22)", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "reacton", "scipy", "twine"] +tests-core = ["altair", "anywidget", "diskcache", "flake8", "folium", "holoviews (>=1.16.0)", "ipython (>=7.0)", "nbval", "numpy", "pandas (>=1.3)", "parameterized", "pre-commit", "psutil", "pytest", "pytest-asyncio (<0.22)", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "scipy"] +ui = ["jupyter-server", "playwright", "pytest-playwright"] + +[[package]] +name = "param" +version = "2.0.2" +description = "Make your Python code clearer and more reliable by declaring Parameters." +optional = false +python-versions = ">=3.8" +files = [ + {file = "param-2.0.2-py3-none-any.whl", hash = "sha256:b269fd7397886ec609e544f81035fa52e1950da0e76d20080bfeca3d7a0317ca"}, + {file = "param-2.0.2.tar.gz", hash = "sha256:785845a727a588eb94c7666d80551c7e2bb97d4309d3507beab66f95e57f7527"}, +] + +[package.extras] +all = ["param[doc]", "param[lint]", "param[tests-full]"] +doc = ["nbsite (==0.8.4)", "param[examples]", "sphinx-remove-toctrees"] +examples = ["aiohttp", "pandas", "panel"] +lint = ["flake8", "pre-commit"] +tests = ["coverage[toml]", "pytest", "pytest-asyncio"] +tests-deser = ["odfpy", "openpyxl", "pyarrow", "tables", "xlrd"] +tests-examples = ["nbval", "param[examples]", "pytest", "pytest-asyncio", "pytest-xdist"] +tests-full = ["cloudpickle", "gmpy", "ipython", "jsonschema", "nest-asyncio", "numpy", "pandas", "param[tests-deser]", "param[tests-examples]", "param[tests]"] + [[package]] name = "parso" version = "0.8.3" @@ -1373,6 +1523,21 @@ files = [ docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +[[package]] +name = "plotly" +version = "5.18.0" +description = "An open-source, interactive data visualization library for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "plotly-5.18.0-py3-none-any.whl", hash = "sha256:23aa8ea2f4fb364a20d34ad38235524bd9d691bf5299e800bca608c31e8db8de"}, + {file = "plotly-5.18.0.tar.gz", hash = "sha256:360a31e6fbb49d12b007036eb6929521343d6bee2236f8459915821baefa2cbb"}, +] + +[package.dependencies] +packaging = "*" +tenacity = ">=6.2.0" + [[package]] name = "pluggy" version = "1.4.0" @@ -1539,6 +1704,25 @@ files = [ {file = "pytz-2023.4.tar.gz", hash = "sha256:31d4583c4ed539cd037956140d695e42c033a19e984bfce9964a3f7d59bc2b40"}, ] +[[package]] +name = "pyviz-comms" +version = "3.0.1" +description = "A JupyterLab extension for rendering HoloViz content." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyviz_comms-3.0.1-py3-none-any.whl", hash = "sha256:0130e952b942906a0eb5fcbcc750262a8e4f565a9b06b3c0d8d631f33b61b78e"}, + {file = "pyviz_comms-3.0.1.tar.gz", hash = "sha256:427c33a5a81780db9b9e757f0675f65ea2292d9a642a2d291cfb5cae6cd46991"}, +] + +[package.dependencies] +param = "*" + +[package.extras] +all = ["pyviz-comms[build]", "pyviz-comms[tests]"] +build = ["jupyterlab (>=4.0,<5.0)", "keyring", "rfc3986", "setuptools (>=40.8.0)", "twine"] +tests = ["flake8", "pytest"] + [[package]] name = "pywavelets" version = "1.5.0" @@ -1875,6 +2059,20 @@ build = ["cython (>=0.29.32)"] develop = ["Jinja2", "colorama", "cython (>=0.29.32)", "cython (>=0.29.32,<3.0.0)", "flake8", "isort", "joblib", "matplotlib (>=3)", "oldest-supported-numpy (>=2022.4.18)", "pytest (>=7.0.1,<7.1.0)", "pytest-randomly", "pytest-xdist", "pywinpty", "setuptools-scm[toml] (>=7.0.0,<7.1.0)"] docs = ["ipykernel", "jupyter-client", "matplotlib", "nbconvert", "nbformat", "numpydoc", "pandas-datareader", "sphinx"] +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + [[package]] name = "tifffile" version = "2023.12.9" @@ -2001,6 +2199,20 @@ files = [ {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, ] +[[package]] +name = "uc-micro-py" +version = "1.0.3" +description = "Micro subset of unicode data files for linkify-it-py projects." +optional = false +python-versions = ">=3.7" +files = [ + {file = "uc-micro-py-1.0.3.tar.gz", hash = "sha256:d321b92cff673ec58027c04015fcaa8bb1e005478643ff4a500882eaab88c48a"}, + {file = "uc_micro_py-1.0.3-py3-none-any.whl", hash = "sha256:db1dffff340817673d7b466ec86114a9dc0e9d4d9b5ba229d9d60e5c12600cd5"}, +] + +[package.extras] +test = ["coverage", "pytest", "pytest-cov"] + [[package]] name = "uncertainties" version = "3.1.7" @@ -2107,6 +2319,17 @@ files = [ {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +optional = false +python-versions = "*" +files = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] + [[package]] name = "xyzservices" version = "2023.10.1" @@ -2121,4 +2344,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "06f1d10780fb94ccd26fa372524e161efbc5a75fac8ce7812912e3f70d6985a7" +content-hash = "0daddfd00e2dc4271dc14874220aa261c3dcf7dba02a9e399ed57e5a6e16b815" diff --git a/pyproject.toml b/pyproject.toml index 61f3ec61..de28bf8f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,6 +38,8 @@ typer = "^0.9.0" statsmodels = "^0.13.2" rich = "^12.5.1" tqdm = "^4.65.0" +panel = "^1.3.8" +plotly = "^5.18.0" [tool.poetry.scripts] peakipy = 'peakipy.cli.main:app' From ae0924f5898d1d6b336ca52fd217c3c14db9af29 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Tue, 13 Feb 2024 08:18:14 -0500 Subject: [PATCH 11/37] addded data validation for peakipy check and plotly flag --- peakipy/cli/main.py | 43 ++++++++++++++-- poetry.lock | 123 +++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 1 + 3 files changed, 163 insertions(+), 4 deletions(-) diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index 88feb731..613546cd 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -35,6 +35,7 @@ from tqdm import tqdm from rich import print from skimage.filters import threshold_otsu +from pydantic import BaseModel import matplotlib import matplotlib.pyplot as plt @@ -1206,6 +1207,40 @@ def create_plotly_figure(plot_data: PlottingDataForPlane): return fig +class FitDataModel(BaseModel): + plane: int + clustid: int + assignment: str + memcnt: int + amp: float + height: float + center_x_ppm: float + center_y_ppm: float + fwhm_x_hz: float + fwhm_y_hz: float + lineshape: str + x_radius: float + y_radius: float + center_x: float + center_y: float + sigma_x: float + sigma_y: float + fraction: float + + +def validate_fit_data(dict): + fit_data = FitDataModel(**dict) + return fit_data.model_dump() + + +def validate_fit_dataframe(df): + validated_fit_data = [] + for _, row in df.iterrows(): + fit_data = validate_fit_data(row.to_dict()) + validated_fit_data.append(fit_data) + return pd.DataFrame(validated_fit_data) + + @app.command(help="Interactive plots for checking fits") def check( fits: Path, @@ -1222,6 +1257,7 @@ def check( ccount: int = 50, colors: Tuple[str, str] = ("#5e3c99", "#e66101"), verb: bool = False, + plotly: bool = False, ): """Interactive plots for checking fits @@ -1271,7 +1307,7 @@ def check( "fwhm_y_hz", "lineshape", ] - fits = pd.read_csv(fits) + fits = validate_fit_dataframe(pd.read_csv(fits)) args = {} # get dims from config file config_path = Path("peakipy.config") @@ -1383,8 +1419,9 @@ def check( plot_data, pdf, individual, label, ccpn_flag, show ) # fig = create_plotly_figure(plot_data) - fig = create_plotly_figure(plot_data) - fig.show() + if plotly: + fig = create_plotly_figure(plot_data) + fig.show() # surf = pn.pane.plotly.Plotly(fig) # app = pn.Column(surf) # app.show(threaded=True) diff --git a/poetry.lock b/poetry.lock index 5ae425a4..451b681a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,16 @@ # This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + [[package]] name = "asteval" version = "0.9.31" @@ -1610,6 +1621,116 @@ files = [ [package.extras] tests = ["pytest"] +[[package]] +name = "pydantic" +version = "2.6.1" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.16.2" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.16.2" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + [[package]] name = "pygments" version = "2.17.2" @@ -2344,4 +2465,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "0daddfd00e2dc4271dc14874220aa261c3dcf7dba02a9e399ed57e5a6e16b815" +content-hash = "d9b8100aa0d54d420fd3bc0ae5649b6c7e5b2ed784db1f8a4df1693da1e4bf28" diff --git a/pyproject.toml b/pyproject.toml index de28bf8f..30214dad 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,6 +40,7 @@ rich = "^12.5.1" tqdm = "^4.65.0" panel = "^1.3.8" plotly = "^5.18.0" +pydantic = "^2.6.1" [tool.poetry.scripts] peakipy = 'peakipy.cli.main:app' From 8acd68ebe37de52ddf5d67fd3aa3672e8610b8c5 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Tue, 13 Feb 2024 23:37:14 -0500 Subject: [PATCH 12/37] very beta check panel --- peakipy/cli/check_panel.py | 50 ++++++++++++++++++++++++++++++++++++++ peakipy/cli/main.py | 4 +-- 2 files changed, 52 insertions(+), 2 deletions(-) create mode 100644 peakipy/cli/check_panel.py diff --git a/peakipy/cli/check_panel.py b/peakipy/cli/check_panel.py new file mode 100644 index 00000000..51fb8fcc --- /dev/null +++ b/peakipy/cli/check_panel.py @@ -0,0 +1,50 @@ +from pathlib import Path +import panel as pn +import pandas as pd + +from peakipy.cli.main import check + +pn.extension() + + +def get_cluster(cluster): + cluster_groups = df.groupby("clustid") + cluster_group = cluster_groups.get_group(cluster) + df_pane = pn.pane.DataFrame(cluster_group) + return df_pane + + +def create_plotly_pane(cluster): + fig = check( + fits=fits_path, + data_path=data_path, + clusters=[cluster], + config_path=config_path, + first=True, + plotly=True, + ) + + fig["layout"].update(height=800, width=800) + fig = fig.to_dict() + return pn.pane.Plotly(fig) + + +if __name__ == "__main__": + fits_path = Path("../../test/test_protein_L/fits.csv") + data_path = Path("../../test/test_protein_L/test1.ft2") + config_path = Path("../../test/test_protein_L/peakipy.config") + df = pd.read_csv(fits_path) + + clusters = [(row.clustid, row.memcnt) for _, row in df.iterrows()] + + select = pn.widgets.Select( + name="Cluster (number of peaks)", options={f"{c} ({m})": c for c, m in clusters} + ) + interactive_cluster_pane = pn.bind(get_cluster, select) + interactive_plotly_pane = pn.bind(create_plotly_pane, select) + check_pane = pn.Card( + select, + pn.Row(interactive_plotly_pane, interactive_cluster_pane), + title="Peakipy check", + ) + check_pane.show() diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index 613546cd..14a7c536 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -1258,6 +1258,7 @@ def check( colors: Tuple[str, str] = ("#5e3c99", "#e66101"), verb: bool = False, plotly: bool = False, + config_path: Path = Path("peakipy.config"), ): """Interactive plots for checking fits @@ -1310,7 +1311,6 @@ def check( fits = validate_fit_dataframe(pd.read_csv(fits)) args = {} # get dims from config file - config_path = Path("peakipy.config") args, config = read_config(args, config_path) dims = config.get("dims", (1, 2, 3)) @@ -1421,7 +1421,7 @@ def check( # fig = create_plotly_figure(plot_data) if plotly: fig = create_plotly_figure(plot_data) - fig.show() + return fig # surf = pn.pane.plotly.Plotly(fig) # app = pn.Column(surf) # app.show(threaded=True) From 37587e0b1717c7d1862721a5b66784415d8bb848 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Wed, 14 Feb 2024 09:16:24 -0500 Subject: [PATCH 13/37] fixed bug with paths and dummy assignments - needs further cleaning --- peakipy/cli/main.py | 10 +++++----- peakipy/core.py | 3 +++ 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index 14a7c536..93a93abe 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -214,7 +214,7 @@ def read( "struc_size": struc_size, } # name of output peaklist - outname = peaklist_path.stem + outname = peaklist_path.parent / peaklist_path.stem cluster = True match peaklist_format: @@ -267,7 +267,7 @@ def read( ) cluster = False # don't overwrite the old .csv file - outname = outname + "_new" + outname = outname.parent / (outname.stem + "_new") peaks.update_df() @@ -287,14 +287,14 @@ def read( match outfmt.value: case "csv": - outname = outname + ".csv" + outname = outname.with_suffix(".csv") data.to_csv(outname, float_format="%.4f", index=False) case "pkl": - outname = outname + ".pkl" + outname = outname.with_suffix(".pkl") data.to_pickle(outname) # write config file - config_path = Path("peakipy.config") + config_path = data_path.parent / Path("peakipy.config") config_kvs = [ ("dims", dims), ("data_path", str(data_path)), diff --git a/peakipy/core.py b/peakipy/core.py index 114cc854..14066107 100644 --- a/peakipy/core.py +++ b/peakipy/core.py @@ -1751,6 +1751,9 @@ def _read_pipe(self): return df def check_assignments(self): + # self.df["ASS"] = self.df. + self.df["ASS"] = self.df.ASS.astype(object) + self.df.loc[self.df["ASS"].isnull(), "ASS"] = "None_dummy_0" self.df["ASS"] = self.df.ASS.astype(str) duplicates_bool = self.df.ASS.duplicated() duplicates = self.df.ASS[duplicates_bool] From 93b45ce7666cf32d946fe87fb5e4cdfe8b2c1f55 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Thu, 15 Feb 2024 00:46:19 -0500 Subject: [PATCH 14/37] interactive check panel peakipy-check --- peakipy/cli/check_panel.py | 106 ++++++-- peakipy/cli/main.py | 122 +++++++-- poetry.lock | 491 +++++++++++++++++++------------------ pyproject.toml | 1 + 4 files changed, 442 insertions(+), 278 deletions(-) diff --git a/peakipy/cli/check_panel.py b/peakipy/cli/check_panel.py index 51fb8fcc..60dd576d 100644 --- a/peakipy/cli/check_panel.py +++ b/peakipy/cli/check_panel.py @@ -1,26 +1,72 @@ from pathlib import Path +from dataclasses import dataclass, field +from functools import lru_cache import panel as pn import pandas as pd +import typer -from peakipy.cli.main import check +from peakipy.cli.main import check, validate_fit_dataframe pn.extension() +pn.config.theme = "dark" + +global fits_path +global data_path +global config_path + + +@dataclass +class Data: + fits_path: Path = Path("./fits.csv") + data_path: Path = Path("./test.ft2") + config_path: Path = Path("./peakipy.config") + _df: pd.DataFrame = field(init=False) + + def load_dataframe(self): + self._df = validate_fit_dataframe(pd.read_csv(self.fits_path)) + + @property + def df(self): + return self._df + + +@lru_cache(maxsize=1) +def data_singleton(): + return Data() def get_cluster(cluster): - cluster_groups = df.groupby("clustid") + data = data_singleton() + cluster_groups = data.df.groupby("clustid") cluster_group = cluster_groups.get_group(cluster) - df_pane = pn.pane.DataFrame(cluster_group) + df_pane = pn.pane.DataFrame( + cluster_group[ + [ + "assignment", + "clustid", + "memcnt", + "plane", + "amp", + "height", + "center_x_ppm", + "center_y_ppm", + "fwhm_x_hz", + "fwhm_x_hz", + "lineshape", + ] + ] + ) return df_pane -def create_plotly_pane(cluster): +def create_plotly_pane(cluster, plane): + data = data_singleton() fig = check( - fits=fits_path, - data_path=data_path, + fits=data.fits_path, + data_path=data.data_path, clusters=[cluster], - config_path=config_path, - first=True, + plane=plane, + config_path=data.config_path, plotly=True, ) @@ -29,22 +75,46 @@ def create_plotly_pane(cluster): return pn.pane.Plotly(fig) -if __name__ == "__main__": - fits_path = Path("../../test/test_protein_L/fits.csv") - data_path = Path("../../test/test_protein_L/test1.ft2") - config_path = Path("../../test/test_protein_L/peakipy.config") - df = pd.read_csv(fits_path) +app = typer.Typer() - clusters = [(row.clustid, row.memcnt) for _, row in df.iterrows()] - select = pn.widgets.Select( +@app.command() +def check_panel( + fits_path: Path, data_path: Path, config_path: Path = Path("./peakipy.config") +): + data = data_singleton() + data.fits_path = fits_path + data.data_path = data_path + data.config_path = config_path + data.load_dataframe() + + clusters = [(row.clustid, row.memcnt) for _, row in data.df.iterrows()] + + select_cluster = pn.widgets.Select( name="Cluster (number of peaks)", options={f"{c} ({m})": c for c, m in clusters} ) - interactive_cluster_pane = pn.bind(get_cluster, select) - interactive_plotly_pane = pn.bind(create_plotly_pane, select) + select_plane = pn.widgets.Select( + name="Plane", options={f"{plane}": plane for plane in data.df.plane.unique()} + ) + interactive_cluster_pane = pn.bind(get_cluster, select_cluster) + interactive_plotly_pane = pn.bind( + create_plotly_pane, cluster=select_cluster, plane=select_plane + ) + info_pane = pn.pane.Markdown( + "Select a cluster and plane to look at from the dropdown menus" + ) check_pane = pn.Card( - select, + info_pane, + select_cluster, + select_plane, pn.Row(interactive_plotly_pane, interactive_cluster_pane), title="Peakipy check", ) check_pane.show() + + +if __name__ == "__main__": + # fits_path = Path("../../test/test_protein_L/fits.csv") + # data_path = Path("../../test/test_protein_L/test1.ft2") + # config_path = Path("../../test/test_protein_L/peakipy.config") + app() diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index 93a93abe..8d0f1f37 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -46,7 +46,9 @@ import yaml import plotly.graph_objects as go -import panel as pn +import plotly.io as pio + +pio.templates.default = "plotly_dark" from peakipy.core import ( Peaklist, @@ -939,7 +941,7 @@ def simulate_lineshapes_from_fitted_peak_parameters( class PlottingDataForPlane: pseudo3D: Pseudo3D plane_id: int - plane: pd.DataFrame + plane_lineshape_parameters: pd.DataFrame X: np.array Y: np.array mask: np.array @@ -996,11 +998,13 @@ def __post_init__(self): def plot_data_is_valid(plot_data: PlottingDataForPlane) -> bool: if len(plot_data.x_plot) < 1 or len(plot_data.y_plot) < 1: - print(f"[red]Nothing to plot for cluster {int(plot_data.plane.clustid)}[/red]") + print( + f"[red]Nothing to plot for cluster {int(plot_data.plane_lineshape_parameters.clustid)}[/red]" + ) print(f"[red]x={plot_data.x_plot},y={plot_data.y_plot}[/red]") print( df_to_rich_table( - plot_data.plane, + plot_data.plane_lineshape_parameters, title="", columns=bad_column_selection, styles=bad_color_selection, @@ -1013,7 +1017,7 @@ def plot_data_is_valid(plot_data: PlottingDataForPlane) -> bool: print(f"[red]Nothing to plot for cluster {int(plot_data.plane.clustid)}[/red]") print( df_to_rich_table( - plot_data.plane, + plot_data.plane_lineshape_parameters, title="Bad plane", columns=bad_column_selection, styles=bad_color_selection, @@ -1101,12 +1105,12 @@ def create_matplotlib_figure( ax.view_init(30, 120) # names = ",".join(plane.assignment) - title = f"Plane={plot_data.plane_id},Cluster={plot_data.plane.clustid.iloc[0]}" + title = f"Plane={plot_data.plane_id},Cluster={plot_data.plane_lineshape_parameters.clustid.iloc[0]}" plt.title(title) print(f"[green]Plotting: {title}[/green]") out_str = "Volumes (Heights)\n===========\n" # chi2s = [] - for _, row in plot_data.plane.iterrows(): + for _, row in plot_data.plane_lineshape_parameters.iterrows(): out_str += f"{row.assignment} = {row.amp:.3e} ({row.height:.3e})\n" if label: ax.text( @@ -1156,23 +1160,64 @@ def next_plot(event): def create_plotly_wireframe_lines(plot_data: PlottingDataForPlane): lines = [] - show_legend = False + show_legend = lambda x: x < 1 + showlegend = False # make simulated data wireframe line_marker = dict(color=plot_data.fit_color, width=4) + counter = 0 for i, j, k in zip(plot_data.x_plot, plot_data.y_plot, plot_data.sim_plot): - lines.append(go.Scatter3d(x=i, y=j, z=k, mode="lines", line=line_marker)) + showlegend = show_legend(counter) + lines.append( + go.Scatter3d( + x=i, + y=j, + z=k, + mode="lines", + line=line_marker, + name="fit", + showlegend=showlegend, + ) + ) + counter += 1 for i, j, k in zip(plot_data.x_plot.T, plot_data.y_plot.T, plot_data.sim_plot.T): - lines.append(go.Scatter3d(x=i, y=j, z=k, mode="lines", line=line_marker)) + lines.append( + go.Scatter3d( + x=i, y=j, z=k, mode="lines", line=line_marker, showlegend=showlegend + ) + ) # make experimental data wireframe line_marker = dict(color=plot_data.data_color, width=4) + counter = 0 for i, j, k in zip(plot_data.x_plot, plot_data.y_plot, plot_data.masked_data): - lines.append(go.Scatter3d(x=i, y=j, z=k, mode="lines", line=line_marker)) + showlegend = show_legend(counter) + lines.append( + go.Scatter3d( + x=i, + y=j, + z=k, + mode="lines", + name="data", + line=line_marker, + showlegend=showlegend, + ) + ) + counter += 1 for i, j, k in zip(plot_data.x_plot.T, plot_data.y_plot.T, plot_data.masked_data.T): - lines.append(go.Scatter3d(x=i, y=j, z=k, mode="lines", line=line_marker)) + lines.append( + go.Scatter3d( + x=i, y=j, z=k, mode="lines", line=line_marker, showlegend=showlegend + ) + ) return lines +def construct_surface_legend_string(row): + surface_legend = "" + surface_legend += row.assignment + return surface_legend + + def create_plotly_surfaces(plot_data: PlottingDataForPlane): data = [] color_scale_values = np.linspace(0, 1, len(plot_data.single_colors)) @@ -1180,7 +1225,12 @@ def create_plotly_surfaces(plot_data: PlottingDataForPlane): [val, f"rgb({', '.join('%d'%(i*255) for i in c[0:3])})"] for val, c in zip(color_scale_values, plot_data.single_colors) ] - for val, individual_peak in zip(color_scale_values, plot_data.sim_data_singles): + for val, individual_peak, row in zip( + color_scale_values, + plot_data.sim_data_singles, + plot_data.plane_lineshape_parameters.itertuples(), + ): + name = construct_surface_legend_string(row) colors = np.zeros(shape=individual_peak.shape) + val data.append( go.Surface( @@ -1193,20 +1243,60 @@ def create_plotly_surfaces(plot_data: PlottingDataForPlane): showscale=False, cmin=0, cmax=1, + name=name, ) ) return data +def create_residual_contours(plot_data: PlottingDataForPlane): + data = [] + contours = go.Contour(x=plot_data.x_plot, y=plot_data.y_plot, z=plot_data.residual) + data.append(contours) + return data + + def create_plotly_figure(plot_data: PlottingDataForPlane): lines = create_plotly_wireframe_lines(plot_data) surfaces = create_plotly_surfaces(plot_data) + # residuals = create_residual_contours(plot_data) fig = go.Figure(data=lines + surfaces) - layout = go.Layout(showlegend=False) - fig.update_layout(layout) + # layout = go.Layout(showlegend=True) + # fig.update_layout(layout) + # fig.update_traces(showlegend=True) + fig = update_axis_ranges(fig, plot_data) + return fig + + +def update_axis_ranges(fig, plot_data: PlottingDataForPlane): + fig.update_layout( + scene=dict( + xaxis=dict(range=[plot_data.x_plot.max(), plot_data.x_plot.min()]), + yaxis=dict(range=[plot_data.y_plot.max(), plot_data.y_plot.min()]), + annotations=make_annotations(plot_data), + ) + ) return fig +def make_annotations(plot_data: PlottingDataForPlane): + annotations = [] + for row in plot_data.plane_lineshape_parameters.itertuples(): + annotations.append( + dict( + showarrow=True, + x=row.center_x_ppm, + y=row.center_y_ppm, + z=row.height * 1.0, + text=row.assignment, + opacity=0.8, + textangle=0, + arrowsize=1, + ) + ) + return annotations + + class FitDataModel(BaseModel): plane: int clustid: int @@ -1334,7 +1424,7 @@ def check( data_color, fit_color = unpack_plotting_colors(colors) fits = get_fit_data_for_selected_peak_clusters(fits, clusters) - peak_clusters = fits.groupby("clustid") + peak_clusters = fits.query(f"plane=={selected_plane}").groupby("clustid") # make plotting meshes x = np.arange(pseudo3D.f2_size) diff --git a/poetry.lock b/poetry.lock index 451b681a..ad256b94 100644 --- a/poetry.lock +++ b/poetry.lock @@ -134,13 +134,13 @@ xyzservices = ">=2021.09.1" [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -438,60 +438,60 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "fonttools" -version = "4.47.2" +version = "4.48.1" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" files = [ - {file = "fonttools-4.47.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3b629108351d25512d4ea1a8393a2dba325b7b7d7308116b605ea3f8e1be88df"}, - {file = "fonttools-4.47.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c19044256c44fe299d9a73456aabee4b4d06c6b930287be93b533b4737d70aa1"}, - {file = "fonttools-4.47.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8be28c036b9f186e8c7eaf8a11b42373e7e4949f9e9f370202b9da4c4c3f56c"}, - {file = "fonttools-4.47.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f83a4daef6d2a202acb9bf572958f91cfde5b10c8ee7fb1d09a4c81e5d851fd8"}, - {file = "fonttools-4.47.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a5a5318ba5365d992666ac4fe35365f93004109d18858a3e18ae46f67907670"}, - {file = "fonttools-4.47.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8f57ecd742545362a0f7186774b2d1c53423ed9ece67689c93a1055b236f638c"}, - {file = "fonttools-4.47.2-cp310-cp310-win32.whl", hash = "sha256:a1c154bb85dc9a4cf145250c88d112d88eb414bad81d4cb524d06258dea1bdc0"}, - {file = "fonttools-4.47.2-cp310-cp310-win_amd64.whl", hash = "sha256:3e2b95dce2ead58fb12524d0ca7d63a63459dd489e7e5838c3cd53557f8933e1"}, - {file = "fonttools-4.47.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:29495d6d109cdbabe73cfb6f419ce67080c3ef9ea1e08d5750240fd4b0c4763b"}, - {file = "fonttools-4.47.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0a1d313a415eaaba2b35d6cd33536560deeebd2ed758b9bfb89ab5d97dc5deac"}, - {file = "fonttools-4.47.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90f898cdd67f52f18049250a6474185ef6544c91f27a7bee70d87d77a8daf89c"}, - {file = "fonttools-4.47.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3480eeb52770ff75140fe7d9a2ec33fb67b07efea0ab5129c7e0c6a639c40c70"}, - {file = "fonttools-4.47.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0255dbc128fee75fb9be364806b940ed450dd6838672a150d501ee86523ac61e"}, - {file = "fonttools-4.47.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f791446ff297fd5f1e2247c188de53c1bfb9dd7f0549eba55b73a3c2087a2703"}, - {file = "fonttools-4.47.2-cp311-cp311-win32.whl", hash = "sha256:740947906590a878a4bde7dd748e85fefa4d470a268b964748403b3ab2aeed6c"}, - {file = "fonttools-4.47.2-cp311-cp311-win_amd64.whl", hash = "sha256:63fbed184979f09a65aa9c88b395ca539c94287ba3a364517698462e13e457c9"}, - {file = "fonttools-4.47.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4ec558c543609e71b2275c4894e93493f65d2f41c15fe1d089080c1d0bb4d635"}, - {file = "fonttools-4.47.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e040f905d542362e07e72e03612a6270c33d38281fd573160e1003e43718d68d"}, - {file = "fonttools-4.47.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6dd58cc03016b281bd2c74c84cdaa6bd3ce54c5a7f47478b7657b930ac3ed8eb"}, - {file = "fonttools-4.47.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32ab2e9702dff0dd4510c7bb958f265a8d3dd5c0e2547e7b5f7a3df4979abb07"}, - {file = "fonttools-4.47.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a808f3c1d1df1f5bf39be869b6e0c263570cdafb5bdb2df66087733f566ea71"}, - {file = "fonttools-4.47.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ac71e2e201df041a2891067dc36256755b1229ae167edbdc419b16da78732c2f"}, - {file = "fonttools-4.47.2-cp312-cp312-win32.whl", hash = "sha256:69731e8bea0578b3c28fdb43dbf95b9386e2d49a399e9a4ad736b8e479b08085"}, - {file = "fonttools-4.47.2-cp312-cp312-win_amd64.whl", hash = "sha256:b3e1304e5f19ca861d86a72218ecce68f391646d85c851742d265787f55457a4"}, - {file = "fonttools-4.47.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:254d9a6f7be00212bf0c3159e0a420eb19c63793b2c05e049eb337f3023c5ecc"}, - {file = "fonttools-4.47.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eabae77a07c41ae0b35184894202305c3ad211a93b2eb53837c2a1143c8bc952"}, - {file = "fonttools-4.47.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a86a5ab2873ed2575d0fcdf1828143cfc6b977ac448e3dc616bb1e3d20efbafa"}, - {file = "fonttools-4.47.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13819db8445a0cec8c3ff5f243af6418ab19175072a9a92f6cc8ca7d1452754b"}, - {file = "fonttools-4.47.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4e743935139aa485fe3253fc33fe467eab6ea42583fa681223ea3f1a93dd01e6"}, - {file = "fonttools-4.47.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d49ce3ea7b7173faebc5664872243b40cf88814ca3eb135c4a3cdff66af71946"}, - {file = "fonttools-4.47.2-cp38-cp38-win32.whl", hash = "sha256:94208ea750e3f96e267f394d5588579bb64cc628e321dbb1d4243ffbc291b18b"}, - {file = "fonttools-4.47.2-cp38-cp38-win_amd64.whl", hash = "sha256:0f750037e02beb8b3569fbff701a572e62a685d2a0e840d75816592280e5feae"}, - {file = "fonttools-4.47.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3d71606c9321f6701642bd4746f99b6089e53d7e9817fc6b964e90d9c5f0ecc6"}, - {file = "fonttools-4.47.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:86e0427864c6c91cf77f16d1fb9bf1bbf7453e824589e8fb8461b6ee1144f506"}, - {file = "fonttools-4.47.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a00bd0e68e88987dcc047ea31c26d40a3c61185153b03457956a87e39d43c37"}, - {file = "fonttools-4.47.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5d77479fb885ef38a16a253a2f4096bc3d14e63a56d6246bfdb56365a12b20c"}, - {file = "fonttools-4.47.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5465df494f20a7d01712b072ae3ee9ad2887004701b95cb2cc6dcb9c2c97a899"}, - {file = "fonttools-4.47.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4c811d3c73b6abac275babb8aa439206288f56fdb2c6f8835e3d7b70de8937a7"}, - {file = "fonttools-4.47.2-cp39-cp39-win32.whl", hash = "sha256:5b60e3afa9635e3dfd3ace2757039593e3bd3cf128be0ddb7a1ff4ac45fa5a50"}, - {file = "fonttools-4.47.2-cp39-cp39-win_amd64.whl", hash = "sha256:7ee48bd9d6b7e8f66866c9090807e3a4a56cf43ffad48962725a190e0dd774c8"}, - {file = "fonttools-4.47.2-py3-none-any.whl", hash = "sha256:7eb7ad665258fba68fd22228a09f347469d95a97fb88198e133595947a20a184"}, - {file = "fonttools-4.47.2.tar.gz", hash = "sha256:7df26dd3650e98ca45f1e29883c96a0b9f5bb6af8d632a6a108bc744fa0bd9b3"}, + {file = "fonttools-4.48.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:702ae93058c81f46461dc4b2c79f11d3c3d8fd7296eaf8f75b4ba5bbf813cd5f"}, + {file = "fonttools-4.48.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:97f0a49fa6aa2d6205c6f72f4f98b74ef4b9bfdcb06fd78e6fe6c7af4989b63e"}, + {file = "fonttools-4.48.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3260db55f1843e57115256e91247ad9f68cb02a434b51262fe0019e95a98738"}, + {file = "fonttools-4.48.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e740a7602c2bb71e1091269b5dbe89549749a8817dc294b34628ffd8b2bf7124"}, + {file = "fonttools-4.48.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4108b1d247953dd7c90ec8f457a2dec5fceb373485973cc852b14200118a51ee"}, + {file = "fonttools-4.48.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56339ec557f0c342bddd7c175f5e41c45fc21282bee58a86bd9aa322bec715f2"}, + {file = "fonttools-4.48.1-cp310-cp310-win32.whl", hash = "sha256:bff5b38d0e76eb18e0b8abbf35d384e60b3371be92f7be36128ee3e67483b3ec"}, + {file = "fonttools-4.48.1-cp310-cp310-win_amd64.whl", hash = "sha256:f7449493886da6a17472004d3818cc050ba3f4a0aa03fb47972e4fa5578e6703"}, + {file = "fonttools-4.48.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:18b35fd1a850ed7233a99bbd6774485271756f717dac8b594958224b54118b61"}, + {file = "fonttools-4.48.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cad5cfd044ea2e306fda44482b3dd32ee47830fa82dfa4679374b41baa294f5f"}, + {file = "fonttools-4.48.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f30e605c7565d0da6f0aec75a30ec372072d016957cd8fc4469721a36ea59b7"}, + {file = "fonttools-4.48.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aee76fd81a8571c68841d6ef0da750d5ff08ff2c5f025576473016f16ac3bcf7"}, + {file = "fonttools-4.48.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5057ade278e67923000041e2b195c9ea53e87f227690d499b6a4edd3702f7f01"}, + {file = "fonttools-4.48.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b10633aafc5932995a391ec07eba5e79f52af0003a1735b2306b3dab8a056d48"}, + {file = "fonttools-4.48.1-cp311-cp311-win32.whl", hash = "sha256:0d533f89819f9b3ee2dbedf0fed3825c425850e32bdda24c558563c71be0064e"}, + {file = "fonttools-4.48.1-cp311-cp311-win_amd64.whl", hash = "sha256:d20588466367f05025bb1efdf4e5d498ca6d14bde07b6928b79199c588800f0a"}, + {file = "fonttools-4.48.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0a2417547462e468edf35b32e3dd06a6215ac26aa6316b41e03b8eeaf9f079ea"}, + {file = "fonttools-4.48.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cf5a0cd974f85a80b74785db2d5c3c1fd6cc09a2ba3c837359b2b5da629ee1b0"}, + {file = "fonttools-4.48.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0452fcbfbce752ba596737a7c5ec5cf76bc5f83847ce1781f4f90eab14ece252"}, + {file = "fonttools-4.48.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578c00f93868f64a4102ecc5aa600a03b49162c654676c3fadc33de2ddb88a81"}, + {file = "fonttools-4.48.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:63dc592a16cd08388d8c4c7502b59ac74190b23e16dfc863c69fe1ea74605b68"}, + {file = "fonttools-4.48.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9b58638d8a85e3a1b32ec0a91d9f8171a877b4b81c408d4cb3257d0dee63e092"}, + {file = "fonttools-4.48.1-cp312-cp312-win32.whl", hash = "sha256:d10979ef14a8beaaa32f613bb698743f7241d92f437a3b5e32356dfb9769c65d"}, + {file = "fonttools-4.48.1-cp312-cp312-win_amd64.whl", hash = "sha256:cdfd7557d1bd294a200bd211aa665ca3b02998dcc18f8211a5532da5b8fad5c5"}, + {file = "fonttools-4.48.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3cdb9a92521b81bf717ebccf592bd0292e853244d84115bfb4db0c426de58348"}, + {file = "fonttools-4.48.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b4ec6d42a7555f5ae35f3b805482f0aad0f1baeeef54859492ea3b782959d4a"}, + {file = "fonttools-4.48.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:902e9c4e9928301912f34a6638741b8ae0b64824112b42aaf240e06b735774b1"}, + {file = "fonttools-4.48.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8c8b54bd1420c184a995f980f1a8076f87363e2bb24239ef8c171a369d85a31"}, + {file = "fonttools-4.48.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:12ee86abca46193359ea69216b3a724e90c66ab05ab220d39e3fc068c1eb72ac"}, + {file = "fonttools-4.48.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6978bade7b6c0335095bdd0bd97f8f3d590d2877b370f17e03e0865241694eb5"}, + {file = "fonttools-4.48.1-cp38-cp38-win32.whl", hash = "sha256:bcd77f89fc1a6b18428e7a55dde8ef56dae95640293bfb8f4e929929eba5e2a2"}, + {file = "fonttools-4.48.1-cp38-cp38-win_amd64.whl", hash = "sha256:f40441437b039930428e04fb05ac3a132e77458fb57666c808d74a556779e784"}, + {file = "fonttools-4.48.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0d2b01428f7da26f229a5656defc824427b741e454b4e210ad2b25ed6ea2aed4"}, + {file = "fonttools-4.48.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:df48798f9a4fc4c315ab46e17873436c8746f5df6eddd02fad91299b2af7af95"}, + {file = "fonttools-4.48.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2eb4167bde04e172a93cf22c875d8b0cff76a2491f67f5eb069566215302d45d"}, + {file = "fonttools-4.48.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c900508c46274d32d308ae8e82335117f11aaee1f7d369ac16502c9a78930b0a"}, + {file = "fonttools-4.48.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:594206b31c95fcfa65f484385171fabb4ec69f7d2d7f56d27f17db26b7a31814"}, + {file = "fonttools-4.48.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:292922dc356d7f11f5063b4111a8b719efb8faea92a2a88ed296408d449d8c2e"}, + {file = "fonttools-4.48.1-cp39-cp39-win32.whl", hash = "sha256:4709c5bf123ba10eac210d2d5c9027d3f472591d9f1a04262122710fa3d23199"}, + {file = "fonttools-4.48.1-cp39-cp39-win_amd64.whl", hash = "sha256:63c73b9dd56a94a3cbd2f90544b5fca83666948a9e03370888994143b8d7c070"}, + {file = "fonttools-4.48.1-py3-none-any.whl", hash = "sha256:e3e33862fc5261d46d9aae3544acb36203b1a337d00bdb5d3753aae50dac860e"}, + {file = "fonttools-4.48.1.tar.gz", hash = "sha256:8b8a45254218679c7f1127812761e7854ed5c8e34349aebf581e8c9204e7495a"}, ] [package.extras] -all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] graphite = ["lz4 (>=1.7.4.2)"] interpolatable = ["munkres", "pycairo", "scipy"] -lxml = ["lxml (>=4.0,<5)"] +lxml = ["lxml (>=4.0)"] pathops = ["skia-pathops (>=0.5.0)"] plot = ["matplotlib"] repacker = ["uharfbuzz (>=0.23.0)"] @@ -530,13 +530,13 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "identify" -version = "2.5.33" +version = "2.5.34" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"}, - {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"}, + {file = "identify-2.5.34-py2.py3-none-any.whl", hash = "sha256:a4316013779e433d08b96e5eabb7f641e6c7942e4ab5d4c509ebd2e7a8994aed"}, + {file = "identify-2.5.34.tar.gz", hash = "sha256:ee17bc9d499899bc9eaec1ac7bf2dc9eedd480db9d88b96d123d3b64a9d34f5d"}, ] [package.extras] @@ -555,13 +555,13 @@ files = [ [[package]] name = "imageio" -version = "2.33.1" +version = "2.34.0" description = "Library for reading and writing a wide range of image, video, scientific, and volumetric data formats." optional = false python-versions = ">=3.8" files = [ - {file = "imageio-2.33.1-py3-none-any.whl", hash = "sha256:c5094c48ccf6b2e6da8b4061cd95e1209380afafcbeae4a4e280938cce227e1d"}, - {file = "imageio-2.33.1.tar.gz", hash = "sha256:78722d40b137bd98f5ec7312119f8aea9ad2049f76f434748eb306b6937cc1ce"}, + {file = "imageio-2.34.0-py3-none-any.whl", hash = "sha256:08082bf47ccb54843d9c73fe9fc8f3a88c72452ab676b58aca74f36167e8ccba"}, + {file = "imageio-2.34.0.tar.gz", hash = "sha256:ae9732e10acf807a22c389aef193f42215718e16bd06eed0c5bb57e1034a4d53"}, ] [package.dependencies] @@ -598,13 +598,13 @@ files = [ [[package]] name = "ipython" -version = "8.20.0" +version = "8.21.0" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.10" files = [ - {file = "ipython-8.20.0-py3-none-any.whl", hash = "sha256:bc9716aad6f29f36c449e30821c9dd0c1c1a7b59ddcc26931685b87b4c569619"}, - {file = "ipython-8.20.0.tar.gz", hash = "sha256:2f21bd3fc1d51550c89ee3944ae04bbc7bc79e129ea0937da6e6c68bfdbf117a"}, + {file = "ipython-8.21.0-py3-none-any.whl", hash = "sha256:1050a3ab8473488d7eee163796b02e511d0735cf43a04ba2a8348bd0f2eaf8a5"}, + {file = "ipython-8.21.0.tar.gz", hash = "sha256:48fbc236fbe0e138b88773fa0437751f14c3645fb483f1d4c5dee58b37e5ce73"}, ] [package.dependencies] @@ -620,17 +620,17 @@ stack-data = "*" traitlets = ">=5" [package.extras] -all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.23)", "pandas", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.23)", "pandas", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] black = ["black"] -doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] -test = ["pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath", "trio"] +test = ["pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "testpath", "trio"] [[package]] name = "jedi" @@ -880,108 +880,108 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.4" +version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:de8153a7aae3835484ac168a9a9bdaa0c5eee4e0bc595503c95d53b942879c84"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e888ff76ceb39601c59e219f281466c6d7e66bd375b4ec1ce83bcdc68306796b"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0b838c37ba596fcbfca71651a104a611543077156cb0a26fe0c475e1f152ee8"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac1ebf6983148b45b5fa48593950f90ed6d1d26300604f321c74a9ca1609f8e"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0fbad3d346df8f9d72622ac71b69565e621ada2ce6572f37c2eae8dacd60385d"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5291d98cd3ad9a562883468c690a2a238c4a6388ab3bd155b0c75dd55ece858"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a7cc49ef48a3c7a0005a949f3c04f8baa5409d3f663a1b36f0eba9bfe2a0396e"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83041cda633871572f0d3c41dddd5582ad7d22f65a72eacd8d3d6d00291df26"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-win32.whl", hash = "sha256:0c26f67b3fe27302d3a412b85ef696792c4a2386293c53ba683a89562f9399b0"}, - {file = "MarkupSafe-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:a76055d5cb1c23485d7ddae533229039b850db711c554a12ea64a0fd8a0129e2"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e9e3c4020aa2dc62d5dd6743a69e399ce3de58320522948af6140ac959ab863"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0042d6a9880b38e1dd9ff83146cc3c9c18a059b9360ceae207805567aacccc69"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d03fea4c4e9fd0ad75dc2e7e2b6757b80c152c032ea1d1de487461d8140efc"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab3a886a237f6e9c9f4f7d272067e712cdb4efa774bef494dccad08f39d8ae6"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf5ebbec056817057bfafc0445916bb688a255a5146f900445d081db08cbabb"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e1a0d1924a5013d4f294087e00024ad25668234569289650929ab871231668e7"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7902211afd0af05fbadcc9a312e4cf10f27b779cf1323e78d52377ae4b72bea"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c669391319973e49a7c6230c218a1e3044710bc1ce4c8e6eb71f7e6d43a2c131"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-win32.whl", hash = "sha256:31f57d64c336b8ccb1966d156932f3daa4fee74176b0fdc48ef580be774aae74"}, - {file = "MarkupSafe-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:54a7e1380dfece8847c71bf7e33da5d084e9b889c75eca19100ef98027bd9f56"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a76cd37d229fc385738bd1ce4cba2a121cf26b53864c1772694ad0ad348e509e"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:987d13fe1d23e12a66ca2073b8d2e2a75cec2ecb8eab43ff5624ba0ad42764bc"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5244324676254697fe5c181fc762284e2c5fceeb1c4e3e7f6aca2b6f107e60dc"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78bc995e004681246e85e28e068111a4c3f35f34e6c62da1471e844ee1446250"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4d176cfdfde84f732c4a53109b293d05883e952bbba68b857ae446fa3119b4f"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f9917691f410a2e0897d1ef99619fd3f7dd503647c8ff2475bf90c3cf222ad74"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f06e5a9e99b7df44640767842f414ed5d7bedaaa78cd817ce04bbd6fd86e2dd6"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396549cea79e8ca4ba65525470d534e8a41070e6b3500ce2414921099cb73e8d"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-win32.whl", hash = "sha256:f6be2d708a9d0e9b0054856f07ac7070fbe1754be40ca8525d5adccdbda8f475"}, - {file = "MarkupSafe-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:5045e892cfdaecc5b4c01822f353cf2c8feb88a6ec1c0adef2a2e705eef0f656"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a07f40ef8f0fbc5ef1000d0c78771f4d5ca03b4953fc162749772916b298fc4"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d18b66fe626ac412d96c2ab536306c736c66cf2a31c243a45025156cc190dc8a"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:698e84142f3f884114ea8cf83e7a67ca8f4ace8454e78fe960646c6c91c63bfa"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a3b78a5af63ec10d8604180380c13dcd870aba7928c1fe04e881d5c792dc4e"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:15866d7f2dc60cfdde12ebb4e75e41be862348b4728300c36cdf405e258415ec"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6aa5e2e7fc9bc042ae82d8b79d795b9a62bd8f15ba1e7594e3db243f158b5565"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:54635102ba3cf5da26eb6f96c4b8c53af8a9c0d97b64bdcb592596a6255d8518"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-win32.whl", hash = "sha256:3583a3a3ab7958e354dc1d25be74aee6228938312ee875a22330c4dc2e41beb0"}, - {file = "MarkupSafe-2.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:d6e427c7378c7f1b2bef6a344c925b8b63623d3321c09a237b7cc0e77dd98ceb"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:bf1196dcc239e608605b716e7b166eb5faf4bc192f8a44b81e85251e62584bd2"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df98d4a9cd6a88d6a585852f56f2155c9cdb6aec78361a19f938810aa020954"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b835aba863195269ea358cecc21b400276747cc977492319fd7682b8cd2c253d"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23984d1bdae01bee794267424af55eef4dfc038dc5d1272860669b2aa025c9e3"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c98c33ffe20e9a489145d97070a435ea0679fddaabcafe19982fe9c971987d5"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9896fca4a8eb246defc8b2a7ac77ef7553b638e04fbf170bff78a40fa8a91474"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b0fe73bac2fed83839dbdbe6da84ae2a31c11cfc1c777a40dbd8ac8a6ed1560f"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c7556bafeaa0a50e2fe7dc86e0382dea349ebcad8f010d5a7dc6ba568eaaa789"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-win32.whl", hash = "sha256:fc1a75aa8f11b87910ffd98de62b29d6520b6d6e8a3de69a70ca34dea85d2a8a"}, - {file = "MarkupSafe-2.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:3a66c36a3864df95e4f62f9167c734b3b1192cb0851b43d7cc08040c074c6279"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:765f036a3d00395a326df2835d8f86b637dbaf9832f90f5d196c3b8a7a5080cb"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21e7af8091007bf4bebf4521184f4880a6acab8df0df52ef9e513d8e5db23411"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c31fe855c77cad679b302aabc42d724ed87c043b1432d457f4976add1c2c3e"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653fa39578957bc42e5ebc15cf4361d9e0ee4b702d7d5ec96cdac860953c5b4"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47bb5f0142b8b64ed1399b6b60f700a580335c8e1c57f2f15587bd072012decc"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fe8512ed897d5daf089e5bd010c3dc03bb1bdae00b35588c49b98268d4a01e00"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:36d7626a8cca4d34216875aee5a1d3d654bb3dac201c1c003d182283e3205949"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b6f14a9cd50c3cb100eb94b3273131c80d102e19bb20253ac7bd7336118a673a"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-win32.whl", hash = "sha256:c8f253a84dbd2c63c19590fa86a032ef3d8cc18923b8049d91bcdeeb2581fbf6"}, - {file = "MarkupSafe-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:8b570a1537367b52396e53325769608f2a687ec9a4363647af1cded8928af959"}, - {file = "MarkupSafe-2.1.4.tar.gz", hash = "sha256:3aae9af4cac263007fd6309c64c6ab4506dd2b79382d9d19a1994f9240b8db4f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] name = "matplotlib" -version = "3.8.2" +version = "3.8.3" description = "Python plotting package" optional = false python-versions = ">=3.9" files = [ - {file = "matplotlib-3.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:09796f89fb71a0c0e1e2f4bdaf63fb2cefc84446bb963ecdeb40dfee7dfa98c7"}, - {file = "matplotlib-3.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6f9c6976748a25e8b9be51ea028df49b8e561eed7809146da7a47dbecebab367"}, - {file = "matplotlib-3.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b78e4f2cedf303869b782071b55fdde5987fda3038e9d09e58c91cc261b5ad18"}, - {file = "matplotlib-3.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e208f46cf6576a7624195aa047cb344a7f802e113bb1a06cfd4bee431de5e31"}, - {file = "matplotlib-3.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:46a569130ff53798ea5f50afce7406e91fdc471ca1e0e26ba976a8c734c9427a"}, - {file = "matplotlib-3.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:830f00640c965c5b7f6bc32f0d4ce0c36dfe0379f7dd65b07a00c801713ec40a"}, - {file = "matplotlib-3.8.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d86593ccf546223eb75a39b44c32788e6f6440d13cfc4750c1c15d0fcb850b63"}, - {file = "matplotlib-3.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a5430836811b7652991939012f43d2808a2db9b64ee240387e8c43e2e5578c8"}, - {file = "matplotlib-3.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9576723858a78751d5aacd2497b8aef29ffea6d1c95981505877f7ac28215c6"}, - {file = "matplotlib-3.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ba9cbd8ac6cf422f3102622b20f8552d601bf8837e49a3afed188d560152788"}, - {file = "matplotlib-3.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:03f9d160a29e0b65c0790bb07f4f45d6a181b1ac33eb1bb0dd225986450148f0"}, - {file = "matplotlib-3.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:3773002da767f0a9323ba1a9b9b5d00d6257dbd2a93107233167cfb581f64717"}, - {file = "matplotlib-3.8.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4c318c1e95e2f5926fba326f68177dee364aa791d6df022ceb91b8221bd0a627"}, - {file = "matplotlib-3.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:091275d18d942cf1ee9609c830a1bc36610607d8223b1b981c37d5c9fc3e46a4"}, - {file = "matplotlib-3.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b0f3b8ea0e99e233a4bcc44590f01604840d833c280ebb8fe5554fd3e6cfe8d"}, - {file = "matplotlib-3.8.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7b1704a530395aaf73912be741c04d181f82ca78084fbd80bc737be04848331"}, - {file = "matplotlib-3.8.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:533b0e3b0c6768eef8cbe4b583731ce25a91ab54a22f830db2b031e83cca9213"}, - {file = "matplotlib-3.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:0f4fc5d72b75e2c18e55eb32292659cf731d9d5b312a6eb036506304f4675630"}, - {file = "matplotlib-3.8.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:deaed9ad4da0b1aea77fe0aa0cebb9ef611c70b3177be936a95e5d01fa05094f"}, - {file = "matplotlib-3.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:172f4d0fbac3383d39164c6caafd3255ce6fa58f08fc392513a0b1d3b89c4f89"}, - {file = "matplotlib-3.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7d36c2209d9136cd8e02fab1c0ddc185ce79bc914c45054a9f514e44c787917"}, - {file = "matplotlib-3.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5864bdd7da445e4e5e011b199bb67168cdad10b501750367c496420f2ad00843"}, - {file = "matplotlib-3.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ef8345b48e95cee45ff25192ed1f4857273117917a4dcd48e3905619bcd9c9b8"}, - {file = "matplotlib-3.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:7c48d9e221b637c017232e3760ed30b4e8d5dfd081daf327e829bf2a72c731b4"}, - {file = "matplotlib-3.8.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:aa11b3c6928a1e496c1a79917d51d4cd5d04f8a2e75f21df4949eeefdf697f4b"}, - {file = "matplotlib-3.8.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1095fecf99eeb7384dabad4bf44b965f929a5f6079654b681193edf7169ec20"}, - {file = "matplotlib-3.8.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:bddfb1db89bfaa855912261c805bd0e10218923cc262b9159a49c29a7a1c1afa"}, - {file = "matplotlib-3.8.2.tar.gz", hash = "sha256:01a978b871b881ee76017152f1f1a0cbf6bd5f7b8ff8c96df0df1bd57d8755a1"}, + {file = "matplotlib-3.8.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:cf60138ccc8004f117ab2a2bad513cc4d122e55864b4fe7adf4db20ca68a078f"}, + {file = "matplotlib-3.8.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f557156f7116be3340cdeef7f128fa99b0d5d287d5f41a16e169819dcf22357"}, + {file = "matplotlib-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f386cf162b059809ecfac3bcc491a9ea17da69fa35c8ded8ad154cd4b933d5ec"}, + {file = "matplotlib-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3c5f96f57b0369c288bf6f9b5274ba45787f7e0589a34d24bdbaf6d3344632f"}, + {file = "matplotlib-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:83e0f72e2c116ca7e571c57aa29b0fe697d4c6425c4e87c6e994159e0c008635"}, + {file = "matplotlib-3.8.3-cp310-cp310-win_amd64.whl", hash = "sha256:1c5c8290074ba31a41db1dc332dc2b62def469ff33766cbe325d32a3ee291aea"}, + {file = "matplotlib-3.8.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5184e07c7e1d6d1481862ee361905b7059f7fe065fc837f7c3dc11eeb3f2f900"}, + {file = "matplotlib-3.8.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d7e7e0993d0758933b1a241a432b42c2db22dfa37d4108342ab4afb9557cbe3e"}, + {file = "matplotlib-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04b36ad07eac9740fc76c2aa16edf94e50b297d6eb4c081e3add863de4bb19a7"}, + {file = "matplotlib-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c42dae72a62f14982f1474f7e5c9959fc4bc70c9de11cc5244c6e766200ba65"}, + {file = "matplotlib-3.8.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bf5932eee0d428192c40b7eac1399d608f5d995f975cdb9d1e6b48539a5ad8d0"}, + {file = "matplotlib-3.8.3-cp311-cp311-win_amd64.whl", hash = "sha256:40321634e3a05ed02abf7c7b47a50be50b53ef3eaa3a573847431a545585b407"}, + {file = "matplotlib-3.8.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:09074f8057917d17ab52c242fdf4916f30e99959c1908958b1fc6032e2d0f6d4"}, + {file = "matplotlib-3.8.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5745f6d0fb5acfabbb2790318db03809a253096e98c91b9a31969df28ee604aa"}, + {file = "matplotlib-3.8.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97653d869a71721b639714b42d87cda4cfee0ee74b47c569e4874c7590c55c5"}, + {file = "matplotlib-3.8.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:242489efdb75b690c9c2e70bb5c6550727058c8a614e4c7716f363c27e10bba1"}, + {file = "matplotlib-3.8.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:83c0653c64b73926730bd9ea14aa0f50f202ba187c307a881673bad4985967b7"}, + {file = "matplotlib-3.8.3-cp312-cp312-win_amd64.whl", hash = "sha256:ef6c1025a570354297d6c15f7d0f296d95f88bd3850066b7f1e7b4f2f4c13a39"}, + {file = "matplotlib-3.8.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c4af3f7317f8a1009bbb2d0bf23dfaba859eb7dd4ccbd604eba146dccaaaf0a4"}, + {file = "matplotlib-3.8.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c6e00a65d017d26009bac6808f637b75ceade3e1ff91a138576f6b3065eeeba"}, + {file = "matplotlib-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7b49ab49a3bea17802df6872f8d44f664ba8f9be0632a60c99b20b6db2165b7"}, + {file = "matplotlib-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6728dde0a3997396b053602dbd907a9bd64ec7d5cf99e728b404083698d3ca01"}, + {file = "matplotlib-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:813925d08fb86aba139f2d31864928d67511f64e5945ca909ad5bc09a96189bb"}, + {file = "matplotlib-3.8.3-cp39-cp39-win_amd64.whl", hash = "sha256:cd3a0c2be76f4e7be03d34a14d49ded6acf22ef61f88da600a18a5cd8b3c5f3c"}, + {file = "matplotlib-3.8.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fa93695d5c08544f4a0dfd0965f378e7afc410d8672816aff1e81be1f45dbf2e"}, + {file = "matplotlib-3.8.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9764df0e8778f06414b9d281a75235c1e85071f64bb5d71564b97c1306a2afc"}, + {file = "matplotlib-3.8.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5e431a09e6fab4012b01fc155db0ce6dccacdbabe8198197f523a4ef4805eb26"}, + {file = "matplotlib-3.8.3.tar.gz", hash = "sha256:7b416239e9ae38be54b028abbf9048aff5054a9aba5416bef0bd17f9162ce161"}, ] [package.dependencies] @@ -1192,47 +1192,47 @@ build-sphinx = ["sphinx (>=1.3.1)"] [[package]] name = "numpy" -version = "1.26.3" +version = "1.26.4" description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.9" files = [ - {file = "numpy-1.26.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf"}, - {file = "numpy-1.26.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd"}, - {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6"}, - {file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b"}, - {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178"}, - {file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485"}, - {file = "numpy-1.26.3-cp310-cp310-win32.whl", hash = "sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3"}, - {file = "numpy-1.26.3-cp310-cp310-win_amd64.whl", hash = "sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce"}, - {file = "numpy-1.26.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374"}, - {file = "numpy-1.26.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6"}, - {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2"}, - {file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda"}, - {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e"}, - {file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00"}, - {file = "numpy-1.26.3-cp311-cp311-win32.whl", hash = "sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b"}, - {file = "numpy-1.26.3-cp311-cp311-win_amd64.whl", hash = "sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4"}, - {file = "numpy-1.26.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13"}, - {file = "numpy-1.26.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e"}, - {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3"}, - {file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419"}, - {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166"}, - {file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36"}, - {file = "numpy-1.26.3-cp312-cp312-win32.whl", hash = "sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511"}, - {file = "numpy-1.26.3-cp312-cp312-win_amd64.whl", hash = "sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b"}, - {file = "numpy-1.26.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1666f634cb3c80ccbd77ec97bc17337718f56d6658acf5d3b906ca03e90ce87f"}, - {file = "numpy-1.26.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18c3319a7d39b2c6a9e3bb75aab2304ab79a811ac0168a671a62e6346c29b03f"}, - {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b7e807d6888da0db6e7e75838444d62495e2b588b99e90dd80c3459594e857b"}, - {file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d362e17bcb0011738c2d83e0a65ea8ce627057b2fdda37678f4374a382a137"}, - {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b8c275f0ae90069496068c714387b4a0eba5d531aace269559ff2b43655edd58"}, - {file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc0743f0302b94f397a4a65a660d4cd24267439eb16493fb3caad2e4389bccbb"}, - {file = "numpy-1.26.3-cp39-cp39-win32.whl", hash = "sha256:9bc6d1a7f8cedd519c4b7b1156d98e051b726bf160715b769106661d567b3f03"}, - {file = "numpy-1.26.3-cp39-cp39-win_amd64.whl", hash = "sha256:867e3644e208c8922a3be26fc6bbf112a035f50f0a86497f98f228c50c607bb2"}, - {file = "numpy-1.26.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e"}, - {file = "numpy-1.26.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0"}, - {file = "numpy-1.26.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5"}, - {file = "numpy-1.26.3.tar.gz", hash = "sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] [[package]] @@ -1521,18 +1521,18 @@ xmp = ["defusedxml"] [[package]] name = "platformdirs" -version = "4.1.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "plotly" @@ -1566,13 +1566,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.6.0" +version = "3.6.1" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" files = [ - {file = "pre_commit-3.6.0-py2.py3-none-any.whl", hash = "sha256:c255039ef399049a5544b6ce13d135caba8f2c28c3b4033277a788f434308376"}, - {file = "pre_commit-3.6.0.tar.gz", hash = "sha256:d30bad9abf165f7785c15a21a1f46da7d0677cb00ee7ff4c579fd38922efe15d"}, + {file = "pre_commit-3.6.1-py2.py3-none-any.whl", hash = "sha256:9fe989afcf095d2c4796ce7c553cf28d4d4a9b9346de3cda079bcf40748454a4"}, + {file = "pre_commit-3.6.1.tar.gz", hash = "sha256:c90961d8aa706f75d60935aba09469a6b0bcb8345f127c3fbee4bdc5f114cf4b"}, ] [package.dependencies] @@ -1816,13 +1816,13 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2023.4" +version = "2024.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.4-py2.py3-none-any.whl", hash = "sha256:f90ef520d95e7c46951105338d918664ebfd6f1d995bd7d153127ce90efafa6a"}, - {file = "pytz-2023.4.tar.gz", hash = "sha256:31d4583c4ed539cd037956140d695e42c033a19e984bfce9964a3f7d59bc2b40"}, + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] [[package]] @@ -2087,18 +2087,18 @@ test = ["asv", "gmpy2", "hypothesis", "mpmath", "pooch", "pytest", "pytest-cov", [[package]] name = "setuptools" -version = "69.0.3" +version = "69.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, + {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -2196,13 +2196,13 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"] [[package]] name = "tifffile" -version = "2023.12.9" +version = "2024.2.12" description = "Read and write TIFF files" optional = false python-versions = ">=3.9" files = [ - {file = "tifffile-2023.12.9-py3-none-any.whl", hash = "sha256:9b066e4b1a900891ea42ffd33dab8ba34c537935618b9893ddef42d7d422692f"}, - {file = "tifffile-2023.12.9.tar.gz", hash = "sha256:9dd1da91180a6453018a241ff219e1905f169384355cd89c9ef4034c1b46cdb8"}, + {file = "tifffile-2024.2.12-py3-none-any.whl", hash = "sha256:870998f82fbc94ff7c3528884c1b0ae54863504ff51dbebea431ac3fa8fb7c21"}, + {file = "tifffile-2024.2.12.tar.gz", hash = "sha256:4920a3ec8e8e003e673d3c6531863c99eedd570d1b8b7e141c072ed78ff8030d"}, ] [package.dependencies] @@ -2244,13 +2244,13 @@ files = [ [[package]] name = "tqdm" -version = "4.66.1" +version = "4.66.2" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, - {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, ] [package.dependencies] @@ -2311,13 +2311,13 @@ files = [ [[package]] name = "tzdata" -version = "2023.4" +version = "2024.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, - {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] [[package]] @@ -2356,17 +2356,18 @@ tests = ["nose", "numpy"] [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -2392,38 +2393,40 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "watchdog" -version = "3.0.0" +version = "4.0.0" description = "Filesystem events monitoring" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, - {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, - {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, - {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, - {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, - {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, - {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, - {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, - {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, + {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, + {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, + {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, ] [package.extras] diff --git a/pyproject.toml b/pyproject.toml index 30214dad..ad532fa7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,7 @@ pydantic = "^2.6.1" [tool.poetry.scripts] peakipy = 'peakipy.cli.main:app' +peakipy-check = 'peakipy.cli.check_panel:app' [tool.poetry.group.dev.dependencies] black = "^23.7.0" From 51bdb1b78c2515a1faf673abd81821d6236a9a73 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Fri, 16 Feb 2024 23:13:28 -0500 Subject: [PATCH 15/37] refactoring and adding tests --- peakipy/cli/edit.py | 6 +- peakipy/cli/fit.py | 149 ++++++++++++++++++++++++++------------------ peakipy/cli/main.py | 3 +- test/test_fit.py | 91 +++++++++++++++++++++++++++ 4 files changed, 184 insertions(+), 65 deletions(-) create mode 100644 test/test_fit.py diff --git a/peakipy/cli/edit.py b/peakipy/cli/edit.py index 6f5a5859..172dab4f 100644 --- a/peakipy/cli/edit.py +++ b/peakipy/cli/edit.py @@ -713,12 +713,14 @@ def fit_selected(self, event): print(f"[yellow]Using LS = {lineshape}[/yellow]") if self.checkbox_group.active == []: fit_command = f"peakipy fit {self.TEMP_INPUT_CSV} {self.data_path} {self.TEMP_OUT_CSV} --lineshape {lineshape}{fix_command}{reference_planes_command}{initial_fit_threshold_command}{xy_bounds_command}" - plot_command = f"peakipy check {self.TEMP_OUT_CSV} {self.data_path} --label --individual --show --outname {self.TEMP_OUT_PLOT / Path('tmp.pdf')}" + # plot_command = f"peakipy check {self.TEMP_OUT_CSV} {self.data_path} --label --individual --show --outname {self.TEMP_OUT_PLOT / Path('tmp.pdf')}" + plot_command = f"peakipy-check {self.TEMP_OUT_CSV} {self.data_path}" else: plane_index = self.select_plane.value print(f"[yellow]Only fitting plane {plane_index}[/yellow]") fit_command = f"peakipy fit {self.TEMP_INPUT_CSV} {self.data_path} {self.TEMP_OUT_CSV} --lineshape {lineshape} --plane {plane_index}{fix_command}{reference_planes_command}{initial_fit_threshold_command}{xy_bounds_command}" - plot_command = f"peakipy check {self.TEMP_OUT_CSV} {self.data_path} --label --individual --show --outname {self.TEMP_OUT_PLOT / Path('tmp.pdf')} --plane {plane_index}" + # plot_command = f"peakipy check {self.TEMP_OUT_CSV} {self.data_path} --label --individual --outname {self.TEMP_OUT_PLOT / Path('tmp.pdf')} --plane {plane_index} --show" + plot_command = f"peakipy-check {self.TEMP_OUT_CSV} {self.data_path}" print(f"[blue]{fit_command}[/blue]") self.fit_reports += fit_command + "
" diff --git a/peakipy/cli/fit.py b/peakipy/cli/fit.py index 86544149..678bfaa6 100644 --- a/peakipy/cli/fit.py +++ b/peakipy/cli/fit.py @@ -3,13 +3,13 @@ """ from pathlib import Path from typing import Optional, List -from multiprocessing import cpu_count, Pool import numpy as np import pandas as pd from rich import print from rich.console import Console +from pydantic import BaseModel from peakipy.core import ( fix_params, @@ -30,62 +30,6 @@ column_selection = ["INDEX", "ASS", "X_PPM", "Y_PPM", "CLUSTID", "MEMCNT"] -def check_xybounds(x): - x = x.split(",") - if len(x) == 2: - # xy_bounds = float(x[0]), float(x[1]) - xy_bounds = [float(i) for i in x] - return xy_bounds - else: - print("[red]🤔 xy_bounds must be pair of floats e.g. --xy_bounds=0.05,0.5[/red]") - exit() - - -# prepare data for multiprocessing - - -def chunks(l, n): - """split list into n chunks - - will return empty lists if n > len(l) - - :param l: list of values you wish to split - :type l: list - :param n: number of sub lists you want to generate - :type n: int - - :returns sub_lists: list of lists - :rtype sub_lists: list - """ - # create n empty lists - sub_lists = [[] for _ in range(n)] - # append into n lists - for num, i in enumerate(l): - sub_lists[num % n].append(i) - return sub_lists - - -def split_peaklist(peaklist, n_cpu, tmp_path=tmp_path): - """split peaklist into smaller files based on number of cpus - - :param peaklist: Peaklist data generated by peakipy read or edit scripts - :type peaklist: pandas.DataFrame - - :returns tmp_path: Temporary directory path - :rtype tmp_path: pathlib.Path - """ - # clustid numbers - clustids = peaklist.CLUSTID.unique() - # make n_cpu lists of clusters - clustids = chunks(clustids, n_cpu) - for i in range(n_cpu): - # get sub dataframe containing ith clustid list - split_peaks = peaklist[peaklist.CLUSTID.isin(clustids[i])] - # save sub dataframe - split_peaks.to_csv(tmp_path / f"peaks_{i}.csv", index=False) - return tmp_path - - class FitPeaksInput: """input data for the fit_peaks function""" @@ -176,6 +120,93 @@ def log(self): return self._log +class FitPeaksResultDfRow(BaseModel): + fit_prefix: str + assignment: str + amp: float + amp_err: float + center_x: float + init_center_x: float + center_y: float + init_center_y: float + sigma_x: float + sigma_y: float + clustid: int + memcnt: int + plane: int + x_radius: float + y_radius: float + x_radius_ppm: float + y_radius_ppm: float + lineshape: str + aic: float + chisqr: float + redchi: float + residual_sum: float + height: float + height_err: float + fwhm_x: float + fwhm_y: float + center_x_ppm: float + center_y_ppm: float + init_center_x_ppm: float + init_center_y_ppm: float + sigma_x_ppm: float + sigma_y_ppm: float + fwhm_x_ppm: float + fwhm_y_ppm: float + fwhm_x_hz: float + fwhm_y_hz: float + + +class FitPeaksResultRowGLPV(FitPeaksResultDfRow): + fraction: float + + +class FitPeaksResultRowPVPV(FitPeaksResultDfRow): + fraction_x: float # for PV_PV model + fraction_y: float # for PV_PV model + + +class FitPeaksResultRowVoigt(FitPeaksResultDfRow): + gamma_x_ppm: float # for voigt + gamma_y_ppm: float # for voigt + + +def get_fit_peaks_result_validation_model(lineshape): + match lineshape: + case lineshape.V: + validation_model = FitPeaksResultRowVoigt + case lineshape.PV_PV: + validation_model = FitPeaksResultRowPVPV + case _: + validation_model = FitPeaksResultRowGLPV + return validation_model + + +def filter_peak_clusters_by_max_cluster_size(grouped_peak_clusters, max_cluster_size): + filtered_peak_clusters = grouped_peak_clusters.filter( + lambda x: len(x) <= max_cluster_size + ) + return filtered_peak_clusters + + +def set_parameters_to_fix_during_fit(first_plane_fit_params, to_fix): + # fix sigma center and fraction parameters + # could add an option to select params to fix + match to_fix: + case None | () | []: + float_str = "Floating all parameters" + parameter_set = first_plane_fit_params + case ["None"] | ["none"]: + float_str = "Floating all parameters" + parameter_set = first_plane_fit_params + case _: + float_str = f"Fixing parameters: {to_fix}" + parameter_set = fix_params(first_plane_fit_params, to_fix) + return parameter_set, float_str + + def fit_peaks(peaks: pd.DataFrame, fit_input: FitPeaksInput) -> FitPeaksResult: """Fit set of peak clusters to lineshape model @@ -188,14 +219,10 @@ def fit_peaks(peaks: pd.DataFrame, fit_input: FitPeaksInput) -> FitPeaksResult: :returns: Data structure containing pd.DataFrame with the fitted results and a log :rtype: FitPeaksResult """ - # sum planes for initial fit - summed_planes = fit_input.data.sum(axis=0) - # group peaks based on CLUSTID groups = peaks.groupby("CLUSTID") # setup arguments to_fix = fit_input.args.get("to_fix") - # console.print(to_fix, style="red bold") noise = fit_input.args.get("noise") verb = fit_input.args.get("verb") initial_fit_threshold = fit_input.args.get("initial_fit_threshold") diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index 8d0f1f37..f4ff51eb 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -25,7 +25,7 @@ from dataclasses import dataclass, field from enum import Enum from typing import Optional, Tuple, List, Annotated -from multiprocessing import Pool +from multiprocessing import Pool, cpu_count import typer import numpy as np @@ -72,7 +72,6 @@ deal_with_peaks_on_edge_of_spectrum, ) from .fit import ( - cpu_count, fit_peaks, FitPeaksInput, ) diff --git a/test/test_fit.py b/test/test_fit.py new file mode 100644 index 00000000..517a8462 --- /dev/null +++ b/test/test_fit.py @@ -0,0 +1,91 @@ +from pytest import fixture + +import pandas as pd +import numpy as np +from lmfit import Parameters + +from peakipy.cli.fit import ( + get_fit_peaks_result_validation_model, + FitPeaksResultRowPVPV, + FitPeaksResultRowVoigt, + FitPeaksResultRowGLPV, + filter_peak_clusters_by_max_cluster_size, + set_parameters_to_fix_during_fit, +) +from peakipy.core import Lineshape + + +def test_get_fit_peaks_result_validation_model_PVPV(): + validation_model = get_fit_peaks_result_validation_model(Lineshape.PV_PV) + assert validation_model == FitPeaksResultRowPVPV + + +def test_get_fit_peaks_result_validation_model_G(): + validation_model = get_fit_peaks_result_validation_model(Lineshape.G) + assert validation_model == FitPeaksResultRowGLPV + + +def test_get_fit_peaks_result_validation_model_L(): + validation_model = get_fit_peaks_result_validation_model(Lineshape.L) + assert validation_model == FitPeaksResultRowGLPV + + +def test_get_fit_peaks_result_validation_model_PV(): + validation_model = get_fit_peaks_result_validation_model(Lineshape.PV) + assert validation_model == FitPeaksResultRowGLPV + + +def test_get_fit_peaks_result_validation_model_V(): + validation_model = get_fit_peaks_result_validation_model(Lineshape.V) + assert validation_model == FitPeaksResultRowVoigt + + +def test_filter_groups_by_max_cluster_size(): + groups = pd.DataFrame( + dict( + col1=[1, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 4, 5, 6, 7], + col2=[1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7], + ) + ).groupby("col1") + max_cluster_size = 3 + filtered_groups = filter_peak_clusters_by_max_cluster_size(groups, max_cluster_size) + filtered_group_names = filtered_groups.col1.unique() + expected_group_names = np.array([3, 4, 5, 6, 7]) + np.testing.assert_array_equal(filtered_group_names, expected_group_names) + + +def test_set_parameters_to_fix_during_fit(): + parameter_set = Parameters() + parameter_set.add("test1", vary=True) + modified_parameter_set, float_str = set_parameters_to_fix_during_fit( + parameter_set, ["test1"] + ) + assert modified_parameter_set["test1"].vary == False + + +def test_set_parameters_to_fix_during_fit_2(): + parameter_set = Parameters() + parameter_set.add("test1", vary=True) + parameter_set.add("test2", vary=True) + modified_parameter_set, float_str = set_parameters_to_fix_during_fit( + parameter_set, ["test1", "test2"] + ) + assert ( + modified_parameter_set["test2"].vary + == modified_parameter_set["test1"].vary + == False + ) + + +def test_set_parameters_to_fix_during_fit_3(): + parameter_set = Parameters() + parameter_set.add("test1", vary=True) + parameter_set.add("test2", vary=True) + modified_parameter_set, float_str = set_parameters_to_fix_during_fit( + parameter_set, ["test2"] + ) + assert ( + modified_parameter_set["test1"].vary + != modified_parameter_set["test2"].vary + == False + ) From f5debf472f90c15c9d4ac4c4499f3760ab08950d Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Mon, 19 Feb 2024 16:00:15 -0500 Subject: [PATCH 16/37] refactoring and adding unit tests --- peakipy/cli/fit.py | 461 ++++++++++++++++++++------------------------- test/test_fit.py | 275 ++++++++++++++++++++++++++- 2 files changed, 468 insertions(+), 268 deletions(-) diff --git a/peakipy/cli/fit.py b/peakipy/cli/fit.py index 678bfaa6..cc1dcb73 100644 --- a/peakipy/cli/fit.py +++ b/peakipy/cli/fit.py @@ -2,7 +2,8 @@ """Fit and deconvolute NMR peaks: Functions used for running peakipy fit """ from pathlib import Path -from typing import Optional, List +from typing import Optional, List, Tuple +from dataclasses import dataclass, field import numpy as np import pandas as pd @@ -10,12 +11,18 @@ from rich import print from rich.console import Console from pydantic import BaseModel +from lmfit import Model, Parameter +from lmfit.model import ModelResult from peakipy.core import ( fix_params, - get_params, fit_first_plane, LoadData, + Lineshape, + pvoigt2d, + voigt2d, + pv_pv, + to_prefix, ) console = Console() @@ -30,6 +37,25 @@ column_selection = ["INDEX", "ASS", "X_PPM", "Y_PPM", "CLUSTID", "MEMCNT"] +@dataclass +class FitPeaksArgs: + noise: float + dims: List[int] = field(default_factory=lambda: [0, 1, 2]) + colors: Tuple[str] = ("#5e3c99", "#e66101") + max_cluster_size: Optional[int] = None + to_fix: List[str] = field(default_factory=lambda: ["fraction", "sigma", "center"]) + xy_bounds: Tuple[float, float] = ((0, 0),) + vclist: Optional[Path] = (None,) + plane: Optional[List[int]] = (None,) + exclude_plane: Optional[List[int]] = (None,) + reference_plane_index: List[int] = ([],) + initial_fit_threshold: Optional[float] = (None,) + mp: bool = (True,) + plot: Optional[Path] = (None,) + show: bool = (False,) + verb: bool = (False,) + + class FitPeaksInput: """input data for the fit_peaks function""" @@ -207,6 +233,154 @@ def set_parameters_to_fix_during_fit(first_plane_fit_params, to_fix): return parameter_set, float_str +def get_default_lineshape_param_names(lineshape: Lineshape): + match lineshape: + case Lineshape.PV | Lineshape.G | Lineshape.L: + param_names = Model(pvoigt2d).param_names + case Lineshape.V: + param_names = Model(voigt2d).param_names + case Lineshape.PV_PV: + param_names = Model(pv_pv).param_names + return param_names + + +def split_parameter_sets_by_peak( + default_param_names: List, params: List[Tuple[str, Parameter]] +): + """params is a list of tuples where the first element of each tuple is a + prefixed parameter name and the second element is the corresponding + Parameter object. This is created by calling .items() on a Parameters + object + """ + number_of_fitted_parameters = len(params) + number_of_default_params = len(default_param_names) + number_of_fitted_peaks = int(number_of_fitted_parameters / number_of_default_params) + split_param_items = [ + params[i : (i + number_of_default_params)] + for i in range(0, number_of_fitted_parameters, number_of_default_params) + ] + assert len(split_param_items) == number_of_fitted_peaks + return split_param_items + + +def create_parameter_dict(prefix, parameters: List[Tuple[str, Parameter]]): + parameter_dict = dict(prefix=prefix) + parameter_dict.update({k.replace(prefix, ""): v.value for k, v in parameters}) + parameter_dict.update( + {f"{k.replace(prefix,'')}_stderr": v.stderr for k, v in parameters} + ) + return parameter_dict + + +def get_prefix_from_parameter_names( + default_param_names: List, parameters: List[Tuple[str, Parameter]] +): + prefixes = [ + param_key_val[0].replace(default_param_name, "") + for param_key_val, default_param_name in zip(parameters, default_param_names) + ] + assert len(set(prefixes)) == 1 + return prefixes[0] + + +def unpack_fitted_parameters_for_lineshape( + lineshape: Lineshape, params: List[dict], plane_number: int +): + default_param_names = get_default_lineshape_param_names(lineshape) + split_parameter_names = split_parameter_sets_by_peak(default_param_names, params) + prefixes = [ + get_prefix_from_parameter_names(default_param_names, i) + for i in split_parameter_names + ] + unpacked_params = [] + for parameter_names, prefix in zip(split_parameter_names, prefixes): + parameter_dict = create_parameter_dict(prefix, parameter_names) + parameter_dict.update({"plane": plane_number}) + unpacked_params.append(parameter_dict) + return unpacked_params + + +def perform_initial_lineshape_fit_on_cluster_of_peaks( + cluster_of_peaks, fit_input: FitPeaksInput +): + fit_result = fit_first_plane( + cluster_of_peaks, + fit_input.data, + # norm(summed_planes), + fit_input.args.get("uc_dics"), + lineshape=fit_input.args.get("lineshape"), + xy_bounds=fit_input.args.get("xy_bounds"), + verbose=fit_input.args.get("verb"), + noise=fit_input.args.get("noise"), + fit_method=fit_input.config.get("fit_method", "leastsq"), + reference_plane_indices=fit_input.args.get("reference_plane_indices"), + threshold=fit_input.args.get("initial_fit_threshold"), + ) + return fit_result + + +def refit_peaks_with_constraints(fit_input: FitPeaksInput, fit_result: FitPeaksResult): + fit_results = [] + for num, d in enumerate(fit_input.data): + plane_number = fit_input.plane_numbers[num] + fit_result.out.fit( + data=d[fit_result.mask], + params=fit_result.out.params, + weights=1.0 + / np.array( + [fit_input.args.get("noise")] * len(np.ravel(d[fit_result.mask])) + ), + ) + fit_results.extend( + unpack_fitted_parameters_for_lineshape( + fit_input.args.get("lineshape"), + list(fit_result.out.params.items()), + plane_number, + ) + ) + # fit_report = fit_result.out.fit_report() + # log.write( + return fit_results + + +def merge_unpacked_parameters_with_metadata(cluster_fit_df, group_of_peaks_df): + group_of_peaks_df["prefix"] = group_of_peaks_df.ASS.apply(to_prefix) + merged_cluster_fit_df = cluster_fit_df.merge(group_of_peaks_df, on="prefix") + return merged_cluster_fit_df + + +def update_cluster_df_with_fit_statistics(cluster_df, fit_result: ModelResult): + cluster_df["chisqr"] = fit_result.chisqr + cluster_df["redchi"] = fit_result.redchi + cluster_df["residual_sum"] = np.sum(fit_result.residual) + cluster_df["aic"] = fit_result.aic + cluster_df["bic"] = fit_result.bic + cluster_df["nfev"] = fit_result.nfev + cluster_df["ndata"] = fit_result.ndata + return cluster_df + + +def rename_columns_for_compatibility(df): + mapping = { + "amplitude": "amp", + "amplitude_stderr": "amp_err", + "X_AXIS": "init_center_x", + "Y_AXIS": "init_center_y", + "ASS": "assignment", + "MEMCNT": "memcnt", + "X_RADIUS": "x_radius", + "Y_RADIUS": "y_radius", + } + df = df.rename(columns=mapping) + return df + + +def add_vclist_to_df(fit_input: FitPeaksInput, df: pd.DataFrame): + vclist_data = fit_input.args.get("vclist_data") + df["vclist"] = df.plane.apply(lambda x: vclist_data[x]) + return df + + def fit_peaks(peaks: pd.DataFrame, fit_input: FitPeaksInput) -> FitPeaksResult: """Fit set of peak clusters to lineshape model @@ -219,266 +393,33 @@ def fit_peaks(peaks: pd.DataFrame, fit_input: FitPeaksInput) -> FitPeaksResult: :returns: Data structure containing pd.DataFrame with the fitted results and a log :rtype: FitPeaksResult """ - # group peaks based on CLUSTID - groups = peaks.groupby("CLUSTID") + peak_clusters = peaks.groupby("CLUSTID") + max_cluster_size = fit_input.args.get("max_cluster_size") + filtered_peaks = filter_peak_clusters_by_max_cluster_size( + peak_clusters, max_cluster_size + ) + peak_clusters = filtered_peaks.groupby("CLUSTID") # setup arguments to_fix = fit_input.args.get("to_fix") - noise = fit_input.args.get("noise") - verb = fit_input.args.get("verb") - initial_fit_threshold = fit_input.args.get("initial_fit_threshold") - reference_plane_indices = fit_input.args.get("reference_plane_indices") lineshape = fit_input.args.get("lineshape") - xy_bounds = fit_input.args.get("xy_bounds") - vclist = fit_input.args.get("vclist") - uc_dics = fit_input.args.get("uc_dics") - - # for saving data, currently not using errs for center and sigma - amps = [] - amp_errs = [] - - center_xs = [] - init_center_xs = [] - # center_x_errs = [] - - center_ys = [] - init_center_ys = [] - # center_y_errs = [] - - sigma_ys = [] - # sigma_y_errs = [] - - sigma_xs = [] - # sigma_x_errs = [] - - match lineshape: - case lineshape.V: - # lorentzian linewidth - gamma_xs = [] - gamma_ys = [] - fractions = [] - - case lineshape.PV_PV: - # seperate fractions for each dim - fractions_x = [] - fractions_y = [] - case _: - fractions = [] - - # lists for saving data - names = [] - assign = [] - clustids = [] - memcnts = [] - planes = [] - x_radii = [] - y_radii = [] - x_radii_ppm = [] - y_radii_ppm = [] - lineshapes = [] - # errors - chisqrs = [] - redchis = [] - aics = [] - res_sum = [] - - # iterate over groups of peaks out_str = "" - for name, group in groups: - #  max cluster size - len_group = len(group) - if len_group <= fit_input.args.get("max_cluster_size"): - if len_group == 1: - peak_str = "peak" - else: - peak_str = "peaks" - - out_str += f""" - - #################################### - Fitting cluster of {len_group} {peak_str} - #################################### - """ - # fits sum of all planes first - fit_result = fit_first_plane( - group, - fit_input.data, - # norm(summed_planes), - uc_dics, - lineshape=lineshape, - xy_bounds=xy_bounds, - verbose=verb, - noise=noise, - fit_method=fit_input.config.get("fit_method", "leastsq"), - reference_plane_indices=reference_plane_indices, - threshold=initial_fit_threshold, - ) - fit_result.plot( - plot_path=fit_input.args.get("plot"), - show=fit_input.args.get("show"), - mp=fit_input.args.get("mp"), - ) - # jack_knife_result = fit_result.jackknife() - # print("JackKnife", jack_knife_result.mean, jack_knife_result.std) - first = fit_result.out - mask = fit_result.mask - # log.write( - out_str += fit_result.fit_str - out_str += f""" - ------------------------------------ - Summed planes - ------------------------------------ - {first.fit_report()} - """ - # ) - # fix sigma center and fraction parameters - # could add an option to select params to fix - match to_fix: - case None | () | []: - float_str = "Floating all parameters" - case ["None"] | ["none"]: - float_str = "Floating all parameters" - case _: - float_str = f"Fixing parameters: {to_fix}" - fix_params(first.params, to_fix) - if verb: - console.print(float_str, style="magenta") - - out_str += float_str + "\n" - - for num, d in enumerate(fit_input.data): - plane_number = fit_input.plane_numbers[num] - first.fit( - data=d[mask], - params=first.params, - weights=1.0 / np.array([noise] * len(np.ravel(d[mask]))), - ) - fit_report = first.fit_report() - # log.write( - out_str += f""" - ------------------------------------ - Plane = {num+1} - ------------------------------------ - {fit_report} - """ - # ) - if verb: - console.print(fit_report, style="bold") - - amp, amp_err, name = get_params(first.params, "amplitude") - cen_x, cen_x_err, cx_name = get_params(first.params, "center_x") - cen_y, cen_y_err, cy_name = get_params(first.params, "center_y") - sig_x, sig_x_err, sx_name = get_params(first.params, "sigma_x") - sig_y, sig_y_err, sy_name = get_params(first.params, "sigma_y") - # currently chi square is calculated for all peaks in cluster (not individual peaks) - # chi2 - residual sum of squares - chisqrs.extend([first.chisqr for _ in sy_name]) - # reduced chi2 - redchis.extend([first.redchi for _ in sy_name]) - # Akaike Information criterion - aics.extend([first.aic for _ in sy_name]) - # residual sum of squares - res_sum.extend([np.sum(first.residual) for _ in sy_name]) - - # deal with lineshape specific parameters - match lineshape: - case lineshape.PV_PV: - frac_x, frac_err_x, name = get_params( - first.params, "fraction_x" - ) - frac_y, frac_err_y, name = get_params( - first.params, "fraction_y" - ) - fractions_x.extend(frac_x) - fractions_y.extend(frac_y) - case lineshape.V: - frac, frac_err, name = get_params(first.params, "fraction") - gam_x, gam_x_err, gx_name = get_params(first.params, "gamma_x") - gam_y, gam_y_err, gy_name = get_params(first.params, "gamma_y") - gamma_xs.extend(gam_x) - gamma_ys.extend(gam_y) - fractions.extend(frac) - case _: - frac, frac_err, name = get_params(first.params, "fraction") - fractions.extend(frac) - - # extend lists with fit data - amps.extend(amp) - amp_errs.extend(amp_err) - center_xs.extend(cen_x) - init_center_xs.extend(group.X_AXISf) - # center_x_errs.extend(cen_x_err) - center_ys.extend(cen_y) - init_center_ys.extend(group.Y_AXISf) - # center_y_errs.extend(cen_y_err) - sigma_xs.extend(sig_x) - # sigma_x_errs.extend(sig_x_err) - sigma_ys.extend(sig_y) - # sigma_y_errs.extend(sig_y_err) - # add plane number, this should map to vclist - planes.extend([plane_number for _ in amp]) - lineshapes.extend([lineshape.value for _ in amp]) - #  get prefix for fit - names.extend([first.model.prefix] * len(name)) - assign.extend(group["ASS"]) - clustids.extend(group["CLUSTID"]) - memcnts.extend(group["MEMCNT"]) - x_radii.extend(group["X_RADIUS"]) - y_radii.extend(group["Y_RADIUS"]) - x_radii_ppm.extend(group["X_RADIUS_PPM"]) - y_radii_ppm.extend(group["Y_RADIUS_PPM"]) - - df_dic = { - "fit_prefix": names, - "assignment": assign, - "amp": amps, - "amp_err": amp_errs, - # "height": heights, - # "height_err": height_errs, - "center_x": center_xs, - "init_center_x": init_center_xs, - # "center_x_err": center_x_errs, - "center_y": center_ys, - "init_center_y": init_center_ys, - # "center_y_err": center_y_errs, - "sigma_x": sigma_xs, - # "sigma_x_err": sigma_x_errs, - "sigma_y": sigma_ys, - # "sigma_y_err": sigma_y_errs, - "clustid": clustids, - "memcnt": memcnts, - "plane": planes, - "x_radius": x_radii, - "y_radius": y_radii, - "x_radius_ppm": x_radii_ppm, - "y_radius_ppm": y_radii_ppm, - "lineshape": lineshapes, - "aic": aics, - "chisqr": chisqrs, - "redchi": redchis, - "residual_sum": res_sum, - # "slope": slopes, - # "intercept": intercepts - } - - # lineshape specific - match lineshape: - case lineshape.PV_PV: - df_dic["fraction_x"] = fractions_x - df_dic["fraction_y"] = fractions_y - case lineshape.V: - df_dic["gamma_x"] = gamma_xs - df_dic["gamma_y"] = gamma_ys - df_dic["fraction"] = fractions - case _: - df_dic["fraction"] = fractions - - #  make dataframe - df = pd.DataFrame(df_dic) - # Fill nan values - df.fillna(value=np.nan, inplace=True) - # vclist - if vclist: - vclist_data = fit_input.args.get("vclist_data") - df["vclist"] = df.plane.apply(lambda x: vclist_data[x]) - #  output data + cluster_dfs = [] + for name, peak_cluster in peak_clusters: + fit_result = perform_initial_lineshape_fit_on_cluster_of_peaks( + peak_cluster, fit_input + ) + fit_result.out.params, float_str = set_parameters_to_fix_during_fit( + fit_result.out.params, to_fix + ) + fit_results = refit_peaks_with_constraints(fit_input, fit_result) + cluster_df = pd.DataFrame(fit_results) + cluster_df = update_cluster_df_with_fit_statistics(cluster_df, fit_result.out) + cluster_df["clustid"] = name + cluster_df = merge_unpacked_parameters_with_metadata(cluster_df, peak_cluster) + cluster_dfs.append(cluster_df) + df = pd.concat(cluster_dfs, ignore_index=True) + df["lineshape"] = lineshape.value + if fit_input.args.get("vclist"): + df = add_vclist_to_df(fit_input, df) + df = rename_columns_for_compatibility(df) return FitPeaksResult(df=df, log=out_str) diff --git a/test/test_fit.py b/test/test_fit.py index 517a8462..113b0339 100644 --- a/test/test_fit.py +++ b/test/test_fit.py @@ -2,7 +2,8 @@ import pandas as pd import numpy as np -from lmfit import Parameters +from lmfit import Parameters, Model +from lmfit.model import ModelResult from peakipy.cli.fit import ( get_fit_peaks_result_validation_model, @@ -11,8 +12,15 @@ FitPeaksResultRowGLPV, filter_peak_clusters_by_max_cluster_size, set_parameters_to_fix_during_fit, + unpack_fitted_parameters_for_lineshape, + get_default_lineshape_param_names, + split_parameter_sets_by_peak, + get_prefix_from_parameter_names, + create_parameter_dict, + perform_initial_lineshape_fit_on_cluster_of_peaks, + merge_unpacked_parameters_with_metadata, ) -from peakipy.core import Lineshape +from peakipy.core import Lineshape, pvoigt2d def test_get_fit_peaks_result_validation_model_PVPV(): @@ -63,16 +71,21 @@ def test_set_parameters_to_fix_during_fit(): assert modified_parameter_set["test1"].vary == False -def test_set_parameters_to_fix_during_fit_2(): +@fixture +def parameters_set_with_two_variables(): parameter_set = Parameters() - parameter_set.add("test1", vary=True) - parameter_set.add("test2", vary=True) + parameter_set.add("prefix1_test1", vary=True) + parameter_set.add("prefix1_test2", vary=True) + return parameter_set + + +def test_set_parameters_to_fix_during_fit_2(parameters_set_with_two_variables): modified_parameter_set, float_str = set_parameters_to_fix_during_fit( - parameter_set, ["test1", "test2"] + parameters_set_with_two_variables, ["prefix1_test1", "prefix1_test2"] ) assert ( - modified_parameter_set["test2"].vary - == modified_parameter_set["test1"].vary + modified_parameter_set["prefix1_test2"].vary + == modified_parameter_set["prefix1_test1"].vary == False ) @@ -89,3 +102,249 @@ def test_set_parameters_to_fix_during_fit_3(): != modified_parameter_set["test2"].vary == False ) + + +def test_get_default_param_names_pseudo_voigt(): + assert get_default_lineshape_param_names(Lineshape.PV) == [ + "amplitude", + "center_x", + "center_y", + "sigma_x", + "sigma_y", + "fraction", + ] + + +def test_get_default_param_names_gaussian(): + assert get_default_lineshape_param_names(Lineshape.G) == [ + "amplitude", + "center_x", + "center_y", + "sigma_x", + "sigma_y", + "fraction", + ] + + +def test_get_default_param_names_lorentzian(): + assert get_default_lineshape_param_names(Lineshape.L) == [ + "amplitude", + "center_x", + "center_y", + "sigma_x", + "sigma_y", + "fraction", + ] + + +def test_get_default_param_names_pv_pv(): + assert get_default_lineshape_param_names(Lineshape.PV_PV) == [ + "amplitude", + "center_x", + "center_y", + "sigma_x", + "sigma_y", + "fraction_x", + "fraction_y", + ] + + +def test_get_default_param_names_voigt(): + assert get_default_lineshape_param_names(Lineshape.V) == [ + "amplitude", + "center_x", + "center_y", + "sigma_x", + "sigma_y", + "gamma_x", + "gamma_y", + "fraction", + ] + + +def test_split_parameter_sets_by_peak(default_pseudo_voigt_parameter_names): + # the second element of each tuple actually contains an + # lmfit.Parameter object + params = [ + ("p1_amplitude", "amplitude"), + ("p1_center_x", "center_x"), + ("p1_center_y", "center_y"), + ("p1_sigma_x", "sigma_x"), + ("p1_sigma_y", "sigma_y"), + ("p1_fraction", "fraction"), + ("p2_amplitude", "amplitude"), + ("p2_center_x", "center_x"), + ("p2_center_y", "center_y"), + ("p2_sigma_x", "sigma_x"), + ("p2_sigma_y", "sigma_y"), + ("p2_fraction", "fraction"), + ("p3_amplitude", "amplitude"), + ("p3_center_x", "center_x"), + ("p3_center_y", "center_y"), + ("p3_sigma_x", "sigma_x"), + ("p3_sigma_y", "sigma_y"), + ("p3_fraction", "fraction"), + ] + expected_result = [ + [ + ("p1_amplitude", "amplitude"), + ("p1_center_x", "center_x"), + ("p1_center_y", "center_y"), + ("p1_sigma_x", "sigma_x"), + ("p1_sigma_y", "sigma_y"), + ("p1_fraction", "fraction"), + ], + [ + ("p2_amplitude", "amplitude"), + ("p2_center_x", "center_x"), + ("p2_center_y", "center_y"), + ("p2_sigma_x", "sigma_x"), + ("p2_sigma_y", "sigma_y"), + ("p2_fraction", "fraction"), + ], + [ + ("p3_amplitude", "amplitude"), + ("p3_center_x", "center_x"), + ("p3_center_y", "center_y"), + ("p3_sigma_x", "sigma_x"), + ("p3_sigma_y", "sigma_y"), + ("p3_fraction", "fraction"), + ], + ] + expected_result_parameter_names = [[j[0] for j in i] for i in expected_result] + split_parameter_names = [ + [j[0] for j in i] + for i in split_parameter_sets_by_peak( + default_pseudo_voigt_parameter_names, params + ) + ] + assert split_parameter_names == expected_result_parameter_names + + +@fixture +def default_pseudo_voigt_parameter_names(): + return Model(pvoigt2d).param_names + + +def test_get_prefix_from_parameter_names(default_pseudo_voigt_parameter_names): + parameter_items_with_prefixes = [ + ("p1_amplitude", "amplitude"), + ("p1_center_x", "center_x"), + ("p1_center_y", "center_y"), + ("p1_sigma_x", "sigma_x"), + ("p1_sigma_y", "sigma_y"), + ("p1_fraction", "fraction"), + ] + expected_result = "p1_" + actual_result = get_prefix_from_parameter_names( + default_pseudo_voigt_parameter_names, parameter_items_with_prefixes + ) + assert expected_result == actual_result + + +@fixture +def pseudo_voigt_model_result(): + m1 = Model(pvoigt2d, prefix="p1_") + m2 = Model(pvoigt2d, prefix="p2_") + model = m1 + m2 + params = model.make_params() + model_result = ModelResult(model, params) + return model_result + + +def test_create_parameter_dict(pseudo_voigt_model_result): + prefix = "p1_" + params = list(pseudo_voigt_model_result.params.items())[:6] + expected_result = dict( + prefix="p1_", + amplitude=1.0, + amplitude_stderr=None, + center_x=0.5, + center_x_stderr=None, + center_y=0.5, + center_y_stderr=None, + sigma_x=1.0, + sigma_x_stderr=None, + sigma_y=1.0, + sigma_y_stderr=None, + fraction=0.5, + fraction_stderr=None, + ) + actual_result = create_parameter_dict(prefix, params) + assert expected_result == actual_result + + +def test_unpack_fitted_parameters_for_lineshape_PV(pseudo_voigt_model_result): + expected_params = [ + dict( + prefix="p1_", + plane=0, + amplitude=1.0, + amplitude_stderr=None, + center_x=0.5, + center_x_stderr=None, + center_y=0.5, + center_y_stderr=None, + sigma_x=1.0, + sigma_x_stderr=None, + sigma_y=1.0, + sigma_y_stderr=None, + fraction=0.5, + fraction_stderr=None, + ), + dict( + prefix="p2_", + plane=0, + amplitude=1.0, + amplitude_stderr=None, + center_x=0.5, + center_x_stderr=None, + center_y=0.5, + center_y_stderr=None, + sigma_x=1.0, + sigma_x_stderr=None, + sigma_y=1.0, + sigma_y_stderr=None, + fraction=0.5, + fraction_stderr=None, + ), + ] + unpacked_params = unpack_fitted_parameters_for_lineshape( + Lineshape.PV, list(pseudo_voigt_model_result.params.items()), plane_number=0 + ) + assert expected_params == unpacked_params + + +def test_merge_unpacked_parameters_with_metadata(): + cluster_fit_df = pd.DataFrame( + dict( + plane=[0, 1, 2, 3, 0, 1, 2, 3], + prefix=["_p1_", "_p1_", "_p1_", "_p1_", "_p2_", "_p2_", "_p2_", "_p2_"], + ) + ) + peak_df = pd.DataFrame(dict(ASS=["p1", "p2"], data=["p1_data", "p2_data"])) + expected_result = pd.DataFrame( + dict( + plane=[0, 1, 2, 3, 0, 1, 2, 3], + prefix=["_p1_", "_p1_", "_p1_", "_p1_", "_p2_", "_p2_", "_p2_", "_p2_"], + ASS=["p1", "p1", "p1", "p1", "p2", "p2", "p2", "p2"], + data=[ + "p1_data", + "p1_data", + "p1_data", + "p1_data", + "p2_data", + "p2_data", + "p2_data", + "p2_data", + ], + ) + ) + actual_result = merge_unpacked_parameters_with_metadata(cluster_fit_df, peak_df) + assert expected_result.equals(actual_result) + + +# def test_perform_initial_lineshape_fit_on_cluster_of_peaks(pseudo_voigt_model_result): +# expected_result = pseudo_voigt_model_result +# actual_result = perform_initial_lineshape_fit_on_cluster_of_peaks() +# assert expected_result == actual_result From 13d53bc0e079ab1a6d900078e9d78f3acd2a5058 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Mon, 19 Feb 2024 16:02:24 -0500 Subject: [PATCH 17/37] refactoring and adding tests --- peakipy/cli/check_panel.py | 7 ------- peakipy/cli/main.py | 28 ++++++++++++++++++++++++++-- peakipy/core.py | 4 +++- test/test_cli.py | 23 +++++++++++++++++++++++ test/test_core.py | 15 ++++++++------- 5 files changed, 60 insertions(+), 17 deletions(-) diff --git a/peakipy/cli/check_panel.py b/peakipy/cli/check_panel.py index 60dd576d..aba815cc 100644 --- a/peakipy/cli/check_panel.py +++ b/peakipy/cli/check_panel.py @@ -10,10 +10,6 @@ pn.extension() pn.config.theme = "dark" -global fits_path -global data_path -global config_path - @dataclass class Data: @@ -114,7 +110,4 @@ def check_panel( if __name__ == "__main__": - # fits_path = Path("../../test/test_protein_L/fits.csv") - # data_path = Path("../../test/test_protein_L/test1.ft2") - # config_path = Path("../../test/test_protein_L/peakipy.config") app() diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index f4ff51eb..9ec1eb9e 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -670,8 +670,11 @@ def fit( ), axis=1, ) + print("VOIGT") + print(df[["amp", "amp_err", "height"]]) df["height_err"] = df.apply( - lambda x: x.amp_err * (x.height / x.amp), axis=1 + lambda x: x.amp_err * (x.height / x.amp) if x.amp_err != None else 0.0, + axis=1, ) df["fwhm_g_x"] = df.sigma_x.apply( lambda x: 2.0 * x * np.sqrt(2.0 * np.log(2.0)) @@ -1314,11 +1317,32 @@ class FitDataModel(BaseModel): center_y: float sigma_x: float sigma_y: float + + +class FitDataModelPVGL(FitDataModel): fraction: float +class FitDataModelVoigt(FitDataModel): + fraction: float + gamma_x: float + gamma_y: float + + +class FitDataModelPVPV(FitDataModel): + fraction_x: float + fraction_y: float + + def validate_fit_data(dict): - fit_data = FitDataModel(**dict) + lineshape = dict.get("lineshape") + if lineshape in ["PV", "G", "L"]: + fit_data = FitDataModelPVGL(**dict) + elif lineshape == "V": + fit_data = FitDataModelVoigt(**dict) + else: + fit_data = FitDataModelPVPV(**dict) + return fit_data.model_dump() diff --git a/peakipy/core.py b/peakipy/core.py index 14066107..4539507f 100644 --- a/peakipy/core.py +++ b/peakipy/core.py @@ -469,12 +469,14 @@ def get_params(params, name): ps = [] ps_err = [] names = [] + prefixes = [] for k in params: if name in k: ps.append(params[k].value) ps_err.append(params[k].stderr) names.append(k) - return ps, ps_err, names + prefixes.append(k.split(name)[0]) + return ps, ps_err, names, prefixes def make_param_dict(peaks, data, lineshape: Lineshape = Lineshape.PV): diff --git a/test/test_cli.py b/test/test_cli.py index a6a4e653..191f2fa3 100644 --- a/test/test_cli.py +++ b/test/test_cli.py @@ -81,6 +81,16 @@ def test_fit_main_with_voigt(protein_L): peakipy.cli.main.fit(**args) +def test_fit_main_with_pv_pv(protein_L): + args = dict( + peaklist_path=protein_L / Path("test.csv"), + data_path=protein_L / Path("test1.ft2"), + output_path=protein_L / Path("fits_PV_PV.csv"), + lineshape=Lineshape.PV_PV, + ) + peakipy.cli.main.fit(**args) + + def test_check_main_with_default(protein_L): args = dict( fits=protein_L / Path("fits_PV.csv"), @@ -133,6 +143,19 @@ def test_check_main_with_voigt(protein_L): peakipy.cli.main.check(**args) +def test_check_main_with_pv_pv(protein_L): + args = dict( + fits=protein_L / Path("fits_PV_PV.csv"), + data_path=protein_L / Path("test1.ft2"), + clusters=[1], + first=True, + label=True, + show=False, + individual=True, + ) + peakipy.cli.main.check(**args) + + def test_edit_with_default(protein_L): args = dict( peaklist_path=protein_L / Path("peaks.csv"), diff --git a/test/test_core.py b/test/test_core.py index dd8a02a7..2af16bd9 100644 --- a/test/test_core.py +++ b/test/test_core.py @@ -179,19 +179,20 @@ def test_fix_params(self): self.assertEqual(pars["fraction"].vary, False) def test_get_params(self): - mod = Model(pvoigt2d) - pars = mod.make_params(center_x=20.0, center_y=30.0) - pars["center_x"].stderr = 1.0 - pars["center_y"].stderr = 2.0 - ps, ps_err, names = get_params(pars, "center") + mod = Model(pvoigt2d, prefix="p1_") + pars = mod.make_params(p1_center_x=20.0, p1_center_y=30.0) + pars["p1_center_x"].stderr = 1.0 + pars["p1_center_y"].stderr = 2.0 + ps, ps_err, names, prefixes = get_params(pars, "center") #  get index of values - cen_x = names.index("center_x") - cen_y = names.index("center_y") + cen_x = names.index("p1_center_x") + cen_y = names.index("p1_center_y") self.assertEqual(ps[cen_x], 20.0) self.assertEqual(ps[cen_y], 30.0) self.assertEqual(ps_err[cen_x], 1.0) self.assertEqual(ps_err[cen_y], 2.0) + self.assertEqual(prefixes[cen_y], "p1_") def test_make_param_dict(self): peaks = pd.DataFrame( From ea82927e67eb1992eb48b809c845090f87dec2a1 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Mon, 19 Feb 2024 22:20:30 -0500 Subject: [PATCH 18/37] fixed bug for amplitude estimation after selecting reference planes. Need to fix for threshold selection too --- peakipy/core.py | 71 +++++++++++++++++++++++++++++++++++++++-------- test/test_core.py | 44 +++++++++++++++++++++++++++++ 2 files changed, 103 insertions(+), 12 deletions(-) diff --git a/peakipy/core.py b/peakipy/core.py index 4539507f..e40ac687 100644 --- a/peakipy/core.py +++ b/peakipy/core.py @@ -25,6 +25,7 @@ from pathlib import Path from typing import List, Optional from enum import Enum +from dataclasses import dataclass, field import numpy as np import nmrglue as ng @@ -479,6 +480,38 @@ def get_params(params, name): return ps, ps_err, names, prefixes +@dataclass +class PeakLimits: + peak: pd.DataFrame + data: np.array + min_x: int = field(init=False) + max_x: int = field(init=False) + min_y: int = field(init=False) + max_y: int = field(init=False) + + def __post_init__(self): + self.max_y = int(self.peak.Y_AXIS + self.peak.YW) + 1 + if self.max_y > self.data.shape[0]: + self.max_y = self.data.shape[0] + self.max_x = int(self.peak.X_AXIS + self.peak.XW) + 1 + if self.max_x > self.data.shape[1]: + self.max_x = self.data.shape[1] + + self.min_y = int(self.peak.Y_AXIS - self.peak.YW) + if self.min_y < 0: + self.min_y = 0 + self.min_x = int(self.peak.X_AXIS - self.peak.XW) + if self.min_x < 0: + self.min_x = 0 + + +def estimate_amplitude(peak, data): + assert len(data.shape) == 2 + limits = PeakLimits(peak, data) + amplitude_est = data[limits.min_y : limits.max_y, limits.min_x : limits.max_x].sum() + return amplitude_est + + def make_param_dict(peaks, data, lineshape: Lineshape = Lineshape.PV): """Make dict of parameter names using prefix""" @@ -490,11 +523,7 @@ def make_param_dict(peaks, data, lineshape: Lineshape = Lineshape.PV): param_dict[str_form("center_x")] = peak.X_AXISf param_dict[str_form("center_y")] = peak.Y_AXISf # estimate peak volume - amplitude_est = data[ - int(peak.Y_AXIS) - int(peak.YW) : int(peak.Y_AXIS) + int(peak.YW) + 1, - int(peak.X_AXIS) - int(peak.XW) : int(peak.X_AXIS) + int(peak.XW) + 1, - ].sum() - + amplitude_est = estimate_amplitude(peak, data) param_dict[str_form("amplitude")] = amplitude_est # sigma linewidth esimate param_dict[str_form("sigma_x")] = peak.XW / 2.0 @@ -566,7 +595,11 @@ def to_prefix(x): def make_models( - model, peaks, data, lineshape: Lineshape = Lineshape.PV, xy_bounds=None + model, + peaks, + data, + lineshape: Lineshape = Lineshape.PV, + xy_bounds=None, ): """Make composite models for multiple peaks @@ -596,7 +629,11 @@ def make_models( # make model for first peak mod = Model(model, prefix="%s" % to_prefix(peaks.ASS.iloc[0])) # add parameters - param_dict = make_param_dict(peaks, data, lineshape=lineshape) + param_dict = make_param_dict( + peaks, + data, + lineshape=lineshape, + ) p_guess = mod.make_params(**param_dict) elif len(peaks) > 1: @@ -606,7 +643,11 @@ def make_models( for _, peak in remaining_peaks: mod += Model(model, prefix="%s" % to_prefix(peak.ASS)) - param_dict = make_param_dict(peaks, data, lineshape=lineshape) + param_dict = make_param_dict( + peaks, + data, + lineshape=lineshape, + ) p_guess = mod.make_params(**param_dict) # add Peak params to p_guess @@ -890,9 +931,6 @@ def fit_first_plane( """ lineshape_function = get_lineshape_function(lineshape) - mod, p_guess = make_models( - lineshape_function, group, data, lineshape=lineshape, xy_bounds=xy_bounds - ) first_plane_data = data[0] mask, peak = make_mask_from_peak_cluster(group, first_plane_data) @@ -909,7 +947,16 @@ def fit_first_plane( max_x, min_x, max_y, min_y = deal_with_peaks_on_edge_of_spectrum( data.shape, max_x, min_x, max_y, min_y ) - + selected_data = select_reference_planes_using_indices( + data, reference_plane_indices + ).sum(axis=0) + mod, p_guess = make_models( + lineshape_function, + group, + selected_data, + lineshape=lineshape, + xy_bounds=xy_bounds, + ) peak_slices = slice_peaks_from_data_using_mask(data, mask) peak_slices = select_reference_planes_using_indices( peak_slices, reference_plane_indices diff --git a/test/test_core.py b/test/test_core.py index 2af16bd9..2a0612c9 100644 --- a/test/test_core.py +++ b/test/test_core.py @@ -1,5 +1,6 @@ import unittest from unittest.mock import patch +from collections import namedtuple import numpy as np from numpy.testing import assert_array_equal @@ -24,6 +25,7 @@ select_reference_planes_using_indices, select_planes_above_threshold_from_masked_data, slice_peaks_from_data_using_mask, + estimate_amplitude, ) @@ -107,6 +109,48 @@ def test_select_planes_above_threshold_from_masked_data(): ) +def test_make_param_dict(): + selected_planes = [1, 2] + data = np.ones((4, 10, 5)) + expected_shape = (2, 10, 5) + actual_shape = data[np.array(selected_planes)].shape + assert expected_shape == actual_shape + + +def test_make_param_dict_sum(): + data = np.ones((4, 10, 5)) + expected_sum = 200 + actual_sum = data.sum() + assert expected_sum == actual_sum + + +def test_make_param_dict_selected(): + selected_planes = [1, 2] + data = np.ones((4, 10, 5)) + data = data[np.array(selected_planes)] + expected_sum = 100 + actual_sum = data.sum() + assert expected_sum == actual_sum + + +def test_estimate_amplitude(): + peak = namedtuple("peak", ["X_AXIS", "XW", "Y_AXIS", "YW"]) + p = peak(5, 2, 3, 2) + data = np.ones((20, 10)) + expected_result = 25 + actual_result = estimate_amplitude(p, data) + assert expected_result == actual_result + + +def test_estimate_amplitude_invalid_indices(): + peak = namedtuple("peak", ["X_AXIS", "XW", "Y_AXIS", "YW"]) + p = peak(1, 2, 3, 2) + data = np.ones((20, 10)) + expected_result = 20 + actual_result = estimate_amplitude(p, data) + assert expected_result == actual_result + + class TestCoreFunctions(unittest.TestCase): def test_make_mask(self): data = np.ones((10, 10)) From b07703e641cfd19f6293fb86651baf0236ffd6f7 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Sat, 24 Feb 2024 22:29:16 -0500 Subject: [PATCH 19/37] major refactor --- peakipy/cli/fit.py | 414 +++++++++++++++++++++++---------- peakipy/cli/main.py | 551 ++++++++++++++++++++------------------------ peakipy/core.py | 532 +++++++++++------------------------------- pyproject.toml | 1 + test/test_main.py | 78 +++++++ 5 files changed, 754 insertions(+), 822 deletions(-) create mode 100644 test/test_main.py diff --git a/peakipy/cli/fit.py b/peakipy/cli/fit.py index cc1dcb73..4930c813 100644 --- a/peakipy/cli/fit.py +++ b/peakipy/cli/fit.py @@ -11,18 +11,25 @@ from rich import print from rich.console import Console from pydantic import BaseModel -from lmfit import Model, Parameter +from lmfit import Model, Parameter, Parameters from lmfit.model import ModelResult from peakipy.core import ( fix_params, - fit_first_plane, - LoadData, Lineshape, pvoigt2d, voigt2d, pv_pv, to_prefix, + get_limits_for_axis_in_points, + get_lineshape_function, + deal_with_peaks_on_edge_of_spectrum, + select_planes_above_threshold_from_masked_data, + select_reference_planes_using_indices, + make_models, + make_meshgrid, + slice_peaks_from_data_using_mask, + make_mask_from_peak_cluster, ) console = Console() @@ -40,6 +47,8 @@ @dataclass class FitPeaksArgs: noise: float + uc_dics: dict + lineshape: Lineshape dims: List[int] = field(default_factory=lambda: [0, 1, 2]) colors: Tuple[str] = ("#5e3c99", "#e66101") max_cluster_size: Optional[int] = None @@ -48,102 +57,77 @@ class FitPeaksArgs: vclist: Optional[Path] = (None,) plane: Optional[List[int]] = (None,) exclude_plane: Optional[List[int]] = (None,) - reference_plane_index: List[int] = ([],) + reference_plane_indices: List[int] = ([],) initial_fit_threshold: Optional[float] = (None,) mp: bool = (True,) - plot: Optional[Path] = (None,) - show: bool = (False,) - verb: bool = (False,) + verbose: bool = (False,) +@dataclass +class FirstPlaneFitInput: + group: pd.DataFrame + last_peak: pd.DataFrame + mask: np.array + mod: Model + p_guess: Parameters + XY: np.array + peak_slices: np.array + XY_slices: np.array + min_x: float + max_x: float + min_y: float + max_y: float + uc_dics: dict + first_plane_data: np.array + weights: np.array + fit_method: str = "leastsq" + verbose: bool = False + + +@dataclass class FitPeaksInput: """input data for the fit_peaks function""" - def __init__( - self, - args: dict, - data: np.array, - config: dict, - plane_numbers: list, - reference_planes_for_initial_fit: List[int] = [], - use_only_planes_above_threshold: Optional[float] = None, - ): - self._data = data - self._args = args - self._config = config - self._plane_numbers = plane_numbers - self._planes_for_initial_fit = reference_planes_for_initial_fit - self._use_only_planes_above_threshold = use_only_planes_above_threshold - - def check_integer_list(self): - if hasattr(self._planes_for_initial_fit, "append"): - pass - else: - return False - if all([(type(i) == int) for i in self._planes_for_initial_fit]): - pass - else: - return False - if all([((i - 1) > self._data.shape[0]) for i in self._planes_for_initial_fit]): - return True - else: - return False - - def sum_planes_for_initial_fit(self): - if ( - self._planes_for_initial_fit - == self._use_only_planes_above_threshold - == None - ): - return self._data.sum(axis=0) - - elif self.check_integer_list(): - return self._data[self._planes_for_initial_fit].sum(axis=0) - - elif type(self._use_only_planes_above_threshold) == float: - # very crude at the moment - return self._data[ - self._data.max(axis=1).max(axis=1) - > self._use_only_planes_above_threshold - ] - else: - return self._data.sum(axis=0) - - @property - def data(self): - return self._data - - @property - def args(self): - return self._args - - @property - def config(self): - return self._config - - @property - def plane_numbers(self): - return self._plane_numbers - - @property - def summed_planes_for_initial_fit(self): - return self.sum_planes_for_initial_fit() - + args: FitPeaksArgs + data: np.array + config: dict + plane_numbers: list -class FitPeaksResult: - """Result of fitting a set of peaks""" - def __init__(self, df: pd.DataFrame, log: str): - self._df = df - self._log = log +@dataclass +class FitResult: + out: ModelResult + mask: np.array + fit_str: str + log: str + group: pd.core.groupby.generic.DataFrameGroupBy + uc_dics: dict + min_x: float + min_y: float + max_x: float + max_y: float + X: np.array + Y: np.array + Z: np.array + Z_sim: np.array + peak_slices: np.array + XY_slices: np.array + weights: np.array + mod: Model + + def check_shifts(self): + """Calculate difference between initial peak positions + and check whether they moved too much from original + position + + """ + pass - @property - def df(self): - return self._df - @property - def log(self): - return self._log +@dataclass +class FitPeaksResult: + df: pd.DataFrame + log: str class FitPeaksResultDfRow(BaseModel): @@ -301,39 +285,76 @@ def unpack_fitted_parameters_for_lineshape( def perform_initial_lineshape_fit_on_cluster_of_peaks( - cluster_of_peaks, fit_input: FitPeaksInput -): - fit_result = fit_first_plane( - cluster_of_peaks, - fit_input.data, - # norm(summed_planes), - fit_input.args.get("uc_dics"), - lineshape=fit_input.args.get("lineshape"), - xy_bounds=fit_input.args.get("xy_bounds"), - verbose=fit_input.args.get("verb"), - noise=fit_input.args.get("noise"), - fit_method=fit_input.config.get("fit_method", "leastsq"), - reference_plane_indices=fit_input.args.get("reference_plane_indices"), - threshold=fit_input.args.get("initial_fit_threshold"), + first_plane_fit_input: FirstPlaneFitInput, +) -> FitResult: + mod = first_plane_fit_input.mod + peak_slices = first_plane_fit_input.peak_slices + XY_slices = first_plane_fit_input.XY_slices + p_guess = first_plane_fit_input.p_guess + weights = first_plane_fit_input.weights + fit_method = first_plane_fit_input.fit_method + mask = first_plane_fit_input.mask + XY = first_plane_fit_input.XY + X, Y = XY + first_plane_data = first_plane_fit_input.first_plane_data + peak = first_plane_fit_input.last_peak + group = first_plane_fit_input.group + min_x = first_plane_fit_input.min_x + min_y = first_plane_fit_input.min_y + max_x = first_plane_fit_input.max_x + max_y = first_plane_fit_input.max_y + verbose = first_plane_fit_input.verbose + uc_dics = first_plane_fit_input.uc_dics + + out = mod.fit( + peak_slices, XY=XY_slices, params=p_guess, weights=weights, method=fit_method + ) + + if verbose: + console.print(out.fit_report(), style="bold") + + z_sim = mod.eval(XY=XY, params=out.params) + z_sim[~mask] = np.nan + z_plot = first_plane_data.copy() + z_plot[~mask] = np.nan + fit_str = "" + log = "" + + return FitResult( + out=out, + mask=mask, + fit_str=fit_str, + log=log, + group=group, + uc_dics=uc_dics, + min_x=min_x, + min_y=min_y, + max_x=max_x, + max_y=max_y, + X=X, + Y=Y, + Z=z_plot, + Z_sim=z_sim, + peak_slices=peak_slices, + XY_slices=XY_slices, + weights=weights, + mod=mod, ) - return fit_result def refit_peaks_with_constraints(fit_input: FitPeaksInput, fit_result: FitPeaksResult): fit_results = [] for num, d in enumerate(fit_input.data): plane_number = fit_input.plane_numbers[num] + masked_data = d[fit_result.mask] fit_result.out.fit( - data=d[fit_result.mask], + data=masked_data, params=fit_result.out.params, - weights=1.0 - / np.array( - [fit_input.args.get("noise")] * len(np.ravel(d[fit_result.mask])) - ), + weights=fit_result.weights, ) fit_results.extend( unpack_fitted_parameters_for_lineshape( - fit_input.args.get("lineshape"), + fit_input.args.lineshape, list(fit_result.out.params.items()), plane_number, ) @@ -381,7 +402,94 @@ def add_vclist_to_df(fit_input: FitPeaksInput, df: pd.DataFrame): return df -def fit_peaks(peaks: pd.DataFrame, fit_input: FitPeaksInput) -> FitPeaksResult: +def prepare_group_of_peaks_for_fitting( + group, data, fit_peaks_input_args: FitPeaksArgs, fit_method="leastsq" +): + lineshape_function = get_lineshape_function(fit_peaks_input_args.lineshape) + + first_plane_data = data[0] + mask, peak = make_mask_from_peak_cluster(group, first_plane_data) + + x_radius = group.X_RADIUS.max() + y_radius = group.Y_RADIUS.max() + + max_x, min_x = get_limits_for_axis_in_points( + group_axis_points=group.X_AXISf, mask_radius_in_points=x_radius + ) + max_y, min_y = get_limits_for_axis_in_points( + group_axis_points=group.Y_AXISf, mask_radius_in_points=y_radius + ) + max_x, min_x, max_y, min_y = deal_with_peaks_on_edge_of_spectrum( + data.shape, max_x, min_x, max_y, min_y + ) + selected_data = select_reference_planes_using_indices( + data, fit_peaks_input_args.reference_plane_indices + ).sum(axis=0) + mod, p_guess = make_models( + lineshape_function, + group, + selected_data, + lineshape=fit_peaks_input_args.lineshape, + xy_bounds=fit_peaks_input_args.xy_bounds, + ) + peak_slices = slice_peaks_from_data_using_mask(data, mask) + peak_slices = select_reference_planes_using_indices( + peak_slices, fit_peaks_input_args.reference_plane_indices + ) + peak_slices = select_planes_above_threshold_from_masked_data( + peak_slices, fit_peaks_input_args.initial_fit_threshold + ) + peak_slices = peak_slices.sum(axis=0) + + XY = make_meshgrid(data.shape) + X, Y = XY + + XY_slices = np.array([X.copy()[mask], Y.copy()[mask]]) + weights = 1.0 / np.array([fit_peaks_input_args.noise] * len(np.ravel(peak_slices))) + # weights = 1.0 / np.ravel(peak_slices) + return FirstPlaneFitInput( + group=group, + last_peak=peak, + mask=mask, + mod=mod, + p_guess=p_guess, + XY=XY, + peak_slices=peak_slices, + XY_slices=XY_slices, + weights=weights, + fit_method=fit_method, + first_plane_data=first_plane_data, + uc_dics=fit_peaks_input_args.uc_dics, + min_x=min_x, + min_y=min_y, + max_x=max_x, + max_y=max_y, + verbose=fit_peaks_input_args.verbose, + ) + + +def fit_cluster_of_peaks( + clustid: int, peak_cluster: pd.DataFrame, fit_input: FitPeaksInput +) -> pd.DataFrame: + data_for_fitting = prepare_group_of_peaks_for_fitting( + peak_cluster, + fit_input.data, + fit_input.args, + fit_method=fit_input.config.get("fit_method", "leastsq"), + ) + fit_result = perform_initial_lineshape_fit_on_cluster_of_peaks(data_for_fitting) + fit_result.out.params, float_str = set_parameters_to_fix_during_fit( + fit_result.out.params, fit_input.args.to_fix + ) + fit_results = refit_peaks_with_constraints(fit_input, fit_result) + cluster_df = pd.DataFrame(fit_results) + cluster_df = update_cluster_df_with_fit_statistics(cluster_df, fit_result.out) + cluster_df["clustid"] = clustid + cluster_df = merge_unpacked_parameters_with_metadata(cluster_df, peak_cluster) + return cluster_df + + +def fit_peak_clusters(peaks: pd.DataFrame, fit_input: FitPeaksInput) -> FitPeaksResult: """Fit set of peak clusters to lineshape model :param peaks: peaklist with generated by peakipy read or edit @@ -394,32 +502,82 @@ def fit_peaks(peaks: pd.DataFrame, fit_input: FitPeaksInput) -> FitPeaksResult: :rtype: FitPeaksResult """ peak_clusters = peaks.groupby("CLUSTID") - max_cluster_size = fit_input.args.get("max_cluster_size") filtered_peaks = filter_peak_clusters_by_max_cluster_size( - peak_clusters, max_cluster_size + peak_clusters, fit_input.args.max_cluster_size ) peak_clusters = filtered_peaks.groupby("CLUSTID") - # setup arguments - to_fix = fit_input.args.get("to_fix") - lineshape = fit_input.args.get("lineshape") out_str = "" cluster_dfs = [] - for name, peak_cluster in peak_clusters: - fit_result = perform_initial_lineshape_fit_on_cluster_of_peaks( - peak_cluster, fit_input - ) - fit_result.out.params, float_str = set_parameters_to_fix_during_fit( - fit_result.out.params, to_fix + for clustid, peak_cluster in peak_clusters: + cluster_df = fit_cluster_of_peaks( + clustid=clustid, peak_cluster=peak_cluster, fit_input=fit_input ) - fit_results = refit_peaks_with_constraints(fit_input, fit_result) - cluster_df = pd.DataFrame(fit_results) - cluster_df = update_cluster_df_with_fit_statistics(cluster_df, fit_result.out) - cluster_df["clustid"] = name - cluster_df = merge_unpacked_parameters_with_metadata(cluster_df, peak_cluster) cluster_dfs.append(cluster_df) df = pd.concat(cluster_dfs, ignore_index=True) - df["lineshape"] = lineshape.value - if fit_input.args.get("vclist"): + df["lineshape"] = fit_input.args.lineshape.value + + if fit_input.args.vclist: df = add_vclist_to_df(fit_input, df) df = rename_columns_for_compatibility(df) return FitPeaksResult(df=df, log=out_str) + + +@dataclass +class JackKnifeResult: + mean: float + std: float + + +def jack_knife_sample_errors( + peaks: pd.DataFrame, fit_input: FirstPlaneFitInput +) -> JackKnifeResult: + peak_slices = fit_input.peak_slices + XY_slices = fit_input.XY_slices + weights = fit_input.weights + jk_results = [] + for i in range(len(peak_slices)): + peak_slices = np.delete(peak_slices, i, None) + X = np.delete(XY_slices[0], i, None) + Y = np.delete(XY_slices[1], i, None) + weights = np.delete(weights, i, None) + jk_results.append( + mod.fit(peak_slices, XY=[X, Y], params=out.params, weights=weights) + ) + + # print(jk_results) + amps = [] + sigma_xs = [] + sigma_ys = [] + names = [] + with open("test_jackknife", "w") as f: + for i in jk_results: + f.write(i.fit_report()) + amp, amp_err, name = get_params(i.params, "amp") + sigma_x, sigma_x_err, name_x = get_params(i.params, "sigma_x") + sigma_y, sigma_y_err, name_y = get_params(i.params, "sigma_y") + f.write(f"{amp},{amp_err},{name_y}\n") + amps.extend(amp) + names.extend(name_y) + sigma_xs.extend(sigma_x) + sigma_ys.extend(sigma_y) + + df = pd.DataFrame( + {"amp": amps, "name": names, "sigma_x": sigma_xs, "sigma_y": sigma_ys} + ) + grouped = df.groupby("name") + mean_amps = grouped.amp.mean() + std_amps = grouped.amp.std() + mean_sigma_x = grouped.sigma_x.mean() + std_sigma_x = grouped.sigma_x.std() + mean_sigma_y = grouped.sigma_y.mean() + std_sigma_y = grouped.sigma_y.std() + f.write("#####################################\n") + f.write( + f"{mean_amps}, {std_amps}, {mean_sigma_x}, {std_sigma_x}, {mean_sigma_y}, {std_sigma_y} " + ) + f.write(self.out.fit_report()) + f.write("#####################################\n") + # print(amps) + # mean = np.mean(amps) + # std = np.std(amps) + return JackKnifeResult(mean=mean_amps, std=std_amps) diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index 9ec1eb9e..4d4f44b0 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -70,10 +70,20 @@ OutFmt, get_limits_for_axis_in_points, deal_with_peaks_on_edge_of_spectrum, + calculate_fwhm_for_voigt_lineshape, + calculate_height_for_voigt_lineshape, + calculate_fwhm_for_pseudo_voigt_lineshape, + calculate_height_for_pseudo_voigt_lineshape, + calculate_height_for_gaussian_lineshape, + calculate_height_for_lorentzian_lineshape, + calculate_height_for_pv_pv_lineshape, + calculate_peak_centers_in_ppm, + calculate_peak_linewidths_in_hz, ) from .fit import ( - fit_peaks, + fit_peak_clusters, FitPeaksInput, + FitPeaksArgs, ) from .edit import BokehScript from .spec import yaml_file @@ -374,6 +384,218 @@ def read( ) +def calculate_lineshape_specific_height_and_fwhm( + lineshape: Lineshape, df: pd.DataFrame +): + match lineshape: + case lineshape.V: + df = calculate_height_for_voigt_lineshape(df) + df = calculate_fwhm_for_voigt_lineshape(df) + + case lineshape.PV: + df = calculate_height_for_pseudo_voigt_lineshape(df) + df = calculate_fwhm_for_pseudo_voigt_lineshape(df) + + case lineshape.G: + df = calculate_height_for_gaussian_lineshape(df) + df = calculate_fwhm_for_pseudo_voigt_lineshape(df) + + case lineshape.L: + df = calculate_height_for_lorentzian_lineshape(df) + df = calculate_fwhm_for_pseudo_voigt_lineshape(df) + + case lineshape.PV_PV: + df = calculate_height_for_pv_pv_lineshape(df) + df = calculate_fwhm_for_pseudo_voigt_lineshape(df) + case _: + df = calculate_fwhm_for_pseudo_voigt_lineshape(df) + return df + + +def get_vclist(vclist, args): + # read vclist + if vclist is None: + vclist = False + elif vclist.exists(): + vclist_data = np.genfromtxt(vclist) + args["vclist_data"] = vclist_data + vclist = True + else: + raise Exception("vclist not found...") + + args["vclist"] = vclist + return args + + +def check_data_shape_is_consistent_with_dims(peakipy_data): + # check data shape is consistent with dims + if len(peakipy_data.dims) != len(peakipy_data.data.shape): + print( + f"Dims are {peakipy_data.dims} while data shape is {peakipy_data.data.shape}?" + ) + exit() + + +def select_specified_planes(plane, peakipy_data): + plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]]) + # only fit specified planes + if plane: + inds = [i for i in plane] + data_inds = [ + (i in inds) for i in range(peakipy_data.data.shape[peakipy_data.dims[0]]) + ] + plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]])[ + data_inds + ] + peakipy_data.data = peakipy_data.data[data_inds] + print( + "[yellow]Using only planes {plane} data now has the following shape[/yellow]", + peakipy_data.data.shape, + ) + if peakipy_data.data.shape[peakipy_data.dims[0]] == 0: + print("[red]You have excluded all the data![/red]", peakipy_data.data.shape) + exit() + return plane_numbers, peakipy_data + + +def select_specified_planes(plane, peakipy_data): + plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]]) + # only fit specified planes + if plane: + inds = [i for i in plane] + data_inds = [ + (i in inds) for i in range(peakipy_data.data.shape[peakipy_data.dims[0]]) + ] + plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]])[ + data_inds + ] + peakipy_data.data = peakipy_data.data[data_inds] + print( + "[yellow]Using only planes {plane} data now has the following shape[/yellow]", + peakipy_data.data.shape, + ) + if peakipy_data.data.shape[peakipy_data.dims[0]] == 0: + print("[red]You have excluded all the data![/red]", peakipy_data.data.shape) + exit() + return plane_numbers, peakipy_data + + +def exclude_specified_planes(exclude_plane, peakipy_data): + plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]]) + # do not fit these planes + if exclude_plane: + inds = [i for i in exclude_plane] + data_inds = [ + (i not in inds) + for i in range(peakipy_data.data.shape[peakipy_data.dims[0]]) + ] + plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]])[ + data_inds + ] + peakipy_data.data = peakipy_data.data[data_inds] + print( + f"[yellow]Excluding planes {exclude_plane} data now has the following shape[/yellow]", + peakipy_data.data.shape, + ) + if peakipy_data.data.shape[peakipy_data.dims[0]] == 0: + print("[red]You have excluded all the data![/red]", peakipy_data.data.shape) + exit() + return plane_numbers, peakipy_data + + +def check_for_include_column_and_add_if_missing(peakipy_data): + # only include peaks with 'include' + if "include" in peakipy_data.df.columns: + pass + else: + # for compatibility + peakipy_data.df["include"] = peakipy_data.df.apply(lambda _: "yes", axis=1) + return peakipy_data + + +def remove_excluded_peaks(peakipy_data): + if len(peakipy_data.df[peakipy_data.df.include != "yes"]) > 0: + excluded = peakipy_data.df[peakipy_data.df.include != "yes"][column_selection] + table = df_to_rich_table( + excluded, + title="[yellow] Excluded peaks [/yellow]", + columns=excluded.columns, + styles=["yellow" for i in excluded.columns], + ) + print(table) + peakipy_data.df = peakipy_data.df[peakipy_data.df.include == "yes"] + return peakipy_data + + +def warn_if_trying_to_fit_large_clusters(max_cluster_size, peakipy_data): + if max_cluster_size is None: + max_cluster_size = peakipy_data.df.MEMCNT.max() + if peakipy_data.df.MEMCNT.max() > 10: + print( + f"""[red] + ################################################################## + You have some clusters of as many as {max_cluster_size} peaks. + You may want to consider reducing the size of your clusters as the + fits will struggle. + + Otherwise you can use the --max-cluster-size flag to exclude large + clusters + ################################################################## + [/red]""" + ) + else: + max_cluster_size = max_cluster_size + return max_cluster_size + + +def update_linewidths_from_hz_to_points(peakipy_data): + """in case they were adjusted when running edit.py""" + peakipy_data.df["XW"] = peakipy_data.df.XW_HZ * peakipy_data.pt_per_hz_f2 + peakipy_data.df["YW"] = peakipy_data.df.YW_HZ * peakipy_data.pt_per_hz_f1 + return peakipy_data + + +def update_peak_positions_from_ppm_to_points(peakipy_data): + # convert peak positions from ppm to points in case they were adjusted running edit.py + peakipy_data.df["X_AXIS"] = peakipy_data.df.X_PPM.apply( + lambda x: peakipy_data.uc_f2(x, "PPM") + ) + peakipy_data.df["Y_AXIS"] = peakipy_data.df.Y_PPM.apply( + lambda x: peakipy_data.uc_f1(x, "PPM") + ) + peakipy_data.df["X_AXISf"] = peakipy_data.df.X_PPM.apply( + lambda x: peakipy_data.uc_f2.f(x, "PPM") + ) + peakipy_data.df["Y_AXISf"] = peakipy_data.df.Y_PPM.apply( + lambda x: peakipy_data.uc_f1.f(x, "PPM") + ) + return peakipy_data + + +def unpack_xy_bounds(xy_bounds, peakipy_data): + match xy_bounds: + case (0, 0): + xy_bounds = None + case (x, y): + # convert ppm to points + xy_bounds = list(xy_bounds) + xy_bounds[0] = xy_bounds[0] * peakipy_data.pt_per_ppm_f2 + xy_bounds[1] = xy_bounds[1] * peakipy_data.pt_per_ppm_f1 + return xy_bounds + + +def save_data(df, output_name): + suffix = output_name.suffix + if suffix == ".csv": + df.to_csv(output_name, float_format="%.4f", index=False) + + elif suffix == ".tab": + df.to_csv(output_name, sep="\t", float_format="%.4f", index=False) + + else: + df.to_pickle(output_name) + + reference_plane_index_help = ( "Select planes to use for initial estimation of lineshape parameters" ) @@ -396,9 +618,7 @@ def fit( ] = [], initial_fit_threshold: Optional[float] = None, mp: bool = True, - plot: Optional[Path] = None, - show: bool = False, - verb: bool = False, + verbose: bool = False, ): """Fit NMR data to lineshape models and deconvolute overlapping peaks @@ -436,13 +656,6 @@ def fit( intensities above this threshold will be included in the intial fit of summed planes. mp : bool Use multiprocessing [default: True] - plot : Optional[Path] - Whether to plot wireframe fits for each peak - (saved into Path provided) [default: None] - show : bool - Whether to show (using plt.show()) wireframe - fits for each peak. Only works if Path is provided to the plot - argument verb : bool Print what's going on """ @@ -455,153 +668,36 @@ def fit( args, config = read_config(args) dims = config.get("dims", [0, 1, 2]) peakipy_data = LoadData(peaklist_path, data_path, dims=dims) + peakipy_data = check_for_include_column_and_add_if_missing(peakipy_data) + peakipy_data = remove_excluded_peaks(peakipy_data) + max_cluster_size = warn_if_trying_to_fit_large_clusters( + max_cluster_size, peakipy_data + ) - # only include peaks with 'include' - if "include" in peakipy_data.df.columns: - pass - else: - # for compatibility - peakipy_data.df["include"] = peakipy_data.df.apply(lambda _: "yes", axis=1) - - if len(peakipy_data.df[peakipy_data.df.include != "yes"]) > 0: - excluded = peakipy_data.df[peakipy_data.df.include != "yes"][column_selection] - table = df_to_rich_table( - excluded, - title="[yellow] Excluded peaks [/yellow]", - columns=excluded.columns, - styles=["yellow" for i in excluded.columns], - ) - print(table) - peakipy_data.df = peakipy_data.df[peakipy_data.df.include == "yes"] - - # filter list based on cluster size - if max_cluster_size is None: - max_cluster_size = peakipy_data.df.MEMCNT.max() - if peakipy_data.df.MEMCNT.max() > 10: - print( - f"""[red] - ################################################################## - You have some clusters of as many as {max_cluster_size} peaks. - You may want to consider reducing the size of your clusters as the - fits will struggle. - - Otherwise you can use the --max-cluster-size flag to exclude large - clusters - ################################################################## - [/red]""" - ) - else: - max_cluster_size = max_cluster_size args["max_cluster_size"] = max_cluster_size args["to_fix"] = fix - args["verb"] = verb - args["show"] = show + args["verbose"] = verbose args["mp"] = mp args["initial_fit_threshold"] = initial_fit_threshold args["reference_plane_indices"] = reference_plane_index - # read vclist - if vclist is None: - vclist = False - elif vclist.exists(): - vclist_data = np.genfromtxt(vclist) - args["vclist_data"] = vclist_data - vclist = True - else: - raise Exception("vclist not found...") - - args["vclist"] = vclist - + args = get_vclist(vclist, args) # plot results or not log_file = open(tmp_path / log_path, "w") - if plot: - plot.mkdir(parents=True, exist_ok=True) - - args["plot"] = plot uc_dics = {"f1": peakipy_data.uc_f1, "f2": peakipy_data.uc_f2} args["uc_dics"] = uc_dics - # check data shape is consistent with dims - if len(peakipy_data.dims) != len(peakipy_data.data.shape): - print( - f"Dims are {peakipy_data.dims} while data shape is {peakipy_data.data.shape}?" - ) - exit() - - plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]]) - # only fit specified planes - if plane: - inds = [i for i in plane] - data_inds = [ - (i in inds) for i in range(peakipy_data.data.shape[peakipy_data.dims[0]]) - ] - plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]])[ - data_inds - ] - peakipy_data.data = peakipy_data.data[data_inds] - print( - "[yellow]Using only planes {plane} data now has the following shape[/yellow]", - peakipy_data.data.shape, - ) - if peakipy_data.data.shape[peakipy_data.dims[0]] == 0: - print("[red]You have excluded all the data![/red]", peakipy_data.data.shape) - exit() - - # do not fit these planes - if exclude_plane: - inds = [i for i in exclude_plane] - data_inds = [ - (i not in inds) - for i in range(peakipy_data.data.shape[peakipy_data.dims[0]]) - ] - plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]])[ - data_inds - ] - peakipy_data.data = peakipy_data.data[data_inds] - print( - f"[yellow]Excluding planes {exclude_plane} data now has the following shape[/yellow]", - peakipy_data.data.shape, - ) - if peakipy_data.data.shape[peakipy_data.dims[0]] == 0: - print("[red]You have excluded all the data![/red]", peakipy_data.data.shape) - exit() - - # setting noise for calculation of chi square - # if noise is None: + check_data_shape_is_consistent_with_dims(peakipy_data) + plane_numbers, peakipy_data = select_specified_planes(plane, peakipy_data) + plane_numbers, peakipy_data = exclude_specified_planes(exclude_plane, peakipy_data) noise = abs(threshold_otsu(peakipy_data.data)) args["noise"] = noise args["lineshape"] = lineshape - - match xy_bounds: - case (0, 0): - xy_bounds = None - case (x, y): - # convert ppm to points - xy_bounds = list(xy_bounds) - xy_bounds[0] = xy_bounds[0] * peakipy_data.pt_per_ppm_f2 - xy_bounds[1] = xy_bounds[1] * peakipy_data.pt_per_ppm_f1 - + xy_bounds = unpack_xy_bounds(xy_bounds, peakipy_data) args["xy_bounds"] = xy_bounds - # args, config = read_config(args) - # convert linewidths from Hz to points in case they were adjusted when running edit.py - peakipy_data.df["XW"] = peakipy_data.df.XW_HZ * peakipy_data.pt_per_hz_f2 - peakipy_data.df["YW"] = peakipy_data.df.YW_HZ * peakipy_data.pt_per_hz_f1 - - # convert peak positions from ppm to points in case they were adjusted running edit.py - peakipy_data.df["X_AXIS"] = peakipy_data.df.X_PPM.apply( - lambda x: peakipy_data.uc_f2(x, "PPM") - ) - peakipy_data.df["Y_AXIS"] = peakipy_data.df.Y_PPM.apply( - lambda x: peakipy_data.uc_f1(x, "PPM") - ) - peakipy_data.df["X_AXISf"] = peakipy_data.df.X_PPM.apply( - lambda x: peakipy_data.uc_f2.f(x, "PPM") - ) - peakipy_data.df["Y_AXISf"] = peakipy_data.df.Y_PPM.apply( - lambda x: peakipy_data.uc_f1.f(x, "PPM") - ) - # start fitting data + peakipy_data = update_linewidths_from_hz_to_points(peakipy_data) + peakipy_data = update_peak_positions_from_ppm_to_points(peakipy_data) # prepare data for multiprocessing nclusters = peakipy_data.df.CLUSTID.nunique() npeaks = peakipy_data.df.shape[0] @@ -610,9 +706,9 @@ def fit( f"[green]Using multiprocessing to fit {npeaks} peaks in {nclusters} clusters [/green]" + "\n" ) - # split peak lists - # tmp_dir = split_peaklist(peakipy_data.df, n_cpu) - fit_peaks_args = FitPeaksInput(args, peakipy_data.data, config, plane_numbers) + fit_peaks_args = FitPeaksInput( + FitPeaksArgs(**args), peakipy_data.data, config, plane_numbers + ) with ( Pool(processes=n_cpu) as pool, tqdm( @@ -621,11 +717,9 @@ def fit( colour="green", ) as pbar, ): - # result = pool.map(fit_peaks, peaklists) - # result = pool.starmap(fit_peaks, zip(peaklists, args_list)) result = [ pool.apply_async( - fit_peaks, + fit_peak_clusters, args=( peaklist, fit_peaks_args, @@ -636,13 +730,14 @@ def fit( ] df = pd.concat([i.df for i in result], ignore_index=True) for num, i in enumerate(result): - # i.df.to_csv(tmp_dir / Path(f"peaks_{num}_fit.csv"), index=False) log_file.write(i.log + "\n") else: print("[green]Not using multiprocessing[green]") - result = fit_peaks( + result = fit_peak_clusters( peakipy_data.df, - FitPeaksInput(args, peakipy_data.data, config, plane_numbers), + FitPeaksInput( + FitPeaksArgs(**args), peakipy_data.data, config, plane_numbers + ), ) df = result.df log_file.write(result.log) @@ -652,159 +747,11 @@ def fit( # close log file log_file.close() output = Path(output_path) - suffix = output.suffix - #  convert sigmas to fwhm - match lineshape: - case lineshape.V: - # calculate peak height - df["height"] = df.apply( - lambda x: voigt2d( - XY=[0, 0], - center_x=0.0, - center_y=0.0, - sigma_x=x.sigma_x, - sigma_y=x.sigma_y, - gamma_x=x.gamma_x, - gamma_y=x.gamma_y, - amplitude=x.amp, - ), - axis=1, - ) - print("VOIGT") - print(df[["amp", "amp_err", "height"]]) - df["height_err"] = df.apply( - lambda x: x.amp_err * (x.height / x.amp) if x.amp_err != None else 0.0, - axis=1, - ) - df["fwhm_g_x"] = df.sigma_x.apply( - lambda x: 2.0 * x * np.sqrt(2.0 * np.log(2.0)) - ) # fwhm of gaussian - df["fwhm_g_y"] = df.sigma_y.apply( - lambda x: 2.0 * x * np.sqrt(2.0 * np.log(2.0)) - ) - df["fwhm_l_x"] = df.gamma_x.apply(lambda x: 2.0 * x) # fwhm of lorentzian - df["fwhm_l_y"] = df.gamma_y.apply(lambda x: 2.0 * x) - df["fwhm_x"] = df.apply( - lambda x: 0.5346 * x.fwhm_l_x - + np.sqrt(0.2166 * x.fwhm_l_x**2.0 + x.fwhm_g_x**2.0), - axis=1, - ) - df["fwhm_y"] = df.apply( - lambda x: 0.5346 * x.fwhm_l_y - + np.sqrt(0.2166 * x.fwhm_l_y**2.0 + x.fwhm_g_y**2.0), - axis=1, - ) - # df["fwhm_y"] = df.apply(lambda x: x.gamma_y + np.sqrt(x.gamma_y**2.0 + 4 * x.sigma_y**2.0 * 2.0 * np.log(2.)), axis=1) - # df["fwhm_x"] = df.apply(lambda x: x.gamma_x + np.sqrt(x.gamma_x**2.0 + 4 * x.sigma_x**2.0 * 2.0 * np.log(2.)), axis=1) - # df["fwhm_y"] = df.apply(lambda x: x.gamma_y + np.sqrt(x.gamma_y**2.0 + 4 * x.sigma_y**2.0 * 2.0 * np.log(2.)), axis=1) - - case lineshape.PV: - # calculate peak height - df["height"] = df.apply( - lambda x: pvoigt2d( - XY=[0, 0], - center_x=0.0, - center_y=0.0, - sigma_x=x.sigma_x, - sigma_y=x.sigma_y, - amplitude=x.amp, - fraction=x.fraction, - ), - axis=1, - ) - df["height_err"] = df.apply( - lambda x: x.amp_err * (x.height / x.amp), axis=1 - ) - df["fwhm_x"] = df.sigma_x.apply(lambda x: x * 2.0) - df["fwhm_y"] = df.sigma_y.apply(lambda x: x * 2.0) + df = calculate_lineshape_specific_height_and_fwhm(lineshape, df) + df = calculate_peak_centers_in_ppm(df, peakipy_data) + df = calculate_peak_linewidths_in_hz(df, peakipy_data) - case lineshape.G: - df["height"] = df.apply( - lambda x: pvoigt2d( - XY=[0, 0], - center_x=0.0, - center_y=0.0, - sigma_x=x.sigma_x, - sigma_y=x.sigma_y, - amplitude=x.amp, - fraction=0.0, # gaussian - ), - axis=1, - ) - df["height_err"] = df.apply( - lambda x: x.amp_err * (x.height / x.amp), axis=1 - ) - df["fwhm_x"] = df.sigma_x.apply(lambda x: x * 2.0) - df["fwhm_y"] = df.sigma_y.apply(lambda x: x * 2.0) - - case lineshape.L: - df["height"] = df.apply( - lambda x: pvoigt2d( - XY=[0, 0], - center_x=0.0, - center_y=0.0, - sigma_x=x.sigma_x, - sigma_y=x.sigma_y, - amplitude=x.amp, - fraction=1.0, # lorentzian - ), - axis=1, - ) - df["height_err"] = df.apply( - lambda x: x.amp_err * (x.height / x.amp), axis=1 - ) - df["fwhm_x"] = df.sigma_x.apply(lambda x: x * 2.0) - df["fwhm_y"] = df.sigma_y.apply(lambda x: x * 2.0) - - case lineshape.PV_PV: - # calculate peak height - df["height"] = df.apply( - lambda x: pv_pv( - XY=[0, 0], - center_x=0.0, - center_y=0.0, - sigma_x=x.sigma_x, - sigma_y=x.sigma_y, - amplitude=x.amp, - fraction_x=x.fraction_x, - fraction_y=x.fraction_y, - ), - axis=1, - ) - df["height_err"] = df.apply( - lambda x: x.amp_err * (x.height / x.amp), axis=1 - ) - df["fwhm_x"] = df.sigma_x.apply(lambda x: x * 2.0) - df["fwhm_y"] = df.sigma_y.apply(lambda x: x * 2.0) - - case _: - df["fwhm_x"] = df.sigma_x.apply(lambda x: x * 2.0) - df["fwhm_y"] = df.sigma_y.apply(lambda x: x * 2.0) - #  convert values to ppm - df["center_x_ppm"] = df.center_x.apply(lambda x: peakipy_data.uc_f2.ppm(x)) - df["center_y_ppm"] = df.center_y.apply(lambda x: peakipy_data.uc_f1.ppm(x)) - df["init_center_x_ppm"] = df.init_center_x.apply( - lambda x: peakipy_data.uc_f2.ppm(x) - ) - df["init_center_y_ppm"] = df.init_center_y.apply( - lambda x: peakipy_data.uc_f1.ppm(x) - ) - df["sigma_x_ppm"] = df.sigma_x.apply(lambda x: x * peakipy_data.ppm_per_pt_f2) - df["sigma_y_ppm"] = df.sigma_y.apply(lambda x: x * peakipy_data.ppm_per_pt_f1) - df["fwhm_x_ppm"] = df.fwhm_x.apply(lambda x: x * peakipy_data.ppm_per_pt_f2) - df["fwhm_y_ppm"] = df.fwhm_y.apply(lambda x: x * peakipy_data.ppm_per_pt_f1) - df["fwhm_x_hz"] = df.fwhm_x.apply(lambda x: x * peakipy_data.hz_per_pt_f2) - df["fwhm_y_hz"] = df.fwhm_y.apply(lambda x: x * peakipy_data.hz_per_pt_f1) - - # save data - if suffix == ".csv": - df.to_csv(output, float_format="%.4f", index=False) - - elif suffix == ".tab": - df.to_csv(output, sep="\t", float_format="%.4f", index=False) - - else: - df.to_pickle(output) + save_data(df, output) print( """[green] diff --git a/peakipy/core.py b/peakipy/core.py index e40ac687..e14c0b40 100644 --- a/peakipy/core.py +++ b/peakipy/core.py @@ -38,7 +38,7 @@ from numpy import sqrt, log, pi, exp, finfo -from lmfit import Model +from lmfit import Model, Parameters from lmfit.model import ModelResult from lmfit.models import LinearModel from scipy.special import wofz @@ -878,397 +878,6 @@ def make_meshgrid(data_shape): return XY -def fit_first_plane( - group, - data, - uc_dics, - lineshape: Lineshape.PV, - xy_bounds=None, - verbose=False, - log=None, - noise=1.0, - fit_method="leastsq", - reference_plane_indices: List[int] = [], - threshold: Optional[float] = None, -): - r"""Deconvolute group of peaks - - :param group: pandas data from containing group of peaks using groupby("CLUSTID") - :type group: pandas.core.groupby.generic.DataFrameGroupBy - - :param data: NMR data - :type data: numpy.array - - :param uc_dics: nmrglue unit conversion dics {"f1":uc_f1,"f2":uc_f2} - :type uc_dics: dict - - :param lineshape: lineshape to fit (PV, G, L, V, G_L, PV_L, PV_G or PV_PV) - :type lineshape: str - - :param xy_bounds: set bounds on x y positions. None or (x_bound, y_bound) - :type xy_bounds: tuple - - :param plot: dir to save wireframe plots - :type plot: str - - :param show: interactive matplotlib plot - :type show: bool - - :param verbose: print what is happening to terminal - :type verbose: bool - - :param log: file - :type log: str - - :param noise: estimate of spectral noise for calculation of :math:`\chi^2` and :math:`\chi^2_{red}` - :type noise: float - - :param fit_method: method used by lmfit - :type fit_method: str - - :return: FitResult - :rtype: FitResult - - """ - lineshape_function = get_lineshape_function(lineshape) - - first_plane_data = data[0] - mask, peak = make_mask_from_peak_cluster(group, first_plane_data) - - x_radius = group.X_RADIUS.max() - y_radius = group.Y_RADIUS.max() - - max_x, min_x = get_limits_for_axis_in_points( - group_axis_points=group.X_AXISf, mask_radius_in_points=x_radius - ) - max_y, min_y = get_limits_for_axis_in_points( - group_axis_points=group.Y_AXISf, mask_radius_in_points=y_radius - ) - max_x, min_x, max_y, min_y = deal_with_peaks_on_edge_of_spectrum( - data.shape, max_x, min_x, max_y, min_y - ) - selected_data = select_reference_planes_using_indices( - data, reference_plane_indices - ).sum(axis=0) - mod, p_guess = make_models( - lineshape_function, - group, - selected_data, - lineshape=lineshape, - xy_bounds=xy_bounds, - ) - peak_slices = slice_peaks_from_data_using_mask(data, mask) - peak_slices = select_reference_planes_using_indices( - peak_slices, reference_plane_indices - ) - peak_slices = select_planes_above_threshold_from_masked_data(peak_slices, threshold) - peak_slices = peak_slices.sum(axis=0) - - XY = make_meshgrid(data.shape) - X, Y = XY - - XY_slices = np.array([X.copy()[mask], Y.copy()[mask]]) - - weights = 1.0 / np.array([noise] * len(np.ravel(peak_slices))) - - out = mod.fit( - peak_slices, XY=XY_slices, params=p_guess, weights=weights, method=fit_method - ) - - if verbose: - console.print(out.fit_report(), style="bold") - - z_sim = mod.eval(XY=XY, params=out.params) - z_sim[~mask] = np.nan - z_plot = first_plane_data.copy() - z_plot[~mask] = np.nan - #  also if peak position changed significantly from start then add warning - - _z_plot = z_plot[~np.isnan(z_plot)] - _z_sim = z_sim[~np.isnan(z_sim)] - - linmod = LinearModel() - linpars = linmod.guess(_z_sim, x=_z_plot) - linfit = linmod.fit(_z_sim, x=_z_plot, params=linpars) - slope = linfit.params["slope"].value - #  number of peaks in cluster - n_peaks = len(group) - - chi2 = out.chisqr - redchi = out.redchi - - fit_str = f""" - Cluster {peak.CLUSTID} containing {n_peaks} peaks - slope={slope:.3f} - - chi^2 = {chi2:.5f} - redchi = {redchi:.5f} - - """ - if (slope > 1.05) or (slope < 0.95): - fit_str += """ - 🧐 NEEDS CHECKING 🧐 - """ - # console.print(fit_str, style="bold yellow") - else: - # console.print(fit_str, style="green") - pass - - if log is not None: - log.write("".join("#" for _ in range(60)) + "\n\n") - log.write(fit_str + "\n\n") - # pass - else: - pass - - return FitResult( - out=out, - mask=mask, - fit_str=fit_str, - log=log, - group=group, - uc_dics=uc_dics, - min_x=min_x, - min_y=min_y, - max_x=max_x, - max_y=max_y, - X=X, - Y=Y, - Z=z_plot, - Z_sim=z_sim, - peak_slices=peak_slices, - XY_slices=XY_slices, - weights=weights, - mod=mod, - ) - - -class FitResult: - """Data structure for storing fit results""" - - def __init__( - self, - out: ModelResult, - mask: np.array, - fit_str: str, - log: str, - group: pd.core.groupby.generic.DataFrameGroupBy, - uc_dics: dict, - min_x: float, - min_y: float, - max_x: float, - max_y: float, - X: np.array, - Y: np.array, - Z: np.array, - Z_sim: np.array, - peak_slices: np.array, - XY_slices: np.array, - weights: np.array, - mod: Model, - ): - """Store output of fit_first_plane function""" - self.out = out - self.mask = mask - self.fit_str = fit_str - self.log = log - self.group = group - self.uc_dics = uc_dics - self.min_x = min_x - self.min_y = min_y - self.max_x = max_x - self.max_y = max_y - self.X = X - self.Y = Y - self.Z = Z - self.Z_sim = Z_sim - self.peak_slices = peak_slices - self.XY_slices = XY_slices - self.weights = weights - self.mod = mod - - def check_shifts(self): - """Calculate difference between initial peak positions - and check whether they moved too much from original - position - - """ - pass - - def jackknife(self): - """perform jackknife sampling to estimate fitting errors""" - jk_results = [] - for i in range(len(self.peak_slices)): - peak_slices = np.delete(self.peak_slices, i, None) - X = np.delete(self.XY_slices[0], i, None) - Y = np.delete(self.XY_slices[1], i, None) - weights = np.delete(self.weights, i, None) - jk_results.append( - self.mod.fit( - peak_slices, XY=[X, Y], params=self.out.params, weights=weights - ) - ) - - # print(jk_results) - amps = [] - sigma_xs = [] - sigma_ys = [] - names = [] - with open("test_jackknife", "w") as f: - for i in jk_results: - f.write(i.fit_report()) - amp, amp_err, name = get_params(i.params, "amp") - sigma_x, sigma_x_err, name_x = get_params(i.params, "sigma_x") - sigma_y, sigma_y_err, name_y = get_params(i.params, "sigma_y") - f.write(f"{amp},{amp_err},{name_y}\n") - amps.extend(amp) - names.extend(name_y) - sigma_xs.extend(sigma_x) - sigma_ys.extend(sigma_y) - - df = pd.DataFrame( - {"amp": amps, "name": names, "sigma_x": sigma_xs, "sigma_y": sigma_ys} - ) - grouped = df.groupby("name") - mean_amps = grouped.amp.mean() - std_amps = grouped.amp.std() - mean_sigma_x = grouped.sigma_x.mean() - std_sigma_x = grouped.sigma_x.std() - mean_sigma_y = grouped.sigma_y.mean() - std_sigma_y = grouped.sigma_y.std() - f.write("#####################################\n") - f.write( - f"{mean_amps}, {std_amps}, {mean_sigma_x}, {std_sigma_x}, {mean_sigma_y}, {std_sigma_y} " - ) - f.write(self.out.fit_report()) - f.write("#####################################\n") - # print(amps) - # mean = np.mean(amps) - # std = np.std(amps) - return JackKnifeResult(mean=mean_amps, std=std_amps) - - def plot(self, plot_path=None, show=False, mp=False): - """Matplotlib interactive plot of the fits""" - - if plot_path != None: - plot_path = Path(plot_path) - plot_path.mkdir(parents=True, exist_ok=True) - # plotting - fig = plt.figure(figsize=(8, 6)) - ax = fig.add_subplot(111, projection="3d") - # slice out plot area - x_plot = self.uc_dics["f2"].ppm( - self.X[self.min_y : self.max_y, self.min_x : self.max_x] - ) - y_plot = self.uc_dics["f1"].ppm( - self.Y[self.min_y : self.max_y, self.min_x : self.max_x] - ) - z_plot = self.Z[self.min_y : self.max_y, self.min_x : self.max_x] - - z_sim = self.Z_sim[self.min_y : self.max_y, self.min_x : self.max_x] - - ax.set_title( - r"$\chi^2$=" - + f"{self.out.chisqr:.3f}, " - + r"$\chi_{red}^2$=" - + f"{self.out.redchi:.4f}" - ) - - residual = z_plot - z_sim - cset = ax.contourf( - x_plot, - y_plot, - residual, - zdir="z", - offset=np.nanmin(z_plot) * 1.1, - alpha=0.5, - cmap=cm.coolwarm, - ) - cbl = fig.colorbar(cset, ax=ax, shrink=0.5, format="%.2e") - cbl.ax.set_title("Residual") - # plot raw data - ax.plot_wireframe(x_plot, y_plot, z_plot, color="#03353E", label="data") - - ax.set_xlabel("F2 ppm") - ax.set_ylabel("F1 ppm") - ax.plot_wireframe( - x_plot, y_plot, z_sim, color="#C1403D", linestyle="--", label="fit" - ) - - # axes will appear inverted - ax.view_init(30, 120) - - # Annotate plots - labs = [] - Z_lab = [] - Y_lab = [] - X_lab = [] - for k, v in self.out.params.valuesdict().items(): - if "amplitude" in k: - Z_lab.append(v) - # get prefix - labs.append(" ".join(k.split("_")[:-1])) - elif "center_x" in k: - X_lab.append(self.uc_dics["f2"].ppm(v)) - elif "center_y" in k: - Y_lab.append(self.uc_dics["f1"].ppm(v)) - #  this is dumb as !£$@ - # Z_lab = [ - # self.Z[ - # int(round(self.uc_dics["f1"](y, "ppm"))), - # int(round(self.uc_dics["f2"](x, "ppm"))), - # ] - # for x, y in zip(X_lab, Y_lab) - # ] - z_max = np.nanmax(z_plot.ravel()) - Z_lab = np.array(Z_lab) - z_max = z_max * (Z_lab / max(Z_lab)) - for l, x, y, z in zip(labs, X_lab, Y_lab, z_max): - # print(l, x, y, z) - # ax.text(x, y, z * 1.2, l, None) - z = z * 1.2 - ax.text(x, y, z, l, None) - ax.plot([x, x], [y, y], [0, z], linestyle="dotted", c="k", alpha=0.5) - - # plt.colorbar(contf) - plt.legend(bbox_to_anchor=(1.2, 1.1)) - - name = self.group.CLUSTID.iloc[0] - if show and not mp: - plt.savefig(plot_path / f"{name}.png", dpi=300) - - def exit_program(event): - exit() - - def next_plot(event): - plt.close() - - axexit = plt.axes([0.81, 0.05, 0.1, 0.075]) - bnexit = Button(axexit, "Exit") - bnexit.on_clicked(exit_program) - - axnext = plt.axes([0.71, 0.05, 0.1, 0.075]) - bnnext = Button(axnext, "Next") - bnnext.on_clicked(next_plot) - - plt.show() - else: - print( - "[red]Cannot use interactive matplotlib in multiprocess mode. Use --no-mp flag.[/red]" - ) - plt.savefig(plot_path / f"{name}.png", dpi=300) - # print(p_guess) - # close plot - plt.close() - else: - pass - - -class JackKnifeResult: - def __init__(self, mean, std): - self.mean = mean - self.std = std - - class Pseudo3D: """Read dic, data from NMRGlue and dims from input to create a Pseudo3D dataset @@ -2233,3 +1842,142 @@ def read_config(args, config_path="peakipy.config"): args["colors"] = colors return args, config + + +def calculate_height_for_voigt_lineshape(df): + df["height"] = df.apply( + lambda x: voigt2d( + XY=[0, 0], + center_x=0.0, + center_y=0.0, + sigma_x=x.sigma_x, + sigma_y=x.sigma_y, + gamma_x=x.gamma_x, + gamma_y=x.gamma_y, + amplitude=x.amp, + ), + axis=1, + ) + df["height_err"] = df.apply( + lambda x: x.amp_err * (x.height / x.amp) if x.amp_err != None else 0.0, + axis=1, + ) + return df + + +def calculate_fwhm_for_voigt_lineshape(df): + df["fwhm_g_x"] = df.sigma_x.apply( + lambda x: 2.0 * x * np.sqrt(2.0 * np.log(2.0)) + ) # fwhm of gaussian + df["fwhm_g_y"] = df.sigma_y.apply(lambda x: 2.0 * x * np.sqrt(2.0 * np.log(2.0))) + df["fwhm_l_x"] = df.gamma_x.apply(lambda x: 2.0 * x) # fwhm of lorentzian + df["fwhm_l_y"] = df.gamma_y.apply(lambda x: 2.0 * x) + df["fwhm_x"] = df.apply( + lambda x: 0.5346 * x.fwhm_l_x + + np.sqrt(0.2166 * x.fwhm_l_x**2.0 + x.fwhm_g_x**2.0), + axis=1, + ) + df["fwhm_y"] = df.apply( + lambda x: 0.5346 * x.fwhm_l_y + + np.sqrt(0.2166 * x.fwhm_l_y**2.0 + x.fwhm_g_y**2.0), + axis=1, + ) + return df + + +def calculate_height_for_pseudo_voigt_lineshape(df): + df["height"] = df.apply( + lambda x: pvoigt2d( + XY=[0, 0], + center_x=0.0, + center_y=0.0, + sigma_x=x.sigma_x, + sigma_y=x.sigma_y, + amplitude=x.amp, + fraction=x.fraction, + ), + axis=1, + ) + df["height_err"] = df.apply(lambda x: x.amp_err * (x.height / x.amp), axis=1) + return df + + +def calculate_fwhm_for_pseudo_voigt_lineshape(df): + df["fwhm_x"] = df.sigma_x.apply(lambda x: x * 2.0) + df["fwhm_y"] = df.sigma_y.apply(lambda x: x * 2.0) + return df + + +def calculate_height_for_gaussian_lineshape(df): + df["height"] = df.apply( + lambda x: pvoigt2d( + XY=[0, 0], + center_x=0.0, + center_y=0.0, + sigma_x=x.sigma_x, + sigma_y=x.sigma_y, + amplitude=x.amp, + fraction=0.0, # gaussian + ), + axis=1, + ) + df["height_err"] = df.apply(lambda x: x.amp_err * (x.height / x.amp), axis=1) + return df + + +def calculate_height_for_lorentzian_lineshape(df): + df["height"] = df.apply( + lambda x: pvoigt2d( + XY=[0, 0], + center_x=0.0, + center_y=0.0, + sigma_x=x.sigma_x, + sigma_y=x.sigma_y, + amplitude=x.amp, + fraction=1.0, # lorentzian + ), + axis=1, + ) + df["height_err"] = df.apply(lambda x: x.amp_err * (x.height / x.amp), axis=1) + return df + + +def calculate_height_for_pv_pv_lineshape(df): + df["height"] = df.apply( + lambda x: pv_pv( + XY=[0, 0], + center_x=0.0, + center_y=0.0, + sigma_x=x.sigma_x, + sigma_y=x.sigma_y, + amplitude=x.amp, + fraction_x=x.fraction_x, + fraction_y=x.fraction_y, + ), + axis=1, + ) + df["height_err"] = df.apply(lambda x: x.amp_err * (x.height / x.amp), axis=1) + return df + + +def calculate_peak_centers_in_ppm(df, peakipy_data): + #  convert values to ppm + df["center_x_ppm"] = df.center_x.apply(lambda x: peakipy_data.uc_f2.ppm(x)) + df["center_y_ppm"] = df.center_y.apply(lambda x: peakipy_data.uc_f1.ppm(x)) + df["init_center_x_ppm"] = df.init_center_x.apply( + lambda x: peakipy_data.uc_f2.ppm(x) + ) + df["init_center_y_ppm"] = df.init_center_y.apply( + lambda x: peakipy_data.uc_f1.ppm(x) + ) + return df + + +def calculate_peak_linewidths_in_hz(df, peakipy_data): + df["sigma_x_ppm"] = df.sigma_x.apply(lambda x: x * peakipy_data.ppm_per_pt_f2) + df["sigma_y_ppm"] = df.sigma_y.apply(lambda x: x * peakipy_data.ppm_per_pt_f1) + df["fwhm_x_ppm"] = df.fwhm_x.apply(lambda x: x * peakipy_data.ppm_per_pt_f2) + df["fwhm_y_ppm"] = df.fwhm_y.apply(lambda x: x * peakipy_data.ppm_per_pt_f1) + df["fwhm_x_hz"] = df.fwhm_x.apply(lambda x: x * peakipy_data.hz_per_pt_f2) + df["fwhm_y_hz"] = df.fwhm_y.apply(lambda x: x * peakipy_data.hz_per_pt_f1) + return df diff --git a/pyproject.toml b/pyproject.toml index ad532fa7..52c1d01e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,6 +52,7 @@ pytest = "^7.4.2" mkdocs-material = "^8.5.7" ipython = "^8.15.0" pre-commit = "^3.6.0" +coverage = "^7.4.2" [build-system] requires = ["poetry>=0.12"] diff --git a/test/test_main.py b/test/test_main.py new file mode 100644 index 00000000..ea40fe02 --- /dev/null +++ b/test/test_main.py @@ -0,0 +1,78 @@ +import tempfile +from dataclasses import dataclass, field + +import numpy as np +import pandas as pd +from pytest import fixture + +from peakipy.cli.main import ( + get_vclist, + check_for_include_column_and_add_if_missing, + select_specified_planes, + exclude_specified_planes, +) + + +@fixture +def actual_vclist(): + with tempfile.TemporaryFile() as fp: + fp.write(b"1\n2\n3\n") + fp.seek(0) + vclist = np.genfromtxt(fp) + return vclist + + +@dataclass +class PeakipyData: + df: pd.DataFrame = pd.DataFrame() + data: np.array = np.zeros((4, 10, 20)) + dims: list = field(default_factory=lambda: [0, 1, 2]) + + +def test_get_vclist(actual_vclist): + expected_vclist = np.array([1.0, 2.0, 3.0]) + np.testing.assert_array_equal(actual_vclist, expected_vclist) + + +def test_get_vclist_none(): + vclist = None + args = {} + args = get_vclist(vclist, args) + expected_args = dict(vclist=False) + assert args == expected_args + + +def test_check_for_include_column(): + peakipy_data = PeakipyData(pd.DataFrame()) + peakipy_data = check_for_include_column_and_add_if_missing(peakipy_data) + assert "include" in peakipy_data.df.columns + + +def test_select_specified_planes(): + plane = None + expected_plane_numbers = np.arange(4) + actual_plane_numbers, peakipy_data = select_specified_planes(plane, PeakipyData()) + np.testing.assert_array_equal(expected_plane_numbers, actual_plane_numbers) + + +def test_select_specified_planes_2(): + plane = [1, 2] + expected_plane_numbers = np.array([1, 2]) + actual_plane_numbers, peakipy_data = select_specified_planes(plane, PeakipyData()) + np.testing.assert_array_equal(expected_plane_numbers, actual_plane_numbers) + assert peakipy_data.data.shape == (2, 10, 20) + + +def test_exclude_specified_planes(): + plane = None + expected_plane_numbers = np.arange(4) + actual_plane_numbers, peakipy_data = exclude_specified_planes(plane, PeakipyData()) + np.testing.assert_array_equal(expected_plane_numbers, actual_plane_numbers) + + +def test_exclude_specified_planes_2(): + plane = [1, 2] + expected_plane_numbers = np.array([0, 3]) + actual_plane_numbers, peakipy_data = exclude_specified_planes(plane, PeakipyData()) + np.testing.assert_array_equal(expected_plane_numbers, actual_plane_numbers) + assert peakipy_data.data.shape == (2, 10, 20) From 1c4bafcc33b77b211a87b2f64850642313f33e11 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Sun, 25 Feb 2024 21:43:13 -0500 Subject: [PATCH 20/37] more re-factoring added jackknife error --- peakipy/cli/fit.py | 228 ++++++++++++++++++++------------------------ peakipy/cli/main.py | 2 + 2 files changed, 107 insertions(+), 123 deletions(-) diff --git a/peakipy/cli/fit.py b/peakipy/cli/fit.py index 4930c813..854d9301 100644 --- a/peakipy/cli/fit.py +++ b/peakipy/cli/fit.py @@ -59,12 +59,33 @@ class FitPeaksArgs: exclude_plane: Optional[List[int]] = (None,) reference_plane_indices: List[int] = ([],) initial_fit_threshold: Optional[float] = (None,) + jack_knife_sample_errors: bool = False mp: bool = (True,) verbose: bool = (False,) @dataclass -class FirstPlaneFitInput: +class Config: + fit_method: str = "leastsq" + + +@dataclass +class FitPeaksInput: + """input data for the fit_peaks function""" + + args: FitPeaksArgs + data: np.array + config: Config + plane_numbers: list + + +@dataclass +class FitPeakClusterInput: + args: FitPeaksArgs + data: np.array + config: Config + plane_numbers: list + clustid: int group: pd.DataFrame last_peak: pd.DataFrame mask: np.array @@ -82,16 +103,10 @@ class FirstPlaneFitInput: weights: np.array fit_method: str = "leastsq" verbose: bool = False + masked_plane_data: np.array = field(init=False) - -@dataclass -class FitPeaksInput: - """input data for the fit_peaks function""" - - args: FitPeaksArgs - data: np.array - config: dict - plane_numbers: list + def __post_init__(self): + self.masked_plane_data = np.array([d[self.mask] for d in self.data]) @dataclass @@ -167,6 +182,7 @@ class FitPeaksResultDfRow(BaseModel): fwhm_y_ppm: float fwhm_x_hz: float fwhm_y_hz: float + jack_knife_sample_index: Optional[int] class FitPeaksResultRowGLPV(FitPeaksResultDfRow): @@ -285,26 +301,26 @@ def unpack_fitted_parameters_for_lineshape( def perform_initial_lineshape_fit_on_cluster_of_peaks( - first_plane_fit_input: FirstPlaneFitInput, + fit_peak_cluster_input: FitPeakClusterInput, ) -> FitResult: - mod = first_plane_fit_input.mod - peak_slices = first_plane_fit_input.peak_slices - XY_slices = first_plane_fit_input.XY_slices - p_guess = first_plane_fit_input.p_guess - weights = first_plane_fit_input.weights - fit_method = first_plane_fit_input.fit_method - mask = first_plane_fit_input.mask - XY = first_plane_fit_input.XY + mod = fit_peak_cluster_input.mod + peak_slices = fit_peak_cluster_input.peak_slices + XY_slices = fit_peak_cluster_input.XY_slices + p_guess = fit_peak_cluster_input.p_guess + weights = fit_peak_cluster_input.weights + fit_method = fit_peak_cluster_input.fit_method + mask = fit_peak_cluster_input.mask + XY = fit_peak_cluster_input.XY X, Y = XY - first_plane_data = first_plane_fit_input.first_plane_data - peak = first_plane_fit_input.last_peak - group = first_plane_fit_input.group - min_x = first_plane_fit_input.min_x - min_y = first_plane_fit_input.min_y - max_x = first_plane_fit_input.max_x - max_y = first_plane_fit_input.max_y - verbose = first_plane_fit_input.verbose - uc_dics = first_plane_fit_input.uc_dics + first_plane_data = fit_peak_cluster_input.first_plane_data + peak = fit_peak_cluster_input.last_peak + group = fit_peak_cluster_input.group + min_x = fit_peak_cluster_input.min_x + min_y = fit_peak_cluster_input.min_y + max_x = fit_peak_cluster_input.max_x + max_y = fit_peak_cluster_input.max_y + verbose = fit_peak_cluster_input.verbose + uc_dics = fit_peak_cluster_input.uc_dics out = mod.fit( peak_slices, XY=XY_slices, params=p_guess, weights=weights, method=fit_method @@ -342,13 +358,14 @@ def perform_initial_lineshape_fit_on_cluster_of_peaks( ) -def refit_peaks_with_constraints(fit_input: FitPeaksInput, fit_result: FitPeaksResult): +def refit_peak_cluster_with_constraints( + fit_input: FitPeakClusterInput, fit_result: FitPeaksResult +): fit_results = [] - for num, d in enumerate(fit_input.data): + for num, d in enumerate(fit_input.masked_plane_data): plane_number = fit_input.plane_numbers[num] - masked_data = d[fit_result.mask] fit_result.out.fit( - data=masked_data, + data=d, params=fit_result.out.params, weights=fit_result.weights, ) @@ -402,12 +419,10 @@ def add_vclist_to_df(fit_input: FitPeaksInput, df: pd.DataFrame): return df -def prepare_group_of_peaks_for_fitting( - group, data, fit_peaks_input_args: FitPeaksArgs, fit_method="leastsq" -): - lineshape_function = get_lineshape_function(fit_peaks_input_args.lineshape) +def prepare_group_of_peaks_for_fitting(clustid, group, fit_peaks_input: FitPeaksInput): + lineshape_function = get_lineshape_function(fit_peaks_input.args.lineshape) - first_plane_data = data[0] + first_plane_data = fit_peaks_input.data[0] mask, peak = make_mask_from_peak_cluster(group, first_plane_data) x_radius = group.X_RADIUS.max() @@ -420,34 +435,39 @@ def prepare_group_of_peaks_for_fitting( group_axis_points=group.Y_AXISf, mask_radius_in_points=y_radius ) max_x, min_x, max_y, min_y = deal_with_peaks_on_edge_of_spectrum( - data.shape, max_x, min_x, max_y, min_y + fit_peaks_input.data.shape, max_x, min_x, max_y, min_y ) selected_data = select_reference_planes_using_indices( - data, fit_peaks_input_args.reference_plane_indices + fit_peaks_input.data, fit_peaks_input.args.reference_plane_indices ).sum(axis=0) mod, p_guess = make_models( lineshape_function, group, selected_data, - lineshape=fit_peaks_input_args.lineshape, - xy_bounds=fit_peaks_input_args.xy_bounds, + lineshape=fit_peaks_input.args.lineshape, + xy_bounds=fit_peaks_input.args.xy_bounds, ) - peak_slices = slice_peaks_from_data_using_mask(data, mask) + peak_slices = slice_peaks_from_data_using_mask(fit_peaks_input.data, mask) peak_slices = select_reference_planes_using_indices( - peak_slices, fit_peaks_input_args.reference_plane_indices + peak_slices, fit_peaks_input.args.reference_plane_indices ) peak_slices = select_planes_above_threshold_from_masked_data( - peak_slices, fit_peaks_input_args.initial_fit_threshold + peak_slices, fit_peaks_input.args.initial_fit_threshold ) peak_slices = peak_slices.sum(axis=0) - XY = make_meshgrid(data.shape) + XY = make_meshgrid(fit_peaks_input.data.shape) X, Y = XY XY_slices = np.array([X.copy()[mask], Y.copy()[mask]]) - weights = 1.0 / np.array([fit_peaks_input_args.noise] * len(np.ravel(peak_slices))) + weights = 1.0 / np.array([fit_peaks_input.args.noise] * len(np.ravel(peak_slices))) # weights = 1.0 / np.ravel(peak_slices) - return FirstPlaneFitInput( + return FitPeakClusterInput( + args=fit_peaks_input.args, + data=fit_peaks_input.data, + config=fit_peaks_input.config, + plane_numbers=fit_peaks_input.plane_numbers, + clustid=clustid, group=group, last_peak=peak, mask=mask, @@ -457,35 +477,29 @@ def prepare_group_of_peaks_for_fitting( peak_slices=peak_slices, XY_slices=XY_slices, weights=weights, - fit_method=fit_method, + fit_method=Config.fit_method, first_plane_data=first_plane_data, - uc_dics=fit_peaks_input_args.uc_dics, + uc_dics=fit_peaks_input.args.uc_dics, min_x=min_x, min_y=min_y, max_x=max_x, max_y=max_y, - verbose=fit_peaks_input_args.verbose, + verbose=fit_peaks_input.args.verbose, ) -def fit_cluster_of_peaks( - clustid: int, peak_cluster: pd.DataFrame, fit_input: FitPeaksInput -) -> pd.DataFrame: - data_for_fitting = prepare_group_of_peaks_for_fitting( - peak_cluster, - fit_input.data, - fit_input.args, - fit_method=fit_input.config.get("fit_method", "leastsq"), - ) +def fit_cluster_of_peaks(data_for_fitting: FitPeakClusterInput) -> pd.DataFrame: fit_result = perform_initial_lineshape_fit_on_cluster_of_peaks(data_for_fitting) fit_result.out.params, float_str = set_parameters_to_fix_during_fit( - fit_result.out.params, fit_input.args.to_fix + fit_result.out.params, data_for_fitting.args.to_fix ) - fit_results = refit_peaks_with_constraints(fit_input, fit_result) + fit_results = refit_peak_cluster_with_constraints(data_for_fitting, fit_result) cluster_df = pd.DataFrame(fit_results) cluster_df = update_cluster_df_with_fit_statistics(cluster_df, fit_result.out) - cluster_df["clustid"] = clustid - cluster_df = merge_unpacked_parameters_with_metadata(cluster_df, peak_cluster) + cluster_df["clustid"] = data_for_fitting.clustid + cluster_df = merge_unpacked_parameters_with_metadata( + cluster_df, data_for_fitting.group + ) return cluster_df @@ -509,9 +523,15 @@ def fit_peak_clusters(peaks: pd.DataFrame, fit_input: FitPeaksInput) -> FitPeaks out_str = "" cluster_dfs = [] for clustid, peak_cluster in peak_clusters: - cluster_df = fit_cluster_of_peaks( - clustid=clustid, peak_cluster=peak_cluster, fit_input=fit_input + data_for_fitting = prepare_group_of_peaks_for_fitting( + clustid, + peak_cluster, + fit_input, ) + if fit_input.args.jack_knife_sample_errors: + cluster_df = jack_knife_sample_errors(data_for_fitting) + else: + cluster_df = fit_cluster_of_peaks(data_for_fitting) cluster_dfs.append(cluster_df) df = pd.concat(cluster_dfs, ignore_index=True) df["lineshape"] = fit_input.args.lineshape.value @@ -522,62 +542,24 @@ def fit_peak_clusters(peaks: pd.DataFrame, fit_input: FitPeaksInput) -> FitPeaks return FitPeaksResult(df=df, log=out_str) -@dataclass -class JackKnifeResult: - mean: float - std: float - - -def jack_knife_sample_errors( - peaks: pd.DataFrame, fit_input: FirstPlaneFitInput -) -> JackKnifeResult: - peak_slices = fit_input.peak_slices - XY_slices = fit_input.XY_slices - weights = fit_input.weights +def jack_knife_sample_errors(fit_input: FitPeakClusterInput) -> pd.DataFrame: + peak_slices = fit_input.peak_slices.copy() + XY_slices = fit_input.XY_slices.copy() + weights = fit_input.weights.copy() + masked_plane_data = fit_input.masked_plane_data.copy() jk_results = [] - for i in range(len(peak_slices)): - peak_slices = np.delete(peak_slices, i, None) - X = np.delete(XY_slices[0], i, None) - Y = np.delete(XY_slices[1], i, None) - weights = np.delete(weights, i, None) - jk_results.append( - mod.fit(peak_slices, XY=[X, Y], params=out.params, weights=weights) - ) - - # print(jk_results) - amps = [] - sigma_xs = [] - sigma_ys = [] - names = [] - with open("test_jackknife", "w") as f: - for i in jk_results: - f.write(i.fit_report()) - amp, amp_err, name = get_params(i.params, "amp") - sigma_x, sigma_x_err, name_x = get_params(i.params, "sigma_x") - sigma_y, sigma_y_err, name_y = get_params(i.params, "sigma_y") - f.write(f"{amp},{amp_err},{name_y}\n") - amps.extend(amp) - names.extend(name_y) - sigma_xs.extend(sigma_x) - sigma_ys.extend(sigma_y) - - df = pd.DataFrame( - {"amp": amps, "name": names, "sigma_x": sigma_xs, "sigma_y": sigma_ys} - ) - grouped = df.groupby("name") - mean_amps = grouped.amp.mean() - std_amps = grouped.amp.std() - mean_sigma_x = grouped.sigma_x.mean() - std_sigma_x = grouped.sigma_x.std() - mean_sigma_y = grouped.sigma_y.mean() - std_sigma_y = grouped.sigma_y.std() - f.write("#####################################\n") - f.write( - f"{mean_amps}, {std_amps}, {mean_sigma_x}, {std_sigma_x}, {mean_sigma_y}, {std_sigma_y} " - ) - f.write(self.out.fit_report()) - f.write("#####################################\n") - # print(amps) - # mean = np.mean(amps) - # std = np.std(amps) - return JackKnifeResult(mean=mean_amps, std=std_amps) + # first fit without jackknife + jk_result = fit_cluster_of_peaks(data_for_fitting=fit_input) + jk_result["jack_knife_sample_index"] = 0 + jk_results.append(jk_result) + for i in np.arange(0, len(peak_slices), 10, dtype=int): + fit_input.peak_slices = np.delete(peak_slices, i, None) + XY_slices_0 = np.delete(XY_slices[0], i, None) + XY_slices_1 = np.delete(XY_slices[1], i, None) + fit_input.XY_slices = np.array([XY_slices_0, XY_slices_1]) + fit_input.weights = np.delete(weights, i, None) + fit_input.masked_plane_data = np.delete(masked_plane_data, i, axis=1) + jk_result = fit_cluster_of_peaks(data_for_fitting=fit_input) + jk_result["jack_knife_sample_index"] = i + 1 + jk_results.append(jk_result) + return pd.concat(jk_results, ignore_index=True) diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index 4d4f44b0..a7687395 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -617,6 +617,7 @@ def fit( List[int], typer.Option(help=reference_plane_index_help) ] = [], initial_fit_threshold: Optional[float] = None, + jack_knife_sample_errors: bool = False, mp: bool = True, verbose: bool = False, ): @@ -680,6 +681,7 @@ def fit( args["mp"] = mp args["initial_fit_threshold"] = initial_fit_threshold args["reference_plane_indices"] = reference_plane_index + args["jack_knife_sample_errors"] = jack_knife_sample_errors args = get_vclist(vclist, args) # plot results or not From 933810c205771d2db36f62c97d71e1dea110fdf8 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Mon, 26 Feb 2024 22:25:21 -0500 Subject: [PATCH 21/37] prototype edit and check panel --- peakipy/cli/check_panel.py | 12 +++++++++--- peakipy/cli/edit.py | 17 ++++++++++++++--- peakipy/cli/edit_panel.py | 38 ++++++++++++++++++++++++++++++++++++++ peakipy/cli/main.py | 12 ++++++------ 4 files changed, 67 insertions(+), 12 deletions(-) create mode 100644 peakipy/cli/edit_panel.py diff --git a/peakipy/cli/check_panel.py b/peakipy/cli/check_panel.py index aba815cc..3df7f2c9 100644 --- a/peakipy/cli/check_panel.py +++ b/peakipy/cli/check_panel.py @@ -75,8 +75,11 @@ def create_plotly_pane(cluster, plane): @app.command() -def check_panel( - fits_path: Path, data_path: Path, config_path: Path = Path("./peakipy.config") +def create_check_panel( + fits_path: Path, + data_path: Path, + config_path: Path = Path("./peakipy.config"), + edit_panel: bool = False, ): data = data_singleton() data.fits_path = fits_path @@ -106,7 +109,10 @@ def check_panel( pn.Row(interactive_plotly_pane, interactive_cluster_pane), title="Peakipy check", ) - check_pane.show() + if edit_panel: + return check_pane + else: + check_pane.show() if __name__ == "__main__": diff --git a/peakipy/cli/edit.py b/peakipy/cli/edit.py index 172dab4f..1acb5943 100644 --- a/peakipy/cli/edit.py +++ b/peakipy/cli/edit.py @@ -14,6 +14,9 @@ from skimage.filters import threshold_otsu from rich import print + +import panel as pn + from bokeh.io import curdoc from bokeh.events import ButtonClick, DoubleTap from bokeh.layouts import row, column, grid @@ -39,6 +42,7 @@ from bokeh.palettes import PuBuGn9, Category20, Viridis256, RdGy11, Reds256, YlOrRd9 from peakipy.core import LoadData, read_config, StrucEl +from check_panel import create_check_panel log_style = "overflow:scroll;" log_div = """
%s
""" @@ -69,6 +73,7 @@ def __init__(self, peaklist_path: Path, data_path: Path): self.setup_initial_fit_threshold() self.setup_quit_button() self.setup_plot() + self.check_pane = "" def init(self, doc): """initialise the bokeh app""" @@ -714,13 +719,19 @@ def fit_selected(self, event): if self.checkbox_group.active == []: fit_command = f"peakipy fit {self.TEMP_INPUT_CSV} {self.data_path} {self.TEMP_OUT_CSV} --lineshape {lineshape}{fix_command}{reference_planes_command}{initial_fit_threshold_command}{xy_bounds_command}" # plot_command = f"peakipy check {self.TEMP_OUT_CSV} {self.data_path} --label --individual --show --outname {self.TEMP_OUT_PLOT / Path('tmp.pdf')}" - plot_command = f"peakipy-check {self.TEMP_OUT_CSV} {self.data_path}" + self.check_pane = create_check_panel( + self.TEMP_OUT_CSV, self.data_path, edit_panel=True + ) + # plot_command = f"peakipy-check {self.TEMP_OUT_CSV} {self.data_path}" else: plane_index = self.select_plane.value print(f"[yellow]Only fitting plane {plane_index}[/yellow]") fit_command = f"peakipy fit {self.TEMP_INPUT_CSV} {self.data_path} {self.TEMP_OUT_CSV} --lineshape {lineshape} --plane {plane_index}{fix_command}{reference_planes_command}{initial_fit_threshold_command}{xy_bounds_command}" + self.check_pane = create_check_panel( + self.TEMP_OUT_CSV, self.data_path, edit_panel=True + ) # plot_command = f"peakipy check {self.TEMP_OUT_CSV} {self.data_path} --label --individual --outname {self.TEMP_OUT_PLOT / Path('tmp.pdf')} --plane {plane_index} --show" - plot_command = f"peakipy-check {self.TEMP_OUT_CSV} {self.data_path}" + # plot_command = f"peakipy-check {self.TEMP_OUT_CSV} {self.data_path}" print(f"[blue]{fit_command}[/blue]") self.fit_reports += fit_command + "
" @@ -730,7 +741,7 @@ def fit_selected(self, event): self.fit_reports = self.fit_reports.replace("\n", "
") self.fit_reports_div.text = log_div % (log_style, self.fit_reports) # plot data - os.system(plot_command) + # os.system(plot_command) def save_peaks(self, event): if self.savefilename.value: diff --git a/peakipy/cli/edit_panel.py b/peakipy/cli/edit_panel.py new file mode 100644 index 00000000..b1b601f1 --- /dev/null +++ b/peakipy/cli/edit_panel.py @@ -0,0 +1,38 @@ +from pathlib import Path +import panel as pn + +from edit import BokehScript +from check_panel import create_check_panel + +pn.extension("plotly") +pn.config.theme = "dark" + +bs = BokehScript( + peaklist_path=Path("./edited_peaks.csv"), data_path=Path("./test1.ft2") +) +bokeh_pane = pn.pane.Bokeh(bs.p) +table_pane = pn.pane.Bokeh(bs.data_table) +spectrum_view_settings = pn.WidgetBox( + "# View settings", bs.pos_neg_contour_radiobutton, bs.contour_start +) +button = pn.widgets.Button(name="Click me", button_type="primary") +fit_controls = pn.WidgetBox( + "# Fit controls", bs.select_lineshape_radiobuttons, bs.fit_button, button +) + + +def b(event): + check_app.loading = True + check_panel = create_check_panel(bs.TEMP_OUT_CSV, bs.data_path, edit_panel=True) + check_app.objects = check_panel.objects + check_app.loading = False + + +button.on_click(b) +fit_app = pn.Card( + pn.Column(bokeh_pane, fit_controls, spectrum_view_settings, table_pane), + title="Peakipy fit", +) +check_app = pn.Card(title="Peakipy check") +app = pn.Column(fit_app, check_app) +server = app.show(threaded=True) diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index a7687395..a497afd3 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -85,7 +85,6 @@ FitPeaksInput, FitPeaksArgs, ) -from .edit import BokehScript from .spec import yaml_file app = typer.Typer() @@ -1475,15 +1474,15 @@ def check( if ccpn_flag: plt = PlotterWidget() - else: - plt = matplotlib.pyplot - create_matplotlib_figure( - plot_data, pdf, individual, label, ccpn_flag, show - ) # fig = create_plotly_figure(plot_data) if plotly: fig = create_plotly_figure(plot_data) return fig + else: + plt = matplotlib.pyplot + create_matplotlib_figure( + plot_data, pdf, individual, label, ccpn_flag, show + ) # surf = pn.pane.plotly.Plotly(fig) # app = pn.Column(surf) # app.show(threaded=True) @@ -1501,6 +1500,7 @@ def edit( ): from bokeh.util.browser import view from bokeh.server.server import Server + from .edit import BokehScript run_log() bs = BokehScript(peaklist_path=peaklist_path, data_path=data_path) From ed1e58e88325bd37376dc5fffda766ee4cfa40c0 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Mon, 26 Feb 2024 22:32:26 -0500 Subject: [PATCH 22/37] prototype edit and check panel --- peakipy/cli/edit.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/peakipy/cli/edit.py b/peakipy/cli/edit.py index 1acb5943..1d50cf98 100644 --- a/peakipy/cli/edit.py +++ b/peakipy/cli/edit.py @@ -42,7 +42,6 @@ from bokeh.palettes import PuBuGn9, Category20, Viridis256, RdGy11, Reds256, YlOrRd9 from peakipy.core import LoadData, read_config, StrucEl -from check_panel import create_check_panel log_style = "overflow:scroll;" log_div = """
%s
""" @@ -719,17 +718,17 @@ def fit_selected(self, event): if self.checkbox_group.active == []: fit_command = f"peakipy fit {self.TEMP_INPUT_CSV} {self.data_path} {self.TEMP_OUT_CSV} --lineshape {lineshape}{fix_command}{reference_planes_command}{initial_fit_threshold_command}{xy_bounds_command}" # plot_command = f"peakipy check {self.TEMP_OUT_CSV} {self.data_path} --label --individual --show --outname {self.TEMP_OUT_PLOT / Path('tmp.pdf')}" - self.check_pane = create_check_panel( - self.TEMP_OUT_CSV, self.data_path, edit_panel=True - ) + # self.check_pane = create_check_panel( + # self.TEMP_OUT_CSV, self.data_path, edit_panel=True + # ) # plot_command = f"peakipy-check {self.TEMP_OUT_CSV} {self.data_path}" else: plane_index = self.select_plane.value print(f"[yellow]Only fitting plane {plane_index}[/yellow]") fit_command = f"peakipy fit {self.TEMP_INPUT_CSV} {self.data_path} {self.TEMP_OUT_CSV} --lineshape {lineshape} --plane {plane_index}{fix_command}{reference_planes_command}{initial_fit_threshold_command}{xy_bounds_command}" - self.check_pane = create_check_panel( - self.TEMP_OUT_CSV, self.data_path, edit_panel=True - ) + # self.check_pane = create_check_panel( + # self.TEMP_OUT_CSV, self.data_path, edit_panel=True + # ) # plot_command = f"peakipy check {self.TEMP_OUT_CSV} {self.data_path} --label --individual --outname {self.TEMP_OUT_PLOT / Path('tmp.pdf')} --plane {plane_index} --show" # plot_command = f"peakipy-check {self.TEMP_OUT_CSV} {self.data_path}" From 66f1d1119055be9e9297943b814dd4bad0342c46 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Wed, 28 Feb 2024 22:22:20 -0500 Subject: [PATCH 23/37] upgrading fit and check panels --- peakipy/cli/check_panel.py | 5 +- peakipy/cli/edit.py | 81 ++++--- peakipy/cli/edit_panel.py | 142 +++++++++--- poetry.lock | 450 +++++++++++++++++++++---------------- pyproject.toml | 1 + 5 files changed, 416 insertions(+), 263 deletions(-) diff --git a/peakipy/cli/check_panel.py b/peakipy/cli/check_panel.py index 3df7f2c9..2bd2ceb4 100644 --- a/peakipy/cli/check_panel.py +++ b/peakipy/cli/check_panel.py @@ -8,7 +8,7 @@ from peakipy.cli.main import check, validate_fit_dataframe pn.extension() -pn.config.theme = "dark" +# pn.config.theme = "dark" @dataclass @@ -104,8 +104,7 @@ def create_check_panel( ) check_pane = pn.Card( info_pane, - select_cluster, - select_plane, + pn.Row(select_cluster, select_plane), pn.Row(interactive_plotly_pane, interactive_cluster_pane), title="Peakipy check", ) diff --git a/peakipy/cli/edit.py b/peakipy/cli/edit.py index 1d50cf98..d0ff2e6c 100644 --- a/peakipy/cli/edit.py +++ b/peakipy/cli/edit.py @@ -85,7 +85,7 @@ def init(self, doc): ) ) doc.title = "peakipy: Edit Fits" - doc.theme = "dark_minimal" + # doc.theme = "dark_minimal" @property def args(self): @@ -496,45 +496,52 @@ def setup_plot(self): editable=True, width=1200, ) + self.table_style = InlineStyleSheet( css=""" - .slick-header-columns { - background-color: #00296b !important; - font-family: arial; - font-weight: bold; - font-size: 12pt; - color: #FFFFFF; - text-align: right; - } - .slick-header-column:hover { - background: none repeat scroll 0 0 #fdc500; - } - .slick-row { - font-size: 12pt; - font-family: arial; - text-align: left; - } - .slick-row:hover{ - background: none repeat scroll 0 0 #7c7c7c; - } - .slick-cell { - header-font-weight: 500; - border-width: 1px 1px 1px 1px; - border-color: #d4d4d4; - background-color: #00509D; - color: #FFFFFF; - } - .slick-cell.selected { - header-font-weight: 500; - border-width: 1px 1px 1px 1px; - border-color: #00509D; - background-color: #FDC500; - color: black; - } - - - """ + .slick-row.even { background: #263140; } + .slick-row.odd { background: #505c6d; } + .slick-cell.l0 {background: #1f2937;} + """ ) + # self.table_style = InlineStyleSheet( + # css=""" + # .slick-header-columns { + # background-color: #00296b !important; + # font-family: arial; + # font-weight: bold; + # font-size: 12pt; + # color: #FFFFFF; + # text-align: right; + # } + # .slick-header-column:hover { + # background: none repeat scroll 0 0 #fdc500; + # } + # .slick-row { + # font-size: 12pt; + # font-family: arial; + # text-align: left; + # } + # .slick-row:hover{ + # background: none repeat scroll 0 0 #7c7c7c; + # } + # .slick-cell { + # header-font-weight: 500; + # border-width: 1px 1px 1px 1px; + # border-color: #d4d4d4; + # background-color: #00509D; + # color: #FFFFFF; + # } + # .slick-cell.selected { + # header-font-weight: 500; + # border-width: 1px 1px 1px 1px; + # border-color: #00509D; + # background-color: #FDC500; + # color: black; + # } + + # """ + # ) self.data_table.stylesheets = [self.table_style] diff --git a/peakipy/cli/edit_panel.py b/peakipy/cli/edit_panel.py index b1b601f1..ddadcc40 100644 --- a/peakipy/cli/edit_panel.py +++ b/peakipy/cli/edit_panel.py @@ -1,38 +1,118 @@ from pathlib import Path +from dataclasses import dataclass, field +from functools import lru_cache + import panel as pn +from typer import Typer + +from peakipy.cli.edit import BokehScript +from peakipy.cli.check_panel import create_check_panel -from edit import BokehScript -from check_panel import create_check_panel +app = Typer() pn.extension("plotly") pn.config.theme = "dark" -bs = BokehScript( - peaklist_path=Path("./edited_peaks.csv"), data_path=Path("./test1.ft2") -) -bokeh_pane = pn.pane.Bokeh(bs.p) -table_pane = pn.pane.Bokeh(bs.data_table) -spectrum_view_settings = pn.WidgetBox( - "# View settings", bs.pos_neg_contour_radiobutton, bs.contour_start -) -button = pn.widgets.Button(name="Click me", button_type="primary") -fit_controls = pn.WidgetBox( - "# Fit controls", bs.select_lineshape_radiobuttons, bs.fit_button, button -) - - -def b(event): - check_app.loading = True - check_panel = create_check_panel(bs.TEMP_OUT_CSV, bs.data_path, edit_panel=True) - check_app.objects = check_panel.objects - check_app.loading = False - - -button.on_click(b) -fit_app = pn.Card( - pn.Column(bokeh_pane, fit_controls, spectrum_view_settings, table_pane), - title="Peakipy fit", -) -check_app = pn.Card(title="Peakipy check") -app = pn.Column(fit_app, check_app) -server = app.show(threaded=True) + +@dataclass +class Data: + peaklist_path: Path = Path("./test.csv") + data_path: Path = Path("./test.ft2") + _bs: BokehScript = field(init=False) + + def load_data(self): + self._bs = BokehScript(self.peaklist_path, self.data_path) + + @property + def bs(self): + return self._bs + + +@lru_cache(maxsize=1) +def data_singleton(): + return Data() + + +def panel_app(): + data = data_singleton() + bs = data.bs + bokeh_pane = pn.pane.Bokeh(bs.p) + # table_pane = pn.pane.Bokeh(bs.data_table) + table_pane = pn.widgets.Tabulator( + bs.peakipy_data.df[ + [ + "ASS", + "CLUSTID", + "X_PPM", + "Y_PPM", + "X_RADIUS_PPM", + "Y_RADIUS_PPM", + "XW_HZ", + "YW_HZ", + "VOL", + "include", + "MEMCNT", + ] + ] + ) + + spectrum_view_settings = pn.WidgetBox( + "# Contour settings", bs.pos_neg_contour_radiobutton, bs.contour_start + ) + button = pn.widgets.Button(name="Fit selected cluster(s)", button_type="primary") + fit_controls = pn.WidgetBox( + "# Fit controls", + bs.select_plane, + bs.checkbox_group, + pn.layout.Divider(), + bs.select_reference_planes_help, + bs.select_reference_planes, + pn.layout.Divider(), + bs.set_initial_fit_threshold_help, + bs.set_initial_fit_threshold, + pn.layout.Divider(), + bs.select_fixed_parameters_help, + bs.select_fixed_parameters, + pn.layout.Divider(), + bs.select_lineshape_radiobuttons_help, + bs.select_lineshape_radiobuttons, + pn.layout.Divider(), + button, + ) + + mask_adjustment_controls = pn.WidgetBox( + "# Fitting mask adjustment", bs.slider_X_RADIUS, bs.slider_Y_RADIUS + ) + + def b(event): + check_app.loading = True + bs.fit_selected(None) + check_panel = create_check_panel(bs.TEMP_OUT_CSV, bs.data_path, edit_panel=True) + check_app.objects = check_panel.objects + check_app.loading = False + + button.on_click(b) + template = pn.template.BootstrapTemplate( + title="Peakipy", + sidebar=[mask_adjustment_controls, fit_controls], + ) + spectrum = pn.Card( + pn.Column(pn.Row(bokeh_pane, spectrum_view_settings), table_pane), + title="Peakipy fit", + ) + check_app = pn.Card(title="Peakipy check") + template.main.append(pn.Row(spectrum, check_app)) + template.show() + + +@app.command() +def main(peaklist_path: Path, data_path: Path): + data = data_singleton() + data.peaklist_path = peaklist_path + data.data_path = data_path + data.load_data() + panel_app() + + +if __name__ == "__main__": + app() diff --git a/poetry.lock b/poetry.lock index ad256b94..c86b7abc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -355,6 +355,70 @@ mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.6.1)", "types-Pill test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] +[[package]] +name = "coverage" +version = "7.4.3" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8580b827d4746d47294c0e0b92854c85a92c2227927433998f0d3320ae8a71b6"}, + {file = "coverage-7.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718187eeb9849fc6cc23e0d9b092bc2348821c5e1a901c9f8975df0bc785bfd4"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:767b35c3a246bcb55b8044fd3a43b8cd553dd1f9f2c1eeb87a302b1f8daa0524"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae7f19afe0cce50039e2c782bff379c7e347cba335429678450b8fe81c4ef96d"}, + {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba3a8aaed13770e970b3df46980cb068d1c24af1a1968b7818b69af8c4347efb"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ee866acc0861caebb4f2ab79f0b94dbfbdbfadc19f82e6e9c93930f74e11d7a0"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:506edb1dd49e13a2d4cac6a5173317b82a23c9d6e8df63efb4f0380de0fbccbc"}, + {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd6545d97c98a192c5ac995d21c894b581f1fd14cf389be90724d21808b657e2"}, + {file = "coverage-7.4.3-cp310-cp310-win32.whl", hash = "sha256:f6a09b360d67e589236a44f0c39218a8efba2593b6abdccc300a8862cffc2f94"}, + {file = "coverage-7.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:18d90523ce7553dd0b7e23cbb28865db23cddfd683a38fb224115f7826de78d0"}, + {file = "coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47"}, + {file = "coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc66e222cf4c719fe7722a403888b1f5e1682d1679bd780e2b26c18bb648cdc"}, + {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:abbbd8093c5229c72d4c2926afaee0e6e3140de69d5dcd918b2921f2f0c8baba"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:35eb581efdacf7b7422af677b92170da4ef34500467381e805944a3201df2079"}, + {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8249b1c7334be8f8c3abcaaa996e1e4927b0e5a23b65f5bf6cfe3180d8ca7840"}, + {file = "coverage-7.4.3-cp311-cp311-win32.whl", hash = "sha256:cf30900aa1ba595312ae41978b95e256e419d8a823af79ce670835409fc02ad3"}, + {file = "coverage-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:18c7320695c949de11a351742ee001849912fd57e62a706d83dfc1581897fa2e"}, + {file = "coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10"}, + {file = "coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c00cdc8fa4e50e1cc1f941a7f2e3e0f26cb2a1233c9696f26963ff58445bac7"}, + {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:062b0a75d9261e2f9c6d071753f7eef0fc9caf3a2c82d36d76667ba7b6470003"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ebe7c9e67a2d15fa97b77ea6571ce5e1e1f6b0db71d1d5e96f8d2bf134303c1d"}, + {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0a120238dd71c68484f02562f6d446d736adcc6ca0993712289b102705a9a3a"}, + {file = "coverage-7.4.3-cp312-cp312-win32.whl", hash = "sha256:37389611ba54fd6d278fde86eb2c013c8e50232e38f5c68235d09d0a3f8aa352"}, + {file = "coverage-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:d25b937a5d9ffa857d41be042b4238dd61db888533b53bc76dc082cb5a15e914"}, + {file = "coverage-7.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28ca2098939eabab044ad68850aac8f8db6bf0b29bc7f2887d05889b17346454"}, + {file = "coverage-7.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:280459f0a03cecbe8800786cdc23067a8fc64c0bd51dc614008d9c36e1659d7e"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0cdedd3500e0511eac1517bf560149764b7d8e65cb800d8bf1c63ebf39edd2"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9babb9466fe1da12417a4aed923e90124a534736de6201794a3aea9d98484e"}, + {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dec9de46a33cf2dd87a5254af095a409ea3bf952d85ad339751e7de6d962cde6"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:16bae383a9cc5abab9bb05c10a3e5a52e0a788325dc9ba8499e821885928968c"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2c854ce44e1ee31bda4e318af1dbcfc929026d12c5ed030095ad98197eeeaed0"}, + {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ce8c50520f57ec57aa21a63ea4f325c7b657386b3f02ccaedeccf9ebe27686e1"}, + {file = "coverage-7.4.3-cp38-cp38-win32.whl", hash = "sha256:708a3369dcf055c00ddeeaa2b20f0dd1ce664eeabde6623e516c5228b753654f"}, + {file = "coverage-7.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1bf25fbca0c8d121a3e92a2a0555c7e5bc981aee5c3fdaf4bb7809f410f696b9"}, + {file = "coverage-7.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b253094dbe1b431d3a4ac2f053b6d7ede2664ac559705a704f621742e034f1f"}, + {file = "coverage-7.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77fbfc5720cceac9c200054b9fab50cb2a7d79660609200ab83f5db96162d20c"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6679060424faa9c11808598504c3ab472de4531c571ab2befa32f4971835788e"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af154d617c875b52651dd8dd17a31270c495082f3d55f6128e7629658d63765"}, + {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8640f1fde5e1b8e3439fe482cdc2b0bb6c329f4bb161927c28d2e8879c6029ee"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:69b9f6f66c0af29642e73a520b6fed25ff9fd69a25975ebe6acb297234eda501"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0842571634f39016a6c03e9d4aba502be652a6e4455fadb73cd3a3a49173e38f"}, + {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a78ed23b08e8ab524551f52953a8a05d61c3a760781762aac49f8de6eede8c45"}, + {file = "coverage-7.4.3-cp39-cp39-win32.whl", hash = "sha256:c0524de3ff096e15fcbfe8f056fdb4ea0bf497d584454f344d59fce069d3e6e9"}, + {file = "coverage-7.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0209a6369ccce576b43bb227dc8322d8ef9e323d089c6f3f26a597b09cb4d2aa"}, + {file = "coverage-7.4.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:7cbde573904625509a3f37b6fecea974e363460b556a627c60dc2f47e2fffa51"}, + {file = "coverage-7.4.3.tar.gz", hash = "sha256:276f6077a5c61447a48d133ed13e759c09e62aff0dc84274a68dc18660104d52"}, +] + +[package.extras] +toml = ["tomli"] + [[package]] name = "cycler" version = "0.12.1" @@ -438,53 +502,53 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "fonttools" -version = "4.48.1" +version = "4.49.0" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" files = [ - {file = "fonttools-4.48.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:702ae93058c81f46461dc4b2c79f11d3c3d8fd7296eaf8f75b4ba5bbf813cd5f"}, - {file = "fonttools-4.48.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:97f0a49fa6aa2d6205c6f72f4f98b74ef4b9bfdcb06fd78e6fe6c7af4989b63e"}, - {file = "fonttools-4.48.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3260db55f1843e57115256e91247ad9f68cb02a434b51262fe0019e95a98738"}, - {file = "fonttools-4.48.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e740a7602c2bb71e1091269b5dbe89549749a8817dc294b34628ffd8b2bf7124"}, - {file = "fonttools-4.48.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4108b1d247953dd7c90ec8f457a2dec5fceb373485973cc852b14200118a51ee"}, - {file = "fonttools-4.48.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56339ec557f0c342bddd7c175f5e41c45fc21282bee58a86bd9aa322bec715f2"}, - {file = "fonttools-4.48.1-cp310-cp310-win32.whl", hash = "sha256:bff5b38d0e76eb18e0b8abbf35d384e60b3371be92f7be36128ee3e67483b3ec"}, - {file = "fonttools-4.48.1-cp310-cp310-win_amd64.whl", hash = "sha256:f7449493886da6a17472004d3818cc050ba3f4a0aa03fb47972e4fa5578e6703"}, - {file = "fonttools-4.48.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:18b35fd1a850ed7233a99bbd6774485271756f717dac8b594958224b54118b61"}, - {file = "fonttools-4.48.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cad5cfd044ea2e306fda44482b3dd32ee47830fa82dfa4679374b41baa294f5f"}, - {file = "fonttools-4.48.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f30e605c7565d0da6f0aec75a30ec372072d016957cd8fc4469721a36ea59b7"}, - {file = "fonttools-4.48.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aee76fd81a8571c68841d6ef0da750d5ff08ff2c5f025576473016f16ac3bcf7"}, - {file = "fonttools-4.48.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5057ade278e67923000041e2b195c9ea53e87f227690d499b6a4edd3702f7f01"}, - {file = "fonttools-4.48.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b10633aafc5932995a391ec07eba5e79f52af0003a1735b2306b3dab8a056d48"}, - {file = "fonttools-4.48.1-cp311-cp311-win32.whl", hash = "sha256:0d533f89819f9b3ee2dbedf0fed3825c425850e32bdda24c558563c71be0064e"}, - {file = "fonttools-4.48.1-cp311-cp311-win_amd64.whl", hash = "sha256:d20588466367f05025bb1efdf4e5d498ca6d14bde07b6928b79199c588800f0a"}, - {file = "fonttools-4.48.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0a2417547462e468edf35b32e3dd06a6215ac26aa6316b41e03b8eeaf9f079ea"}, - {file = "fonttools-4.48.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cf5a0cd974f85a80b74785db2d5c3c1fd6cc09a2ba3c837359b2b5da629ee1b0"}, - {file = "fonttools-4.48.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0452fcbfbce752ba596737a7c5ec5cf76bc5f83847ce1781f4f90eab14ece252"}, - {file = "fonttools-4.48.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:578c00f93868f64a4102ecc5aa600a03b49162c654676c3fadc33de2ddb88a81"}, - {file = "fonttools-4.48.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:63dc592a16cd08388d8c4c7502b59ac74190b23e16dfc863c69fe1ea74605b68"}, - {file = "fonttools-4.48.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9b58638d8a85e3a1b32ec0a91d9f8171a877b4b81c408d4cb3257d0dee63e092"}, - {file = "fonttools-4.48.1-cp312-cp312-win32.whl", hash = "sha256:d10979ef14a8beaaa32f613bb698743f7241d92f437a3b5e32356dfb9769c65d"}, - {file = "fonttools-4.48.1-cp312-cp312-win_amd64.whl", hash = "sha256:cdfd7557d1bd294a200bd211aa665ca3b02998dcc18f8211a5532da5b8fad5c5"}, - {file = "fonttools-4.48.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3cdb9a92521b81bf717ebccf592bd0292e853244d84115bfb4db0c426de58348"}, - {file = "fonttools-4.48.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b4ec6d42a7555f5ae35f3b805482f0aad0f1baeeef54859492ea3b782959d4a"}, - {file = "fonttools-4.48.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:902e9c4e9928301912f34a6638741b8ae0b64824112b42aaf240e06b735774b1"}, - {file = "fonttools-4.48.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8c8b54bd1420c184a995f980f1a8076f87363e2bb24239ef8c171a369d85a31"}, - {file = "fonttools-4.48.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:12ee86abca46193359ea69216b3a724e90c66ab05ab220d39e3fc068c1eb72ac"}, - {file = "fonttools-4.48.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6978bade7b6c0335095bdd0bd97f8f3d590d2877b370f17e03e0865241694eb5"}, - {file = "fonttools-4.48.1-cp38-cp38-win32.whl", hash = "sha256:bcd77f89fc1a6b18428e7a55dde8ef56dae95640293bfb8f4e929929eba5e2a2"}, - {file = "fonttools-4.48.1-cp38-cp38-win_amd64.whl", hash = "sha256:f40441437b039930428e04fb05ac3a132e77458fb57666c808d74a556779e784"}, - {file = "fonttools-4.48.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0d2b01428f7da26f229a5656defc824427b741e454b4e210ad2b25ed6ea2aed4"}, - {file = "fonttools-4.48.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:df48798f9a4fc4c315ab46e17873436c8746f5df6eddd02fad91299b2af7af95"}, - {file = "fonttools-4.48.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2eb4167bde04e172a93cf22c875d8b0cff76a2491f67f5eb069566215302d45d"}, - {file = "fonttools-4.48.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c900508c46274d32d308ae8e82335117f11aaee1f7d369ac16502c9a78930b0a"}, - {file = "fonttools-4.48.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:594206b31c95fcfa65f484385171fabb4ec69f7d2d7f56d27f17db26b7a31814"}, - {file = "fonttools-4.48.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:292922dc356d7f11f5063b4111a8b719efb8faea92a2a88ed296408d449d8c2e"}, - {file = "fonttools-4.48.1-cp39-cp39-win32.whl", hash = "sha256:4709c5bf123ba10eac210d2d5c9027d3f472591d9f1a04262122710fa3d23199"}, - {file = "fonttools-4.48.1-cp39-cp39-win_amd64.whl", hash = "sha256:63c73b9dd56a94a3cbd2f90544b5fca83666948a9e03370888994143b8d7c070"}, - {file = "fonttools-4.48.1-py3-none-any.whl", hash = "sha256:e3e33862fc5261d46d9aae3544acb36203b1a337d00bdb5d3753aae50dac860e"}, - {file = "fonttools-4.48.1.tar.gz", hash = "sha256:8b8a45254218679c7f1127812761e7854ed5c8e34349aebf581e8c9204e7495a"}, + {file = "fonttools-4.49.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d970ecca0aac90d399e458f0b7a8a597e08f95de021f17785fb68e2dc0b99717"}, + {file = "fonttools-4.49.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac9a745b7609f489faa65e1dc842168c18530874a5f5b742ac3dd79e26bca8bc"}, + {file = "fonttools-4.49.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ba0e00620ca28d4ca11fc700806fd69144b463aa3275e1b36e56c7c09915559"}, + {file = "fonttools-4.49.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdee3ab220283057e7840d5fb768ad4c2ebe65bdba6f75d5d7bf47f4e0ed7d29"}, + {file = "fonttools-4.49.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ce7033cb61f2bb65d8849658d3786188afd80f53dad8366a7232654804529532"}, + {file = "fonttools-4.49.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:07bc5ea02bb7bc3aa40a1eb0481ce20e8d9b9642a9536cde0218290dd6085828"}, + {file = "fonttools-4.49.0-cp310-cp310-win32.whl", hash = "sha256:86eef6aab7fd7c6c8545f3ebd00fd1d6729ca1f63b0cb4d621bccb7d1d1c852b"}, + {file = "fonttools-4.49.0-cp310-cp310-win_amd64.whl", hash = "sha256:1fac1b7eebfce75ea663e860e7c5b4a8831b858c17acd68263bc156125201abf"}, + {file = "fonttools-4.49.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:edc0cce355984bb3c1d1e89d6a661934d39586bb32191ebff98c600f8957c63e"}, + {file = "fonttools-4.49.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:83a0d9336de2cba86d886507dd6e0153df333ac787377325a39a2797ec529814"}, + {file = "fonttools-4.49.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36c8865bdb5cfeec88f5028e7e592370a0657b676c6f1d84a2108e0564f90e22"}, + {file = "fonttools-4.49.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33037d9e56e2562c710c8954d0f20d25b8386b397250d65581e544edc9d6b942"}, + {file = "fonttools-4.49.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8fb022d799b96df3eaa27263e9eea306bd3d437cc9aa981820850281a02b6c9a"}, + {file = "fonttools-4.49.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33c584c0ef7dc54f5dd4f84082eabd8d09d1871a3d8ca2986b0c0c98165f8e86"}, + {file = "fonttools-4.49.0-cp311-cp311-win32.whl", hash = "sha256:cbe61b158deb09cffdd8540dc4a948d6e8f4d5b4f3bf5cd7db09bd6a61fee64e"}, + {file = "fonttools-4.49.0-cp311-cp311-win_amd64.whl", hash = "sha256:fc11e5114f3f978d0cea7e9853627935b30d451742eeb4239a81a677bdee6bf6"}, + {file = "fonttools-4.49.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d647a0e697e5daa98c87993726da8281c7233d9d4ffe410812a4896c7c57c075"}, + {file = "fonttools-4.49.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f3bbe672df03563d1f3a691ae531f2e31f84061724c319652039e5a70927167e"}, + {file = "fonttools-4.49.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bebd91041dda0d511b0d303180ed36e31f4f54b106b1259b69fade68413aa7ff"}, + {file = "fonttools-4.49.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4145f91531fd43c50f9eb893faa08399816bb0b13c425667c48475c9f3a2b9b5"}, + {file = "fonttools-4.49.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ea329dafb9670ffbdf4dbc3b0e5c264104abcd8441d56de77f06967f032943cb"}, + {file = "fonttools-4.49.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c076a9e548521ecc13d944b1d261ff3d7825048c338722a4bd126d22316087b7"}, + {file = "fonttools-4.49.0-cp312-cp312-win32.whl", hash = "sha256:b607ea1e96768d13be26d2b400d10d3ebd1456343eb5eaddd2f47d1c4bd00880"}, + {file = "fonttools-4.49.0-cp312-cp312-win_amd64.whl", hash = "sha256:a974c49a981e187381b9cc2c07c6b902d0079b88ff01aed34695ec5360767034"}, + {file = "fonttools-4.49.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b85ec0bdd7bdaa5c1946398cbb541e90a6dfc51df76dfa88e0aaa41b335940cb"}, + {file = "fonttools-4.49.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:af20acbe198a8a790618ee42db192eb128afcdcc4e96d99993aca0b60d1faeb4"}, + {file = "fonttools-4.49.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d418b1fee41a1d14931f7ab4b92dc0bc323b490e41d7a333eec82c9f1780c75"}, + {file = "fonttools-4.49.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b44a52b8e6244b6548851b03b2b377a9702b88ddc21dcaf56a15a0393d425cb9"}, + {file = "fonttools-4.49.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7c7125068e04a70739dad11857a4d47626f2b0bd54de39e8622e89701836eabd"}, + {file = "fonttools-4.49.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29e89d0e1a7f18bc30f197cfadcbef5a13d99806447c7e245f5667579a808036"}, + {file = "fonttools-4.49.0-cp38-cp38-win32.whl", hash = "sha256:9d95fa0d22bf4f12d2fb7b07a46070cdfc19ef5a7b1c98bc172bfab5bf0d6844"}, + {file = "fonttools-4.49.0-cp38-cp38-win_amd64.whl", hash = "sha256:768947008b4dc552d02772e5ebd49e71430a466e2373008ce905f953afea755a"}, + {file = "fonttools-4.49.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:08877e355d3dde1c11973bb58d4acad1981e6d1140711230a4bfb40b2b937ccc"}, + {file = "fonttools-4.49.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fdb54b076f25d6b0f0298dc706acee5052de20c83530fa165b60d1f2e9cbe3cb"}, + {file = "fonttools-4.49.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0af65c720520710cc01c293f9c70bd69684365c6015cc3671db2b7d807fe51f2"}, + {file = "fonttools-4.49.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f255ce8ed7556658f6d23f6afd22a6d9bbc3edb9b96c96682124dc487e1bf42"}, + {file = "fonttools-4.49.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d00af0884c0e65f60dfaf9340e26658836b935052fdd0439952ae42e44fdd2be"}, + {file = "fonttools-4.49.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:263832fae27481d48dfafcc43174644b6706639661e242902ceb30553557e16c"}, + {file = "fonttools-4.49.0-cp39-cp39-win32.whl", hash = "sha256:0404faea044577a01bb82d47a8fa4bc7a54067fa7e324785dd65d200d6dd1133"}, + {file = "fonttools-4.49.0-cp39-cp39-win_amd64.whl", hash = "sha256:b050d362df50fc6e38ae3954d8c29bf2da52be384649ee8245fdb5186b620836"}, + {file = "fonttools-4.49.0-py3-none-any.whl", hash = "sha256:af281525e5dd7fa0b39fb1667b8d5ca0e2a9079967e14c4bfe90fd1cd13e0f18"}, + {file = "fonttools-4.49.0.tar.gz", hash = "sha256:ebf46e7f01b7af7861310417d7c49591a85d99146fc23a5ba82fdb28af156321"}, ] [package.extras] @@ -503,12 +567,13 @@ woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] [[package]] name = "future" -version = "0.18.3" +version = "1.0.0" description = "Clean single-source support for Python 3 and 2" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ - {file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"}, + {file = "future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216"}, + {file = "future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05"}, ] [[package]] @@ -530,13 +595,13 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "identify" -version = "2.5.34" +version = "2.5.35" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.34-py2.py3-none-any.whl", hash = "sha256:a4316013779e433d08b96e5eabb7f641e6c7942e4ab5d4c509ebd2e7a8994aed"}, - {file = "identify-2.5.34.tar.gz", hash = "sha256:ee17bc9d499899bc9eaec1ac7bf2dc9eedd480db9d88b96d123d3b64a9d34f5d"}, + {file = "identify-2.5.35-py2.py3-none-any.whl", hash = "sha256:c4de0081837b211594f8e877a6b4fad7ca32bbfc1a9307fdd61c28bfe923f13e"}, + {file = "identify-2.5.35.tar.gz", hash = "sha256:10a7ca245cfcd756a554a7288159f72ff105ad233c7c4b9c6f0f4d108f5f6791"}, ] [package.extras] @@ -598,13 +663,13 @@ files = [ [[package]] name = "ipython" -version = "8.21.0" +version = "8.22.1" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.10" files = [ - {file = "ipython-8.21.0-py3-none-any.whl", hash = "sha256:1050a3ab8473488d7eee163796b02e511d0735cf43a04ba2a8348bd0f2eaf8a5"}, - {file = "ipython-8.21.0.tar.gz", hash = "sha256:48fbc236fbe0e138b88773fa0437751f14c3645fb483f1d4c5dee58b37e5ce73"}, + {file = "ipython-8.22.1-py3-none-any.whl", hash = "sha256:869335e8cded62ffb6fac8928e5287a05433d6462e3ebaac25f4216474dd6bc4"}, + {file = "ipython-8.22.1.tar.gz", hash = "sha256:39c6f9efc079fb19bfb0f17eee903978fe9a290b1b82d68196c641cecb76ea22"}, ] [package.dependencies] @@ -613,16 +678,16 @@ decorator = "*" exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} jedi = ">=0.16" matplotlib-inline = "*" -pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} prompt-toolkit = ">=3.0.41,<3.1.0" pygments = ">=2.4.0" stack-data = "*" -traitlets = ">=5" +traitlets = ">=5.13.0" [package.extras] -all = ["black", "curio", "docrepr", "exceptiongroup", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.23)", "pandas", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +all = ["ipython[black,doc,kernel,nbconvert,nbformat,notebook,parallel,qtconsole,terminal]", "ipython[test,test-extra]"] black = ["black"] -doc = ["docrepr", "exceptiongroup", "ipykernel", "matplotlib", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "stack-data", "typing-extensions"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] @@ -630,7 +695,7 @@ notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] test = ["pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "pickleshare", "pytest (<8)", "pytest-asyncio (<0.22)", "testpath", "trio"] +test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] [[package]] name = "jedi" @@ -1248,40 +1313,40 @@ files = [ [[package]] name = "pandas" -version = "2.2.0" +version = "2.2.1" description = "Powerful data structures for data analysis, time series, and statistics" optional = false python-versions = ">=3.9" files = [ - {file = "pandas-2.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8108ee1712bb4fa2c16981fba7e68b3f6ea330277f5ca34fa8d557e986a11670"}, - {file = "pandas-2.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:736da9ad4033aeab51d067fc3bd69a0ba36f5a60f66a527b3d72e2030e63280a"}, - {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e0b4fc3ddceb56ec8a287313bc22abe17ab0eb184069f08fc6a9352a769b18"}, - {file = "pandas-2.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20404d2adefe92aed3b38da41d0847a143a09be982a31b85bc7dd565bdba0f4e"}, - {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ea3ee3f125032bfcade3a4cf85131ed064b4f8dd23e5ce6fa16473e48ebcaf5"}, - {file = "pandas-2.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f9670b3ac00a387620489dfc1bca66db47a787f4e55911f1293063a78b108df1"}, - {file = "pandas-2.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a946f210383c7e6d16312d30b238fd508d80d927014f3b33fb5b15c2f895430"}, - {file = "pandas-2.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a1b438fa26b208005c997e78672f1aa8138f67002e833312e6230f3e57fa87d5"}, - {file = "pandas-2.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8ce2fbc8d9bf303ce54a476116165220a1fedf15985b09656b4b4275300e920b"}, - {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2707514a7bec41a4ab81f2ccce8b382961a29fbe9492eab1305bb075b2b1ff4f"}, - {file = "pandas-2.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85793cbdc2d5bc32620dc8ffa715423f0c680dacacf55056ba13454a5be5de88"}, - {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cfd6c2491dc821b10c716ad6776e7ab311f7df5d16038d0b7458bc0b67dc10f3"}, - {file = "pandas-2.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a146b9dcacc3123aa2b399df1a284de5f46287a4ab4fbfc237eac98a92ebcb71"}, - {file = "pandas-2.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbc1b53c0e1fdf16388c33c3cca160f798d38aea2978004dd3f4d3dec56454c9"}, - {file = "pandas-2.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a41d06f308a024981dcaa6c41f2f2be46a6b186b902c94c2674e8cb5c42985bc"}, - {file = "pandas-2.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:159205c99d7a5ce89ecfc37cb08ed179de7783737cea403b295b5eda8e9c56d1"}, - {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1e1f3861ea9132b32f2133788f3b14911b68102d562715d71bd0013bc45440"}, - {file = "pandas-2.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:761cb99b42a69005dec2b08854fb1d4888fdf7b05db23a8c5a099e4b886a2106"}, - {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a20628faaf444da122b2a64b1e5360cde100ee6283ae8effa0d8745153809a2e"}, - {file = "pandas-2.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f5be5d03ea2073627e7111f61b9f1f0d9625dc3c4d8dda72cc827b0c58a1d042"}, - {file = "pandas-2.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:a626795722d893ed6aacb64d2401d017ddc8a2341b49e0384ab9bf7112bdec30"}, - {file = "pandas-2.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9f66419d4a41132eb7e9a73dcec9486cf5019f52d90dd35547af11bc58f8637d"}, - {file = "pandas-2.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57abcaeda83fb80d447f28ab0cc7b32b13978f6f733875ebd1ed14f8fbc0f4ab"}, - {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60f1f7dba3c2d5ca159e18c46a34e7ca7247a73b5dd1a22b6d59707ed6b899a"}, - {file = "pandas-2.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb61dc8567b798b969bcc1fc964788f5a68214d333cade8319c7ab33e2b5d88a"}, - {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:52826b5f4ed658fa2b729264d63f6732b8b29949c7fd234510d57c61dbeadfcd"}, - {file = "pandas-2.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bde2bc699dbd80d7bc7f9cab1e23a95c4375de615860ca089f34e7c64f4a8de7"}, - {file = "pandas-2.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:3de918a754bbf2da2381e8a3dcc45eede8cd7775b047b923f9006d5f876802ae"}, - {file = "pandas-2.2.0.tar.gz", hash = "sha256:30b83f7c3eb217fb4d1b494a57a2fda5444f17834f5df2de6b2ffff68dc3c8e2"}, + {file = "pandas-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8df8612be9cd1c7797c93e1c5df861b2ddda0b48b08f2c3eaa0702cf88fb5f88"}, + {file = "pandas-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0f573ab277252ed9aaf38240f3b54cfc90fff8e5cab70411ee1d03f5d51f3944"}, + {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f02a3a6c83df4026e55b63c1f06476c9aa3ed6af3d89b4f04ea656ccdaaaa359"}, + {file = "pandas-2.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c38ce92cb22a4bea4e3929429aa1067a454dcc9c335799af93ba9be21b6beb51"}, + {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c2ce852e1cf2509a69e98358e8458775f89599566ac3775e70419b98615f4b06"}, + {file = "pandas-2.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53680dc9b2519cbf609c62db3ed7c0b499077c7fefda564e330286e619ff0dd9"}, + {file = "pandas-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:94e714a1cca63e4f5939cdce5f29ba8d415d85166be3441165edd427dc9f6bc0"}, + {file = "pandas-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f821213d48f4ab353d20ebc24e4faf94ba40d76680642fb7ce2ea31a3ad94f9b"}, + {file = "pandas-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c70e00c2d894cb230e5c15e4b1e1e6b2b478e09cf27cc593a11ef955b9ecc81a"}, + {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e97fbb5387c69209f134893abc788a6486dbf2f9e511070ca05eed4b930b1b02"}, + {file = "pandas-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101d0eb9c5361aa0146f500773395a03839a5e6ecde4d4b6ced88b7e5a1a6403"}, + {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7d2ed41c319c9fb4fd454fe25372028dfa417aacb9790f68171b2e3f06eae8cd"}, + {file = "pandas-2.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af5d3c00557d657c8773ef9ee702c61dd13b9d7426794c9dfeb1dc4a0bf0ebc7"}, + {file = "pandas-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:06cf591dbaefb6da9de8472535b185cba556d0ce2e6ed28e21d919704fef1a9e"}, + {file = "pandas-2.2.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:88ecb5c01bb9ca927ebc4098136038519aa5d66b44671861ffab754cae75102c"}, + {file = "pandas-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:04f6ec3baec203c13e3f8b139fb0f9f86cd8c0b94603ae3ae8ce9a422e9f5bee"}, + {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a935a90a76c44fe170d01e90a3594beef9e9a6220021acfb26053d01426f7dc2"}, + {file = "pandas-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c391f594aae2fd9f679d419e9a4d5ba4bce5bb13f6a989195656e7dc4b95c8f0"}, + {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9d1265545f579edf3f8f0cb6f89f234f5e44ba725a34d86535b1a1d38decbccc"}, + {file = "pandas-2.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:11940e9e3056576ac3244baef2fedade891977bcc1cb7e5cc8f8cc7d603edc89"}, + {file = "pandas-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:4acf681325ee1c7f950d058b05a820441075b0dd9a2adf5c4835b9bc056bf4fb"}, + {file = "pandas-2.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9bd8a40f47080825af4317d0340c656744f2bfdb6819f818e6ba3cd24c0e1397"}, + {file = "pandas-2.2.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:df0c37ebd19e11d089ceba66eba59a168242fc6b7155cba4ffffa6eccdfb8f16"}, + {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:739cc70eaf17d57608639e74d63387b0d8594ce02f69e7a0b046f117974b3019"}, + {file = "pandas-2.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9d3558d263073ed95e46f4650becff0c5e1ffe0fc3a015de3c79283dfbdb3df"}, + {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4aa1d8707812a658debf03824016bf5ea0d516afdea29b7dc14cf687bc4d4ec6"}, + {file = "pandas-2.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:76f27a809cda87e07f192f001d11adc2b930e93a2b0c4a236fde5429527423be"}, + {file = "pandas-2.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:1ba21b1d5c0e43416218db63037dbe1a01fc101dc6e6024bcad08123e48004ab"}, + {file = "pandas-2.2.1.tar.gz", hash = "sha256:0ab90f87093c13f3e8fa45b48ba9f39181046e8f3317d3aadb2fffbb1b978572"}, ] [package.dependencies] @@ -1313,6 +1378,7 @@ parquet = ["pyarrow (>=10.0.1)"] performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] plot = ["matplotlib (>=3.6.3)"] postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] spss = ["pyreadstat (>=1.2.0)"] sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] @@ -1536,13 +1602,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest- [[package]] name = "plotly" -version = "5.18.0" +version = "5.19.0" description = "An open-source, interactive data visualization library for Python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "plotly-5.18.0-py3-none-any.whl", hash = "sha256:23aa8ea2f4fb364a20d34ad38235524bd9d691bf5299e800bca608c31e8db8de"}, - {file = "plotly-5.18.0.tar.gz", hash = "sha256:360a31e6fbb49d12b007036eb6929521343d6bee2236f8459915821baefa2cbb"}, + {file = "plotly-5.19.0-py3-none-any.whl", hash = "sha256:906abcc5f15945765328c5d47edaa884bc99f5985fbc61e8cd4dc361f4ff8f5a"}, + {file = "plotly-5.19.0.tar.gz", hash = "sha256:5ea91a56571292ade3e3bc9bf712eba0b95a1fb0a941375d978cc79432e055f4"}, ] [package.dependencies] @@ -1566,13 +1632,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.6.1" +version = "3.6.2" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" files = [ - {file = "pre_commit-3.6.1-py2.py3-none-any.whl", hash = "sha256:9fe989afcf095d2c4796ce7c553cf28d4d4a9b9346de3cda079bcf40748454a4"}, - {file = "pre_commit-3.6.1.tar.gz", hash = "sha256:c90961d8aa706f75d60935aba09469a6b0bcb8345f127c3fbee4bdc5f114cf4b"}, + {file = "pre_commit-3.6.2-py2.py3-none-any.whl", hash = "sha256:ba637c2d7a670c10daedc059f5c49b5bd0aadbccfcd7ec15592cf9665117532c"}, + {file = "pre_commit-3.6.2.tar.gz", hash = "sha256:c3ef34f463045c88658c5b99f38c1e297abdcc0ff13f98d3370055fbbfabc67e"}, ] [package.dependencies] @@ -1623,18 +1689,18 @@ tests = ["pytest"] [[package]] name = "pydantic" -version = "2.6.1" +version = "2.6.3" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, - {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, + {file = "pydantic-2.6.3-py3-none-any.whl", hash = "sha256:72c6034df47f46ccdf81869fddb81aade68056003900a8724a4f160700016a2a"}, + {file = "pydantic-2.6.3.tar.gz", hash = "sha256:e07805c4c7f5c6826e33a1d4c9d47950d7eaf34868e2690f8594d2e30241f11f"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.16.2" +pydantic-core = "2.16.3" typing-extensions = ">=4.6.1" [package.extras] @@ -1642,90 +1708,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.16.2" +version = "2.16.3" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, - {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, - {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, - {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, - {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, - {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, - {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, - {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, - {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, - {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, - {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, - {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, - {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, - {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, - {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, - {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, - {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, - {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, - {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, - {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, - {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, - {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, - {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, - {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, - {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, - {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, - {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, - {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, - {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, - {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, - {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, - {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, - {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, - {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, - {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, - {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, - {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, - {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, - {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, - {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"}, + {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"}, + {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"}, + {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"}, + {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"}, + {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"}, + {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"}, + {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"}, + {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"}, + {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"}, + {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"}, + {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"}, + {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"}, + {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"}, + {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"}, + {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"}, + {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"}, + {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"}, + {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"}, + {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"}, + {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"}, + {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"}, + {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"}, + {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"}, + {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"}, + {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"}, + {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"}, + {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"}, + {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"}, + {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"}, ] [package.dependencies] @@ -2087,19 +2153,19 @@ test = ["asv", "gmpy2", "hypothesis", "mpmath", "pooch", "pytest", "pytest-cov", [[package]] name = "setuptools" -version = "69.1.0" +version = "69.1.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, - {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, + {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, + {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -2300,13 +2366,13 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6. [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.10.0-py3-none-any.whl", hash = "sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475"}, + {file = "typing_extensions-4.10.0.tar.gz", hash = "sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb"}, ] [[package]] @@ -2356,13 +2422,13 @@ tests = ["nose", "numpy"] [[package]] name = "urllib3" -version = "2.2.0" +version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, - {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, ] [package.extras] @@ -2373,13 +2439,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.25.0" +version = "20.25.1" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, - {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, + {file = "virtualenv-20.25.1-py3-none-any.whl", hash = "sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a"}, + {file = "virtualenv-20.25.1.tar.gz", hash = "sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197"}, ] [package.dependencies] @@ -2468,4 +2534,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "d9b8100aa0d54d420fd3bc0ae5649b6c7e5b2ed784db1f8a4df1693da1e4bf28" +content-hash = "410a3b6a5275c389c4434d7ece93bfe1379baf7ae6f91146cf5d91a83a92dc2b" diff --git a/pyproject.toml b/pyproject.toml index 52c1d01e..3bd0edde 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,7 @@ pydantic = "^2.6.1" [tool.poetry.scripts] peakipy = 'peakipy.cli.main:app' +peakipy-edit = 'peakipy.cli.edit_panel:app' peakipy-check = 'peakipy.cli.check_panel:app' [tool.poetry.group.dev.dependencies] From e024885d9003711859ba73d401f2ad1e412b2257 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Mon, 4 Mar 2024 21:38:13 -0500 Subject: [PATCH 24/37] remove clusters larger than max_cluster_size --- peakipy/cli/main.py | 2 ++ peakipy/core.py | 6 ++---- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index a497afd3..af17bf38 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -673,6 +673,8 @@ def fit( max_cluster_size = warn_if_trying_to_fit_large_clusters( max_cluster_size, peakipy_data ) + # remove peak clusters larger than max_cluster_size + peakipy_data.df = peakipy_data.df[peakipy_data.df.MEMCNT <= max_cluster_size] args["max_cluster_size"] = max_cluster_size args["to_fix"] = fix diff --git a/peakipy/core.py b/peakipy/core.py index e14c0b40..7504f309 100644 --- a/peakipy/core.py +++ b/peakipy/core.py @@ -1757,10 +1757,8 @@ def update_df(self): self.df["X_AXISf"] = self.df.X_PPM.apply(lambda x: self.uc_f2.f(x, "ppm")) self.df["Y_AXISf"] = self.df.Y_PPM.apply(lambda x: self.uc_f1.f(x, "ppm")) # in case of missing values (should estimate though) - # self.df.XW_HZ.replace("None", "20.0", inplace=True) - # self.df.YW_HZ.replace("None", "20.0", inplace=True) - self.df.XW_HZ.replace(np.NaN, "20.0", inplace=True) - self.df.YW_HZ.replace(np.NaN, "20.0", inplace=True) + self.df["XW_HZ"] = self.df.XW_HZ.replace(np.NaN, "20.0") + self.df["YW_HZ"] = self.df.YW_HZ.replace(np.NaN, "20.0") # convert linewidths to float self.df["XW_HZ"] = self.df.XW_HZ.apply(lambda x: float(x)) self.df["YW_HZ"] = self.df.YW_HZ.apply(lambda x: float(x)) From 305c18d0ac2ffbdecab81fa4014aeee5320c9667 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Sun, 17 Mar 2024 23:04:39 -0400 Subject: [PATCH 25/37] trailing white space --- peakipy/cli/check_panel.py | 20 ++- peakipy/cli/edit.py | 349 +++++++------------------------------ peakipy/cli/edit_panel.py | 74 +++++--- peakipy/cli/main.py | 49 +++--- peakipy/core.py | 44 +---- test/test_cli.py | 14 +- test/test_main.py | 119 +++++++++++++ 7 files changed, 275 insertions(+), 394 deletions(-) diff --git a/peakipy/cli/check_panel.py b/peakipy/cli/check_panel.py index 2bd2ceb4..b60de1d7 100644 --- a/peakipy/cli/check_panel.py +++ b/peakipy/cli/check_panel.py @@ -8,7 +8,6 @@ from peakipy.cli.main import check, validate_fit_dataframe pn.extension() -# pn.config.theme = "dark" @dataclass @@ -35,7 +34,7 @@ def get_cluster(cluster): data = data_singleton() cluster_groups = data.df.groupby("clustid") cluster_group = cluster_groups.get_group(cluster) - df_pane = pn.pane.DataFrame( + df_pane = pn.widgets.Tabulator( cluster_group[ [ "assignment", @@ -47,10 +46,12 @@ def get_cluster(cluster): "center_x_ppm", "center_y_ppm", "fwhm_x_hz", - "fwhm_x_hz", + "fwhm_y_hz", "lineshape", ] - ] + ], + selectable=False, + disabled=True, ) return df_pane @@ -61,7 +62,7 @@ def create_plotly_pane(cluster, plane): fits=data.fits_path, data_path=data.data_path, clusters=[cluster], - plane=plane, + plane=[plane], config_path=data.config_path, plotly=True, ) @@ -105,7 +106,14 @@ def create_check_panel( check_pane = pn.Card( info_pane, pn.Row(select_cluster, select_plane), - pn.Row(interactive_plotly_pane, interactive_cluster_pane), + pn.Row( + pn.Column( + pn.Card(interactive_plotly_pane, title="Fitted cluster"), + pn.Card( + interactive_cluster_pane, title="Fitted parameters for cluster" + ), + ) + ), title="Peakipy check", ) if edit_panel: diff --git a/peakipy/cli/edit.py b/peakipy/cli/edit.py index d0ff2e6c..0d85bd96 100644 --- a/peakipy/cli/edit.py +++ b/peakipy/cli/edit.py @@ -52,8 +52,6 @@ def __init__(self, peaklist_path: Path, data_path: Path): self._path = peaklist_path self._data_path = data_path args, config = read_config({}) - # self.args = args - # self.config = config self._dims = config.get("dims", [0, 1, 2]) self.thres = config.get("thres", 1e6) self._peakipy_data = LoadData( @@ -64,6 +62,7 @@ def __init__(self, peaklist_path: Path, data_path: Path): # make temporary paths self.make_temp_files() self.make_data_source() + self.make_tabulator_widget() self.setup_radii_sliders() self.setup_save_buttons() self.setup_set_fixed_parameters() @@ -120,6 +119,34 @@ def make_data_source(self): self.source.data = ColumnDataSource.from_df(self.peakipy_data.df) return self.source + @property + def tabulator_columns(self): + columns = [ + "ASS", + "CLUSTID", + "X_PPM", + "Y_PPM", + "X_RADIUS_PPM", + "Y_RADIUS_PPM", + "XW_HZ", + "YW_HZ", + "VOL", + "include", + "MEMCNT", + ] + return columns + + def make_tabulator_widget(self): + self.tablulator_widget = pn.widgets.Tabulator( + self.peakipy_data.df[self.tabulator_columns], + ) + return self.tablulator_widget + + def select_callback(self, attrname, old, new): + for col in self.peakipy_data.df.columns: + self.peakipy_data.df.loc[:, col] = self.source.data[col] + self.update_memcnt() + def setup_radii_sliders(self): # configure sliders for setting radii self.slider_X_RADIUS = Slider( @@ -399,10 +426,15 @@ def setup_plot(self): ) self.clust_div = Div( text="""If you want to adjust how the peaks are automatically clustered then try changing the - width/diameter/height (integer values) of the structuring element used during the binary dilation step - (you can also remove it by selecting 'None'). Increasing the size of the structuring element will cause - peaks to be more readily incorporated into clusters. Be sure to save your peak list before doing this as - any manual edits will be lost.""" + width/diameter/height (integer values) of the structuring element used during the binary dilation step. + Increasing the size of the structuring element will cause + peaks to be more readily incorporated into clusters. The mask_method scales the fitting masks based on + the provided floating point value and considers any overlapping masks to be part of a cluster.""", + ) + self.recluster_warning = Div( + text=""" + Be sure to save your peak list before reclustering as + any manual edits to clusters will be lost.""", ) self.intro_div = Div( text="""

peakipy - interactive fit adjustment

@@ -434,170 +466,17 @@ def setup_plot(self): labels=["fit current plane only"], active=[] ) - #  not sure this is needed - selected_df = self.peakipy_data.df.copy() - self.fit_button.on_event(ButtonClick, self.fit_selected) - columns = [ - TableColumn(field="ASS", title="Assignment", width=500), - TableColumn(field="CLUSTID", title="Cluster", editor=IntEditor()), - TableColumn( - field="X_PPM", - title=f"{self.peakipy_data.f2_label}", - editor=NumberEditor(step=0.0001), - formatter=NumberFormatter(format="0.0000"), - ), - TableColumn( - field="Y_PPM", - title=f"{self.peakipy_data.f1_label}", - editor=NumberEditor(step=0.0001), - formatter=NumberFormatter(format="0.0000"), - ), - TableColumn( - field="X_RADIUS_PPM", - title=f"{self.peakipy_data.f2_label} radius (ppm)", - editor=NumberEditor(step=0.0001), - formatter=NumberFormatter(format="0.0000"), - ), - TableColumn( - field="Y_RADIUS_PPM", - title=f"{self.peakipy_data.f1_label} radius (ppm)", - editor=NumberEditor(step=0.0001), - formatter=NumberFormatter(format="0.0000"), - ), - TableColumn( - field="XW_HZ", - title=f"{self.peakipy_data.f2_label} LW (Hz)", - editor=NumberEditor(step=0.01), - formatter=NumberFormatter(format="0.00"), - ), - TableColumn( - field="YW_HZ", - title=f"{self.peakipy_data.f1_label} LW (Hz)", - editor=NumberEditor(step=0.01), - formatter=NumberFormatter(format="0.00"), - ), - TableColumn( - field="VOL", title="Volume", formatter=NumberFormatter(format="0.0") - ), - TableColumn( - field="include", - title="Include", - width=7, - editor=SelectEditor(options=["yes", "no"]), - ), - TableColumn(field="MEMCNT", title="MEMCNT", editor=IntEditor()), - ] - - self.data_table = DataTable( - source=self.source, - columns=columns, - editable=True, - width=1200, - ) - - self.table_style = InlineStyleSheet( - css=""" - .slick-row.even { background: #263140; } - .slick-row.odd { background: #505c6d; } - .slick-cell.l0 {background: #1f2937;} - """ - ) - # self.table_style = InlineStyleSheet( - # css=""" - # .slick-header-columns { - # background-color: #00296b !important; - # font-family: arial; - # font-weight: bold; - # font-size: 12pt; - # color: #FFFFFF; - # text-align: right; - # } - # .slick-header-column:hover { - # background: none repeat scroll 0 0 #fdc500; - # } - # .slick-row { - # font-size: 12pt; - # font-family: arial; - # text-align: left; - # } - # .slick-row:hover{ - # background: none repeat scroll 0 0 #7c7c7c; - # } - # .slick-cell { - # header-font-weight: 500; - # border-width: 1px 1px 1px 1px; - # border-color: #d4d4d4; - # background-color: #00509D; - # color: #FFFFFF; - # } - # .slick-cell.selected { - # header-font-weight: 500; - # border-width: 1px 1px 1px 1px; - # border-color: #00509D; - # background-color: #FDC500; - # color: black; - # } - - # """ - # ) - - self.data_table.stylesheets = [self.table_style] - # callback for adding # source.selected.on_change('indices', callback) self.source.selected.on_change("indices", self.select_callback) - # # Document layout - # fitting_controls = column( - # row( - # column(self.slider_X_RADIUS, self.slider_Y_RADIUS), - # column( - # row(column(self.contour_start, self.pos_neg_contour_radiobutton)), - # column(self.fit_button), - # ), - # ), - # row( - # column(column(self.select_lineshape_radiobuttons_help), column(self.select_lineshape_radiobuttons)), - # column(column(self.select_plane), column(self.checkbox_group)), - # column(self.select_fixed_parameters_help, self.select_fixed_parameters), - # column(self.select_reference_planes) - # ), - # max_width=400, - # ) - fitting_controls = row( - column( - self.slider_X_RADIUS, - self.slider_Y_RADIUS, - self.contour_start, - self.pos_neg_contour_radiobutton, - self.select_lineshape_radiobuttons_help, - self.select_lineshape_radiobuttons, - max_width=400, - ), - column( - self.select_plane, - self.checkbox_group, - self.select_fixed_parameters_help, - self.select_fixed_parameters, - self.set_xybounds_help, - self.set_xybounds, - self.select_reference_planes_help, - self.select_reference_planes, - self.set_initial_fit_threshold_help, - self.set_initial_fit_threshold, - self.fit_button, - max_width=400, - ), - max_width=800, - ) - # reclustering tab self.struct_el = Select( title="Structuring element:", value=StrucEl.disk.value, - options=[i.value for i in StrucEl] + ["None"], + options=[i.value for i in StrucEl], width=100, ) self.struct_el_size = TextInput( @@ -609,36 +488,6 @@ def setup_plot(self): self.recluster = Button(label="Re-cluster", button_type="warning") self.recluster.on_event(ButtonClick, self.recluster_peaks) - # edit_fits tabs - fitting_layout = fitting_controls - log_layout = self.fit_reports_div - recluster_layout = column( - row( - self.clust_div, - ), - row( - column( - self.contour_start, - self.struct_el, - self.struct_el_size, - self.recluster, - ) - ), - max_width=400, - ) - save_layout = column( - self.savefilename, self.button, self.exit_button, max_width=400 - ) - - fitting_tab = TabPanel(child=fitting_layout, title="Peak fitting") - log_tab = TabPanel(child=log_layout, title="Log") - recluster_tab = TabPanel(child=recluster_layout, title="Re-cluster peaks") - save_tab = TabPanel(child=save_layout, title="Save edited peaklist") - self.tabs = Tabs( - tabs=[fitting_tab, log_tab, recluster_tab, save_tab], - sizing_mode="scale_both", - ) - def recluster_peaks(self, event): if self.struct_el.value == "mask_method": self.struc_size = tuple( @@ -658,7 +507,7 @@ def recluster_peaks(self, event): ) # update data source self.source.data = ColumnDataSource.from_df(self.peakipy_data.df) - + self.tablulator_widget.value = self.peakipy_data.df[self.tabulator_columns] return self.peakipy_data.df def update_memcnt(self): @@ -675,6 +524,7 @@ def update_memcnt(self): self.peakipy_data.df.loc[include_no, "color"] = "ghostwhite" # update source data self.source.data = ColumnDataSource.from_df(self.peakipy_data.df) + self.tablulator_widget.value = self.peakipy_data.df[self.tabulator_columns] return self.peakipy_data.df def unpack_parameters_to_fix(self): @@ -690,12 +540,12 @@ def fit_selected(self, event): selectionIndex = self.source.selected.indices current = self.peakipy_data.df.iloc[selectionIndex] - self.peakipy_data.df.loc[selectionIndex, "X_RADIUS_PPM"] = ( - self.slider_X_RADIUS.value - ) - self.peakipy_data.df.loc[selectionIndex, "Y_RADIUS_PPM"] = ( - self.slider_Y_RADIUS.value - ) + # self.peakipy_data.df.loc[selectionIndex, "X_RADIUS_PPM"] = ( + # self.slider_X_RADIUS.value + # ) + # self.peakipy_data.df.loc[selectionIndex, "Y_RADIUS_PPM"] = ( + # self.slider_Y_RADIUS.value + # ) self.peakipy_data.df.loc[selectionIndex, "X_DIAMETER_PPM"] = ( current["X_RADIUS_PPM"] * 2.0 @@ -724,20 +574,10 @@ def fit_selected(self, event): print(f"[yellow]Using LS = {lineshape}[/yellow]") if self.checkbox_group.active == []: fit_command = f"peakipy fit {self.TEMP_INPUT_CSV} {self.data_path} {self.TEMP_OUT_CSV} --lineshape {lineshape}{fix_command}{reference_planes_command}{initial_fit_threshold_command}{xy_bounds_command}" - # plot_command = f"peakipy check {self.TEMP_OUT_CSV} {self.data_path} --label --individual --show --outname {self.TEMP_OUT_PLOT / Path('tmp.pdf')}" - # self.check_pane = create_check_panel( - # self.TEMP_OUT_CSV, self.data_path, edit_panel=True - # ) - # plot_command = f"peakipy-check {self.TEMP_OUT_CSV} {self.data_path}" else: plane_index = self.select_plane.value print(f"[yellow]Only fitting plane {plane_index}[/yellow]") fit_command = f"peakipy fit {self.TEMP_INPUT_CSV} {self.data_path} {self.TEMP_OUT_CSV} --lineshape {lineshape} --plane {plane_index}{fix_command}{reference_planes_command}{initial_fit_threshold_command}{xy_bounds_command}" - # self.check_pane = create_check_panel( - # self.TEMP_OUT_CSV, self.data_path, edit_panel=True - # ) - # plot_command = f"peakipy check {self.TEMP_OUT_CSV} {self.data_path} --label --individual --outname {self.TEMP_OUT_PLOT / Path('tmp.pdf')} --plane {plane_index} --show" - # plot_command = f"peakipy-check {self.TEMP_OUT_CSV} {self.data_path}" print(f"[blue]{fit_command}[/blue]") self.fit_reports += fit_command + "
" @@ -746,8 +586,6 @@ def fit_selected(self, event): self.fit_reports += stdout.decode() + "


" self.fit_reports = self.fit_reports.replace("\n", "
") self.fit_reports_div.text = log_div % (log_style, self.fit_reports) - # plot data - # os.system(plot_command) def save_peaks(self, event): if self.savefilename.value: @@ -765,18 +603,6 @@ def save_peaks(self, event): else: self.peakipy_data.df.to_pickle(to_save) - def select_callback(self, attrname, old, new): - # print(Fore.RED + "Calling Select Callback") - # selectionIndex = self.source.selected.indices - # current = self.peakipy_data.df.iloc[selectionIndex] - - for col in self.peakipy_data.df.columns: - self.peakipy_data.df.loc[:, col] = self.source.data[col] - # self.source.data = ColumnDataSource.from_df(self.peakipy_data.df) - # update memcnt - self.update_memcnt() - # print(Fore.YELLOW + "Finished Calling Select Callback") - def peak_pick_callback(self, event): # global so that df is updated globally x_radius_ppm = 0.035 @@ -836,82 +662,33 @@ def peak_pick_callback(self, event): ) self.update_memcnt() - def slider_callback_x(self, attrname, old, new): + def slider_callback(self, dim, channel): selectionIndex = self.source.selected.indices current = self.peakipy_data.df.iloc[selectionIndex] - self.peakipy_data.df.loc[selectionIndex, "X_RADIUS"] = ( - self.slider_X_RADIUS.value * self.peakipy_data.pt_per_ppm_f2 - ) - self.peakipy_data.df.loc[selectionIndex, "X_RADIUS_PPM"] = ( - self.slider_X_RADIUS.value - ) + self.peakipy_data.df.loc[selectionIndex, f"{dim}_RADIUS"] = getattr( + self, f"slider_{dim}_RADIUS" + ).value * getattr(self.peakipy_data, f"pt_per_ppm_{channel}") + self.peakipy_data.df.loc[selectionIndex, f"{dim}_RADIUS_PPM"] = getattr( + self, f"slider_{dim}_RADIUS" + ).value - self.peakipy_data.df.loc[selectionIndex, "X_DIAMETER_PPM"] = ( - current["X_RADIUS_PPM"] * 2.0 + self.peakipy_data.df.loc[selectionIndex, f"{dim}_DIAMETER_PPM"] = ( + current[f"{dim}_RADIUS_PPM"] * 2.0 ) - self.peakipy_data.df.loc[selectionIndex, "X_DIAMETER"] = ( - current["X_RADIUS"] * 2.0 + self.peakipy_data.df.loc[selectionIndex, f"{dim}_DIAMETER"] = ( + current[f"{dim}_RADIUS"] * 2.0 ) # set edited rows to True self.peakipy_data.df.loc[selectionIndex, "Edited"] = True - self.source.data = ColumnDataSource.from_df(self.peakipy_data.df) + self.tablulator_widget.value = self.peakipy_data.df[self.tabulator_columns] - def slider_callback_y(self, attrname, old, new): - selectionIndex = self.source.selected.indices - current = self.peakipy_data.df.iloc[selectionIndex] - self.peakipy_data.df.loc[selectionIndex, "Y_RADIUS"] = ( - self.slider_Y_RADIUS.value * self.peakipy_data.pt_per_ppm_f1 - ) - self.peakipy_data.df.loc[selectionIndex, "Y_RADIUS_PPM"] = ( - self.slider_Y_RADIUS.value - ) - - self.peakipy_data.df.loc[selectionIndex, "Y_DIAMETER_PPM"] = ( - current["Y_RADIUS_PPM"] * 2.0 - ) - self.peakipy_data.df.loc[selectionIndex, "Y_DIAMETER"] = ( - current["Y_RADIUS"] * 2.0 - ) - - # set edited rows to True - self.peakipy_data.df.loc[selectionIndex, "Edited"] = True - - self.source.data = ColumnDataSource.from_df(self.peakipy_data.df) + def slider_callback_x(self, attrname, old, new): + self.slider_callback("X", "f2") - # def slider_callback(self, attrname, old, new, dim="X"): - # - # selectionIndex = self.source.selected.indices - # current = self.peakipy_data.df.iloc[selectionIndex] - # self.peakipy_data.df.loc[selectionIndex, f"{dim}_RADIUS"] = ( - # self.slider_Y_RADIUS.value * self.peakipy_data.pt_per_ppm_f1 - # ) - # self.peakipy_data.df.loc[ - # selectionIndex, f"{dim}_RADIUS_PPM" - # ] = self.slider_Y_RADIUS.value - # - # self.peakipy_data.df.loc[selectionIndex, f"{dim}_DIAMETER_PPM"] = ( - # current[f"{dim}_RADIUS_PPM"] * 2.0 - # ) - # self.peakipy_data.df.loc[selectionIndex, f"{dim}_DIAMETER"] = ( - # current[f"{dim}_RADIUS"] * 2.0 - # ) - # - # set edited rows to True - # self.peakipy_data.df.loc[selectionIndex, "Edited"] = True - - # selected_df = df[df.CLUSTID.isin(list(current.CLUSTID))] - # print(list(selected_df)) - # self.source.data = ColumnDataSource.from_df(self.peakipy_data.df) - - # def slider_callback_x(self, attrname, old, new): - # - # self.slider_callback(attrname, old, new, dim="X") - # - # def slider_callback_y(self, attrname, old, new): - # - # self.slider_callback(attrname, old, new, dim="Y") + def slider_callback_y(self, attrname, old, new): + self.slider_callback("Y", "f1") def update_contour(self, attrname, old, new): new_cs = eval(self.contour_start.value) diff --git a/peakipy/cli/edit_panel.py b/peakipy/cli/edit_panel.py index ddadcc40..de63f8a4 100644 --- a/peakipy/cli/edit_panel.py +++ b/peakipy/cli/edit_panel.py @@ -33,35 +33,44 @@ def data_singleton(): return Data() +def update_peakipy_data_on_edit_of_table(event): + data = data_singleton() + column = event.column + row = event.row + value = event.value + data.bs.peakipy_data.df.loc[row, column] = value + data.bs.update_memcnt() + + def panel_app(): data = data_singleton() bs = data.bs bokeh_pane = pn.pane.Bokeh(bs.p) - # table_pane = pn.pane.Bokeh(bs.data_table) - table_pane = pn.widgets.Tabulator( - bs.peakipy_data.df[ - [ - "ASS", - "CLUSTID", - "X_PPM", - "Y_PPM", - "X_RADIUS_PPM", - "Y_RADIUS_PPM", - "XW_HZ", - "YW_HZ", - "VOL", - "include", - "MEMCNT", - ] - ] - ) - spectrum_view_settings = pn.WidgetBox( "# Contour settings", bs.pos_neg_contour_radiobutton, bs.contour_start ) + save_peaklist_box = pn.WidgetBox( + "# Save your peaklist", + bs.savefilename, + bs.button, + pn.layout.Divider(), + bs.exit_button, + ) + recluster_settings = pn.WidgetBox( + "# Re-cluster your peaks", + bs.clust_div, + bs.struct_el, + bs.struct_el_size, + pn.layout.Divider(), + bs.recluster_warning, + bs.recluster, + sizing_mode="stretch_width", + ) button = pn.widgets.Button(name="Fit selected cluster(s)", button_type="primary") fit_controls = pn.WidgetBox( "# Fit controls", + button, + pn.layout.Divider(), bs.select_plane, bs.checkbox_group, pn.layout.Divider(), @@ -76,32 +85,47 @@ def panel_app(): pn.layout.Divider(), bs.select_lineshape_radiobuttons_help, bs.select_lineshape_radiobuttons, - pn.layout.Divider(), - button, ) mask_adjustment_controls = pn.WidgetBox( "# Fitting mask adjustment", bs.slider_X_RADIUS, bs.slider_Y_RADIUS ) - def b(event): + # bs.source.on_change() + def fit_peaks_button_click(event): check_app.loading = True bs.fit_selected(None) check_panel = create_check_panel(bs.TEMP_OUT_CSV, bs.data_path, edit_panel=True) check_app.objects = check_panel.objects check_app.loading = False - button.on_click(b) + button.on_click(fit_peaks_button_click) + + def update_source_selected_indices(event): + print(event) + print(bs.tablulator_widget.selection) + bs.source.selected.indices = bs.tablulator_widget.selection + + bs.tablulator_widget.on_click(update_source_selected_indices) + bs.tablulator_widget.on_edit(update_peakipy_data_on_edit_of_table) + template = pn.template.BootstrapTemplate( title="Peakipy", sidebar=[mask_adjustment_controls, fit_controls], ) spectrum = pn.Card( - pn.Column(pn.Row(bokeh_pane, spectrum_view_settings), table_pane), + pn.Column( + pn.Row( + bokeh_pane, + pn.Column(spectrum_view_settings, save_peaklist_box), + recluster_settings, + ), + bs.tablulator_widget, + ), title="Peakipy fit", ) check_app = pn.Card(title="Peakipy check") - template.main.append(pn.Row(spectrum, check_app)) + template.main.append(pn.Column(check_app, spectrum)) template.show() diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index af17bf38..2ecb064c 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -766,33 +766,30 @@ def fit( def validate_plane_selection(plane, pseudo3D): - if plane > pseudo3D.n_planes: + if (plane == []) or (plane == None): + plane = list(range(pseudo3D.n_planes)) + + elif max(plane) > (pseudo3D.n_planes - 1): raise ValueError( - f"[red]There are {pseudo3D.n_planes} planes in your data you selected --plane {plane}...[red]" + f"[red]There are {pseudo3D.n_planes} planes in your data you selected --plane {max(plane)}...[red]" f"plane numbering starts from 0." ) - elif plane < 0: + elif min(plane) < 0: raise ValueError( - f"[red]Plane number can not be negative; you selected --plane {plane}...[/red]" + f"[red]Plane number can not be negative; you selected --plane {min(plane)}...[/red]" ) else: - return plane - + plane = sorted(plane) -def validate_ccount(ccount): - if type(ccount) == int: - ccount = ccount - else: - raise TypeError("ccount should be an integer") - return ccount + return plane -def validate_rcount(rcount): - if type(rcount) == int: - rcount = rcount +def validate_sample_count(sample_count): + if type(sample_count) == int: + sample_count = sample_count else: - raise TypeError("rcount should be an integer") - return rcount + raise TypeError("Sample count (ccount, rcount) should be an integer") + return sample_count def unpack_plotting_colors(colors): @@ -1107,7 +1104,7 @@ def next_plot(event): else: pdf.savefig() - plt.close() + plt.close() def create_plotly_wireframe_lines(plot_data: PlottingDataForPlane): @@ -1309,7 +1306,7 @@ def check( fits: Path, data_path: Path, clusters: Optional[List[int]] = None, - plane: int = 0, + plane: Optional[List[int]] = None, outname: Path = Path("plots.pdf"), first: bool = False, show: bool = False, @@ -1387,17 +1384,15 @@ def check( # first only overrides plane option if first: - plane = 0 + selected_planes = [0] else: - plane = plane - - selected_plane = validate_plane_selection(plane, pseudo3D) - ccount = validate_ccount(ccount) - rcount = validate_rcount(rcount) + selected_planes = validate_plane_selection(plane, pseudo3D) + ccount = validate_sample_count(ccount) + rcount = validate_sample_count(rcount) data_color, fit_color = unpack_plotting_colors(colors) fits = get_fit_data_for_selected_peak_clusters(fits, clusters) - peak_clusters = fits.query(f"plane=={selected_plane}").groupby("clustid") + peak_clusters = fits.query(f"plane in @selected_planes").groupby("clustid") # make plotting meshes x = np.arange(pseudo3D.f2_size) @@ -1430,7 +1425,7 @@ def check( empty_mask_array = np.zeros( (pseudo3D.f1_size, pseudo3D.f2_size), dtype=bool ) - first_plane = peak_cluster[peak_cluster.plane == selected_plane] + first_plane = peak_cluster[peak_cluster.plane == selected_planes[0]] individual_masks, mask = make_masks_from_plane_data( empty_mask_array, first_plane ) diff --git a/peakipy/core.py b/peakipy/core.py index 7504f309..4d4dad95 100644 --- a/peakipy/core.py +++ b/peakipy/core.py @@ -29,7 +29,6 @@ import numpy as np import nmrglue as ng -import matplotlib.pyplot as plt import pandas as pd import textwrap from rich import print @@ -38,15 +37,9 @@ from numpy import sqrt, log, pi, exp, finfo -from lmfit import Model, Parameters -from lmfit.model import ModelResult -from lmfit.models import LinearModel +from lmfit import Model from scipy.special import wofz -from matplotlib import cm -from mpl_toolkits.mplot3d import Axes3D -from matplotlib.widgets import Button - from bokeh.palettes import Category20 from scipy import ndimage from skimage.morphology import square, binary_closing, disk, rectangle @@ -1169,30 +1162,12 @@ def __init__( self._analysis_to_pipe_dic = { "#": "INDEX", - # "": "X_AXIS", - # "": "Y_AXIS", - # "": "DX", - # "": "DY", "Position F1": "X_PPM", "Position F2": "Y_PPM", - # "": "X_HZ", - # "": "Y_HZ", - # "": "XW", - # "": "YW", "Line Width F1 (Hz)": "XW_HZ", "Line Width F2 (Hz)": "YW_HZ", - # "": "X1", - # "": "X3", - # "": "Y1", - # "": "Y3", "Height": "HEIGHT", - # "Height": "DHEIGHT", "Volume": "VOL", - # "": "PCHI2", - # "": "TYPE", - # "": "ASS", - # "": "CLUSTID", - # "": "MEMCNT" } self._assign_to_pipe_dic = { "#": "INDEX", @@ -1206,30 +1181,13 @@ def __init__( self._sparky_to_pipe_dic = { "index": "INDEX", - # "": "X_AXIS", - # "": "Y_AXIS", - # "": "DX", - # "": "DY", "w1": "X_PPM", "w2": "Y_PPM", - # "": "X_HZ", - # "": "Y_HZ", - # "": "XW", - # "": "YW", "lw1 (hz)": "XW_HZ", "lw2 (hz)": "YW_HZ", - # "": "X1", - # "": "X3", - # "": "Y1", - # "": "Y3", "Height": "HEIGHT", - # "Height": "DHEIGHT", "Volume": "VOL", - # "": "PCHI2", - # "": "TYPE", "Assignment": "ASS", - # "": "CLUSTID", - # "": "MEMCNT" } self._analysis_to_pipe_dic[posF1] = "Y_PPM" diff --git a/test/test_cli.py b/test/test_cli.py index 191f2fa3..4b4b2062 100644 --- a/test/test_cli.py +++ b/test/test_cli.py @@ -156,13 +156,13 @@ def test_check_main_with_pv_pv(protein_L): peakipy.cli.main.check(**args) -def test_edit_with_default(protein_L): - args = dict( - peaklist_path=protein_L / Path("peaks.csv"), - data_path=protein_L / Path("test1.ft2"), - test=True, - ) - peakipy.cli.main.edit(**args) +# def test_edit_with_default(protein_L): +# args = dict( +# peaklist_path=protein_L / Path("peaks.csv"), +# data_path=protein_L / Path("test1.ft2"), +# test=True, +# ) +# peakipy.cli.main.edit(**args) # if __name__ == "__main__": diff --git a/test/test_main.py b/test/test_main.py index ea40fe02..366f934b 100644 --- a/test/test_main.py +++ b/test/test_main.py @@ -3,6 +3,7 @@ import numpy as np import pandas as pd +import pytest from pytest import fixture from peakipy.cli.main import ( @@ -10,6 +11,10 @@ check_for_include_column_and_add_if_missing, select_specified_planes, exclude_specified_planes, + validate_plane_selection, + validate_sample_count, + unpack_plotting_colors, + get_fit_data_for_selected_peak_clusters, ) @@ -76,3 +81,117 @@ def test_exclude_specified_planes_2(): actual_plane_numbers, peakipy_data = exclude_specified_planes(plane, PeakipyData()) np.testing.assert_array_equal(expected_plane_numbers, actual_plane_numbers) assert peakipy_data.data.shape == (2, 10, 20) + + +class MockPseudo3D: + def __init__(self, n_planes): + self.n_planes = n_planes + + +def test_empty_plane_selection(): + pseudo3D = MockPseudo3D(n_planes=5) + assert validate_plane_selection([], pseudo3D) == [0, 1, 2, 3, 4] + + +def test_plane_selection_none(): + pseudo3D = MockPseudo3D(n_planes=5) + assert validate_plane_selection(None, pseudo3D) == [0, 1, 2, 3, 4] + + +def test_valid_plane_selection(): + pseudo3D = MockPseudo3D(n_planes=5) + assert validate_plane_selection([0, 1, 2], pseudo3D) == [0, 1, 2] + + +def test_invalid_plane_selection_negative(): + pseudo3D = MockPseudo3D(n_planes=5) + with pytest.raises(ValueError): + validate_plane_selection([-1], pseudo3D) + + +def test_invalid_plane_selection_too_high(): + pseudo3D = MockPseudo3D(n_planes=5) + with pytest.raises(ValueError): + validate_plane_selection([5], pseudo3D) + + +def test_invalid_plane_selection_mix(): + pseudo3D = MockPseudo3D(n_planes=5) + with pytest.raises(ValueError): + validate_plane_selection([-1, 3, 5], pseudo3D) + + +def test_valid_sample_count(): + assert validate_sample_count(10) == 10 + + +def test_invalid_sample_count_type(): + with pytest.raises(TypeError): + validate_sample_count("10") + + +def test_invalid_sample_count_float(): + with pytest.raises(TypeError): + validate_sample_count(10.5) + + +def test_invalid_sample_count_list(): + with pytest.raises(TypeError): + validate_sample_count([10]) + + +def test_invalid_sample_count_dict(): + with pytest.raises(TypeError): + validate_sample_count({"count": 10}) + + +def test_invalid_sample_count_none(): + with pytest.raises(TypeError): + validate_sample_count(None) + + +def test_valid_colors(): + assert unpack_plotting_colors(("red", "black")) == ("red", "black") + + +def test_default_colors(): + assert unpack_plotting_colors(()) == ("green", "blue") + + +def test_invalid_colors_type(): + assert unpack_plotting_colors("red") == ("green", "blue") + + +def test_invalid_colors_single(): + assert unpack_plotting_colors(("red",)) == ("green", "blue") + + +def test_invalid_colors_length(): + assert unpack_plotting_colors(("red", "black", "green")) == ("green", "blue") + + +def test_no_clusters(): + fits = pd.DataFrame({"clustid": [1, 2, 3]}) + assert get_fit_data_for_selected_peak_clusters(fits, None).equals(fits) + + +def test_empty_clusters(): + fits = pd.DataFrame({"clustid": [1, 2, 3]}) + assert get_fit_data_for_selected_peak_clusters(fits, []).equals(fits) + + +def test_valid_clusters(): + fits = pd.DataFrame({"clustid": [1, 2, 3]}) + selected_clusters = [1, 3] + expected_result = pd.DataFrame({"clustid": [1, 3]}) + assert ( + get_fit_data_for_selected_peak_clusters(fits, selected_clusters) + .reset_index(drop=True) + .equals(expected_result) + ) + + +def test_invalid_clusters(): + fits = pd.DataFrame({"clustid": [1, 2, 3]}) + with pytest.raises(SystemExit): + get_fit_data_for_selected_peak_clusters(fits, [4, 5, 6]) From a31da53f710919fc7c78e20458ff489023b6ebc7 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Mon, 18 Mar 2024 22:47:57 -0400 Subject: [PATCH 26/37] added residual plot and cleaned up a little --- Makefile | 7 + peakipy/cli/check_panel.py | 24 ++-- peakipy/cli/edit.py | 27 ++-- peakipy/cli/edit_panel.py | 4 +- peakipy/cli/main.py | 62 +++------ peakipy/core.py | 24 +--- poetry.lock | 264 ++++++++++++++++++------------------- test/test_cli.py | 19 --- 8 files changed, 187 insertions(+), 244 deletions(-) create mode 100644 Makefile diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..8cb28ad3 --- /dev/null +++ b/Makefile @@ -0,0 +1,7 @@ +.PHONY: test + +test: + pytest test/test_core.py + pytest test/test_main.py + pytest test/test_fit.py + pytest test/test_cli.py diff --git a/peakipy/cli/check_panel.py b/peakipy/cli/check_panel.py index b60de1d7..44bc00d4 100644 --- a/peakipy/cli/check_panel.py +++ b/peakipy/cli/check_panel.py @@ -56,9 +56,15 @@ def get_cluster(cluster): return df_pane +@dataclass +class PlotContainer: + main_figure: pn.pane.Plotly + residual_figure: pn.pane.Plotly + + def create_plotly_pane(cluster, plane): data = data_singleton() - fig = check( + fig, residual_fig = check( fits=data.fits_path, data_path=data.data_path, clusters=[cluster], @@ -69,7 +75,8 @@ def create_plotly_pane(cluster, plane): fig["layout"].update(height=800, width=800) fig = fig.to_dict() - return pn.pane.Plotly(fig) + residual_fig = residual_fig.to_dict() + return pn.Column(pn.pane.Plotly(fig), pn.pane.Plotly(residual_fig)) app = typer.Typer() @@ -96,7 +103,7 @@ def create_check_panel( select_plane = pn.widgets.Select( name="Plane", options={f"{plane}": plane for plane in data.df.plane.unique()} ) - interactive_cluster_pane = pn.bind(get_cluster, select_cluster) + result_table_pane = pn.bind(get_cluster, select_cluster) interactive_plotly_pane = pn.bind( create_plotly_pane, cluster=select_cluster, plane=select_plane ) @@ -104,14 +111,15 @@ def create_check_panel( "Select a cluster and plane to look at from the dropdown menus" ) check_pane = pn.Card( - info_pane, - pn.Row(select_cluster, select_plane), + # info_pane, + # pn.Row(select_cluster, select_plane), pn.Row( pn.Column( - pn.Card(interactive_plotly_pane, title="Fitted cluster"), - pn.Card( - interactive_cluster_pane, title="Fitted parameters for cluster" + pn.Row( + pn.Card(interactive_plotly_pane, title="Fitted cluster"), + pn.Column(info_pane, select_cluster, select_plane), ), + pn.Card(result_table_pane, title="Fitted parameters for cluster"), ) ), title="Peakipy check", diff --git a/peakipy/cli/edit.py b/peakipy/cli/edit.py index 0d85bd96..47fbe766 100644 --- a/peakipy/cli/edit.py +++ b/peakipy/cli/edit.py @@ -17,21 +17,14 @@ import panel as pn -from bokeh.io import curdoc from bokeh.events import ButtonClick, DoubleTap -from bokeh.layouts import row, column, grid -from bokeh.models import ColumnDataSource, Tabs, TabPanel, InlineStyleSheet +from bokeh.layouts import row, column +from bokeh.models import ColumnDataSource from bokeh.models.tools import HoverTool from bokeh.models.widgets import ( Slider, Select, Button, - DataTable, - TableColumn, - NumberFormatter, - NumberEditor, - IntEditor, - SelectEditor, TextInput, RadioButtonGroup, CheckboxGroup, @@ -136,9 +129,15 @@ def tabulator_columns(self): ] return columns + @property + def tabulator_non_editable_columns(self): + editors = {"X_RADIUS_PPM": None, "Y_RADIUS_PPM": None} + return editors + def make_tabulator_widget(self): self.tablulator_widget = pn.widgets.Tabulator( self.peakipy_data.df[self.tabulator_columns], + editors=self.tabulator_non_editable_columns, ) return self.tablulator_widget @@ -321,10 +320,7 @@ def setup_plot(self): self.contour_start = TextInput( value="%.2e" % self.thres, title="Contour level:", width=100 ) - # contour_factor = Slider(title="contour factor", value=1.20, start=1., end=2.,step=0.05) self.contour_start.on_change("value", self.update_contour) - # for w in [contour_num,contour_start,contour_factor]: - # w.on_change("value",update_contour) #  plot mask outlines el = self.p.ellipse( @@ -540,13 +536,6 @@ def fit_selected(self, event): selectionIndex = self.source.selected.indices current = self.peakipy_data.df.iloc[selectionIndex] - # self.peakipy_data.df.loc[selectionIndex, "X_RADIUS_PPM"] = ( - # self.slider_X_RADIUS.value - # ) - # self.peakipy_data.df.loc[selectionIndex, "Y_RADIUS_PPM"] = ( - # self.slider_Y_RADIUS.value - # ) - self.peakipy_data.df.loc[selectionIndex, "X_DIAMETER_PPM"] = ( current["X_RADIUS_PPM"] * 2.0 ) diff --git a/peakipy/cli/edit_panel.py b/peakipy/cli/edit_panel.py index de63f8a4..04b853d0 100644 --- a/peakipy/cli/edit_panel.py +++ b/peakipy/cli/edit_panel.py @@ -102,8 +102,8 @@ def fit_peaks_button_click(event): button.on_click(fit_peaks_button_click) def update_source_selected_indices(event): - print(event) - print(bs.tablulator_widget.selection) + # print(event) + # print(bs.tablulator_widget.selection) bs.source.selected.indices = bs.tablulator_widget.selection bs.tablulator_widget.on_click(update_source_selected_indices) diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index 2ecb064c..849ab610 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -215,10 +215,6 @@ def read( """ - # verbose_mode = args.get("--verb") - # if verbose_mode: - # print("Using arguments:", args) - clust_args = { "struc_el": struc_el, "struc_size": struc_size, @@ -241,7 +237,6 @@ def read( posF1=y_ppm_column_name, posF2=x_ppm_column_name, ) - # peaks.adaptive_clusters(block_size=151,offset=0) case peaklist_format.a3: peaks = Peaklist( @@ -292,9 +287,6 @@ def read( if fuda: peaks.to_fuda() - # if verbose_mode: - # print(data.head()) - match outfmt.value: case "csv": outname = outname.with_suffix(".csv") @@ -1053,12 +1045,10 @@ def create_matplotlib_figure( # axes will appear inverted ax.view_init(30, 120) - # names = ",".join(plane.assignment) title = f"Plane={plot_data.plane_id},Cluster={plot_data.plane_lineshape_parameters.clustid.iloc[0]}" plt.title(title) print(f"[green]Plotting: {title}[/green]") out_str = "Volumes (Heights)\n===========\n" - # chi2s = [] for _, row in plot_data.plane_lineshape_parameters.iterrows(): out_str += f"{row.assignment} = {row.amp:.3e} ({row.height:.3e})\n" if label: @@ -1199,10 +1189,23 @@ def create_plotly_surfaces(plot_data: PlottingDataForPlane): def create_residual_contours(plot_data: PlottingDataForPlane): - data = [] - contours = go.Contour(x=plot_data.x_plot, y=plot_data.y_plot, z=plot_data.residual) - data.append(contours) - return data + contours = go.Contour( + x=plot_data.x_plot[0], y=plot_data.y_plot.T[0], z=plot_data.residual + ) + return contours + + +def create_residual_figure(plot_data: PlottingDataForPlane): + data = create_residual_contours(plot_data) + fig = go.Figure(data=data) + fig.update_layout( + title="Fit residuals", + xaxis_title=f"{plot_data.pseudo3D.f2_label} ppm", + yaxis_title=f"{plot_data.pseudo3D.f1_label} ppm", + xaxis=dict(range=[plot_data.x_plot.max(), plot_data.x_plot.min()]), + yaxis=dict(range=[plot_data.y_plot.max(), plot_data.y_plot.min()]), + ) + return fig def create_plotly_figure(plot_data: PlottingDataForPlane): @@ -1210,9 +1213,6 @@ def create_plotly_figure(plot_data: PlottingDataForPlane): surfaces = create_plotly_surfaces(plot_data) # residuals = create_residual_contours(plot_data) fig = go.Figure(data=lines + surfaces) - # layout = go.Layout(showlegend=True) - # fig.update_layout(layout) - # fig.update_traces(showlegend=True) fig = update_axis_ranges(fig, plot_data) return fig @@ -1222,6 +1222,8 @@ def update_axis_ranges(fig, plot_data: PlottingDataForPlane): scene=dict( xaxis=dict(range=[plot_data.x_plot.max(), plot_data.x_plot.min()]), yaxis=dict(range=[plot_data.y_plot.max(), plot_data.y_plot.min()]), + xaxis_title=f"{plot_data.pseudo3D.f2_label} ppm", + yaxis_title=f"{plot_data.pseudo3D.f1_label} ppm", annotations=make_annotations(plot_data), ) ) @@ -1474,41 +1476,19 @@ def check( # fig = create_plotly_figure(plot_data) if plotly: fig = create_plotly_figure(plot_data) - return fig + residual_fig = create_residual_figure(plot_data) + return fig, residual_fig else: plt = matplotlib.pyplot create_matplotlib_figure( plot_data, pdf, individual, label, ccpn_flag, show ) - # surf = pn.pane.plotly.Plotly(fig) - # app = pn.Column(surf) - # app.show(threaded=True) if first: break run_log() -@app.command(help="Interactive Bokeh dashboard for configuring fitting parameters") -def edit( - peaklist_path: Path, - data_path: Path, - test: bool = False, -): - from bokeh.util.browser import view - from bokeh.server.server import Server - from .edit import BokehScript - - run_log() - bs = BokehScript(peaklist_path=peaklist_path, data_path=data_path) - if not test: - server = Server({"/edit": bs.init}) - server.start() - print("[green]Opening peakipy: Edit fits on http://localhost:5006/edit[/green]") - server.io_loop.add_callback(server.show, "/edit") - server.io_loop.start() - - def make_yaml_file(name, yaml_file=yaml_file): if os.path.exists(name): print(f"Copying {name} to {name}.bak") diff --git a/peakipy/core.py b/peakipy/core.py index 4d4dad95..f5b2084f 100644 --- a/peakipy/core.py +++ b/peakipy/core.py @@ -23,7 +23,7 @@ import json from datetime import datetime from pathlib import Path -from typing import List, Optional +from typing import List from enum import Enum from dataclasses import dataclass, field @@ -1515,28 +1515,6 @@ def clusters( ) return ClustersResult(labeled_array, num_features, closed_data, peaks) - # def adaptive_clusters(self, block_size, offset, l_struc=None): - - # self.thresh = threshold_otsu(self.data[0]) - - # peaks = [[y, x] for y, x in zip(self.df.Y_AXIS, self.df.X_AXIS)] - - # binary_adaptive = threshold_adaptive( - # self.data[0], block_size=block_size, offset=offset - # ) - - # labeled_array, num_features = ndimage.label(binary_adaptive, l_struc) - # # print(labeled_array, num_features) - - # self.df["CLUSTID"] = [labeled_array[i[0], i[1]] for i in peaks] - - # #  renumber "0" clusters - # max_clustid = self.df["CLUSTID"].max() - # n_of_zeros = len(self.df[self.df["CLUSTID"] == 0]["CLUSTID"]) - # self.df.loc[self.df[self.df["CLUSTID"] == 0].index, "CLUSTID"] = np.arange( - # max_clustid + 1, n_of_zeros + max_clustid + 1, dtype=int - # ) - def mask_method(self, overlap=1.0, l_struc=None): """connect clusters based on overlap of fitting masks diff --git a/poetry.lock b/poetry.lock index c86b7abc..2057e006 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,13 +13,13 @@ files = [ [[package]] name = "asteval" -version = "0.9.31" +version = "0.9.32" description = "Safe, minimalistic evaluator of python expression using ast module" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "asteval-0.9.31-py3-none-any.whl", hash = "sha256:2761750c184d97707c292b62df3b10e330a809a2201721acc435a2b89a114263"}, - {file = "asteval-0.9.31.tar.gz", hash = "sha256:a2da066b6696dba9835c5f7dec63e0ffb5bd2b4e3bb5f0b9a604aeafb17d833d"}, + {file = "asteval-0.9.32-py3-none-any.whl", hash = "sha256:4d0da45a15f15eeb88bb53cf4c352591ccb00f00f81f74649fd7084519adc3fe"}, + {file = "asteval-0.9.32.tar.gz", hash = "sha256:3bef25a973d378fda21c83a38c6292c4d0d94773f49f42073e69dbb19932bb74"}, ] [package.extras] @@ -357,63 +357,63 @@ test-no-images = ["pytest", "pytest-cov", "pytest-xdist", "wurlitzer"] [[package]] name = "coverage" -version = "7.4.3" +version = "7.4.4" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8580b827d4746d47294c0e0b92854c85a92c2227927433998f0d3320ae8a71b6"}, - {file = "coverage-7.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:718187eeb9849fc6cc23e0d9b092bc2348821c5e1a901c9f8975df0bc785bfd4"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:767b35c3a246bcb55b8044fd3a43b8cd553dd1f9f2c1eeb87a302b1f8daa0524"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae7f19afe0cce50039e2c782bff379c7e347cba335429678450b8fe81c4ef96d"}, - {file = "coverage-7.4.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba3a8aaed13770e970b3df46980cb068d1c24af1a1968b7818b69af8c4347efb"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ee866acc0861caebb4f2ab79f0b94dbfbdbfadc19f82e6e9c93930f74e11d7a0"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:506edb1dd49e13a2d4cac6a5173317b82a23c9d6e8df63efb4f0380de0fbccbc"}, - {file = "coverage-7.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd6545d97c98a192c5ac995d21c894b581f1fd14cf389be90724d21808b657e2"}, - {file = "coverage-7.4.3-cp310-cp310-win32.whl", hash = "sha256:f6a09b360d67e589236a44f0c39218a8efba2593b6abdccc300a8862cffc2f94"}, - {file = "coverage-7.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:18d90523ce7553dd0b7e23cbb28865db23cddfd683a38fb224115f7826de78d0"}, - {file = "coverage-7.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbbe5e739d45a52f3200a771c6d2c7acf89eb2524890a4a3aa1a7fa0695d2a47"}, - {file = "coverage-7.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:489763b2d037b164846ebac0cbd368b8a4ca56385c4090807ff9fad817de4113"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:451f433ad901b3bb00184d83fd83d135fb682d780b38af7944c9faeecb1e0bfe"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcc66e222cf4c719fe7722a403888b1f5e1682d1679bd780e2b26c18bb648cdc"}, - {file = "coverage-7.4.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ec74cfef2d985e145baae90d9b1b32f85e1741b04cd967aaf9cfa84c1334f3"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:abbbd8093c5229c72d4c2926afaee0e6e3140de69d5dcd918b2921f2f0c8baba"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:35eb581efdacf7b7422af677b92170da4ef34500467381e805944a3201df2079"}, - {file = "coverage-7.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8249b1c7334be8f8c3abcaaa996e1e4927b0e5a23b65f5bf6cfe3180d8ca7840"}, - {file = "coverage-7.4.3-cp311-cp311-win32.whl", hash = "sha256:cf30900aa1ba595312ae41978b95e256e419d8a823af79ce670835409fc02ad3"}, - {file = "coverage-7.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:18c7320695c949de11a351742ee001849912fd57e62a706d83dfc1581897fa2e"}, - {file = "coverage-7.4.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b51bfc348925e92a9bd9b2e48dad13431b57011fd1038f08316e6bf1df107d10"}, - {file = "coverage-7.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d6cdecaedea1ea9e033d8adf6a0ab11107b49571bbb9737175444cea6eb72328"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b2eccb883368f9e972e216c7b4c7c06cabda925b5f06dde0650281cb7666a30"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c00cdc8fa4e50e1cc1f941a7f2e3e0f26cb2a1233c9696f26963ff58445bac7"}, - {file = "coverage-7.4.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4a8dd3dcf4cbd3165737358e4d7dfbd9d59902ad11e3b15eebb6393b0446e"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:062b0a75d9261e2f9c6d071753f7eef0fc9caf3a2c82d36d76667ba7b6470003"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:ebe7c9e67a2d15fa97b77ea6571ce5e1e1f6b0db71d1d5e96f8d2bf134303c1d"}, - {file = "coverage-7.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c0a120238dd71c68484f02562f6d446d736adcc6ca0993712289b102705a9a3a"}, - {file = "coverage-7.4.3-cp312-cp312-win32.whl", hash = "sha256:37389611ba54fd6d278fde86eb2c013c8e50232e38f5c68235d09d0a3f8aa352"}, - {file = "coverage-7.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:d25b937a5d9ffa857d41be042b4238dd61db888533b53bc76dc082cb5a15e914"}, - {file = "coverage-7.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:28ca2098939eabab044ad68850aac8f8db6bf0b29bc7f2887d05889b17346454"}, - {file = "coverage-7.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:280459f0a03cecbe8800786cdc23067a8fc64c0bd51dc614008d9c36e1659d7e"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0cdedd3500e0511eac1517bf560149764b7d8e65cb800d8bf1c63ebf39edd2"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9babb9466fe1da12417a4aed923e90124a534736de6201794a3aea9d98484e"}, - {file = "coverage-7.4.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dec9de46a33cf2dd87a5254af095a409ea3bf952d85ad339751e7de6d962cde6"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:16bae383a9cc5abab9bb05c10a3e5a52e0a788325dc9ba8499e821885928968c"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2c854ce44e1ee31bda4e318af1dbcfc929026d12c5ed030095ad98197eeeaed0"}, - {file = "coverage-7.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ce8c50520f57ec57aa21a63ea4f325c7b657386b3f02ccaedeccf9ebe27686e1"}, - {file = "coverage-7.4.3-cp38-cp38-win32.whl", hash = "sha256:708a3369dcf055c00ddeeaa2b20f0dd1ce664eeabde6623e516c5228b753654f"}, - {file = "coverage-7.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:1bf25fbca0c8d121a3e92a2a0555c7e5bc981aee5c3fdaf4bb7809f410f696b9"}, - {file = "coverage-7.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b253094dbe1b431d3a4ac2f053b6d7ede2664ac559705a704f621742e034f1f"}, - {file = "coverage-7.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77fbfc5720cceac9c200054b9fab50cb2a7d79660609200ab83f5db96162d20c"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6679060424faa9c11808598504c3ab472de4531c571ab2befa32f4971835788e"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4af154d617c875b52651dd8dd17a31270c495082f3d55f6128e7629658d63765"}, - {file = "coverage-7.4.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8640f1fde5e1b8e3439fe482cdc2b0bb6c329f4bb161927c28d2e8879c6029ee"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:69b9f6f66c0af29642e73a520b6fed25ff9fd69a25975ebe6acb297234eda501"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0842571634f39016a6c03e9d4aba502be652a6e4455fadb73cd3a3a49173e38f"}, - {file = "coverage-7.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a78ed23b08e8ab524551f52953a8a05d61c3a760781762aac49f8de6eede8c45"}, - {file = "coverage-7.4.3-cp39-cp39-win32.whl", hash = "sha256:c0524de3ff096e15fcbfe8f056fdb4ea0bf497d584454f344d59fce069d3e6e9"}, - {file = "coverage-7.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:0209a6369ccce576b43bb227dc8322d8ef9e323d089c6f3f26a597b09cb4d2aa"}, - {file = "coverage-7.4.3-pp38.pp39.pp310-none-any.whl", hash = "sha256:7cbde573904625509a3f37b6fecea974e363460b556a627c60dc2f47e2fffa51"}, - {file = "coverage-7.4.3.tar.gz", hash = "sha256:276f6077a5c61447a48d133ed13e759c09e62aff0dc84274a68dc18660104d52"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0be5efd5127542ef31f165de269f77560d6cdef525fffa446de6f7e9186cfb2"}, + {file = "coverage-7.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ccd341521be3d1b3daeb41960ae94a5e87abe2f46f17224ba5d6f2b8398016cf"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fa497a8ab37784fbb20ab699c246053ac294d13fc7eb40ec007a5043ec91f8"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b1a93009cb80730c9bca5d6d4665494b725b6e8e157c1cb7f2db5b4b122ea562"}, + {file = "coverage-7.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:690db6517f09336559dc0b5f55342df62370a48f5469fabf502db2c6d1cffcd2"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:09c3255458533cb76ef55da8cc49ffab9e33f083739c8bd4f58e79fecfe288f7"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8ce1415194b4a6bd0cdcc3a1dfbf58b63f910dcb7330fe15bdff542c56949f87"}, + {file = "coverage-7.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b91cbc4b195444e7e258ba27ac33769c41b94967919f10037e6355e998af255c"}, + {file = "coverage-7.4.4-cp310-cp310-win32.whl", hash = "sha256:598825b51b81c808cb6f078dcb972f96af96b078faa47af7dfcdf282835baa8d"}, + {file = "coverage-7.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:09ef9199ed6653989ebbcaacc9b62b514bb63ea2f90256e71fea3ed74bd8ff6f"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f9f50e7ef2a71e2fae92774c99170eb8304e3fdf9c8c3c7ae9bab3e7229c5cf"}, + {file = "coverage-7.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:623512f8ba53c422fcfb2ce68362c97945095b864cda94a92edbaf5994201083"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0513b9508b93da4e1716744ef6ebc507aff016ba115ffe8ecff744d1322a7b63"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40209e141059b9370a2657c9b15607815359ab3ef9918f0196b6fccce8d3230f"}, + {file = "coverage-7.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a2b2b78c78293782fd3767d53e6474582f62443d0504b1554370bde86cc8227"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:73bfb9c09951125d06ee473bed216e2c3742f530fc5acc1383883125de76d9cd"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f384c3cc76aeedce208643697fb3e8437604b512255de6d18dae3f27655a384"}, + {file = "coverage-7.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:54eb8d1bf7cacfbf2a3186019bcf01d11c666bd495ed18717162f7eb1e9dd00b"}, + {file = "coverage-7.4.4-cp311-cp311-win32.whl", hash = "sha256:cac99918c7bba15302a2d81f0312c08054a3359eaa1929c7e4b26ebe41e9b286"}, + {file = "coverage-7.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:b14706df8b2de49869ae03a5ccbc211f4041750cd4a66f698df89d44f4bd30ec"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:201bef2eea65e0e9c56343115ba3814e896afe6d36ffd37bab783261db430f76"}, + {file = "coverage-7.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41c9c5f3de16b903b610d09650e5e27adbfa7f500302718c9ffd1c12cf9d6818"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d898fe162d26929b5960e4e138651f7427048e72c853607f2b200909794ed978"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ea79bb50e805cd6ac058dfa3b5c8f6c040cb87fe83de10845857f5535d1db70"}, + {file = "coverage-7.4.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce4b94265ca988c3f8e479e741693d143026632672e3ff924f25fab50518dd51"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:00838a35b882694afda09f85e469c96367daa3f3f2b097d846a7216993d37f4c"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:fdfafb32984684eb03c2d83e1e51f64f0906b11e64482df3c5db936ce3839d48"}, + {file = "coverage-7.4.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:69eb372f7e2ece89f14751fbcbe470295d73ed41ecd37ca36ed2eb47512a6ab9"}, + {file = "coverage-7.4.4-cp312-cp312-win32.whl", hash = "sha256:137eb07173141545e07403cca94ab625cc1cc6bc4c1e97b6e3846270e7e1fea0"}, + {file = "coverage-7.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:d71eec7d83298f1af3326ce0ff1d0ea83c7cb98f72b577097f9083b20bdaf05e"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d5ae728ff3b5401cc320d792866987e7e7e880e6ebd24433b70a33b643bb0384"}, + {file = "coverage-7.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cc4f1358cb0c78edef3ed237ef2c86056206bb8d9140e73b6b89fbcfcbdd40e1"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8130a2aa2acb8788e0b56938786c33c7c98562697bf9f4c7d6e8e5e3a0501e4a"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf271892d13e43bc2b51e6908ec9a6a5094a4df1d8af0bfc360088ee6c684409"}, + {file = "coverage-7.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4cdc86d54b5da0df6d3d3a2f0b710949286094c3a6700c21e9015932b81447e"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae71e7ddb7a413dd60052e90528f2f65270aad4b509563af6d03d53e979feafd"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:38dd60d7bf242c4ed5b38e094baf6401faa114fc09e9e6632374388a404f98e7"}, + {file = "coverage-7.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa5b1c1bfc28384f1f53b69a023d789f72b2e0ab1b3787aae16992a7ca21056c"}, + {file = "coverage-7.4.4-cp38-cp38-win32.whl", hash = "sha256:dfa8fe35a0bb90382837b238fff375de15f0dcdb9ae68ff85f7a63649c98527e"}, + {file = "coverage-7.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:b2991665420a803495e0b90a79233c1433d6ed77ef282e8e152a324bbbc5e0c8"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3b799445b9f7ee8bf299cfaed6f5b226c0037b74886a4e11515e569b36fe310d"}, + {file = "coverage-7.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b4d33f418f46362995f1e9d4f3a35a1b6322cb959c31d88ae56b0298e1c22357"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aadacf9a2f407a4688d700e4ebab33a7e2e408f2ca04dbf4aef17585389eff3e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c95949560050d04d46b919301826525597f07b33beba6187d04fa64d47ac82e"}, + {file = "coverage-7.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff7687ca3d7028d8a5f0ebae95a6e4827c5616b31a4ee1192bdfde697db110d4"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fc1de20b2d4a061b3df27ab9b7c7111e9a710f10dc2b84d33a4ab25065994ec"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c74880fc64d4958159fbd537a091d2a585448a8f8508bf248d72112723974cbd"}, + {file = "coverage-7.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:742a76a12aa45b44d236815d282b03cfb1de3b4323f3e4ec933acfae08e54ade"}, + {file = "coverage-7.4.4-cp39-cp39-win32.whl", hash = "sha256:d89d7b2974cae412400e88f35d86af72208e1ede1a541954af5d944a8ba46c57"}, + {file = "coverage-7.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:9ca28a302acb19b6af89e90f33ee3e1906961f94b54ea37de6737b7ca9d8827c"}, + {file = "coverage-7.4.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b2c5edc4ac10a7ef6605a966c58929ec6c1bd0917fb8c15cb3363f65aa40e677"}, + {file = "coverage-7.4.4.tar.gz", hash = "sha256:c901df83d097649e257e803be22592aedfd5182f07b3cc87d640bbb9afd50f49"}, ] [package.extras] @@ -502,53 +502,53 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "fonttools" -version = "4.49.0" +version = "4.50.0" description = "Tools to manipulate font files" optional = false python-versions = ">=3.8" files = [ - {file = "fonttools-4.49.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d970ecca0aac90d399e458f0b7a8a597e08f95de021f17785fb68e2dc0b99717"}, - {file = "fonttools-4.49.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac9a745b7609f489faa65e1dc842168c18530874a5f5b742ac3dd79e26bca8bc"}, - {file = "fonttools-4.49.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ba0e00620ca28d4ca11fc700806fd69144b463aa3275e1b36e56c7c09915559"}, - {file = "fonttools-4.49.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdee3ab220283057e7840d5fb768ad4c2ebe65bdba6f75d5d7bf47f4e0ed7d29"}, - {file = "fonttools-4.49.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ce7033cb61f2bb65d8849658d3786188afd80f53dad8366a7232654804529532"}, - {file = "fonttools-4.49.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:07bc5ea02bb7bc3aa40a1eb0481ce20e8d9b9642a9536cde0218290dd6085828"}, - {file = "fonttools-4.49.0-cp310-cp310-win32.whl", hash = "sha256:86eef6aab7fd7c6c8545f3ebd00fd1d6729ca1f63b0cb4d621bccb7d1d1c852b"}, - {file = "fonttools-4.49.0-cp310-cp310-win_amd64.whl", hash = "sha256:1fac1b7eebfce75ea663e860e7c5b4a8831b858c17acd68263bc156125201abf"}, - {file = "fonttools-4.49.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:edc0cce355984bb3c1d1e89d6a661934d39586bb32191ebff98c600f8957c63e"}, - {file = "fonttools-4.49.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:83a0d9336de2cba86d886507dd6e0153df333ac787377325a39a2797ec529814"}, - {file = "fonttools-4.49.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36c8865bdb5cfeec88f5028e7e592370a0657b676c6f1d84a2108e0564f90e22"}, - {file = "fonttools-4.49.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33037d9e56e2562c710c8954d0f20d25b8386b397250d65581e544edc9d6b942"}, - {file = "fonttools-4.49.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8fb022d799b96df3eaa27263e9eea306bd3d437cc9aa981820850281a02b6c9a"}, - {file = "fonttools-4.49.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33c584c0ef7dc54f5dd4f84082eabd8d09d1871a3d8ca2986b0c0c98165f8e86"}, - {file = "fonttools-4.49.0-cp311-cp311-win32.whl", hash = "sha256:cbe61b158deb09cffdd8540dc4a948d6e8f4d5b4f3bf5cd7db09bd6a61fee64e"}, - {file = "fonttools-4.49.0-cp311-cp311-win_amd64.whl", hash = "sha256:fc11e5114f3f978d0cea7e9853627935b30d451742eeb4239a81a677bdee6bf6"}, - {file = "fonttools-4.49.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d647a0e697e5daa98c87993726da8281c7233d9d4ffe410812a4896c7c57c075"}, - {file = "fonttools-4.49.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f3bbe672df03563d1f3a691ae531f2e31f84061724c319652039e5a70927167e"}, - {file = "fonttools-4.49.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bebd91041dda0d511b0d303180ed36e31f4f54b106b1259b69fade68413aa7ff"}, - {file = "fonttools-4.49.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4145f91531fd43c50f9eb893faa08399816bb0b13c425667c48475c9f3a2b9b5"}, - {file = "fonttools-4.49.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ea329dafb9670ffbdf4dbc3b0e5c264104abcd8441d56de77f06967f032943cb"}, - {file = "fonttools-4.49.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c076a9e548521ecc13d944b1d261ff3d7825048c338722a4bd126d22316087b7"}, - {file = "fonttools-4.49.0-cp312-cp312-win32.whl", hash = "sha256:b607ea1e96768d13be26d2b400d10d3ebd1456343eb5eaddd2f47d1c4bd00880"}, - {file = "fonttools-4.49.0-cp312-cp312-win_amd64.whl", hash = "sha256:a974c49a981e187381b9cc2c07c6b902d0079b88ff01aed34695ec5360767034"}, - {file = "fonttools-4.49.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b85ec0bdd7bdaa5c1946398cbb541e90a6dfc51df76dfa88e0aaa41b335940cb"}, - {file = "fonttools-4.49.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:af20acbe198a8a790618ee42db192eb128afcdcc4e96d99993aca0b60d1faeb4"}, - {file = "fonttools-4.49.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d418b1fee41a1d14931f7ab4b92dc0bc323b490e41d7a333eec82c9f1780c75"}, - {file = "fonttools-4.49.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b44a52b8e6244b6548851b03b2b377a9702b88ddc21dcaf56a15a0393d425cb9"}, - {file = "fonttools-4.49.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7c7125068e04a70739dad11857a4d47626f2b0bd54de39e8622e89701836eabd"}, - {file = "fonttools-4.49.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29e89d0e1a7f18bc30f197cfadcbef5a13d99806447c7e245f5667579a808036"}, - {file = "fonttools-4.49.0-cp38-cp38-win32.whl", hash = "sha256:9d95fa0d22bf4f12d2fb7b07a46070cdfc19ef5a7b1c98bc172bfab5bf0d6844"}, - {file = "fonttools-4.49.0-cp38-cp38-win_amd64.whl", hash = "sha256:768947008b4dc552d02772e5ebd49e71430a466e2373008ce905f953afea755a"}, - {file = "fonttools-4.49.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:08877e355d3dde1c11973bb58d4acad1981e6d1140711230a4bfb40b2b937ccc"}, - {file = "fonttools-4.49.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fdb54b076f25d6b0f0298dc706acee5052de20c83530fa165b60d1f2e9cbe3cb"}, - {file = "fonttools-4.49.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0af65c720520710cc01c293f9c70bd69684365c6015cc3671db2b7d807fe51f2"}, - {file = "fonttools-4.49.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f255ce8ed7556658f6d23f6afd22a6d9bbc3edb9b96c96682124dc487e1bf42"}, - {file = "fonttools-4.49.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d00af0884c0e65f60dfaf9340e26658836b935052fdd0439952ae42e44fdd2be"}, - {file = "fonttools-4.49.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:263832fae27481d48dfafcc43174644b6706639661e242902ceb30553557e16c"}, - {file = "fonttools-4.49.0-cp39-cp39-win32.whl", hash = "sha256:0404faea044577a01bb82d47a8fa4bc7a54067fa7e324785dd65d200d6dd1133"}, - {file = "fonttools-4.49.0-cp39-cp39-win_amd64.whl", hash = "sha256:b050d362df50fc6e38ae3954d8c29bf2da52be384649ee8245fdb5186b620836"}, - {file = "fonttools-4.49.0-py3-none-any.whl", hash = "sha256:af281525e5dd7fa0b39fb1667b8d5ca0e2a9079967e14c4bfe90fd1cd13e0f18"}, - {file = "fonttools-4.49.0.tar.gz", hash = "sha256:ebf46e7f01b7af7861310417d7c49591a85d99146fc23a5ba82fdb28af156321"}, + {file = "fonttools-4.50.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effd303fb422f8ce06543a36ca69148471144c534cc25f30e5be752bc4f46736"}, + {file = "fonttools-4.50.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7913992ab836f621d06aabac118fc258b9947a775a607e1a737eb3a91c360335"}, + {file = "fonttools-4.50.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e0a1c5bd2f63da4043b63888534b52c5a1fd7ae187c8ffc64cbb7ae475b9dab"}, + {file = "fonttools-4.50.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d40fc98540fa5360e7ecf2c56ddf3c6e7dd04929543618fd7b5cc76e66390562"}, + {file = "fonttools-4.50.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fff65fbb7afe137bac3113827855e0204482727bddd00a806034ab0d3951d0d"}, + {file = "fonttools-4.50.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1aeae3dd2ee719074a9372c89ad94f7c581903306d76befdaca2a559f802472"}, + {file = "fonttools-4.50.0-cp310-cp310-win32.whl", hash = "sha256:e9623afa319405da33b43c85cceb0585a6f5d3a1d7c604daf4f7e1dd55c03d1f"}, + {file = "fonttools-4.50.0-cp310-cp310-win_amd64.whl", hash = "sha256:778c5f43e7e654ef7fe0605e80894930bc3a7772e2f496238e57218610140f54"}, + {file = "fonttools-4.50.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3dfb102e7f63b78c832e4539969167ffcc0375b013080e6472350965a5fe8048"}, + {file = "fonttools-4.50.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e58fe34cb379ba3d01d5d319d67dd3ce7ca9a47ad044ea2b22635cd2d1247fc"}, + {file = "fonttools-4.50.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c673ab40d15a442a4e6eb09bf007c1dda47c84ac1e2eecbdf359adacb799c24"}, + {file = "fonttools-4.50.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b3ac35cdcd1a4c90c23a5200212c1bb74fa05833cc7c14291d7043a52ca2aaa"}, + {file = "fonttools-4.50.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8844e7a2c5f7ecf977e82eb6b3014f025c8b454e046d941ece05b768be5847ae"}, + {file = "fonttools-4.50.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f849bd3c5c2249b49c98eca5aaebb920d2bfd92b3c69e84ca9bddf133e9f83f0"}, + {file = "fonttools-4.50.0-cp311-cp311-win32.whl", hash = "sha256:39293ff231b36b035575e81c14626dfc14407a20de5262f9596c2cbb199c3625"}, + {file = "fonttools-4.50.0-cp311-cp311-win_amd64.whl", hash = "sha256:c33d5023523b44d3481624f840c8646656a1def7630ca562f222eb3ead16c438"}, + {file = "fonttools-4.50.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b4a886a6dbe60100ba1cd24de962f8cd18139bd32808da80de1fa9f9f27bf1dc"}, + {file = "fonttools-4.50.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b2ca1837bfbe5eafa11313dbc7edada79052709a1fffa10cea691210af4aa1fa"}, + {file = "fonttools-4.50.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0493dd97ac8977e48ffc1476b932b37c847cbb87fd68673dee5182004906828"}, + {file = "fonttools-4.50.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77844e2f1b0889120b6c222fc49b2b75c3d88b930615e98893b899b9352a27ea"}, + {file = "fonttools-4.50.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3566bfb8c55ed9100afe1ba6f0f12265cd63a1387b9661eb6031a1578a28bad1"}, + {file = "fonttools-4.50.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:35e10ddbc129cf61775d58a14f2d44121178d89874d32cae1eac722e687d9019"}, + {file = "fonttools-4.50.0-cp312-cp312-win32.whl", hash = "sha256:cc8140baf9fa8f9b903f2b393a6c413a220fa990264b215bf48484f3d0bf8710"}, + {file = "fonttools-4.50.0-cp312-cp312-win_amd64.whl", hash = "sha256:0ccc85fd96373ab73c59833b824d7a73846670a0cb1f3afbaee2b2c426a8f931"}, + {file = "fonttools-4.50.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e270a406219af37581d96c810172001ec536e29e5593aa40d4c01cca3e145aa6"}, + {file = "fonttools-4.50.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac2463de667233372e9e1c7e9de3d914b708437ef52a3199fdbf5a60184f190c"}, + {file = "fonttools-4.50.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47abd6669195abe87c22750dbcd366dc3a0648f1b7c93c2baa97429c4dc1506e"}, + {file = "fonttools-4.50.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:074841375e2e3d559aecc86e1224caf78e8b8417bb391e7d2506412538f21adc"}, + {file = "fonttools-4.50.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0743fd2191ad7ab43d78cd747215b12033ddee24fa1e088605a3efe80d6984de"}, + {file = "fonttools-4.50.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3d7080cce7be5ed65bee3496f09f79a82865a514863197ff4d4d177389e981b0"}, + {file = "fonttools-4.50.0-cp38-cp38-win32.whl", hash = "sha256:a467ba4e2eadc1d5cc1a11d355abb945f680473fbe30d15617e104c81f483045"}, + {file = "fonttools-4.50.0-cp38-cp38-win_amd64.whl", hash = "sha256:f77e048f805e00870659d6318fd89ef28ca4ee16a22b4c5e1905b735495fc422"}, + {file = "fonttools-4.50.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b6245eafd553c4e9a0708e93be51392bd2288c773523892fbd616d33fd2fda59"}, + {file = "fonttools-4.50.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a4062cc7e8de26f1603323ef3ae2171c9d29c8a9f5e067d555a2813cd5c7a7e0"}, + {file = "fonttools-4.50.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34692850dfd64ba06af61e5791a441f664cb7d21e7b544e8f385718430e8f8e4"}, + {file = "fonttools-4.50.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678dd95f26a67e02c50dcb5bf250f95231d455642afbc65a3b0bcdacd4e4dd38"}, + {file = "fonttools-4.50.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4f2ce7b0b295fe64ac0a85aef46a0f2614995774bd7bc643b85679c0283287f9"}, + {file = "fonttools-4.50.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d346f4dc2221bfb7ab652d1e37d327578434ce559baf7113b0f55768437fe6a0"}, + {file = "fonttools-4.50.0-cp39-cp39-win32.whl", hash = "sha256:a51eeaf52ba3afd70bf489be20e52fdfafe6c03d652b02477c6ce23c995222f4"}, + {file = "fonttools-4.50.0-cp39-cp39-win_amd64.whl", hash = "sha256:8639be40d583e5d9da67795aa3eeeda0488fb577a1d42ae11a5036f18fb16d93"}, + {file = "fonttools-4.50.0-py3-none-any.whl", hash = "sha256:48fa36da06247aa8282766cfd63efff1bb24e55f020f29a335939ed3844d20d3"}, + {file = "fonttools-4.50.0.tar.gz", hash = "sha256:fa5cf61058c7dbb104c2ac4e782bf1b2016a8cf2f69de6e4dd6a865d2c969bb5"}, ] [package.extras] @@ -663,13 +663,13 @@ files = [ [[package]] name = "ipython" -version = "8.22.1" +version = "8.22.2" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.10" files = [ - {file = "ipython-8.22.1-py3-none-any.whl", hash = "sha256:869335e8cded62ffb6fac8928e5287a05433d6462e3ebaac25f4216474dd6bc4"}, - {file = "ipython-8.22.1.tar.gz", hash = "sha256:39c6f9efc079fb19bfb0f17eee903978fe9a290b1b82d68196c641cecb76ea22"}, + {file = "ipython-8.22.2-py3-none-any.whl", hash = "sha256:3c86f284c8f3d8f2b6c662f885c4889a91df7cd52056fd02b7d8d6195d7f56e9"}, + {file = "ipython-8.22.2.tar.gz", hash = "sha256:2dcaad9049f9056f1fef63514f176c7d41f930daa78d05b82a176202818f2c14"}, ] [package.dependencies] @@ -906,13 +906,13 @@ test = ["coverage", "flaky", "pytest", "pytest-cov"] [[package]] name = "markdown" -version = "3.5.2" +version = "3.6" description = "Python implementation of John Gruber's Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd"}, - {file = "Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8"}, + {file = "Markdown-3.6-py3-none-any.whl", hash = "sha256:48f276f4d8cfb8ce6527c8f79e2ee29708508bf4d40aa410fbc3b4ee832c850f"}, + {file = "Markdown-3.6.tar.gz", hash = "sha256:ed4f41f6daecbeeb96e576ce414c41d2d876daa9a16cb35fa8ed8c2ddfad0224"}, ] [package.extras] @@ -1302,13 +1302,13 @@ files = [ [[package]] name = "packaging" -version = "23.2" +version = "24.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] @@ -1602,13 +1602,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest- [[package]] name = "plotly" -version = "5.19.0" +version = "5.20.0" description = "An open-source, interactive data visualization library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "plotly-5.19.0-py3-none-any.whl", hash = "sha256:906abcc5f15945765328c5d47edaa884bc99f5985fbc61e8cd4dc361f4ff8f5a"}, - {file = "plotly-5.19.0.tar.gz", hash = "sha256:5ea91a56571292ade3e3bc9bf712eba0b95a1fb0a941375d978cc79432e055f4"}, + {file = "plotly-5.20.0-py3-none-any.whl", hash = "sha256:837a9c8aa90f2c0a2f0d747b82544d014dc2a2bdde967b5bb1da25b53932d1a9"}, + {file = "plotly-5.20.0.tar.gz", hash = "sha256:bf901c805d22032cfa534b2ff7c5aa6b0659e037f19ec1e0cca7f585918b5c89"}, ] [package.dependencies] @@ -1689,13 +1689,13 @@ tests = ["pytest"] [[package]] name = "pydantic" -version = "2.6.3" +version = "2.6.4" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.3-py3-none-any.whl", hash = "sha256:72c6034df47f46ccdf81869fddb81aade68056003900a8724a4f160700016a2a"}, - {file = "pydantic-2.6.3.tar.gz", hash = "sha256:e07805c4c7f5c6826e33a1d4c9d47950d7eaf34868e2690f8594d2e30241f11f"}, + {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"}, + {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"}, ] [package.dependencies] @@ -1814,13 +1814,13 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pymdown-extensions" -version = "10.7" +version = "10.7.1" description = "Extension pack for Python Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "pymdown_extensions-10.7-py3-none-any.whl", hash = "sha256:6ca215bc57bc12bf32b414887a68b810637d039124ed9b2e5bd3325cbb2c050c"}, - {file = "pymdown_extensions-10.7.tar.gz", hash = "sha256:c0d64d5cf62566f59e6b2b690a4095c931107c250a8c8e1351c1de5f6b036deb"}, + {file = "pymdown_extensions-10.7.1-py3-none-any.whl", hash = "sha256:f5cc7000d7ff0d1ce9395d216017fa4df3dde800afb1fb72d1c7d3fd35e710f4"}, + {file = "pymdown_extensions-10.7.1.tar.gz", hash = "sha256:c70e146bdd83c744ffc766b4671999796aba18842b268510a329f7f64700d584"}, ] [package.dependencies] @@ -1832,13 +1832,13 @@ extra = ["pygments (>=2.12)"] [[package]] name = "pyparsing" -version = "3.1.1" +version = "3.1.2" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, - {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, ] [package.extras] @@ -1868,13 +1868,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -2153,18 +2153,18 @@ test = ["asv", "gmpy2", "hypothesis", "mpmath", "pooch", "pytest", "pytest-cov", [[package]] name = "setuptools" -version = "69.1.1" +version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.1.1-py3-none-any.whl", hash = "sha256:02fa291a0471b3a18b2b2481ed902af520c69e8ae0919c13da936542754b4c56"}, - {file = "setuptools-69.1.1.tar.gz", hash = "sha256:5c0806c7d9af348e6dd3777b4f4dbb42c7ad85b190104837488eab9a7c945cf8"}, + {file = "setuptools-69.2.0-py3-none-any.whl", hash = "sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c"}, + {file = "setuptools-69.2.0.tar.gz", hash = "sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -2330,18 +2330,18 @@ telegram = ["requests"] [[package]] name = "traitlets" -version = "5.14.1" +version = "5.14.2" description = "Traitlets Python configuration system" optional = false python-versions = ">=3.8" files = [ - {file = "traitlets-5.14.1-py3-none-any.whl", hash = "sha256:2e5a030e6eff91737c643231bfcf04a65b0132078dad75e4936700b213652e74"}, - {file = "traitlets-5.14.1.tar.gz", hash = "sha256:8585105b371a04b8316a43d5ce29c098575c2e477850b62b848b964f1444527e"}, + {file = "traitlets-5.14.2-py3-none-any.whl", hash = "sha256:fcdf85684a772ddeba87db2f398ce00b40ff550d1528c03c14dbf6a02003cd80"}, + {file = "traitlets-5.14.2.tar.gz", hash = "sha256:8cdd83c040dab7d1dee822678e5f5d100b514f7b72b01615b26fc5718916fdf9"}, ] [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.1)", "pytest-mock", "pytest-mypy-testing"] [[package]] name = "typer" diff --git a/test/test_cli.py b/test/test_cli.py index 4b4b2062..95aec4b4 100644 --- a/test/test_cli.py +++ b/test/test_cli.py @@ -154,22 +154,3 @@ def test_check_main_with_pv_pv(protein_L): individual=True, ) peakipy.cli.main.check(**args) - - -# def test_edit_with_default(protein_L): -# args = dict( -# peaklist_path=protein_L / Path("peaks.csv"), -# data_path=protein_L / Path("test1.ft2"), -# test=True, -# ) -# peakipy.cli.main.edit(**args) - - -# if __name__ == "__main__": - -# unittest.TestLoader.sortTestMethodsUsing = None -# unittest.main(verbosity=2) -# to_clean = ["test.csv", "peakipy.config", "run_log.txt", "fits.csv"] -# for i in to_clean: -# print(f"Deleting: {i}") -# shutil.rmtree(i) From e5f02201b1cf0a7495ab81572487747b7cdf3d9f Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Wed, 20 Mar 2024 22:50:39 -0400 Subject: [PATCH 27/37] refactored config code and added tests --- peakipy/cli/edit.py | 4 +-- peakipy/cli/fit.py | 9 ++---- peakipy/cli/main.py | 39 ++++++++++--------------- peakipy/core.py | 42 ++++++++++++++++++++------- test/test_core.py | 71 +++++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 122 insertions(+), 43 deletions(-) diff --git a/peakipy/cli/edit.py b/peakipy/cli/edit.py index 47fbe766..080f5b02 100644 --- a/peakipy/cli/edit.py +++ b/peakipy/cli/edit.py @@ -34,7 +34,7 @@ from bokeh.plotting.contour import contour_data from bokeh.palettes import PuBuGn9, Category20, Viridis256, RdGy11, Reds256, YlOrRd9 -from peakipy.core import LoadData, read_config, StrucEl +from peakipy.core import LoadData, update_args_with_values_from_config_file, StrucEl log_style = "overflow:scroll;" log_div = """
%s
""" @@ -44,7 +44,7 @@ class BokehScript: def __init__(self, peaklist_path: Path, data_path: Path): self._path = peaklist_path self._data_path = data_path - args, config = read_config({}) + args, config = update_args_with_values_from_config_file({}) self._dims = config.get("dims", [0, 1, 2]) self.thres = config.get("thres", 1e6) self._peakipy_data = LoadData( diff --git a/peakipy/cli/fit.py b/peakipy/cli/fit.py index 854d9301..8d370898 100644 --- a/peakipy/cli/fit.py +++ b/peakipy/cli/fit.py @@ -33,15 +33,12 @@ ) console = Console() -# some constants π = np.pi sqrt2 = np.sqrt(2.0) -# temp and log paths + tmp_path = Path("tmp") tmp_path.mkdir(exist_ok=True) log_path = Path("log.txt") -# for printing dataframes -column_selection = ["INDEX", "ASS", "X_PPM", "Y_PPM", "CLUSTID", "MEMCNT"] @dataclass @@ -376,8 +373,6 @@ def refit_peak_cluster_with_constraints( plane_number, ) ) - # fit_report = fit_result.out.fit_report() - # log.write( return fit_results @@ -461,7 +456,6 @@ def prepare_group_of_peaks_for_fitting(clustid, group, fit_peaks_input: FitPeaks XY_slices = np.array([X.copy()[mask], Y.copy()[mask]]) weights = 1.0 / np.array([fit_peaks_input.args.noise] * len(np.ravel(peak_slices))) - # weights = 1.0 / np.ravel(peak_slices) return FitPeakClusterInput( args=fit_peaks_input.args, data=fit_peaks_input.data, @@ -534,6 +528,7 @@ def fit_peak_clusters(peaks: pd.DataFrame, fit_input: FitPeaksInput) -> FitPeaks cluster_df = fit_cluster_of_peaks(data_for_fitting) cluster_dfs.append(cluster_df) df = pd.concat(cluster_dfs, ignore_index=True) + df["lineshape"] = fit_input.args.lineshape.value if fit_input.args.vclist: diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index 849ab610..46186429 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -54,7 +54,6 @@ Peaklist, run_log, LoadData, - read_config, pv_pv, pvoigt2d, voigt2d, @@ -68,6 +67,9 @@ PeaklistFormat, Lineshape, OutFmt, + write_config, + update_config_file, + update_args_with_values_from_config_file, get_limits_for_axis_in_points, deal_with_peaks_on_edge_of_spectrum, calculate_fwhm_for_voigt_lineshape, @@ -92,7 +94,7 @@ tmp_path.mkdir(exist_ok=True) log_path = Path("log.txt") # for printing dataframes -column_selection = ["INDEX", "ASS", "X_PPM", "Y_PPM", "CLUSTID", "MEMCNT"] +peaklist_columns_for_printing = ["INDEX", "ASS", "X_PPM", "Y_PPM", "CLUSTID", "MEMCNT"] bad_column_selection = [ "clustid", "amp", @@ -111,6 +113,8 @@ "red", "magenta", ] + + peaklist_path_help = "Path to peaklist" data_path_help = "Path to 2D or pseudo3D processed NMRPipe data (e.g. .ft2 or .ft3)" peaklist_format_help = "The format of your peaklist. This can be a2 for CCPN Analysis version 2 style, a3 for CCPN Analysis version 3, sparky, pipe for NMRPipe, or peakipy if you want to use a previously .csv peaklist from peakipy" @@ -296,7 +300,7 @@ def read( data.to_pickle(outname) # write config file - config_path = data_path.parent / Path("peakipy.config") + config_path = peaklist_path.parent / Path("peakipy.config") config_kvs = [ ("dims", dims), ("data_path", str(data_path)), @@ -306,28 +310,16 @@ def read( ("fit_method", "leastsq"), ] try: - if config_path.exists(): - with open(config_path) as opened_config: - config_dic = json.load(opened_config) - # update values in dict - config_dic.update(dict(config_kvs)) - - else: - # make a new config - config_dic = dict(config_kvs) + update_config_file(config_path, config_kvs) except json.decoder.JSONDecodeError: print( - f"Your {config_path} may be corrupted. Making new one (old one moved to {config_path}.bak)" + "\n" + + f"[yellow]Your {config_path} may be corrupted. Making new one (old one moved to {config_path}.bak)[/yellow]" ) shutil.copy(f"{config_path}", f"{config_path}.bak") config_dic = dict(config_kvs) - - with open(config_path, "w") as config: - # write json - # print(config_dic) - config.write(json.dumps(config_dic, sort_keys=True, indent=4)) - # json.dump(config_dic, fp=config, sort_keys=True, indent=4) + write_config(config_path, config_dic) run_log() @@ -506,7 +498,9 @@ def check_for_include_column_and_add_if_missing(peakipy_data): def remove_excluded_peaks(peakipy_data): if len(peakipy_data.df[peakipy_data.df.include != "yes"]) > 0: - excluded = peakipy_data.df[peakipy_data.df.include != "yes"][column_selection] + excluded = peakipy_data.df[peakipy_data.df.include != "yes"][ + peaklist_columns_for_printing + ] table = df_to_rich_table( excluded, title="[yellow] Excluded peaks [/yellow]", @@ -657,7 +651,7 @@ def fit( # read NMR data args = {} config = {} - args, config = read_config(args) + args, config = update_args_with_values_from_config_file(args) dims = config.get("dims", [0, 1, 2]) peakipy_data = LoadData(peaklist_path, data_path, dims=dims) peakipy_data = check_for_include_column_and_add_if_missing(peakipy_data) @@ -1211,7 +1205,6 @@ def create_residual_figure(plot_data: PlottingDataForPlane): def create_plotly_figure(plot_data: PlottingDataForPlane): lines = create_plotly_wireframe_lines(plot_data) surfaces = create_plotly_surfaces(plot_data) - # residuals = create_residual_contours(plot_data) fig = go.Figure(data=lines + surfaces) fig = update_axis_ranges(fig, plot_data) return fig @@ -1373,7 +1366,7 @@ def check( fits = validate_fit_dataframe(pd.read_csv(fits)) args = {} # get dims from config file - args, config = read_config(args, config_path) + args, config = update_args_with_values_from_config_file(args, config_path) dims = config.get("dims", (1, 2, 3)) ccpn_flag = ccpn diff --git a/peakipy/core.py b/peakipy/core.py index f5b2084f..5dc87fa2 100644 --- a/peakipy/core.py +++ b/peakipy/core.py @@ -1728,7 +1728,28 @@ def update_df(self): self.check_peak_bounds() -def read_config(args, config_path="peakipy.config"): +def load_config(config_path): + if config_path.exists(): + with open(config_path) as opened_config: + config_dic = json.load(opened_config) + return config_dic + else: + return {} + + +def write_config(config_path, config_dic): + with open(config_path, "w") as config: + config.write(json.dumps(config_dic, sort_keys=True, indent=4)) + + +def update_config_file(config_path, config_kvs): + config_dic = load_config(config_path) + config_dic.update(config_kvs) + write_config(config_path, config_dic) + return config_dic + + +def update_args_with_values_from_config_file(args, config_path="peakipy.config"): """read a peakipy config file, extract params and update args dict :param args: dict containing params extracted from docopt command line @@ -1746,17 +1767,16 @@ def read_config(args, config_path="peakipy.config"): config_path = Path(config_path) if config_path.exists(): try: - with open(config_path) as config_file: - config = json.load(config_file) - print( - f"[green]Using config file with dims [yellow]{config.get('dims')}[/yellow][/green]" - ) - args["dims"] = config.get("dims", [0, 1, 2]) - noise = config.get("noise") - if noise: - noise = float(noise) + config = load_config(config_path) + print( + f"[green]Using config file with dims [yellow]{config.get('dims')}[/yellow][/green]" + ) + args["dims"] = config.get("dims", (0, 1, 2)) + noise = config.get("noise") + if noise: + noise = float(noise) - colors = config.get("colors", ["#5e3c99", "#e66101"]) + colors = config.get("colors", ["#5e3c99", "#e66101"]) except json.decoder.JSONDecodeError: print( "[red]Your peakipy.config file is corrupted - maybe your JSON is not correct...[/red]" diff --git a/test/test_core.py b/test/test_core.py index 2a0612c9..083f9e25 100644 --- a/test/test_core.py +++ b/test/test_core.py @@ -1,6 +1,8 @@ import unittest from unittest.mock import patch from collections import namedtuple +from pathlib import Path +import json import numpy as np from numpy.testing import assert_array_equal @@ -26,6 +28,9 @@ select_planes_above_threshold_from_masked_data, slice_peaks_from_data_using_mask, estimate_amplitude, + load_config, + write_config, + update_config_file, ) @@ -402,5 +407,71 @@ def test_main(self, MockSpec): self.assertIsNotNone(spec) +def test_load_config_existing(): + config_path = Path("test_config.json") + # Create a dummy existing config file + with open(config_path, "w") as f: + json.dump({"key1": "value1"}, f) + + loaded_config = load_config(config_path) + + assert loaded_config == {"key1": "value1"} + + # Clean up + config_path.unlink() + + +def test_load_config_nonexistent(): + config_path = Path("test_config.json") + + loaded_config = load_config(config_path) + + assert loaded_config == {} + + +def test_write_config(): + config_path = Path("test_config.json") + config_kvs = {"key1": "value1", "key2": "value2"} + + write_config(config_path, config_kvs) + + # Check if the config file is created correctly + assert config_path.exists() + + # Check if the config file content is correct + with open(config_path) as f: + created_config = json.load(f) + assert created_config == {"key1": "value1", "key2": "value2"} + + # Clean up + config_path.unlink() + + +def test_update_config_file_existing(): + config_path = Path("test_config.json") + # Create a dummy existing config file + with open(config_path, "w") as f: + json.dump({"key1": "value1"}, f) + + config_kvs = {"key2": "value2", "key3": "value3"} + updated_config = update_config_file(config_path, config_kvs) + + assert updated_config == {"key1": "value1", "key2": "value2", "key3": "value3"} + + # Clean up + config_path.unlink() + + +def test_update_config_file_nonexistent(): + config_path = Path("test_config.json") + config_kvs = {"key1": "value1", "key2": "value2"} + updated_config = update_config_file(config_path, config_kvs) + + assert updated_config == {"key1": "value1", "key2": "value2"} + + # Clean up + config_path.unlink() + + if __name__ == "__main__": unittest.main(verbosity=2) From 2a1afe2a8eade62212a9efa86680a9638d65c3e2 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Sun, 31 Mar 2024 23:23:24 -0400 Subject: [PATCH 28/37] fixed vclist bug and added test --- peakipy/cli/fit.py | 3 ++- test/test_cli.py | 10 ++++++++++ test/test_fit.py | 29 +++++++++++++++++++++++++++++ 3 files changed, 41 insertions(+), 1 deletion(-) diff --git a/peakipy/cli/fit.py b/peakipy/cli/fit.py index 8d370898..7beb8f0c 100644 --- a/peakipy/cli/fit.py +++ b/peakipy/cli/fit.py @@ -59,6 +59,7 @@ class FitPeaksArgs: jack_knife_sample_errors: bool = False mp: bool = (True,) verbose: bool = (False,) + vclist_data: Optional[np.array] = None @dataclass @@ -409,7 +410,7 @@ def rename_columns_for_compatibility(df): def add_vclist_to_df(fit_input: FitPeaksInput, df: pd.DataFrame): - vclist_data = fit_input.args.get("vclist_data") + vclist_data = fit_input.args.vclist_data df["vclist"] = df.plane.apply(lambda x: vclist_data[x]) return df diff --git a/test/test_cli.py b/test/test_cli.py index 95aec4b4..aec9c35c 100644 --- a/test/test_cli.py +++ b/test/test_cli.py @@ -51,6 +51,16 @@ def test_fit_main_with_default(protein_L): peakipy.cli.main.fit(**args) +def test_fit_main_with_vclist(protein_L): + args = dict( + peaklist_path=protein_L / Path("test.csv"), + data_path=protein_L / Path("test1.ft2"), + output_path=protein_L / Path("fits_PV.csv"), + vclist=protein_L / Path("vclist"), + ) + peakipy.cli.main.fit(**args) + + def test_fit_main_with_gaussian(protein_L): args = dict( peaklist_path=protein_L / Path("test.csv"), diff --git a/test/test_fit.py b/test/test_fit.py index 113b0339..b14816b0 100644 --- a/test/test_fit.py +++ b/test/test_fit.py @@ -19,6 +19,9 @@ create_parameter_dict, perform_initial_lineshape_fit_on_cluster_of_peaks, merge_unpacked_parameters_with_metadata, + add_vclist_to_df, + FitPeaksArgs, + FitPeaksInput, ) from peakipy.core import Lineshape, pvoigt2d @@ -344,6 +347,32 @@ def test_merge_unpacked_parameters_with_metadata(): assert expected_result.equals(actual_result) +def test_add_vclist_to_df(): + args = FitPeaksArgs( + noise=0, uc_dics={}, lineshape=Lineshape.PV, vclist_data=np.array([1, 2, 3]) + ) + fit_peaks_input = FitPeaksInput( + args=args, data=None, config=None, plane_numbers=None + ) + df = pd.DataFrame(dict(plane=[0, 1, 2])) + expected_df = pd.DataFrame(dict(plane=[0, 1, 2], vclist=[1, 2, 3])) + actual_df = add_vclist_to_df(fit_peaks_input, df) + assert actual_df.equals(expected_df) + + +def test_add_vclist_to_df_plane_order(): + args = FitPeaksArgs( + noise=0, uc_dics={}, lineshape=Lineshape.PV, vclist_data=np.array([1, 2, 3]) + ) + fit_peaks_input = FitPeaksInput( + args=args, data=None, config=None, plane_numbers=None + ) + df = pd.DataFrame(dict(plane=[2, 1, 0])) + expected_df = pd.DataFrame(dict(plane=[2, 1, 0], vclist=[3, 2, 1])) + actual_df = add_vclist_to_df(fit_peaks_input, df) + assert actual_df.equals(expected_df) + + # def test_perform_initial_lineshape_fit_on_cluster_of_peaks(pseudo_voigt_model_result): # expected_result = pseudo_voigt_model_result # actual_result = perform_initial_lineshape_fit_on_cluster_of_peaks() From bb64a9941851be445625b2c6c825e35861ff797b Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Sun, 5 May 2024 21:43:51 -0400 Subject: [PATCH 29/37] added more tests --- .github/workflows/ci.yml | 4 +- Makefile | 15 +++--- peakipy/core.py | 17 ++++++- test/test_core.py | 103 +++++++++++++++++++++++++++++++++++++-- test/test_fit.py | 81 ++++++++++++++++++++++++++++++ 5 files changed, 205 insertions(+), 15 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index dde8ed90..e283337a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,7 +17,7 @@ jobs: python -m pip install --upgrade pip pip install . pip install pytest + pip install coverage - name: Run tests run: | - pytest test/test_core.py - pytest test/test_cli.py \ No newline at end of file + make coverage diff --git a/Makefile b/Makefile index 8cb28ad3..3a87d871 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,10 @@ -.PHONY: test +.PHONY: coverage -test: - pytest test/test_core.py - pytest test/test_main.py - pytest test/test_fit.py - pytest test/test_cli.py +coverage: + coverage run -m pytest test/test_core.py test/test_main.py test/test_fit.py test/test_cli.py + +coverage-html: + coverage html + firefox htmlcov/index.html + +test: coverage coverage-html diff --git a/peakipy/core.py b/peakipy/core.py index 5dc87fa2..d0473a61 100644 --- a/peakipy/core.py +++ b/peakipy/core.py @@ -475,6 +475,17 @@ def get_params(params, name): @dataclass class PeakLimits: + """Given a peak position and linewidth in points determine + the limits based on the data + + Arguments + --------- + peak: pd.DataFrame + peak is a row from a pandas dataframe + data: np.array + 2D numpy array + """ + peak: pd.DataFrame data: np.array min_x: int = field(init=False) @@ -483,10 +494,12 @@ class PeakLimits: max_y: int = field(init=False) def __post_init__(self): - self.max_y = int(self.peak.Y_AXIS + self.peak.YW) + 1 + assert self.peak.Y_AXIS <= self.data.shape[0] + assert self.peak.X_AXIS <= self.data.shape[1] + self.max_y = int(np.ceil(self.peak.Y_AXIS + self.peak.YW)) + 1 if self.max_y > self.data.shape[0]: self.max_y = self.data.shape[0] - self.max_x = int(self.peak.X_AXIS + self.peak.XW) + 1 + self.max_x = int(np.ceil(self.peak.X_AXIS + self.peak.XW)) + 1 if self.max_x > self.data.shape[1]: self.max_x = self.data.shape[1] diff --git a/test/test_core.py b/test/test_core.py index 083f9e25..ed9dec4c 100644 --- a/test/test_core.py +++ b/test/test_core.py @@ -4,6 +4,7 @@ from pathlib import Path import json +import pytest import numpy as np from numpy.testing import assert_array_equal import pandas as pd @@ -14,16 +15,22 @@ from peakipy.core import ( make_mask, fix_params, + gaussian_lorentzian, + pv_g, + pv_l, + voigt2d, pvoigt2d, pv_pv, get_params, make_param_dict, to_prefix, make_models, + get_lineshape_function, Pseudo3D, Peaklist, Lineshape, PeaklistFormat, + PeakLimits, select_reference_planes_using_indices, select_planes_above_threshold_from_masked_data, slice_peaks_from_data_using_mask, @@ -34,6 +41,11 @@ ) +@pytest.fixture +def test_directory(): + return Path(__file__).parent + + def test_select_reference_planes_using_indices(): data = np.zeros((6, 100, 200)) indices = [] @@ -157,6 +169,9 @@ def test_estimate_amplitude_invalid_indices(): class TestCoreFunctions(unittest.TestCase): + test_directory = Path(__file__).parent + test_directory = "./" + def test_make_mask(self): data = np.ones((10, 10)) c_x = 5 @@ -268,6 +283,12 @@ def test_make_param_dict(self): self.assertEqual(params["_two_sigma_x"], 1.25) self.assertEqual(params["_two_sigma_y"], 1.25) + voigt_params = make_param_dict(peaks, data, Lineshape.V) + self.assertEqual( + voigt_params["_one_sigma_x"], 2.5 / (2.0 * np.sqrt(2.0 * np.log(2))) + ) + self.assertEqual(voigt_params["_one_gamma_x"], 2.5 / 2.0) + def test_to_prefix(self): names = [ (1, "_1_"), @@ -328,9 +349,9 @@ def test_make_models(self): def test_Pseudo3D(self): datasets = [ - ("test/test_protein_L/test1.ft2", [0, 1, 2]), - ("test/test_protein_L/test_tp.ft2", [2, 1, 0]), - ("test/test_protein_L/test_tp2.ft2", [1, 2, 0]), + (f"{self.test_directory}/test_protein_L/test1.ft2", [0, 1, 2]), + (f"{self.test_directory}/test_protein_L/test_tp.ft2", [2, 1, 0]), + (f"{self.test_directory}/test_protein_L/test_tp2.ft2", [1, 2, 0]), ] # expected shape @@ -377,6 +398,8 @@ def test_main(self, MockFit): class TestReadScript(unittest.TestCase): + test_directory = "./" + @patch("peakipy.cli.main.read") def test_main(self, MockRead): args = {"": "hello", "": "data"} @@ -385,8 +408,8 @@ def test_main(self, MockRead): def test_read_pipe_peaklist(self): args = { - "path": "test/test_pipe.tab", - "data_path": "test/test_pipe.ft2", + "path": f"{self.test_directory}/test_pipe.tab", + "data_path": f"{self.test_directory}/test_pipe.ft2", "dims": [0, 1, 2], "fmt": PeaklistFormat.pipe, } @@ -473,5 +496,75 @@ def test_update_config_file_nonexistent(): config_path.unlink() +@pytest.fixture +def sample_data(): + return np.zeros((10, 10)) + + +@pytest.fixture +def sample_peak(): + peak_data = {"X_AXIS": [5], "Y_AXIS": [5], "XW": [2], "YW": [2]} + return pd.DataFrame(peak_data).iloc[0] + + +def test_peak_limits_max_min(sample_peak, sample_data): + limits = PeakLimits(sample_peak, sample_data) + + assert limits.max_x == 8 + assert limits.max_y == 8 + assert limits.min_x == 3 + assert limits.min_y == 3 + + +def test_peak_limits_boundary(sample_data): + peak_data = {"X_AXIS": [8], "Y_AXIS": [8], "XW": [2], "YW": [2]} + peak = pd.DataFrame(peak_data).iloc[0] + limits = PeakLimits(peak, sample_data) + + assert limits.max_x == 10 + assert limits.max_y == 10 + assert limits.min_x == 6 + assert limits.min_y == 6 + + +def test_peak_limits_at_boundary(sample_data): + peak_data = {"X_AXIS": [0], "Y_AXIS": [0], "XW": [2], "YW": [2]} + peak = pd.DataFrame(peak_data).iloc[0] + limits = PeakLimits(peak, sample_data) + + assert limits.max_x == 3 + assert limits.max_y == 3 + assert limits.min_x == 0 + assert limits.min_y == 0 + + +def test_peak_limits_outside_boundary(sample_data): + peak_data = {"X_AXIS": [15], "Y_AXIS": [15], "XW": [2], "YW": [2]} + peak = pd.DataFrame(peak_data).iloc[0] + with pytest.raises(AssertionError): + limits = PeakLimits(peak, sample_data) + + +def test_peak_limits_1d_data(): + data = np.zeros(10) + peak_data = {"X_AXIS": [5], "Y_AXIS": [0], "XW": [2], "YW": [0]} + peak = pd.DataFrame(peak_data).iloc[0] + with pytest.raises(IndexError): + limits = PeakLimits(peak, data) + + +def test_get_lineshape_function(): + assert get_lineshape_function(Lineshape.PV) == pvoigt2d + assert get_lineshape_function(Lineshape.L) == pvoigt2d + assert get_lineshape_function(Lineshape.G) == pvoigt2d + assert get_lineshape_function(Lineshape.G_L) == gaussian_lorentzian + assert get_lineshape_function(Lineshape.PV_G) == pv_g + assert get_lineshape_function(Lineshape.PV_L) == pv_l + assert get_lineshape_function(Lineshape.PV_PV) == pv_pv + assert get_lineshape_function(Lineshape.V) == voigt2d + with pytest.raises(Exception): + get_lineshape_function("bla") + + if __name__ == "__main__": unittest.main(verbosity=2) diff --git a/test/test_fit.py b/test/test_fit.py index b14816b0..83512aed 100644 --- a/test/test_fit.py +++ b/test/test_fit.py @@ -20,6 +20,8 @@ perform_initial_lineshape_fit_on_cluster_of_peaks, merge_unpacked_parameters_with_metadata, add_vclist_to_df, + update_cluster_df_with_fit_statistics, + rename_columns_for_compatibility, FitPeaksArgs, FitPeaksInput, ) @@ -107,6 +109,85 @@ def test_set_parameters_to_fix_during_fit_3(): ) +def test_set_parameters_to_fix_during_fit_None(): + parameter_set = Parameters() + parameter_set.add("test1", vary=True) + parameter_set.add("test2", vary=True) + modified_parameter_set, float_str = set_parameters_to_fix_during_fit( + parameter_set, None + ) + assert ( + modified_parameter_set["test1"].vary + == modified_parameter_set["test2"].vary + == True + ) + + +def test_set_parameters_to_fix_during_fit_None_str(): + parameter_set = Parameters() + parameter_set.add("test1", vary=True) + parameter_set.add("test2", vary=True) + modified_parameter_set, float_str = set_parameters_to_fix_during_fit( + parameter_set, ["None"] + ) + assert ( + modified_parameter_set["test1"].vary + == modified_parameter_set["test2"].vary + == True + ) + + +def test_update_cluster_df_with_fit_statistics(): + result = ModelResult(Model(pvoigt2d), None, None) + result.aic = None + result.bic = None + data = [ + dict( + chisqr=None, + redchi=None, + residual_sum=None, + aic=None, + bic=None, + nfev=0, + ndata=0, + ) + ] + expected_cluster_df = pd.DataFrame(data) + actual_cluster_df = update_cluster_df_with_fit_statistics( + expected_cluster_df, result + ) + pd.testing.assert_frame_equal(actual_cluster_df, expected_cluster_df) + + +def test_rename_columns_for_compatibility(): + df = pd.DataFrame( + [ + dict( + amplitude=1, + amplitude_stderr=1, + X_AXIS=1, + Y_AXIS=1, + ASS="None", + MEMCNT=1, + X_RADIUS=1, + Y_RADIUS=1, + ) + ] + ) + expected_columns = [ + "amp", + "amp_err", + "init_center_x", + "init_center_y", + "assignment", + "memcnt", + "x_radius", + "y_radius", + ] + actual_columns = rename_columns_for_compatibility(df).columns + assert all([i == j for i, j in zip(actual_columns, expected_columns)]) + + def test_get_default_param_names_pseudo_voigt(): assert get_default_lineshape_param_names(Lineshape.PV) == [ "amplitude", From 09655d35c59cf19f554b9e27fafd507dc7e6c3f6 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Sun, 5 May 2024 21:53:59 -0400 Subject: [PATCH 30/37] fixed dataclass factory --- test/test_main.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/test_main.py b/test/test_main.py index 366f934b..6e3bb4ce 100644 --- a/test/test_main.py +++ b/test/test_main.py @@ -29,8 +29,8 @@ def actual_vclist(): @dataclass class PeakipyData: - df: pd.DataFrame = pd.DataFrame() - data: np.array = np.zeros((4, 10, 20)) + df: pd.DataFrame = field(default_factory=lambda: pd.DataFrame()) + data: np.array = field(default_factory=lambda: np.zeros((4, 10, 20))) dims: list = field(default_factory=lambda: [0, 1, 2]) From 3ca2acd3128c8b26050298045481c3e2acb394a3 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Sun, 12 May 2024 16:01:03 -0400 Subject: [PATCH 31/37] added tests and coverage --- .github/workflows/ci.yml | 4 ++ peakipy/cli/main.py | 26 ++---------- test/test_main.py | 87 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 95 insertions(+), 22 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e283337a..a76fd9e7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -21,3 +21,7 @@ jobs: - name: Run tests run: | make coverage + - name: Upload coverage reports to Codecov + uses: codecov/codecov-action@v4.0.1 + with: + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index 46186429..3a636632 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -441,28 +441,6 @@ def select_specified_planes(plane, peakipy_data): return plane_numbers, peakipy_data -def select_specified_planes(plane, peakipy_data): - plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]]) - # only fit specified planes - if plane: - inds = [i for i in plane] - data_inds = [ - (i in inds) for i in range(peakipy_data.data.shape[peakipy_data.dims[0]]) - ] - plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]])[ - data_inds - ] - peakipy_data.data = peakipy_data.data[data_inds] - print( - "[yellow]Using only planes {plane} data now has the following shape[/yellow]", - peakipy_data.data.shape, - ) - if peakipy_data.data.shape[peakipy_data.dims[0]] == 0: - print("[red]You have excluded all the data![/red]", peakipy_data.data.shape) - exit() - return plane_numbers, peakipy_data - - def exclude_specified_planes(exclude_plane, peakipy_data): plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]]) # do not fit these planes @@ -566,6 +544,10 @@ def unpack_xy_bounds(xy_bounds, peakipy_data): xy_bounds = list(xy_bounds) xy_bounds[0] = xy_bounds[0] * peakipy_data.pt_per_ppm_f2 xy_bounds[1] = xy_bounds[1] * peakipy_data.pt_per_ppm_f1 + case _: + raise TypeError( + "xy_bounds should be a tuple (, )" + ) return xy_bounds diff --git a/test/test_main.py b/test/test_main.py index 6e3bb4ce..cf0f144e 100644 --- a/test/test_main.py +++ b/test/test_main.py @@ -9,8 +9,12 @@ from peakipy.cli.main import ( get_vclist, check_for_include_column_and_add_if_missing, + check_data_shape_is_consistent_with_dims, select_specified_planes, exclude_specified_planes, + remove_excluded_peaks, + warn_if_trying_to_fit_large_clusters, + unpack_xy_bounds, validate_plane_selection, validate_sample_count, unpack_plotting_colors, @@ -32,6 +36,8 @@ class PeakipyData: df: pd.DataFrame = field(default_factory=lambda: pd.DataFrame()) data: np.array = field(default_factory=lambda: np.zeros((4, 10, 20))) dims: list = field(default_factory=lambda: [0, 1, 2]) + pt_per_ppm_f2 = 10 + pt_per_ppm_f1 = 20 def test_get_vclist(actual_vclist): @@ -47,6 +53,13 @@ def test_get_vclist_none(): assert args == expected_args +def test_get_vclist_error(): + vclist = "vclist" + args = {} + with pytest.raises(Exception): + get_vclist(vclist, args) + + def test_check_for_include_column(): peakipy_data = PeakipyData(pd.DataFrame()) peakipy_data = check_for_include_column_and_add_if_missing(peakipy_data) @@ -68,6 +81,14 @@ def test_select_specified_planes_2(): assert peakipy_data.data.shape == (2, 10, 20) +def test_select_specified_planes_all_planes_excluded(capsys): + plane = [10] + with pytest.raises(SystemExit): + select_specified_planes(plane, PeakipyData()) + captured = capsys.readouterr() + assert "" in captured.err + + def test_exclude_specified_planes(): plane = None expected_plane_numbers = np.arange(4) @@ -83,6 +104,66 @@ def test_exclude_specified_planes_2(): assert peakipy_data.data.shape == (2, 10, 20) +def test_exclude_specified_planes_all_planes_excluded(capsys): + plane = [0, 1, 2, 3] + with pytest.raises(SystemExit): + exclude_specified_planes(plane, PeakipyData()) + captured = capsys.readouterr() + assert "" in captured.err + + +def test_remove_excluded_peaks(): + actual_dict = dict( + include=["yes", "yes", "no"], + peak=[1, 2, 3], + INDEX=[0, 1, 2], + ASS=["one", "two", "three"], + X_PPM=[1, 2, 3], + Y_PPM=[1, 2, 3], + CLUSTID=[1, 2, 3], + MEMCNT=[1, 1, 1], + ) + expected_dict = {k: v[:-1] for k, v in actual_dict.items()} + actual_df = pd.DataFrame(actual_dict) + expected_df = pd.DataFrame(expected_dict) + peakipy_data = PeakipyData(df=actual_df) + pd.testing.assert_frame_equal(remove_excluded_peaks(peakipy_data).df, expected_df) + + +def test_warn_if_trying_to_fit_large_clusters(): + max_cluster_size = 7 + df = pd.DataFrame(dict(MEMCNT=[1, 6], CLUSTID=[0, 1])) + peakipy_data = PeakipyData(df=df) + assert ( + warn_if_trying_to_fit_large_clusters(max_cluster_size, peakipy_data) + == max_cluster_size + ) + + +def test_warn_if_trying_to_fit_large_clusters_none(): + max_cluster_size = None + df = pd.DataFrame(dict(MEMCNT=[1, 12], CLUSTID=[0, 1])) + peakipy_data = PeakipyData(df=df) + assert warn_if_trying_to_fit_large_clusters(max_cluster_size, peakipy_data) == 12 + + +def test_unpack_xy_bounds_case_00(): + xy_bounds = (0, 0) + result = unpack_xy_bounds(xy_bounds, PeakipyData()) + assert result == None + + +def test_unpack_xy_bounds_case_xy(): + xy_bounds = (1, 2) + result = unpack_xy_bounds(xy_bounds, PeakipyData()) + assert result == [10, 40] + + +def test_unpack_xy_bounds_invalid_input(): + with pytest.raises(TypeError): + unpack_xy_bounds(None, PeakipyData()) + + class MockPseudo3D: def __init__(self, n_planes): self.n_planes = n_planes @@ -195,3 +276,9 @@ def test_invalid_clusters(): fits = pd.DataFrame({"clustid": [1, 2, 3]}) with pytest.raises(SystemExit): get_fit_data_for_selected_peak_clusters(fits, [4, 5, 6]) + + +def test_check_data_shape_is_consistent_with_dims(): + peakipy_data = PeakipyData(data=np.zeros((4, 10))) + with pytest.raises(SystemExit): + check_data_shape_is_consistent_with_dims(peakipy_data) From 5c33de287b0bf8d612f3c490080499d855fefbf4 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Sat, 25 May 2024 23:12:19 -0400 Subject: [PATCH 32/37] major refactoring and increased test coverage --- .gitignore | 3 +- Makefile | 3 +- peakipy/cli/check_panel.py | 2 +- peakipy/cli/edit.py | 3 +- peakipy/cli/edit_panel.py | 5 +- peakipy/cli/fit.py | 8 +- peakipy/cli/main.py | 884 +++-------------------------------- peakipy/constants.py | 6 + peakipy/fitting.py | 645 ++++++++++++++++++++++++++ peakipy/io.py | 913 +++++++++++++++++++++++++++++++++++++ peakipy/lineshapes.py | 522 +++++++++++++++++++++ peakipy/plotting.py | 400 ++++++++++++++++ peakipy/utils.py | 239 ++++++++++ test/test_cli.py | 4 +- test/test_data.py | 13 +- test/test_edit.py | 1 + test/test_fit.py | 2 +- test/test_fitting.py | 692 ++++++++++++++++++++++++++++ test/test_io.py | 428 +++++++++++++++++ test/test_lineshapes.py | 353 ++++++++++++++ test/test_utils.py | 252 ++++++++++ 21 files changed, 4543 insertions(+), 835 deletions(-) create mode 100644 peakipy/constants.py create mode 100644 peakipy/fitting.py create mode 100644 peakipy/io.py create mode 100644 peakipy/lineshapes.py create mode 100644 peakipy/plotting.py create mode 100644 peakipy/utils.py create mode 100644 test/test_edit.py create mode 100644 test/test_fitting.py create mode 100644 test/test_io.py create mode 100644 test/test_lineshapes.py create mode 100644 test/test_utils.py diff --git a/.gitignore b/.gitignore index 44059f70..6f72b217 100644 --- a/.gitignore +++ b/.gitignore @@ -29,4 +29,5 @@ site/ # Tests test/test_protein_L/tmp/ ## pytest -.pytest_cache \ No newline at end of file +.pytest_cache +.coverage diff --git a/Makefile b/Makefile index 3a87d871..431618da 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,8 @@ .PHONY: coverage coverage: - coverage run -m pytest test/test_core.py test/test_main.py test/test_fit.py test/test_cli.py + #coverage run -m pytest test/test_core.py test/test_main.py test/test_fit.py test/test_cli.py + coverage run -m pytest test/test_fitting.py test/test_lineshapes.py test/test_io.py test/test_utils.py test/test_main.py test/test_cli.py coverage-html: coverage html diff --git a/peakipy/cli/check_panel.py b/peakipy/cli/check_panel.py index 44bc00d4..2b11d131 100644 --- a/peakipy/cli/check_panel.py +++ b/peakipy/cli/check_panel.py @@ -69,7 +69,7 @@ def create_plotly_pane(cluster, plane): data_path=data.data_path, clusters=[cluster], plane=[plane], - config_path=data.config_path, + # config_path=data.config_path, plotly=True, ) diff --git a/peakipy/cli/edit.py b/peakipy/cli/edit.py index 080f5b02..3f4d31ef 100644 --- a/peakipy/cli/edit.py +++ b/peakipy/cli/edit.py @@ -34,7 +34,8 @@ from bokeh.plotting.contour import contour_data from bokeh.palettes import PuBuGn9, Category20, Viridis256, RdGy11, Reds256, YlOrRd9 -from peakipy.core import LoadData, update_args_with_values_from_config_file, StrucEl +from peakipy.io import LoadData, StrucEl +from peakipy.utils import update_args_with_values_from_config_file log_style = "overflow:scroll;" log_div = """
%s
""" diff --git a/peakipy/cli/edit_panel.py b/peakipy/cli/edit_panel.py index 04b853d0..f74f5b3e 100644 --- a/peakipy/cli/edit_panel.py +++ b/peakipy/cli/edit_panel.py @@ -102,9 +102,12 @@ def fit_peaks_button_click(event): button.on_click(fit_peaks_button_click) def update_source_selected_indices(event): - # print(event) # print(bs.tablulator_widget.selection) + # hack to make current selection however, only allows one selection + # at a time + bs.tablulator_widget._update_selection([event.value]) bs.source.selected.indices = bs.tablulator_widget.selection + # print(bs.tablulator_widget.selection) bs.tablulator_widget.on_click(update_source_selected_indices) bs.tablulator_widget.on_edit(update_peakipy_data_on_edit_of_table) diff --git a/peakipy/cli/fit.py b/peakipy/cli/fit.py index 7beb8f0c..8ffc73e1 100644 --- a/peakipy/cli/fit.py +++ b/peakipy/cli/fit.py @@ -14,15 +14,17 @@ from lmfit import Model, Parameter, Parameters from lmfit.model import ModelResult -from peakipy.core import ( - fix_params, +from peakipy.lineshapes import ( Lineshape, pvoigt2d, voigt2d, pv_pv, + get_lineshape_function, +) +from peakipy.fitting import ( + fix_params, to_prefix, get_limits_for_axis_in_points, - get_lineshape_function, deal_with_peaks_on_edge_of_spectrum, select_planes_above_threshold_from_masked_data, select_reference_planes_using_indices, diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index 3a636632..79613f42 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -1,23 +1,4 @@ #!/usr/bin/env python3 -""" - - peakipy - deconvolute overlapping NMR peaks - Copyright (C) 2019 Jacob Peter Brady - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -""" import os import json import shutil @@ -42,77 +23,75 @@ from mpl_toolkits.mplot3d import axes3d from matplotlib import cm from matplotlib.backends.backend_pdf import PdfPages -from matplotlib.widgets import Button import yaml -import plotly.graph_objects as go import plotly.io as pio pio.templates.default = "plotly_dark" -from peakipy.core import ( +from peakipy.io import ( Peaklist, - run_log, LoadData, - pv_pv, - pvoigt2d, - voigt2d, - make_mask, - pv_g, - pv_l, - gaussian_lorentzian, Pseudo3D, - df_to_rich_table, StrucEl, PeaklistFormat, - Lineshape, OutFmt, + get_vclist, +) +from peakipy.utils import ( + run_log, + df_to_rich_table, write_config, update_config_file, update_args_with_values_from_config_file, - get_limits_for_axis_in_points, - deal_with_peaks_on_edge_of_spectrum, - calculate_fwhm_for_voigt_lineshape, - calculate_height_for_voigt_lineshape, - calculate_fwhm_for_pseudo_voigt_lineshape, - calculate_height_for_pseudo_voigt_lineshape, - calculate_height_for_gaussian_lineshape, - calculate_height_for_lorentzian_lineshape, - calculate_height_for_pv_pv_lineshape, + update_linewidths_from_hz_to_points, + update_peak_positions_from_ppm_to_points, + check_data_shape_is_consistent_with_dims, + check_for_include_column_and_add_if_missing, + remove_excluded_peaks, + warn_if_trying_to_fit_large_clusters, + save_data, +) + +from peakipy.lineshapes import ( + Lineshape, + calculate_lineshape_specific_height_and_fwhm, calculate_peak_centers_in_ppm, calculate_peak_linewidths_in_hz, ) +from peakipy.fitting import ( + get_limits_for_axis_in_points, + deal_with_peaks_on_edge_of_spectrum, + select_specified_planes, + exclude_specified_planes, + unpack_xy_bounds, + validate_plane_selection, + get_fit_data_for_selected_peak_clusters, + make_masks_from_plane_data, + simulate_lineshapes_from_fitted_peak_parameters, + simulate_pv_pv_lineshapes_from_fitted_peak_parameters, + validate_fit_dataframe, +) + from .fit import ( fit_peak_clusters, FitPeaksInput, FitPeaksArgs, ) +from peakipy.plotting import ( + PlottingDataForPlane, + validate_sample_count, + unpack_plotting_colors, + create_plotly_figure, + create_residual_figure, + create_matplotlib_figure, +) from .spec import yaml_file app = typer.Typer() tmp_path = Path("tmp") tmp_path.mkdir(exist_ok=True) log_path = Path("log.txt") -# for printing dataframes -peaklist_columns_for_printing = ["INDEX", "ASS", "X_PPM", "Y_PPM", "CLUSTID", "MEMCNT"] -bad_column_selection = [ - "clustid", - "amp", - "center_x_ppm", - "center_y_ppm", - "fwhm_x_hz", - "fwhm_y_hz", - "lineshape", -] -bad_color_selection = [ - "green", - "blue", - "yellow", - "red", - "yellow", - "red", - "magenta", -] peaklist_path_help = "Path to peaklist" @@ -367,225 +346,46 @@ def read( ) -def calculate_lineshape_specific_height_and_fwhm( - lineshape: Lineshape, df: pd.DataFrame -): - match lineshape: - case lineshape.V: - df = calculate_height_for_voigt_lineshape(df) - df = calculate_fwhm_for_voigt_lineshape(df) - - case lineshape.PV: - df = calculate_height_for_pseudo_voigt_lineshape(df) - df = calculate_fwhm_for_pseudo_voigt_lineshape(df) - - case lineshape.G: - df = calculate_height_for_gaussian_lineshape(df) - df = calculate_fwhm_for_pseudo_voigt_lineshape(df) - - case lineshape.L: - df = calculate_height_for_lorentzian_lineshape(df) - df = calculate_fwhm_for_pseudo_voigt_lineshape(df) - - case lineshape.PV_PV: - df = calculate_height_for_pv_pv_lineshape(df) - df = calculate_fwhm_for_pseudo_voigt_lineshape(df) - case _: - df = calculate_fwhm_for_pseudo_voigt_lineshape(df) - return df - - -def get_vclist(vclist, args): - # read vclist - if vclist is None: - vclist = False - elif vclist.exists(): - vclist_data = np.genfromtxt(vclist) - args["vclist_data"] = vclist_data - vclist = True - else: - raise Exception("vclist not found...") - - args["vclist"] = vclist - return args - - -def check_data_shape_is_consistent_with_dims(peakipy_data): - # check data shape is consistent with dims - if len(peakipy_data.dims) != len(peakipy_data.data.shape): - print( - f"Dims are {peakipy_data.dims} while data shape is {peakipy_data.data.shape}?" - ) - exit() - - -def select_specified_planes(plane, peakipy_data): - plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]]) - # only fit specified planes - if plane: - inds = [i for i in plane] - data_inds = [ - (i in inds) for i in range(peakipy_data.data.shape[peakipy_data.dims[0]]) - ] - plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]])[ - data_inds - ] - peakipy_data.data = peakipy_data.data[data_inds] - print( - "[yellow]Using only planes {plane} data now has the following shape[/yellow]", - peakipy_data.data.shape, - ) - if peakipy_data.data.shape[peakipy_data.dims[0]] == 0: - print("[red]You have excluded all the data![/red]", peakipy_data.data.shape) - exit() - return plane_numbers, peakipy_data - - -def exclude_specified_planes(exclude_plane, peakipy_data): - plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]]) - # do not fit these planes - if exclude_plane: - inds = [i for i in exclude_plane] - data_inds = [ - (i not in inds) - for i in range(peakipy_data.data.shape[peakipy_data.dims[0]]) - ] - plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]])[ - data_inds - ] - peakipy_data.data = peakipy_data.data[data_inds] - print( - f"[yellow]Excluding planes {exclude_plane} data now has the following shape[/yellow]", - peakipy_data.data.shape, - ) - if peakipy_data.data.shape[peakipy_data.dims[0]] == 0: - print("[red]You have excluded all the data![/red]", peakipy_data.data.shape) - exit() - return plane_numbers, peakipy_data - - -def check_for_include_column_and_add_if_missing(peakipy_data): - # only include peaks with 'include' - if "include" in peakipy_data.df.columns: - pass - else: - # for compatibility - peakipy_data.df["include"] = peakipy_data.df.apply(lambda _: "yes", axis=1) - return peakipy_data - - -def remove_excluded_peaks(peakipy_data): - if len(peakipy_data.df[peakipy_data.df.include != "yes"]) > 0: - excluded = peakipy_data.df[peakipy_data.df.include != "yes"][ - peaklist_columns_for_printing - ] - table = df_to_rich_table( - excluded, - title="[yellow] Excluded peaks [/yellow]", - columns=excluded.columns, - styles=["yellow" for i in excluded.columns], - ) - print(table) - peakipy_data.df = peakipy_data.df[peakipy_data.df.include == "yes"] - return peakipy_data - - -def warn_if_trying_to_fit_large_clusters(max_cluster_size, peakipy_data): - if max_cluster_size is None: - max_cluster_size = peakipy_data.df.MEMCNT.max() - if peakipy_data.df.MEMCNT.max() > 10: - print( - f"""[red] - ################################################################## - You have some clusters of as many as {max_cluster_size} peaks. - You may want to consider reducing the size of your clusters as the - fits will struggle. - - Otherwise you can use the --max-cluster-size flag to exclude large - clusters - ################################################################## - [/red]""" - ) - else: - max_cluster_size = max_cluster_size - return max_cluster_size - - -def update_linewidths_from_hz_to_points(peakipy_data): - """in case they were adjusted when running edit.py""" - peakipy_data.df["XW"] = peakipy_data.df.XW_HZ * peakipy_data.pt_per_hz_f2 - peakipy_data.df["YW"] = peakipy_data.df.YW_HZ * peakipy_data.pt_per_hz_f1 - return peakipy_data - - -def update_peak_positions_from_ppm_to_points(peakipy_data): - # convert peak positions from ppm to points in case they were adjusted running edit.py - peakipy_data.df["X_AXIS"] = peakipy_data.df.X_PPM.apply( - lambda x: peakipy_data.uc_f2(x, "PPM") - ) - peakipy_data.df["Y_AXIS"] = peakipy_data.df.Y_PPM.apply( - lambda x: peakipy_data.uc_f1(x, "PPM") - ) - peakipy_data.df["X_AXISf"] = peakipy_data.df.X_PPM.apply( - lambda x: peakipy_data.uc_f2.f(x, "PPM") - ) - peakipy_data.df["Y_AXISf"] = peakipy_data.df.Y_PPM.apply( - lambda x: peakipy_data.uc_f1.f(x, "PPM") - ) - return peakipy_data - - -def unpack_xy_bounds(xy_bounds, peakipy_data): - match xy_bounds: - case (0, 0): - xy_bounds = None - case (x, y): - # convert ppm to points - xy_bounds = list(xy_bounds) - xy_bounds[0] = xy_bounds[0] * peakipy_data.pt_per_ppm_f2 - xy_bounds[1] = xy_bounds[1] * peakipy_data.pt_per_ppm_f1 - case _: - raise TypeError( - "xy_bounds should be a tuple (, )" - ) - return xy_bounds - - -def save_data(df, output_name): - suffix = output_name.suffix - if suffix == ".csv": - df.to_csv(output_name, float_format="%.4f", index=False) - - elif suffix == ".tab": - df.to_csv(output_name, sep="\t", float_format="%.4f", index=False) - - else: - df.to_pickle(output_name) - - +fix_help = "Set parameters to fix after initial lineshape fit (see docs)" +xy_bounds_help = ( + "Restrict fitted peak centre within +/- x and y from initial picked position" +) reference_plane_index_help = ( - "Select planes to use for initial estimation of lineshape parameters" + "Select plane(s) to use for initial estimation of lineshape parameters" ) +mp_help = "Use multiprocessing" +vclist_help = "Provide a vclist style file" +plane_help = "Select individual planes for fitting" +exclude_plane_help = "Exclude individual planes from fitting" @app.command(help="Fit NMR data to lineshape models and deconvolute overlapping peaks") def fit( - peaklist_path: Path, - data_path: Path, + peaklist_path: Annotated[Path, typer.Argument(help=peaklist_path_help)], + data_path: Annotated[Path, typer.Argument(help=data_path_help)], output_path: Path, max_cluster_size: Optional[int] = None, lineshape: Lineshape = Lineshape.PV, - fix: List[str] = ["fraction", "sigma", "center"], - xy_bounds: Tuple[float, float] = (0, 0), - vclist: Optional[Path] = None, - plane: Optional[List[int]] = None, - exclude_plane: Optional[List[int]] = None, + fix: Annotated[List[str], typer.Option(help=fix_help)] = [ + "fraction", + "sigma", + "center", + ], + xy_bounds: Annotated[Tuple[float, float], typer.Option(help=xy_bounds_help)] = ( + 0, + 0, + ), + vclist: Annotated[Optional[Path], typer.Option(help=vclist_help)] = None, + plane: Annotated[Optional[List[int]], typer.Option(help=plane_help)] = None, + exclude_plane: Annotated[ + Optional[List[int]], typer.Option(help=exclude_plane_help) + ] = None, reference_plane_index: Annotated[ List[int], typer.Option(help=reference_plane_index_help) ] = [], initial_fit_threshold: Optional[float] = None, jack_knife_sample_errors: bool = False, - mp: bool = True, + mp: Annotated[bool, typer.Option(help=mp_help)] = True, verbose: bool = False, ): """Fit NMR data to lineshape models and deconvolute overlapping peaks @@ -633,7 +433,10 @@ def fit( # read NMR data args = {} config = {} - args, config = update_args_with_values_from_config_file(args) + data_dir = peaklist_path.parent + args, config = update_args_with_values_from_config_file( + args, config_path=data_dir / "peakipy.config" + ) dims = config.get("dims", [0, 1, 2]) peakipy_data = LoadData(peaklist_path, data_path, dims=dims) peakipy_data = check_for_include_column_and_add_if_missing(peakipy_data) @@ -733,551 +536,6 @@ def fit( run_log() -def validate_plane_selection(plane, pseudo3D): - if (plane == []) or (plane == None): - plane = list(range(pseudo3D.n_planes)) - - elif max(plane) > (pseudo3D.n_planes - 1): - raise ValueError( - f"[red]There are {pseudo3D.n_planes} planes in your data you selected --plane {max(plane)}...[red]" - f"plane numbering starts from 0." - ) - elif min(plane) < 0: - raise ValueError( - f"[red]Plane number can not be negative; you selected --plane {min(plane)}...[/red]" - ) - else: - plane = sorted(plane) - - return plane - - -def validate_sample_count(sample_count): - if type(sample_count) == int: - sample_count = sample_count - else: - raise TypeError("Sample count (ccount, rcount) should be an integer") - return sample_count - - -def unpack_plotting_colors(colors): - match colors: - case (data_color, fit_color): - data_color, fit_color = colors - case _: - data_color, fit_color = "green", "blue" - return data_color, fit_color - - -def get_fit_data_for_selected_peak_clusters(fits, clusters): - match clusters: - case None | []: - pass - case _: - # only use these clusters - fits = fits[fits.clustid.isin(clusters)] - if len(fits) < 1: - exit(f"Are you sure clusters {clusters} exist?") - return fits - - -def make_masks_from_plane_data(empty_mask_array, plane_data): - # make masks - individual_masks = [] - for cx, cy, rx, ry, name in zip( - plane_data.center_x, - plane_data.center_y, - plane_data.x_radius, - plane_data.y_radius, - plane_data.assignment, - ): - tmp_mask = make_mask(empty_mask_array, cx, cy, rx, ry) - empty_mask_array += tmp_mask - individual_masks.append(tmp_mask) - filled_mask_array = empty_mask_array - return individual_masks, filled_mask_array - - -def simulate_pv_pv_lineshapes_from_fitted_peak_parameters( - peak_parameters, XY, sim_data, sim_data_singles -): - for amp, c_x, c_y, s_x, s_y, frac_x, frac_y, ls in zip( - peak_parameters.amp, - peak_parameters.center_x, - peak_parameters.center_y, - peak_parameters.sigma_x, - peak_parameters.sigma_y, - peak_parameters.fraction_x, - peak_parameters.fraction_y, - peak_parameters.lineshape, - ): - sim_data_i = pv_pv(XY, amp, c_x, c_y, s_x, s_y, frac_x, frac_y).reshape( - sim_data.shape - ) - sim_data += sim_data_i - sim_data_singles.append(sim_data_i) - return sim_data, sim_data_singles - - -def simulate_lineshapes_from_fitted_peak_parameters( - peak_parameters, XY, sim_data, sim_data_singles -): - shape = sim_data.shape - for amp, c_x, c_y, s_x, s_y, frac, lineshape in zip( - peak_parameters.amp, - peak_parameters.center_x, - peak_parameters.center_y, - peak_parameters.sigma_x, - peak_parameters.sigma_y, - peak_parameters.fraction, - peak_parameters.lineshape, - ): - # print(amp) - match lineshape: - case "G" | "L" | "PV": - sim_data_i = pvoigt2d(XY, amp, c_x, c_y, s_x, s_y, frac).reshape(shape) - case "PV_L": - sim_data_i = pv_l(XY, amp, c_x, c_y, s_x, s_y, frac).reshape(shape) - - case "PV_G": - sim_data_i = pv_g(XY, amp, c_x, c_y, s_x, s_y, frac).reshape(shape) - - case "G_L": - sim_data_i = gaussian_lorentzian( - XY, amp, c_x, c_y, s_x, s_y, frac - ).reshape(shape) - - case "V": - sim_data_i = voigt2d(XY, amp, c_x, c_y, s_x, s_y, frac).reshape(shape) - sim_data += sim_data_i - sim_data_singles.append(sim_data_i) - return sim_data, sim_data_singles - - -@dataclass -class PlottingDataForPlane: - pseudo3D: Pseudo3D - plane_id: int - plane_lineshape_parameters: pd.DataFrame - X: np.array - Y: np.array - mask: np.array - individual_masks: List[np.array] - sim_data: np.array - sim_data_singles: List[np.array] - min_x: int - max_x: int - min_y: int - max_y: int - fit_color: str - data_color: str - rcount: int - ccount: int - - x_plot: np.array = field(init=False) - y_plot: np.array = field(init=False) - masked_data: np.array = field(init=False) - masked_sim_data: np.array = field(init=False) - residual: np.array = field(init=False) - single_colors: List = field(init=False) - - def __post_init__(self): - self.plane_data = self.pseudo3D.data[self.plane_id] - self.masked_data = self.plane_data.copy() - self.masked_sim_data = self.sim_data.copy() - self.masked_data[~self.mask] = np.nan - self.masked_sim_data[~self.mask] = np.nan - - self.x_plot = self.pseudo3D.uc_f2.ppm( - self.X[self.min_y : self.max_y, self.min_x : self.max_x] - ) - self.y_plot = self.pseudo3D.uc_f1.ppm( - self.Y[self.min_y : self.max_y, self.min_x : self.max_x] - ) - self.masked_data = self.masked_data[ - self.min_y : self.max_y, self.min_x : self.max_x - ] - self.sim_plot = self.masked_sim_data[ - self.min_y : self.max_y, self.min_x : self.max_x - ] - self.residual = self.masked_data - self.sim_plot - - for single_mask, single in zip(self.individual_masks, self.sim_data_singles): - single[~single_mask] = np.nan - self.sim_data_singles = [ - sim_data_single[self.min_y : self.max_y, self.min_x : self.max_x] - for sim_data_single in self.sim_data_singles - ] - self.single_colors = [ - cm.viridis(i) for i in np.linspace(0, 1, len(self.sim_data_singles)) - ] - - -def plot_data_is_valid(plot_data: PlottingDataForPlane) -> bool: - if len(plot_data.x_plot) < 1 or len(plot_data.y_plot) < 1: - print( - f"[red]Nothing to plot for cluster {int(plot_data.plane_lineshape_parameters.clustid)}[/red]" - ) - print(f"[red]x={plot_data.x_plot},y={plot_data.y_plot}[/red]") - print( - df_to_rich_table( - plot_data.plane_lineshape_parameters, - title="", - columns=bad_column_selection, - styles=bad_color_selection, - ) - ) - plt.close() - validated = False - # print(Fore.RED + "Maybe your F1/F2 radii for fitting were too small...") - elif plot_data.masked_data.shape[0] == 0 or plot_data.masked_data.shape[1] == 0: - print(f"[red]Nothing to plot for cluster {int(plot_data.plane.clustid)}[/red]") - print( - df_to_rich_table( - plot_data.plane_lineshape_parameters, - title="Bad plane", - columns=bad_column_selection, - styles=bad_color_selection, - ) - ) - spec_lim_f1 = " - ".join( - ["%8.3f" % i for i in plot_data.pseudo3D.f1_ppm_limits] - ) - spec_lim_f2 = " - ".join( - ["%8.3f" % i for i in plot_data.pseudo3D.f2_ppm_limits] - ) - print(f"Spectrum limits are {plot_data.pseudo3D.f2_label:4s}:{spec_lim_f2} ppm") - print(f" {plot_data.pseudo3D.f1_label:4s}:{spec_lim_f1} ppm") - plt.close() - validated = False - else: - validated = True - return validated - - -def create_matplotlib_figure( - plot_data: PlottingDataForPlane, - pdf: PdfPages, - individual=False, - label=False, - ccpn_flag=False, - show=True, -): - fig = plt.figure(figsize=(10, 6)) - ax = fig.add_subplot(projection="3d") - if plot_data_is_valid(plot_data): - cset = ax.contourf( - plot_data.x_plot, - plot_data.y_plot, - plot_data.residual, - zdir="z", - offset=np.nanmin(plot_data.masked_data) * 1.1, - alpha=0.5, - cmap=cm.coolwarm, - ) - cbl = fig.colorbar(cset, ax=ax, shrink=0.5, format="%.2e") - cbl.ax.set_title("Residual", pad=20) - - if individual: - #  for plotting single fit surfaces - single_colors = [ - cm.viridis(i) - for i in np.linspace(0, 1, len(plot_data.sim_data_singles)) - ] - [ - ax.plot_surface( - plot_data.x_plot, - plot_data.y_plot, - z_single, - color=c, - alpha=0.5, - ) - for c, z_single in zip(single_colors, plot_data.sim_data_singles) - ] - ax.plot_wireframe( - plot_data.x_plot, - plot_data.y_plot, - plot_data.sim_plot, - # colors=[cm.coolwarm(i) for i in np.ravel(residual)], - colors=plot_data.fit_color, - linestyle="--", - label="fit", - rcount=plot_data.rcount, - ccount=plot_data.ccount, - ) - ax.plot_wireframe( - plot_data.x_plot, - plot_data.y_plot, - plot_data.masked_data, - colors=plot_data.data_color, - linestyle="-", - label="data", - rcount=plot_data.rcount, - ccount=plot_data.ccount, - ) - ax.set_ylabel(plot_data.pseudo3D.f1_label) - ax.set_xlabel(plot_data.pseudo3D.f2_label) - - # axes will appear inverted - ax.view_init(30, 120) - - title = f"Plane={plot_data.plane_id},Cluster={plot_data.plane_lineshape_parameters.clustid.iloc[0]}" - plt.title(title) - print(f"[green]Plotting: {title}[/green]") - out_str = "Volumes (Heights)\n===========\n" - for _, row in plot_data.plane_lineshape_parameters.iterrows(): - out_str += f"{row.assignment} = {row.amp:.3e} ({row.height:.3e})\n" - if label: - ax.text( - row.center_x_ppm, - row.center_y_ppm, - row.height * 1.2, - row.assignment, - (1, 1, 1), - ) - - ax.text2D( - -0.5, - 1.0, - out_str, - transform=ax.transAxes, - fontsize=10, - fontfamily="sans-serif", - va="top", - bbox=dict(boxstyle="round", ec="k", fc="k", alpha=0.5), - ) - - ax.legend() - - if show: - - def exit_program(event): - exit() - - def next_plot(event): - plt.close() - - axexit = plt.axes([0.81, 0.05, 0.1, 0.075]) - bnexit = Button(axexit, "Exit") - bnexit.on_clicked(exit_program) - axnext = plt.axes([0.71, 0.05, 0.1, 0.075]) - bnnext = Button(axnext, "Next") - bnnext.on_clicked(next_plot) - if ccpn_flag: - plt.show(windowTitle="", size=(1000, 500)) - else: - plt.show() - else: - pdf.savefig() - - plt.close() - - -def create_plotly_wireframe_lines(plot_data: PlottingDataForPlane): - lines = [] - show_legend = lambda x: x < 1 - showlegend = False - # make simulated data wireframe - line_marker = dict(color=plot_data.fit_color, width=4) - counter = 0 - for i, j, k in zip(plot_data.x_plot, plot_data.y_plot, plot_data.sim_plot): - showlegend = show_legend(counter) - lines.append( - go.Scatter3d( - x=i, - y=j, - z=k, - mode="lines", - line=line_marker, - name="fit", - showlegend=showlegend, - ) - ) - counter += 1 - for i, j, k in zip(plot_data.x_plot.T, plot_data.y_plot.T, plot_data.sim_plot.T): - lines.append( - go.Scatter3d( - x=i, y=j, z=k, mode="lines", line=line_marker, showlegend=showlegend - ) - ) - # make experimental data wireframe - line_marker = dict(color=plot_data.data_color, width=4) - counter = 0 - for i, j, k in zip(plot_data.x_plot, plot_data.y_plot, plot_data.masked_data): - showlegend = show_legend(counter) - lines.append( - go.Scatter3d( - x=i, - y=j, - z=k, - mode="lines", - name="data", - line=line_marker, - showlegend=showlegend, - ) - ) - counter += 1 - for i, j, k in zip(plot_data.x_plot.T, plot_data.y_plot.T, plot_data.masked_data.T): - lines.append( - go.Scatter3d( - x=i, y=j, z=k, mode="lines", line=line_marker, showlegend=showlegend - ) - ) - - return lines - - -def construct_surface_legend_string(row): - surface_legend = "" - surface_legend += row.assignment - return surface_legend - - -def create_plotly_surfaces(plot_data: PlottingDataForPlane): - data = [] - color_scale_values = np.linspace(0, 1, len(plot_data.single_colors)) - color_scale = [ - [val, f"rgb({', '.join('%d'%(i*255) for i in c[0:3])})"] - for val, c in zip(color_scale_values, plot_data.single_colors) - ] - for val, individual_peak, row in zip( - color_scale_values, - plot_data.sim_data_singles, - plot_data.plane_lineshape_parameters.itertuples(), - ): - name = construct_surface_legend_string(row) - colors = np.zeros(shape=individual_peak.shape) + val - data.append( - go.Surface( - z=individual_peak, - x=plot_data.x_plot, - y=plot_data.y_plot, - opacity=0.5, - surfacecolor=colors, - colorscale=color_scale, - showscale=False, - cmin=0, - cmax=1, - name=name, - ) - ) - return data - - -def create_residual_contours(plot_data: PlottingDataForPlane): - contours = go.Contour( - x=plot_data.x_plot[0], y=plot_data.y_plot.T[0], z=plot_data.residual - ) - return contours - - -def create_residual_figure(plot_data: PlottingDataForPlane): - data = create_residual_contours(plot_data) - fig = go.Figure(data=data) - fig.update_layout( - title="Fit residuals", - xaxis_title=f"{plot_data.pseudo3D.f2_label} ppm", - yaxis_title=f"{plot_data.pseudo3D.f1_label} ppm", - xaxis=dict(range=[plot_data.x_plot.max(), plot_data.x_plot.min()]), - yaxis=dict(range=[plot_data.y_plot.max(), plot_data.y_plot.min()]), - ) - return fig - - -def create_plotly_figure(plot_data: PlottingDataForPlane): - lines = create_plotly_wireframe_lines(plot_data) - surfaces = create_plotly_surfaces(plot_data) - fig = go.Figure(data=lines + surfaces) - fig = update_axis_ranges(fig, plot_data) - return fig - - -def update_axis_ranges(fig, plot_data: PlottingDataForPlane): - fig.update_layout( - scene=dict( - xaxis=dict(range=[plot_data.x_plot.max(), plot_data.x_plot.min()]), - yaxis=dict(range=[plot_data.y_plot.max(), plot_data.y_plot.min()]), - xaxis_title=f"{plot_data.pseudo3D.f2_label} ppm", - yaxis_title=f"{plot_data.pseudo3D.f1_label} ppm", - annotations=make_annotations(plot_data), - ) - ) - return fig - - -def make_annotations(plot_data: PlottingDataForPlane): - annotations = [] - for row in plot_data.plane_lineshape_parameters.itertuples(): - annotations.append( - dict( - showarrow=True, - x=row.center_x_ppm, - y=row.center_y_ppm, - z=row.height * 1.0, - text=row.assignment, - opacity=0.8, - textangle=0, - arrowsize=1, - ) - ) - return annotations - - -class FitDataModel(BaseModel): - plane: int - clustid: int - assignment: str - memcnt: int - amp: float - height: float - center_x_ppm: float - center_y_ppm: float - fwhm_x_hz: float - fwhm_y_hz: float - lineshape: str - x_radius: float - y_radius: float - center_x: float - center_y: float - sigma_x: float - sigma_y: float - - -class FitDataModelPVGL(FitDataModel): - fraction: float - - -class FitDataModelVoigt(FitDataModel): - fraction: float - gamma_x: float - gamma_y: float - - -class FitDataModelPVPV(FitDataModel): - fraction_x: float - fraction_y: float - - -def validate_fit_data(dict): - lineshape = dict.get("lineshape") - if lineshape in ["PV", "G", "L"]: - fit_data = FitDataModelPVGL(**dict) - elif lineshape == "V": - fit_data = FitDataModelVoigt(**dict) - else: - fit_data = FitDataModelPVPV(**dict) - - return fit_data.model_dump() - - -def validate_fit_dataframe(df): - validated_fit_data = [] - for _, row in df.iterrows(): - fit_data = validate_fit_data(row.to_dict()) - validated_fit_data.append(fit_data) - return pd.DataFrame(validated_fit_data) - - @app.command(help="Interactive plots for checking fits") def check( fits: Path, @@ -1295,7 +553,6 @@ def check( colors: Tuple[str, str] = ("#5e3c99", "#e66101"), verb: bool = False, plotly: bool = False, - config_path: Path = Path("peakipy.config"), ): """Interactive plots for checking fits @@ -1348,6 +605,7 @@ def check( fits = validate_fit_dataframe(pd.read_csv(fits)) args = {} # get dims from config file + config_path = data_path.parent / "peakipy.config" args, config = update_args_with_values_from_config_file(args, config_path) dims = config.get("dims", (1, 2, 3)) diff --git a/peakipy/constants.py b/peakipy/constants.py new file mode 100644 index 00000000..04039817 --- /dev/null +++ b/peakipy/constants.py @@ -0,0 +1,6 @@ +from numpy import log, pi, finfo + + +log2 = log(2) +π = pi +tiny = finfo(float).eps diff --git a/peakipy/fitting.py b/peakipy/fitting.py new file mode 100644 index 00000000..915ebfae --- /dev/null +++ b/peakipy/fitting.py @@ -0,0 +1,645 @@ +from dataclasses import dataclass, field +from typing import List + +import numpy as np +from numpy import sqrt +import pandas as pd +from lmfit import Model +from pydantic import BaseModel + +from peakipy.lineshapes import Lineshape, pvoigt2d, pv_pv, pv_g, pv_l, voigt2d +from peakipy.constants import log2 + + +class FitDataModel(BaseModel): + plane: int + clustid: int + assignment: str + memcnt: int + amp: float + height: float + center_x_ppm: float + center_y_ppm: float + fwhm_x_hz: float + fwhm_y_hz: float + lineshape: str + x_radius: float + y_radius: float + center_x: float + center_y: float + sigma_x: float + sigma_y: float + + +class FitDataModelPVGL(FitDataModel): + fraction: float + + +class FitDataModelVoigt(FitDataModel): + fraction: float + gamma_x: float + gamma_y: float + + +class FitDataModelPVPV(FitDataModel): + fraction_x: float + fraction_y: float + + +def validate_fit_data(dict): + lineshape = dict.get("lineshape") + if lineshape in ["PV", "G", "L"]: + fit_data = FitDataModelPVGL(**dict) + elif lineshape == "V": + fit_data = FitDataModelVoigt(**dict) + else: + fit_data = FitDataModelPVPV(**dict) + + return fit_data.model_dump() + + +def validate_fit_dataframe(df): + validated_fit_data = [] + for _, row in df.iterrows(): + fit_data = validate_fit_data(row.to_dict()) + validated_fit_data.append(fit_data) + return pd.DataFrame(validated_fit_data) + + +def make_mask(data, c_x, c_y, r_x, r_y): + """Create and elliptical mask + + Generate an elliptical boolean mask with center c_x/c_y in points + with radii r_x and r_y. Used to generate fit mask + + :param data: 2D array + :type data: np.array + + :param c_x: x center + :type c_x: float + + :param c_y: y center + :type c_y: float + + :param r_x: radius in x + :type r_x: float + + :param r_y: radius in y + :type r_y: float + + :return: boolean mask of data.shape + :rtype: numpy.array + + """ + a, b = c_y, c_x + n_y, n_x = data.shape + y, x = np.ogrid[-a : n_y - a, -b : n_x - b] + mask = x**2.0 / r_x**2.0 + y**2.0 / r_y**2.0 <= 1.0 + return mask + + +def fix_params(params, to_fix): + """Set parameters to fix + + + :param params: lmfit parameters + :type params: lmfit.Parameters + + :param to_fix: list of parameter name to fix + :type to_fix: list + + :return: updated parameter object + :rtype: lmfit.Parameters + + """ + for k in params: + for p in to_fix: + if p in k: + params[k].vary = False + + return params + + +def get_params(params, name): + ps = [] + ps_err = [] + names = [] + prefixes = [] + for k in params: + if name in k: + ps.append(params[k].value) + ps_err.append(params[k].stderr) + names.append(k) + prefixes.append(k.split(name)[0]) + return ps, ps_err, names, prefixes + + +@dataclass +class PeakLimits: + """Given a peak position and linewidth in points determine + the limits based on the data + + Arguments + --------- + peak: pd.DataFrame + peak is a row from a pandas dataframe + data: np.array + 2D numpy array + """ + + peak: pd.DataFrame + data: np.array + min_x: int = field(init=False) + max_x: int = field(init=False) + min_y: int = field(init=False) + max_y: int = field(init=False) + + def __post_init__(self): + assert self.peak.Y_AXIS <= self.data.shape[0] + assert self.peak.X_AXIS <= self.data.shape[1] + self.max_y = int(np.ceil(self.peak.Y_AXIS + self.peak.YW)) + 1 + if self.max_y > self.data.shape[0]: + self.max_y = self.data.shape[0] + self.max_x = int(np.ceil(self.peak.X_AXIS + self.peak.XW)) + 1 + if self.max_x > self.data.shape[1]: + self.max_x = self.data.shape[1] + + self.min_y = int(self.peak.Y_AXIS - self.peak.YW) + if self.min_y < 0: + self.min_y = 0 + self.min_x = int(self.peak.X_AXIS - self.peak.XW) + if self.min_x < 0: + self.min_x = 0 + + +def estimate_amplitude(peak, data): + assert len(data.shape) == 2 + limits = PeakLimits(peak, data) + amplitude_est = data[limits.min_y : limits.max_y, limits.min_x : limits.max_x].sum() + return amplitude_est + + +def make_param_dict(peaks, data, lineshape: Lineshape = Lineshape.PV): + """Make dict of parameter names using prefix""" + + param_dict = {} + + for _, peak in peaks.iterrows(): + str_form = lambda x: "%s%s" % (to_prefix(peak.ASS), x) + # using exact value of points (i.e decimal) + param_dict[str_form("center_x")] = peak.X_AXISf + param_dict[str_form("center_y")] = peak.Y_AXISf + # estimate peak volume + amplitude_est = estimate_amplitude(peak, data) + param_dict[str_form("amplitude")] = amplitude_est + # sigma linewidth esimate + param_dict[str_form("sigma_x")] = peak.XW / 2.0 + param_dict[str_form("sigma_y")] = peak.YW / 2.0 + + match lineshape: + case lineshape.V: + #  Voigt G sigma from linewidth esimate + param_dict[str_form("sigma_x")] = peak.XW / ( + 2.0 * sqrt(2.0 * log2) + ) # 3.6013 + param_dict[str_form("sigma_y")] = peak.YW / ( + 2.0 * sqrt(2.0 * log2) + ) # 3.6013 + #  Voigt L gamma from linewidth esimate + param_dict[str_form("gamma_x")] = peak.XW / 2.0 + param_dict[str_form("gamma_y")] = peak.YW / 2.0 + # height + # add height here + + case lineshape.G: + param_dict[str_form("fraction")] = 0.0 + case lineshape.L: + param_dict[str_form("fraction")] = 1.0 + case lineshape.PV_PV: + param_dict[str_form("fraction_x")] = 0.5 + param_dict[str_form("fraction_y")] = 0.5 + case _: + param_dict[str_form("fraction")] = 0.5 + + return param_dict + + +def to_prefix(x): + """ + Peak assignments with characters that are not compatible lmfit model naming + are converted to lmfit "safe" names. + + :param x: Peak assignment to be used as prefix for lmfit model + :type x: str + + :returns: lmfit model prefix (_Peak_assignment_) + :rtype: str + + """ + # must be string + if type(x) != str: + x = str(x) + + prefix = "_" + x + to_replace = [ + [".", "_"], + [" ", ""], + ["{", "_"], + ["}", "_"], + ["[", "_"], + ["]", "_"], + ["-", ""], + ["/", "or"], + ["?", "maybe"], + ["\\", ""], + ["(", "_"], + [")", "_"], + ["@", "_at_"], + ] + for p in to_replace: + prefix = prefix.replace(*p) + return prefix + "_" + + +def make_models( + model, + peaks, + data, + lineshape: Lineshape = Lineshape.PV, + xy_bounds=None, +): + """Make composite models for multiple peaks + + :param model: lineshape function + :type model: function + + :param peaks: instance of pandas.df.groupby("CLUSTID") + :type peaks: pandas.df.groupby("CLUSTID") + + :param data: NMR data + :type data: numpy.array + + :param lineshape: lineshape to use for fit (PV/G/L/PV_PV) + :type lineshape: str + + :param xy_bounds: bounds for peak centers (+/-x, +/-y) + :type xy_bounds: tuple + + :return mod: Composite lmfit model containing all peaks + :rtype mod: lmfit.CompositeModel + + :return p_guess: params for composite model with starting values + :rtype p_guess: lmfit.Parameters + + """ + if len(peaks) == 1: + # make model for first peak + mod = Model(model, prefix="%s" % to_prefix(peaks.ASS.iloc[0])) + # add parameters + param_dict = make_param_dict( + peaks, + data, + lineshape=lineshape, + ) + p_guess = mod.make_params(**param_dict) + + elif len(peaks) > 1: + # make model for first peak + first_peak, *remaining_peaks = peaks.iterrows() + mod = Model(model, prefix="%s" % to_prefix(first_peak[1].ASS)) + for _, peak in remaining_peaks: + mod += Model(model, prefix="%s" % to_prefix(peak.ASS)) + + param_dict = make_param_dict( + peaks, + data, + lineshape=lineshape, + ) + p_guess = mod.make_params(**param_dict) + # add Peak params to p_guess + + update_params(p_guess, param_dict, lineshape=lineshape, xy_bounds=xy_bounds) + + return mod, p_guess + + +def update_params( + params, param_dict, lineshape: Lineshape = Lineshape.PV, xy_bounds=None +): + """Update lmfit parameters with values from Peak + + :param params: lmfit parameters + :type params: lmfit.Parameters object + :param param_dict: parameters corresponding to each peak in fit + :type param_dict: dict + :param lineshape: Lineshape (PV, G, L, PV_PV etc.) + :type lineshape: Lineshape + :param xy_bounds: bounds on xy peak positions + :type xy_bounds: tuple + + :returns: None + :rtype: None + + ToDo + -- deal with boundaries + -- currently positions in points + + """ + for k, v in param_dict.items(): + params[k].value = v + # print("update", k, v) + if "center" in k: + if xy_bounds == None: + # no bounds set + pass + else: + if "center_x" in k: + # set x bounds + x_bound = xy_bounds[0] + params[k].min = v - x_bound + params[k].max = v + x_bound + elif "center_y" in k: + # set y bounds + y_bound = xy_bounds[1] + params[k].min = v - y_bound + params[k].max = v + y_bound + # pass + # print( + # "setting limit of %s, min = %.3e, max = %.3e" + # % (k, params[k].min, params[k].max) + # ) + elif "sigma" in k: + params[k].min = 0.0 + params[k].max = 1e4 + + elif "gamma" in k: + params[k].min = 0.0 + params[k].max = 1e4 + # print( + # "setting limit of %s, min = %.3e, max = %.3e" + # % (k, params[k].min, params[k].max) + # ) + elif "fraction" in k: + # fix weighting between 0 and 1 + params[k].min = 0.0 + params[k].max = 1.0 + + #  fix fraction of G or L + match lineshape: + case lineshape.G | lineshape.L: + params[k].vary = False + case lineshape.PV | lineshape.PV_PV: + params[k].vary = True + case _: + pass + + # return params + + +def make_mask_from_peak_cluster(group, data): + mask = np.zeros(data.shape, dtype=bool) + for _, peak in group.iterrows(): + mask += make_mask( + data, peak.X_AXISf, peak.Y_AXISf, peak.X_RADIUS, peak.Y_RADIUS + ) + return mask, peak + + +def select_reference_planes_using_indices(data, indices: List[int]): + n_planes = data.shape[0] + if indices == []: + return data + + max_index = max(indices) + min_index = min(indices) + + if max_index >= n_planes: + raise IndexError( + f"Your data has {n_planes}. You selected plane {max_index} (allowed indices between 0 and {n_planes-1})" + ) + elif min_index < (-1 * n_planes): + raise IndexError( + f"Your data has {n_planes}. You selected plane {min_index} (allowed indices between -{n_planes} and {n_planes-1})" + ) + else: + data = data[indices] + return data + + +def select_planes_above_threshold_from_masked_data(data, threshold=None): + """This function returns planes with data above the threshold. + + It currently uses absolute intensity values. + Negative thresholds just result in return of the orignal data. + + """ + if threshold == None: + selected_data = data + else: + selected_data = data[np.abs(data).max(axis=1) > threshold] + + if selected_data.shape[0] == 0: + selected_data = data + + return selected_data + + +def validate_plane_selection(plane, pseudo3D): + if (plane == []) or (plane == None): + plane = list(range(pseudo3D.n_planes)) + + elif max(plane) > (pseudo3D.n_planes - 1): + raise ValueError( + f"[red]There are {pseudo3D.n_planes} planes in your data you selected --plane {max(plane)}...[red]" + f"plane numbering starts from 0." + ) + elif min(plane) < 0: + raise ValueError( + f"[red]Plane number can not be negative; you selected --plane {min(plane)}...[/red]" + ) + else: + plane = sorted(plane) + + return plane + + +def slice_peaks_from_data_using_mask(data, mask): + peak_slices = np.array([d[mask] for d in data]) + return peak_slices + + +def get_limits_for_axis_in_points(group_axis_points, mask_radius_in_points): + max_point, min_point = ( + int(np.ceil(max(group_axis_points) + mask_radius_in_points + 1)), + int(np.floor(min(group_axis_points) - mask_radius_in_points)), + ) + return max_point, min_point + + +def deal_with_peaks_on_edge_of_spectrum(data_shape, max_x, min_x, max_y, min_y): + if min_y < 0: + min_y = 0 + + if min_x < 0: + min_x = 0 + + if max_y > data_shape[-2]: + max_y = data_shape[-2] + + if max_x > data_shape[-1]: + max_x = data_shape[-1] + return max_x, min_x, max_y, min_y + + +def make_meshgrid(data_shape): + # must be a better way to make the meshgrid + x = np.arange(data_shape[-1]) + y = np.arange(data_shape[-2]) + XY = np.meshgrid(x, y) + return XY + + +def unpack_xy_bounds(xy_bounds, peakipy_data): + match xy_bounds: + case (0, 0): + xy_bounds = None + case (x, y): + # convert ppm to points + xy_bounds = list(xy_bounds) + xy_bounds[0] = xy_bounds[0] * peakipy_data.pt_per_ppm_f2 + xy_bounds[1] = xy_bounds[1] * peakipy_data.pt_per_ppm_f1 + case _: + raise TypeError( + "xy_bounds should be a tuple (, )" + ) + return xy_bounds + + +def select_specified_planes(plane, peakipy_data): + plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]]) + # only fit specified planes + if plane: + inds = [i for i in plane] + data_inds = [ + (i in inds) for i in range(peakipy_data.data.shape[peakipy_data.dims[0]]) + ] + plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]])[ + data_inds + ] + peakipy_data.data = peakipy_data.data[data_inds] + print( + "[yellow]Using only planes {plane} data now has the following shape[/yellow]", + peakipy_data.data.shape, + ) + if peakipy_data.data.shape[peakipy_data.dims[0]] == 0: + print("[red]You have excluded all the data![/red]", peakipy_data.data.shape) + exit() + return plane_numbers, peakipy_data + + +def exclude_specified_planes(exclude_plane, peakipy_data): + plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]]) + # do not fit these planes + if exclude_plane: + inds = [i for i in exclude_plane] + data_inds = [ + (i not in inds) + for i in range(peakipy_data.data.shape[peakipy_data.dims[0]]) + ] + plane_numbers = np.arange(peakipy_data.data.shape[peakipy_data.dims[0]])[ + data_inds + ] + peakipy_data.data = peakipy_data.data[data_inds] + print( + f"[yellow]Excluding planes {exclude_plane} data now has the following shape[/yellow]", + peakipy_data.data.shape, + ) + if peakipy_data.data.shape[peakipy_data.dims[0]] == 0: + print("[red]You have excluded all the data![/red]", peakipy_data.data.shape) + exit() + return plane_numbers, peakipy_data + + +def get_fit_data_for_selected_peak_clusters(fits, clusters): + match clusters: + case None | []: + pass + case _: + # only use these clusters + fits = fits[fits.clustid.isin(clusters)] + if len(fits) < 1: + exit(f"Are you sure clusters {clusters} exist?") + return fits + + +def make_masks_from_plane_data(empty_mask_array, plane_data): + # make masks + individual_masks = [] + for cx, cy, rx, ry, name in zip( + plane_data.center_x, + plane_data.center_y, + plane_data.x_radius, + plane_data.y_radius, + plane_data.assignment, + ): + tmp_mask = make_mask(empty_mask_array, cx, cy, rx, ry) + empty_mask_array += tmp_mask + individual_masks.append(tmp_mask) + filled_mask_array = empty_mask_array + return individual_masks, filled_mask_array + + +def simulate_pv_pv_lineshapes_from_fitted_peak_parameters( + peak_parameters, XY, sim_data, sim_data_singles +): + for amp, c_x, c_y, s_x, s_y, frac_x, frac_y, ls in zip( + peak_parameters.amp, + peak_parameters.center_x, + peak_parameters.center_y, + peak_parameters.sigma_x, + peak_parameters.sigma_y, + peak_parameters.fraction_x, + peak_parameters.fraction_y, + peak_parameters.lineshape, + ): + sim_data_i = pv_pv(XY, amp, c_x, c_y, s_x, s_y, frac_x, frac_y).reshape( + sim_data.shape + ) + sim_data += sim_data_i + sim_data_singles.append(sim_data_i) + return sim_data, sim_data_singles + + +def simulate_lineshapes_from_fitted_peak_parameters( + peak_parameters, XY, sim_data, sim_data_singles +): + shape = sim_data.shape + for amp, c_x, c_y, s_x, s_y, frac, lineshape in zip( + peak_parameters.amp, + peak_parameters.center_x, + peak_parameters.center_y, + peak_parameters.sigma_x, + peak_parameters.sigma_y, + peak_parameters.fraction, + peak_parameters.lineshape, + ): + # print(amp) + match lineshape: + case "G" | "L" | "PV": + sim_data_i = pvoigt2d(XY, amp, c_x, c_y, s_x, s_y, frac).reshape(shape) + case "PV_L": + sim_data_i = pv_l(XY, amp, c_x, c_y, s_x, s_y, frac).reshape(shape) + + case "PV_G": + sim_data_i = pv_g(XY, amp, c_x, c_y, s_x, s_y, frac).reshape(shape) + + case "G_L": + sim_data_i = gaussian_lorentzian( + XY, amp, c_x, c_y, s_x, s_y, frac + ).reshape(shape) + + case "V": + sim_data_i = voigt2d(XY, amp, c_x, c_y, s_x, s_y, frac).reshape(shape) + sim_data += sim_data_i + sim_data_singles.append(sim_data_i) + return sim_data, sim_data_singles diff --git a/peakipy/io.py b/peakipy/io.py new file mode 100644 index 00000000..88ed5be8 --- /dev/null +++ b/peakipy/io.py @@ -0,0 +1,913 @@ +import sys +from pathlib import Path +from enum import Enum + +import numpy as np +import nmrglue as ng +import pandas as pd +import textwrap +from rich import print +from rich.console import Console + + +from bokeh.palettes import Category20 +from scipy import ndimage +from skimage.morphology import square, binary_closing, disk, rectangle +from skimage.filters import threshold_otsu + +from peakipy.utils import df_to_rich_table +from peakipy.fitting import make_mask + +console = Console() + + +class StrucEl(str, Enum): + square = "square" + disk = "disk" + rectangle = "rectangle" + mask_method = "mask_method" + + +class PeaklistFormat(str, Enum): + a2 = "a2" + a3 = "a3" + sparky = "sparky" + pipe = "pipe" + peakipy = "peakipy" + + +class OutFmt(str, Enum): + csv = "csv" + pkl = "pkl" + + +class Pseudo3D: + """Read dic, data from NMRGlue and dims from input to create a Pseudo3D dataset + + :param dic: from nmrglue.pipe.read + :type dic: dict + + :param data: data from nmrglue.pipe.read + :type data: numpy.array + + :param dims: dimension order i.e [0,1,2] where 0 = planes, 1 = f1, 2 = f2 + :type dims: list + """ + + def __init__(self, dic, data, dims): + # check dimensions + self._udic = ng.pipe.guess_udic(dic, data) + self._ndim = self._udic["ndim"] + + if self._ndim == 1: + err = f"""[red] + ########################################## + NMR Data should be either 2D or 3D + ########################################## + [/red]""" + # raise TypeError(err) + sys.exit(err) + + # check that spectrum has correct number of dims + elif self._ndim != len(dims): + err = f"""[red] + ################################################################# + Your spectrum has {self._ndim} dimensions with shape {data.shape} + but you have given a dimension order of {dims}... + ################################################################# + [/red]""" + # raise ValueError(err) + sys.exit(err) + + elif (self._ndim == 2) and (len(dims) == 2): + self._f1_dim, self._f2_dim = dims + self._planes = 0 + self._uc_f1 = ng.pipe.make_uc(dic, data, dim=self._f1_dim) + self._uc_f2 = ng.pipe.make_uc(dic, data, dim=self._f2_dim) + # make data pseudo3d + self._data = data.reshape((1, data.shape[0], data.shape[1])) + self._dims = [self._planes, self._f1_dim + 1, self._f2_dim + 1] + + else: + self._planes, self._f1_dim, self._f2_dim = dims + self._dims = dims + self._data = data + # make unit conversion dicts + self._uc_f2 = ng.pipe.make_uc(dic, data, dim=self._f2_dim) + self._uc_f1 = ng.pipe.make_uc(dic, data, dim=self._f1_dim) + + #  rearrange data if dims not in standard order + if self._dims != [0, 1, 2]: + # np.argsort returns indices of array for order 0,1,2 to transpose data correctly + # self._dims = np.argsort(self._dims) + self._data = np.transpose(data, self._dims) + + self._dic = dic + + self._f1_label = self._udic[self._f1_dim]["label"] + self._f2_label = self._udic[self._f2_dim]["label"] + + @property + def uc_f1(self): + """Return unit conversion dict for F1""" + return self._uc_f1 + + @property + def uc_f2(self): + """Return unit conversion dict for F2""" + return self._uc_f2 + + @property + def dims(self): + """Return dimension order""" + return self._dims + + @property + def data(self): + """Return array containing data""" + return self._data + + @data.setter + def data(self, data): + self._data = data + + @property + def dic(self): + return self._dic + + @property + def udic(self): + return self._udic + + @property + def ndim(self): + return self._ndim + + @property + def f1_label(self): + # dim label + return self._f1_label + + @property + def f2_label(self): + # dim label + return self._f2_label + + @property + def planes(self): + return self.dims[0] + + @property + def n_planes(self): + return self.data.shape[self.planes] + + @property + def f1(self): + return self.dims[1] + + @property + def f2(self): + return self.dims[2] + + # size of f1 and f2 in points + @property + def f2_size(self): + """Return size of f2 dimension in points""" + return self._udic[self._f2_dim]["size"] + + @property + def f1_size(self): + """Return size of f1 dimension in points""" + return self._udic[self._f1_dim]["size"] + + # points per ppm + @property + def pt_per_ppm_f1(self): + return self.f1_size / ( + self._udic[self._f1_dim]["sw"] / self._udic[self._f1_dim]["obs"] + ) + + @property + def pt_per_ppm_f2(self): + return self.f2_size / ( + self._udic[self._f2_dim]["sw"] / self._udic[self._f2_dim]["obs"] + ) + + # points per hz + @property + def pt_per_hz_f1(self): + return self.f1_size / self._udic[self._f1_dim]["sw"] + + @property + def pt_per_hz_f2(self): + return self.f2_size / self._udic[self._f2_dim]["sw"] + + # hz per point + @property + def hz_per_pt_f1(self): + return 1.0 / self.pt_per_hz_f1 + + @property + def hz_per_pt_f2(self): + return 1.0 / self.pt_per_hz_f2 + + # ppm per point + @property + def ppm_per_pt_f1(self): + return 1.0 / self.pt_per_ppm_f1 + + @property + def ppm_per_pt_f2(self): + return 1.0 / self.pt_per_ppm_f2 + + # get ppm limits for ppm scales + @property + def f2_ppm_scale(self): + return self.uc_f2.ppm_scale() + + @property + def f1_ppm_scale(self): + return self.uc_f1.ppm_scale() + + @property + def f2_ppm_limits(self): + return self.uc_f2.ppm_limits() + + @property + def f1_ppm_limits(self): + return self.uc_f1.ppm_limits() + + @property + def f1_ppm_max(self): + return max(self.f1_ppm_limits) + + @property + def f1_ppm_min(self): + return min(self.f1_ppm_limits) + + @property + def f2_ppm_max(self): + return max(self.f2_ppm_limits) + + @property + def f2_ppm_min(self): + return min(self.f2_ppm_limits) + + @property + def f2_ppm_0(self): + return self.f2_ppm_limits[0] + + @property + def f2_ppm_1(self): + return self.f2_ppm_limits[1] + + @property + def f1_ppm_0(self): + return self.f1_ppm_limits[0] + + @property + def f1_ppm_1(self): + return self.f1_ppm_limits[1] + + +class UnknownFormat(Exception): + pass + + +class Peaklist(Pseudo3D): + """Read analysis, sparky or NMRPipe peak list and convert to NMRPipe-ish format also find peak clusters + + Parameters + ---------- + path : path-like or str + path to peaklist + data_path : ndarray + NMRPipe format data + fmt : str + a2|a3|sparky|pipe + dims: list + [planes,y,x] + radii: list + [x,y] Mask radii in ppm + + + Methods + ------- + + clusters : + mask_method : + adaptive_clusters : + + Returns + ------- + df : pandas DataFrame + dataframe containing peaklist + + """ + + def __init__( + self, + path, + data_path, + fmt: PeaklistFormat = PeaklistFormat.a2, + dims=[0, 1, 2], + radii=[0.04, 0.4], + posF1="Position F2", + posF2="Position F1", + verbose=False, + ): + dic, data = ng.pipe.read(data_path) + Pseudo3D.__init__(self, dic, data, dims) + self.fmt = fmt + self.peaklist_path = path + self.data_path = data_path + self.verbose = verbose + self._radii = radii + self._thres = None + if self.verbose: + print( + "Points per hz f1 = %.3f, f2 = %.3f" + % (self.pt_per_hz_f1, self.pt_per_hz_f2) + ) + + self._analysis_to_pipe_dic = { + "#": "INDEX", + "Position F1": "X_PPM", + "Position F2": "Y_PPM", + "Line Width F1 (Hz)": "XW_HZ", + "Line Width F2 (Hz)": "YW_HZ", + "Height": "HEIGHT", + "Volume": "VOL", + } + self._assign_to_pipe_dic = { + "#": "INDEX", + "Pos F1": "X_PPM", + "Pos F2": "Y_PPM", + "LW F1 (Hz)": "XW_HZ", + "LW F2 (Hz)": "YW_HZ", + "Height": "HEIGHT", + "Volume": "VOL", + } + + self._sparky_to_pipe_dic = { + "index": "INDEX", + "w1": "X_PPM", + "w2": "Y_PPM", + "lw1 (hz)": "XW_HZ", + "lw2 (hz)": "YW_HZ", + "Height": "HEIGHT", + "Volume": "VOL", + "Assignment": "ASS", + } + + self._analysis_to_pipe_dic[posF1] = "Y_PPM" + self._analysis_to_pipe_dic[posF2] = "X_PPM" + + self._df = self.read_peaklist() + + def read_peaklist(self): + match self.fmt: + case self.fmt.a2: + self._df = self._read_analysis() + + case self.fmt.a3: + self._df = self._read_assign() + + case self.fmt.sparky: + self._df = self._read_sparky() + + case self.fmt.pipe: + self._df = self._read_pipe() + + case _: + raise UnknownFormat("I don't know this format: {self.fmt}") + + return self._df + + @property + def df(self): + return self._df + + @df.setter + def df(self, df): + self._df = df + return self._df + + @property + def radii(self): + return self._radii + + @property + def f2_radius(self): + """radius for fitting mask in f2""" + return self.radii[0] + + @property + def f1_radius(self): + """radius for fitting mask in f1""" + return self.radii[1] + + @property + def analysis_to_pipe_dic(self): + return self._analysis_to_pipe_dic + + @property + def assign_to_pipe_dic(self): + return self._assign_to_pipe_dic + + @property + def sparky_to_pipe_dic(self): + return self._sparky_to_pipe_dic + + @property + def thres(self): + if self._thres == None: + self._thres = abs(threshold_otsu(self.data[0])) + return self._thres + else: + return self._thres + + def update_df(self): + # int point value + self.df["X_AXIS"] = self.df.X_PPM.apply(lambda x: self.uc_f2(x, "ppm")) + self.df["Y_AXIS"] = self.df.Y_PPM.apply(lambda x: self.uc_f1(x, "ppm")) + # decimal point value + self.df["X_AXISf"] = self.df.X_PPM.apply(lambda x: self.uc_f2.f(x, "ppm")) + self.df["Y_AXISf"] = self.df.Y_PPM.apply(lambda x: self.uc_f1.f(x, "ppm")) + # in case of missing values (should estimate though) + self.df["XW_HZ"] = self.df.XW_HZ.replace("None", "20.0") + self.df["YW_HZ"] = self.df.YW_HZ.replace("None", "20.0") + self.df["XW_HZ"] = self.df.XW_HZ.replace(np.NaN, "20.0") + self.df["YW_HZ"] = self.df.YW_HZ.replace(np.NaN, "20.0") + # convert linewidths to float + self.df["XW_HZ"] = self.df.XW_HZ.apply(lambda x: float(x)) + self.df["YW_HZ"] = self.df.YW_HZ.apply(lambda x: float(x)) + # convert Hz lw to points + self.df["XW"] = self.df.XW_HZ.apply(lambda x: x * self.pt_per_hz_f2) + self.df["YW"] = self.df.YW_HZ.apply(lambda x: x * self.pt_per_hz_f1) + # makes an assignment column from Assign F1 and Assign F2 columns + # in analysis2.x and ccpnmr v3 assign peak lists + if self.fmt in [PeaklistFormat.a2, PeaklistFormat.a3]: + self.df["ASS"] = self.df.apply( + # lambda i: "".join([i["Assign F1"], i["Assign F2"]]), axis=1 + lambda i: f"{i['Assign F1']}_{i['Assign F2']}", + axis=1, + ) + + # make default values for X and Y radii for fit masks + self.df["X_RADIUS_PPM"] = np.zeros(len(self.df)) + self.f2_radius + self.df["Y_RADIUS_PPM"] = np.zeros(len(self.df)) + self.f1_radius + self.df["X_RADIUS"] = self.df.X_RADIUS_PPM.apply( + lambda x: x * self.pt_per_ppm_f2 + ) + self.df["Y_RADIUS"] = self.df.Y_RADIUS_PPM.apply( + lambda x: x * self.pt_per_ppm_f1 + ) + # add include column + if "include" in self.df.columns: + pass + else: + self.df["include"] = self.df.apply(lambda x: "yes", axis=1) + + # check assignments for duplicates + self.check_assignments() + # check that peaks are within the bounds of the data + self.check_peak_bounds() + + def add_fix_bound_columns(self): + """add columns containing parameter bounds (param_upper/param_lower) + and whether or not parameter should be fixed (yes/no) + + For parameter bounding: + + Column names are _upper and _lower for upper and lower bounds respectively. + Values are given as floating point. Value of 0.0 indicates that parameter is unbounded + X/Y positions are given in ppm + Linewidths are given in Hz + + For parameter fixing: + + Column names are _fix. + Values are given as a string 'yes' or 'no' + + """ + pass + + def _read_analysis(self): + df = pd.read_csv(self.peaklist_path, delimiter="\t") + new_columns = [self.analysis_to_pipe_dic.get(i, i) for i in df.columns] + pipe_columns = dict(zip(df.columns, new_columns)) + df = df.rename(index=str, columns=pipe_columns) + + return df + + def _read_assign(self): + df = pd.read_csv(self.peaklist_path, delimiter="\t") + new_columns = [self.assign_to_pipe_dic.get(i, i) for i in df.columns] + pipe_columns = dict(zip(df.columns, new_columns)) + df = df.rename(index=str, columns=pipe_columns) + + return df + + def _read_sparky(self): + df = pd.read_csv( + self.peaklist_path, + skiprows=1, + sep=r"\s+", + names=["ASS", "Y_PPM", "X_PPM", "VOLUME", "HEIGHT", "YW_HZ", "XW_HZ"], + ) + df["INDEX"] = df.index + + return df + + def _read_pipe(self): + to_skip = 0 + with open(self.peaklist_path) as f: + lines = f.readlines() + for line in lines: + if line.startswith("VARS"): + columns = line.strip().split()[1:] + elif line[:5].strip(" ").isdigit(): + break + else: + to_skip += 1 + df = pd.read_csv( + self.peaklist_path, skiprows=to_skip, names=columns, sep=r"\s+" + ) + return df + + def check_assignments(self): + # self.df["ASS"] = self.df. + self.df["ASS"] = self.df.ASS.astype(object) + self.df.loc[self.df["ASS"].isnull(), "ASS"] = "None_dummy_0" + self.df["ASS"] = self.df.ASS.astype(str) + duplicates_bool = self.df.ASS.duplicated() + duplicates = self.df.ASS[duplicates_bool] + if len(duplicates) > 0: + console.print( + textwrap.dedent( + """ + ############################################################################# + You have duplicated assignments in your list... + Currently each peak needs a unique assignment. Sorry about that buddy... + ############################################################################# + """ + ), + style="yellow", + ) + self.df.loc[duplicates_bool, "ASS"] = [ + f"{i}_dummy_{num+1}" for num, i in enumerate(duplicates) + ] + if self.verbose: + print("Here are the duplicates") + print(duplicates) + print(self.df.ASS) + + print( + textwrap.dedent( + """ + Creating dummy assignments for duplicates + + """ + ) + ) + + def check_peak_bounds(self): + columns_to_print = ["INDEX", "ASS", "X_AXIS", "Y_AXIS", "X_PPM", "Y_PPM"] + # check that peaks are within the bounds of spectrum + within_x = (self.df.X_PPM < self.f2_ppm_max) & (self.df.X_PPM > self.f2_ppm_min) + within_y = (self.df.Y_PPM < self.f1_ppm_max) & (self.df.Y_PPM > self.f1_ppm_min) + self.excluded = self.df[~(within_x & within_y)] + self.df = self.df[within_x & within_y] + if len(self.excluded) > 0: + print( + textwrap.dedent( + f"""[red] + ################################################################################# + + Excluding the following peaks as they are not within the spectrum which has shape + + {self.data.shape} + [/red]""" + ) + ) + table_to_print = df_to_rich_table( + self.excluded, + title="Excluded", + columns=columns_to_print, + styles=["red" for i in columns_to_print], + ) + print(table_to_print) + print( + "[red]#################################################################################[/red]" + ) + + def clusters( + self, + thres=None, + struc_el: StrucEl = StrucEl.disk, + struc_size=(3,), + l_struc=None, + ): + """Find clusters of peaks + + :param thres: threshold for positive signals above which clusters are selected. If None then threshold_otsu is used + :type thres: float + + :param struc_el: 'square'|'disk'|'rectangle' + structuring element for binary_closing of thresholded data can be square, disc or rectangle + :type struc_el: str + + :param struc_size: size/dimensions of structuring element + for square and disk first element of tuple is used (for disk value corresponds to radius) + for rectangle, tuple corresponds to (width,height). + :type struc_size: tuple + + + """ + peaks = [[y, x] for y, x in zip(self.df.Y_AXIS, self.df.X_AXIS)] + + if thres == None: + thres = self.thres + self._thres = abs(threshold_otsu(self.data[0])) + else: + self._thres = thres + + # get positive and negative + thresh_data = np.bitwise_or( + self.data[0] < (self._thres * -1.0), self.data[0] > self._thres + ) + + match struc_el: + case struc_el.disk: + radius = struc_size[0] + if self.verbose: + print(f"using disk with {radius}") + closed_data = binary_closing(thresh_data, disk(int(radius))) + + case struc_el.square: + width = struc_size[0] + if self.verbose: + print(f"using square with {width}") + closed_data = binary_closing(thresh_data, square(int(width))) + + case struc_el.rectangle: + width, height = struc_size + if self.verbose: + print(f"using rectangle with {width} and {height}") + closed_data = binary_closing( + thresh_data, rectangle(int(width), int(height)) + ) + + case _: + if self.verbose: + print(f"Not using any closing function") + closed_data = thresh_data + + labeled_array, num_features = ndimage.label(closed_data, l_struc) + + self.df.loc[:, "CLUSTID"] = [labeled_array[i[0], i[1]] for i in peaks] + + #  renumber "0" clusters + max_clustid = self.df["CLUSTID"].max() + n_of_zeros = len(self.df[self.df["CLUSTID"] == 0]["CLUSTID"]) + self.df.loc[self.df[self.df["CLUSTID"] == 0].index, "CLUSTID"] = np.arange( + max_clustid + 1, n_of_zeros + max_clustid + 1, dtype=int + ) + + # count how many peaks per cluster + for ind, group in self.df.groupby("CLUSTID"): + self.df.loc[group.index, "MEMCNT"] = len(group) + + self.df.loc[:, "color"] = self.df.apply( + lambda x: Category20[20][int(x.CLUSTID) % 20] if x.MEMCNT > 1 else "black", + axis=1, + ) + return ClustersResult(labeled_array, num_features, closed_data, peaks) + + def mask_method(self, overlap=1.0, l_struc=None): + """connect clusters based on overlap of fitting masks + + :param overlap: fraction of mask for which overlaps are calculated + :type overlap: float + + :returns ClusterResult: Instance of ClusterResult + :rtype: ClustersResult + """ + # overlap is positive + overlap = abs(overlap) + + self._thres = threshold_otsu(self.data[0]) + + mask = np.zeros(self.data[0].shape, dtype=bool) + + for ind, peak in self.df.iterrows(): + mask += make_mask( + self.data[0], + peak.X_AXISf, + peak.Y_AXISf, + peak.X_RADIUS * overlap, + peak.Y_RADIUS * overlap, + ) + + peaks = [[y, x] for y, x in zip(self.df.Y_AXIS, self.df.X_AXIS)] + labeled_array, num_features = ndimage.label(mask, l_struc) + + self.df.loc[:, "CLUSTID"] = [labeled_array[i[0], i[1]] for i in peaks] + + #  renumber "0" clusters + max_clustid = self.df["CLUSTID"].max() + n_of_zeros = len(self.df[self.df["CLUSTID"] == 0]["CLUSTID"]) + self.df.loc[self.df[self.df["CLUSTID"] == 0].index, "CLUSTID"] = np.arange( + max_clustid + 1, n_of_zeros + max_clustid + 1, dtype=int + ) + + # count how many peaks per cluster + for ind, group in self.df.groupby("CLUSTID"): + self.df.loc[group.index, "MEMCNT"] = len(group) + + self.df.loc[:, "color"] = self.df.apply( + lambda x: Category20[20][int(x.CLUSTID) % 20] if x.MEMCNT > 1 else "black", + axis=1, + ) + + return ClustersResult(labeled_array, num_features, mask, peaks) + + def to_fuda(self, fname="params.fuda"): + with open("peaks.fuda", "w") as peaks_fuda: + for ass, f1_ppm, f2_ppm in zip(self.df.ASS, self.df.Y_PPM, self.df.X_PPM): + peaks_fuda.write(f"{ass}\t{f1_ppm:.3f}\t{f2_ppm:.3f}\n") + groups = self.df.groupby("CLUSTID") + fuda_params = Path(fname) + overlap_peaks = "" + + for ind, group in groups: + if len(group) > 1: + overlap_peaks_str = ";".join(group.ASS) + overlap_peaks += f"OVERLAP_PEAKS=({overlap_peaks_str})\n" + + fuda_file = textwrap.dedent( + f"""\ + +# Read peaklist and spectrum info +PEAKLIST=peaks.fuda +SPECFILE={self.data_path} +PARAMETERFILE=(bruker;vclist) +ZCORR=ncyc +NOISE={self.thres} # you'll need to adjust this +BASELINE=N +VERBOSELEVEL=5 +PRINTDATA=Y +LM=(MAXFEV=250;TOL=1e-5) +#Specify the default values. All values are in ppm: +DEF_LINEWIDTH_F1={self.f1_radius} +DEF_LINEWIDTH_F2={self.f2_radius} +DEF_RADIUS_F1={self.f1_radius} +DEF_RADIUS_F2={self.f2_radius} +SHAPE=GLORE +# OVERLAP PEAKS +{overlap_peaks}""" + ) + with open(fuda_params, "w") as f: + print(f"Writing FuDA file {fuda_file}") + f.write(fuda_file) + if self.verbose: + print(overlap_peaks) + + +class ClustersResult: + """Class to store results of clusters function""" + + def __init__(self, labeled_array, num_features, closed_data, peaks): + self._labeled_array = labeled_array + self._num_features = num_features + self._closed_data = closed_data + self._peaks = peaks + + @property + def labeled_array(self): + return self._labeled_array + + @property + def num_features(self): + return self._num_features + + @property + def closed_data(self): + return self._closed_data + + @property + def peaks(self): + return self._peaks + + +class LoadData(Peaklist): + """Load peaklist data from peakipy .csv file output from either peakipy read or edit + + read_peaklist is redefined to just read a .csv file + + check_data_frame makes sure data frame is in good shape for setting up fits + + """ + + def read_peaklist(self): + if self.peaklist_path.suffix == ".csv": + self.df = pd.read_csv(self.peaklist_path) # , comment="#") + + elif self.peaklist_path.suffix == ".tab": + self.df = pd.read_csv(self.peaklist_path, sep="\t") # comment="#") + + else: + self.df = pd.read_pickle(self.peaklist_path) + + self._thres = threshold_otsu(self.data[0]) + + return self.df + + def check_data_frame(self): + # make diameter columns + if "X_DIAMETER_PPM" in self.df.columns: + pass + else: + self.df["X_DIAMETER_PPM"] = self.df["X_RADIUS_PPM"] * 2.0 + self.df["Y_DIAMETER_PPM"] = self.df["Y_RADIUS_PPM"] * 2.0 + + #  make a column to track edited peaks + if "Edited" in self.df.columns: + pass + else: + self.df["Edited"] = np.zeros(len(self.df), dtype=bool) + + # create include column if it doesn't exist + if "include" in self.df.columns: + pass + else: + self.df["include"] = self.df.apply(lambda _: "yes", axis=1) + + # color clusters + self.df["color"] = self.df.apply( + lambda x: Category20[20][int(x.CLUSTID) % 20] if x.MEMCNT > 1 else "black", + axis=1, + ) + + # get rid of unnamed columns + unnamed_cols = [i for i in self.df.columns if "Unnamed:" in i] + self.df = self.df.drop(columns=unnamed_cols) + + def update_df(self): + """Slightly modified to retain previous configurations""" + # int point value + self.df["X_AXIS"] = self.df.X_PPM.apply(lambda x: self.uc_f2(x, "ppm")) + self.df["Y_AXIS"] = self.df.Y_PPM.apply(lambda x: self.uc_f1(x, "ppm")) + # decimal point value + self.df["X_AXISf"] = self.df.X_PPM.apply(lambda x: self.uc_f2.f(x, "ppm")) + self.df["Y_AXISf"] = self.df.Y_PPM.apply(lambda x: self.uc_f1.f(x, "ppm")) + # in case of missing values (should estimate though) + self.df["XW_HZ"] = self.df.XW_HZ.replace(np.NaN, "20.0") + self.df["YW_HZ"] = self.df.YW_HZ.replace(np.NaN, "20.0") + # convert linewidths to float + self.df["XW_HZ"] = self.df.XW_HZ.apply(lambda x: float(x)) + self.df["YW_HZ"] = self.df.YW_HZ.apply(lambda x: float(x)) + # convert Hz lw to points + self.df["XW"] = self.df.XW_HZ.apply(lambda x: x * self.pt_per_hz_f2) + self.df["YW"] = self.df.YW_HZ.apply(lambda x: x * self.pt_per_hz_f1) + # makes an assignment column + if self.fmt == "a2": + self.df["ASS"] = self.df.apply( + lambda i: "".join([i["Assign F1"], i["Assign F2"]]), axis=1 + ) + + # make default values for X and Y radii for fit masks + # self.df["X_RADIUS_PPM"] = np.zeros(len(self.df)) + self.f2_radius + # self.df["Y_RADIUS_PPM"] = np.zeros(len(self.df)) + self.f1_radius + self.df["X_RADIUS"] = self.df.X_RADIUS_PPM.apply( + lambda x: x * self.pt_per_ppm_f2 + ) + self.df["Y_RADIUS"] = self.df.Y_RADIUS_PPM.apply( + lambda x: x * self.pt_per_ppm_f1 + ) + # add include column + if "include" in self.df.columns: + pass + else: + self.df["include"] = self.df.apply(lambda x: "yes", axis=1) + + # check assignments for duplicates + self.check_assignments() + # check that peaks are within the bounds of the data + self.check_peak_bounds() + + +def get_vclist(vclist, args): + # read vclist + if vclist is None: + vclist = False + elif vclist.exists(): + vclist_data = np.genfromtxt(vclist) + args["vclist_data"] = vclist_data + vclist = True + else: + raise Exception("vclist not found...") + + args["vclist"] = vclist + return args diff --git a/peakipy/lineshapes.py b/peakipy/lineshapes.py new file mode 100644 index 00000000..38b53e26 --- /dev/null +++ b/peakipy/lineshapes.py @@ -0,0 +1,522 @@ +from enum import Enum + +import pandas as pd +from numpy import sqrt, exp, log +from scipy.special import wofz + +from peakipy.constants import π, tiny, log2 + + +class Lineshape(str, Enum): + PV = "PV" + V = "V" + G = "G" + L = "L" + PV_PV = "PV_PV" + G_L = "G_L" + PV_G = "PV_G" + PV_L = "PV_L" + + +def gaussian(x, center=0.0, sigma=1.0): + r"""1-dimensional Gaussian function. + + gaussian(x, center, sigma) = + (1/(s2pi*sigma)) * exp(-(1.0*x-center)**2 / (2*sigma**2)) + + :math:`\\frac{1}{ \sqrt{2\pi} } exp \left( \\frac{-(x-center)^2}{2 \sigma^2} \\right)` + + :param x: x + :param center: center + :param sigma: sigma + :type x: numpy.array + :type center: float + :type sigma: float + + :return: 1-dimensional Gaussian + :rtype: numpy.array + + """ + return (1.0 / max(tiny, (sqrt(2 * π) * sigma))) * exp( + -((1.0 * x - center) ** 2) / max(tiny, (2 * sigma**2)) + ) + + +def lorentzian(x, center=0.0, sigma=1.0): + r"""1-dimensional Lorentzian function. + + lorentzian(x, center, sigma) = + (1/(1 + ((1.0*x-center)/sigma)**2)) / (pi*sigma) + + :math:`\\frac{1}{ 1+ \left( \\frac{x-center}{\sigma}\\right)^2} / (\pi\sigma)` + + :param x: x + :param center: center + :param sigma: sigma + :type x: numpy.array + :type center: float + :type sigma: float + + :return: 1-dimensional Lorenztian + :rtype: numpy.array + + """ + return (1.0 / (1 + ((1.0 * x - center) / max(tiny, sigma)) ** 2)) / max( + tiny, (π * sigma) + ) + + +def voigt(x, center=0.0, sigma=1.0, gamma=None): + r"""Return a 1-dimensional Voigt function. + + voigt(x, center, sigma, gamma) = + amplitude*wofz(z).real / (sigma*sqrt(2.0 * π)) + + :math:`V(x,\sigma,\gamma) = (\\frac{Re[\omega(z)]}{\sigma \sqrt{2\pi}})` + + :math:`z=\\frac{x+i\gamma}{\sigma\sqrt{2}}` + + see Voigt_ wiki + + .. _Voigt: https://en.wikipedia.org/wiki/Voigt_profile + + + :param x: x values + :type x: numpy array 1d + :param center: center of lineshape in points + :type center: float + :param sigma: sigma of gaussian + :type sigma: float + :param gamma: gamma of lorentzian + :type gamma: float + + :returns: Voigt lineshape + :rtype: numpy.array + + """ + if gamma is None: + gamma = sigma + + z = (x - center + 1j * gamma) / max(tiny, (sigma * sqrt(2.0))) + return wofz(z).real / max(tiny, (sigma * sqrt(2.0 * π))) + + +def pseudo_voigt(x, center=0.0, sigma=1.0, fraction=0.5): + r"""1-dimensional Pseudo-voigt function + + Superposition of Gaussian and Lorentzian function + + :math:`(1-\phi) G(x,center,\sigma_g) + \phi L(x, center, \sigma)` + + Where :math:`\phi` is the fraction of Lorentzian lineshape and :math:`G` and :math:`L` are Gaussian and + Lorentzian functions, respectively. + + :param x: data + :type x: numpy.array + :param center: center of peak + :type center: float + :param sigma: sigma of lineshape + :type sigma: float + :param fraction: fraction of lorentzian lineshape (between 0 and 1) + :type fraction: float + + :return: pseudo-voigt function + :rtype: numpy.array + + """ + sigma_g = sigma / sqrt(2 * log2) + pv = (1 - fraction) * gaussian(x, center, sigma_g) + fraction * lorentzian( + x, center, sigma + ) + return pv + + +def pvoigt2d( + XY, + amplitude=1.0, + center_x=0.5, + center_y=0.5, + sigma_x=1.0, + sigma_y=1.0, + fraction=0.5, +): + r"""2D pseudo-voigt model + + :math:`(1-fraction) G(x,center,\sigma_{gx}) + (fraction) L(x, center, \sigma_x) * (1-fraction) G(y,center,\sigma_{gy}) + (fraction) L(y, center, \sigma_y)` + + :param XY: meshgrid of X and Y coordinates [X,Y] each with shape Z + :type XY: numpy.array + + :param amplitude: amplitude of peak + :type amplitude: float + + :param center_x: center of peak in x + :type center_x: float + + :param center_y: center of peak in x + :type center_y: float + + :param sigma_x: sigma of lineshape in x + :type sigma_x: float + + :param sigma_y: sigma of lineshape in y + :type sigma_y: float + + :param fraction: fraction of lorentzian lineshape (between 0 and 1) + :type fraction: float + + :return: flattened array of Z values (use Z.reshape(X.shape) for recovery) + :rtype: numpy.array + + """ + x, y = XY + pv_x = pseudo_voigt(x, center_x, sigma_x, fraction) + pv_y = pseudo_voigt(y, center_y, sigma_y, fraction) + return amplitude * pv_x * pv_y + + +def pv_l( + XY, + amplitude=1.0, + center_x=0.5, + center_y=0.5, + sigma_x=1.0, + sigma_y=1.0, + fraction=0.5, +): + """2D lineshape model with pseudo-voigt in x and lorentzian in y + + Arguments + ========= + + -- XY: meshgrid of X and Y coordinates [X,Y] each with shape Z + -- amplitude: peak amplitude (gaussian and lorentzian) + -- center_x: position of peak in x + -- center_y: position of peak in y + -- sigma_x: linewidth in x + -- sigma_y: linewidth in y + -- fraction: fraction of lorentzian in fit + + Returns + ======= + + -- flattened array of Z values (use Z.reshape(X.shape) for recovery) + + """ + + x, y = XY + pv_x = pseudo_voigt(x, center_x, sigma_x, fraction) + pv_y = pseudo_voigt(y, center_y, sigma_y, 1.0) # lorentzian + return amplitude * pv_x * pv_y + + +def pv_g( + XY, + amplitude=1.0, + center_x=0.5, + center_y=0.5, + sigma_x=1.0, + sigma_y=1.0, + fraction=0.5, +): + """2D lineshape model with pseudo-voigt in x and gaussian in y + + Arguments + --------- + + -- XY: meshgrid of X and Y coordinates [X,Y] each with shape Z + -- amplitude: peak amplitude (gaussian and lorentzian) + -- center_x: position of peak in x + -- center_y: position of peak in y + -- sigma_x: linewidth in x + -- sigma_y: linewidth in y + -- fraction: fraction of lorentzian in fit + + Returns + ------- + + -- flattened array of Z values (use Z.reshape(X.shape) for recovery) + + """ + x, y = XY + pv_x = pseudo_voigt(x, center_x, sigma_x, fraction) + pv_y = pseudo_voigt(y, center_y, sigma_y, 0.0) # gaussian + return amplitude * pv_x * pv_y + + +def pv_pv( + XY, + amplitude=1.0, + center_x=0.5, + center_y=0.5, + sigma_x=1.0, + sigma_y=1.0, + fraction_x=0.5, + fraction_y=0.5, +): + """2D lineshape model with pseudo-voigt in x and pseudo-voigt in y + i.e. fraction_x and fraction_y params + + Arguments + ========= + + -- XY: meshgrid of X and Y coordinates [X,Y] each with shape Z + -- amplitude: peak amplitude (gaussian and lorentzian) + -- center_x: position of peak in x + -- center_y: position of peak in y + -- sigma_x: linewidth in x + -- sigma_y: linewidth in y + -- fraction_x: fraction of lorentzian in x + -- fraction_y: fraction of lorentzian in y + + Returns + ======= + + -- flattened array of Z values (use Z.reshape(X.shape) for recovery) + + """ + + x, y = XY + pv_x = pseudo_voigt(x, center_x, sigma_x, fraction_x) + pv_y = pseudo_voigt(y, center_y, sigma_y, fraction_y) + return amplitude * pv_x * pv_y + + +def gaussian_lorentzian( + XY, + amplitude=1.0, + center_x=0.5, + center_y=0.5, + sigma_x=1.0, + sigma_y=1.0, + fraction=0.5, +): + """2D lineshape model with gaussian in x and lorentzian in y + + Arguments + ========= + + -- XY: meshgrid of X and Y coordinates [X,Y] each with shape Z + -- amplitude: peak amplitude (gaussian and lorentzian) + -- center_x: position of peak in x + -- center_y: position of peak in y + -- sigma_x: linewidth in x + -- sigma_y: linewidth in y + -- fraction: fraction of lorentzian in fit + + Returns + ======= + + -- flattened array of Z values (use Z.reshape(X.shape) for recovery) + + """ + x, y = XY + pv_x = pseudo_voigt(x, center_x, sigma_x, 0.0) # gaussian + pv_y = pseudo_voigt(y, center_y, sigma_y, 1.0) # lorentzian + return amplitude * pv_x * pv_y + + +def voigt2d( + XY, + amplitude=1.0, + center_x=0.5, + center_y=0.5, + sigma_x=1.0, + sigma_y=1.0, + gamma_x=1.0, + gamma_y=1.0, + fraction=0.5, +): + fraction = 0.5 + gamma_x = None + gamma_y = None + x, y = XY + voigt_x = voigt(x, center_x, sigma_x, gamma_x) + voigt_y = voigt(y, center_y, sigma_y, gamma_y) + return amplitude * voigt_x * voigt_y + + +def get_lineshape_function(lineshape: Lineshape): + match lineshape: + case lineshape.PV | lineshape.G | lineshape.L: + lineshape_function = pvoigt2d + case lineshape.V: + lineshape_function = voigt2d + case lineshape.PV_PV: + lineshape_function = pv_pv + case lineshape.G_L: + lineshape_function = gaussian_lorentzian + case lineshape.PV_G: + lineshape_function = pv_g + case lineshape.PV_L: + lineshape_function = pv_l + case _: + raise Exception("No lineshape was selected!") + return lineshape_function + + +def calculate_height_for_voigt_lineshape(df): + df["height"] = df.apply( + lambda x: voigt2d( + XY=[0, 0], + center_x=0.0, + center_y=0.0, + sigma_x=x.sigma_x, + sigma_y=x.sigma_y, + gamma_x=x.gamma_x, + gamma_y=x.gamma_y, + amplitude=x.amp, + ), + axis=1, + ) + df["height_err"] = df.apply( + lambda x: x.amp_err * (x.height / x.amp) if x.amp_err != None else 0.0, + axis=1, + ) + return df + + +def calculate_fwhm_for_voigt_lineshape(df): + df["fwhm_g_x"] = df.sigma_x.apply( + lambda x: 2.0 * x * sqrt(2.0 * log(2.0)) + ) # fwhm of gaussian + df["fwhm_g_y"] = df.sigma_y.apply(lambda x: 2.0 * x * sqrt(2.0 * log(2.0))) + df["fwhm_l_x"] = df.gamma_x.apply(lambda x: 2.0 * x) # fwhm of lorentzian + df["fwhm_l_y"] = df.gamma_y.apply(lambda x: 2.0 * x) + df["fwhm_x"] = df.apply( + lambda x: 0.5346 * x.fwhm_l_x + + sqrt(0.2166 * x.fwhm_l_x**2.0 + x.fwhm_g_x**2.0), + axis=1, + ) + df["fwhm_y"] = df.apply( + lambda x: 0.5346 * x.fwhm_l_y + + sqrt(0.2166 * x.fwhm_l_y**2.0 + x.fwhm_g_y**2.0), + axis=1, + ) + return df + + +def calculate_height_for_pseudo_voigt_lineshape(df): + df["height"] = df.apply( + lambda x: pvoigt2d( + XY=[0, 0], + center_x=0.0, + center_y=0.0, + sigma_x=x.sigma_x, + sigma_y=x.sigma_y, + amplitude=x.amp, + fraction=x.fraction, + ), + axis=1, + ) + df["height_err"] = df.apply(lambda x: x.amp_err * (x.height / x.amp), axis=1) + return df + + +def calculate_fwhm_for_pseudo_voigt_lineshape(df): + df["fwhm_x"] = df.sigma_x.apply(lambda x: x * 2.0) + df["fwhm_y"] = df.sigma_y.apply(lambda x: x * 2.0) + return df + + +def calculate_height_for_gaussian_lineshape(df): + df["height"] = df.apply( + lambda x: pvoigt2d( + XY=[0, 0], + center_x=0.0, + center_y=0.0, + sigma_x=x.sigma_x, + sigma_y=x.sigma_y, + amplitude=x.amp, + fraction=0.0, # gaussian + ), + axis=1, + ) + df["height_err"] = df.apply(lambda x: x.amp_err * (x.height / x.amp), axis=1) + return df + + +def calculate_height_for_lorentzian_lineshape(df): + df["height"] = df.apply( + lambda x: pvoigt2d( + XY=[0, 0], + center_x=0.0, + center_y=0.0, + sigma_x=x.sigma_x, + sigma_y=x.sigma_y, + amplitude=x.amp, + fraction=1.0, # lorentzian + ), + axis=1, + ) + df["height_err"] = df.apply(lambda x: x.amp_err * (x.height / x.amp), axis=1) + return df + + +def calculate_height_for_pv_pv_lineshape(df): + df["height"] = df.apply( + lambda x: pv_pv( + XY=[0, 0], + center_x=0.0, + center_y=0.0, + sigma_x=x.sigma_x, + sigma_y=x.sigma_y, + amplitude=x.amp, + fraction_x=x.fraction_x, + fraction_y=x.fraction_y, + ), + axis=1, + ) + df["height_err"] = df.apply(lambda x: x.amp_err * (x.height / x.amp), axis=1) + return df + + +def calculate_peak_centers_in_ppm(df, peakipy_data): + #  convert values to ppm + df["center_x_ppm"] = df.center_x.apply(lambda x: peakipy_data.uc_f2.ppm(x)) + df["center_y_ppm"] = df.center_y.apply(lambda x: peakipy_data.uc_f1.ppm(x)) + df["init_center_x_ppm"] = df.init_center_x.apply( + lambda x: peakipy_data.uc_f2.ppm(x) + ) + df["init_center_y_ppm"] = df.init_center_y.apply( + lambda x: peakipy_data.uc_f1.ppm(x) + ) + return df + + +def calculate_peak_linewidths_in_hz(df, peakipy_data): + df["sigma_x_ppm"] = df.sigma_x.apply(lambda x: x * peakipy_data.ppm_per_pt_f2) + df["sigma_y_ppm"] = df.sigma_y.apply(lambda x: x * peakipy_data.ppm_per_pt_f1) + df["fwhm_x_ppm"] = df.fwhm_x.apply(lambda x: x * peakipy_data.ppm_per_pt_f2) + df["fwhm_y_ppm"] = df.fwhm_y.apply(lambda x: x * peakipy_data.ppm_per_pt_f1) + df["fwhm_x_hz"] = df.fwhm_x.apply(lambda x: x * peakipy_data.hz_per_pt_f2) + df["fwhm_y_hz"] = df.fwhm_y.apply(lambda x: x * peakipy_data.hz_per_pt_f1) + return df + + +def calculate_lineshape_specific_height_and_fwhm( + lineshape: Lineshape, df: pd.DataFrame +): + match lineshape: + case lineshape.V: + df = calculate_height_for_voigt_lineshape(df) + df = calculate_fwhm_for_voigt_lineshape(df) + + case lineshape.PV: + df = calculate_height_for_pseudo_voigt_lineshape(df) + df = calculate_fwhm_for_pseudo_voigt_lineshape(df) + + case lineshape.G: + df = calculate_height_for_gaussian_lineshape(df) + df = calculate_fwhm_for_pseudo_voigt_lineshape(df) + + case lineshape.L: + df = calculate_height_for_lorentzian_lineshape(df) + df = calculate_fwhm_for_pseudo_voigt_lineshape(df) + + case lineshape.PV_PV: + df = calculate_height_for_pv_pv_lineshape(df) + df = calculate_fwhm_for_pseudo_voigt_lineshape(df) + case _: + df = calculate_fwhm_for_pseudo_voigt_lineshape(df) + return df diff --git a/peakipy/plotting.py b/peakipy/plotting.py new file mode 100644 index 00000000..479f4ace --- /dev/null +++ b/peakipy/plotting.py @@ -0,0 +1,400 @@ +from dataclasses import dataclass, field +from typing import List + +import pandas as pd +import numpy as np +import plotly.graph_objects as go +import matplotlib.pyplot as plt +from matplotlib import cm +from matplotlib.widgets import Button +from matplotlib.backends.backend_pdf import PdfPages +from rich import print + +from peakipy.io import Pseudo3D +from peakipy.utils import df_to_rich_table, bad_color_selection, bad_column_selection + + +@dataclass +class PlottingDataForPlane: + pseudo3D: Pseudo3D + plane_id: int + plane_lineshape_parameters: pd.DataFrame + X: np.array + Y: np.array + mask: np.array + individual_masks: List[np.array] + sim_data: np.array + sim_data_singles: List[np.array] + min_x: int + max_x: int + min_y: int + max_y: int + fit_color: str + data_color: str + rcount: int + ccount: int + + x_plot: np.array = field(init=False) + y_plot: np.array = field(init=False) + masked_data: np.array = field(init=False) + masked_sim_data: np.array = field(init=False) + residual: np.array = field(init=False) + single_colors: List = field(init=False) + + def __post_init__(self): + self.plane_data = self.pseudo3D.data[self.plane_id] + self.masked_data = self.plane_data.copy() + self.masked_sim_data = self.sim_data.copy() + self.masked_data[~self.mask] = np.nan + self.masked_sim_data[~self.mask] = np.nan + + self.x_plot = self.pseudo3D.uc_f2.ppm( + self.X[self.min_y : self.max_y, self.min_x : self.max_x] + ) + self.y_plot = self.pseudo3D.uc_f1.ppm( + self.Y[self.min_y : self.max_y, self.min_x : self.max_x] + ) + self.masked_data = self.masked_data[ + self.min_y : self.max_y, self.min_x : self.max_x + ] + self.sim_plot = self.masked_sim_data[ + self.min_y : self.max_y, self.min_x : self.max_x + ] + self.residual = self.masked_data - self.sim_plot + + for single_mask, single in zip(self.individual_masks, self.sim_data_singles): + single[~single_mask] = np.nan + self.sim_data_singles = [ + sim_data_single[self.min_y : self.max_y, self.min_x : self.max_x] + for sim_data_single in self.sim_data_singles + ] + self.single_colors = [ + cm.viridis(i) for i in np.linspace(0, 1, len(self.sim_data_singles)) + ] + + +def plot_data_is_valid(plot_data: PlottingDataForPlane) -> bool: + if len(plot_data.x_plot) < 1 or len(plot_data.y_plot) < 1: + print( + f"[red]Nothing to plot for cluster {int(plot_data.plane_lineshape_parameters.clustid)}[/red]" + ) + print(f"[red]x={plot_data.x_plot},y={plot_data.y_plot}[/red]") + print( + df_to_rich_table( + plot_data.plane_lineshape_parameters, + title="", + columns=bad_column_selection, + styles=bad_color_selection, + ) + ) + plt.close() + validated = False + # print(Fore.RED + "Maybe your F1/F2 radii for fitting were too small...") + elif plot_data.masked_data.shape[0] == 0 or plot_data.masked_data.shape[1] == 0: + print(f"[red]Nothing to plot for cluster {int(plot_data.plane.clustid)}[/red]") + print( + df_to_rich_table( + plot_data.plane_lineshape_parameters, + title="Bad plane", + columns=bad_column_selection, + styles=bad_color_selection, + ) + ) + spec_lim_f1 = " - ".join( + ["%8.3f" % i for i in plot_data.pseudo3D.f1_ppm_limits] + ) + spec_lim_f2 = " - ".join( + ["%8.3f" % i for i in plot_data.pseudo3D.f2_ppm_limits] + ) + print(f"Spectrum limits are {plot_data.pseudo3D.f2_label:4s}:{spec_lim_f2} ppm") + print(f" {plot_data.pseudo3D.f1_label:4s}:{spec_lim_f1} ppm") + plt.close() + validated = False + else: + validated = True + return validated + + +def create_matplotlib_figure( + plot_data: PlottingDataForPlane, + pdf: PdfPages, + individual=False, + label=False, + ccpn_flag=False, + show=True, +): + fig = plt.figure(figsize=(10, 6)) + ax = fig.add_subplot(projection="3d") + if plot_data_is_valid(plot_data): + cset = ax.contourf( + plot_data.x_plot, + plot_data.y_plot, + plot_data.residual, + zdir="z", + offset=np.nanmin(plot_data.masked_data) * 1.1, + alpha=0.5, + cmap=cm.coolwarm, + ) + cbl = fig.colorbar(cset, ax=ax, shrink=0.5, format="%.2e") + cbl.ax.set_title("Residual", pad=20) + + if individual: + #  for plotting single fit surfaces + single_colors = [ + cm.viridis(i) + for i in np.linspace(0, 1, len(plot_data.sim_data_singles)) + ] + [ + ax.plot_surface( + plot_data.x_plot, + plot_data.y_plot, + z_single, + color=c, + alpha=0.5, + ) + for c, z_single in zip(single_colors, plot_data.sim_data_singles) + ] + ax.plot_wireframe( + plot_data.x_plot, + plot_data.y_plot, + plot_data.sim_plot, + # colors=[cm.coolwarm(i) for i in np.ravel(residual)], + colors=plot_data.fit_color, + linestyle="--", + label="fit", + rcount=plot_data.rcount, + ccount=plot_data.ccount, + ) + ax.plot_wireframe( + plot_data.x_plot, + plot_data.y_plot, + plot_data.masked_data, + colors=plot_data.data_color, + linestyle="-", + label="data", + rcount=plot_data.rcount, + ccount=plot_data.ccount, + ) + ax.set_ylabel(plot_data.pseudo3D.f1_label) + ax.set_xlabel(plot_data.pseudo3D.f2_label) + + # axes will appear inverted + ax.view_init(30, 120) + + title = f"Plane={plot_data.plane_id},Cluster={plot_data.plane_lineshape_parameters.clustid.iloc[0]}" + plt.title(title) + print(f"[green]Plotting: {title}[/green]") + out_str = "Volumes (Heights)\n===========\n" + for _, row in plot_data.plane_lineshape_parameters.iterrows(): + out_str += f"{row.assignment} = {row.amp:.3e} ({row.height:.3e})\n" + if label: + ax.text( + row.center_x_ppm, + row.center_y_ppm, + row.height * 1.2, + row.assignment, + (1, 1, 1), + ) + + ax.text2D( + -0.5, + 1.0, + out_str, + transform=ax.transAxes, + fontsize=10, + fontfamily="sans-serif", + va="top", + bbox=dict(boxstyle="round", ec="k", fc="k", alpha=0.5), + ) + + ax.legend() + + if show: + + def exit_program(event): + exit() + + def next_plot(event): + plt.close() + + axexit = plt.axes([0.81, 0.05, 0.1, 0.075]) + bnexit = Button(axexit, "Exit") + bnexit.on_clicked(exit_program) + axnext = plt.axes([0.71, 0.05, 0.1, 0.075]) + bnnext = Button(axnext, "Next") + bnnext.on_clicked(next_plot) + if ccpn_flag: + plt.show(windowTitle="", size=(1000, 500)) + else: + plt.show() + else: + pdf.savefig() + + plt.close() + + +def create_plotly_wireframe_lines(plot_data: PlottingDataForPlane): + lines = [] + show_legend = lambda x: x < 1 + showlegend = False + # make simulated data wireframe + line_marker = dict(color=plot_data.fit_color, width=4) + counter = 0 + for i, j, k in zip(plot_data.x_plot, plot_data.y_plot, plot_data.sim_plot): + showlegend = show_legend(counter) + lines.append( + go.Scatter3d( + x=i, + y=j, + z=k, + mode="lines", + line=line_marker, + name="fit", + showlegend=showlegend, + ) + ) + counter += 1 + for i, j, k in zip(plot_data.x_plot.T, plot_data.y_plot.T, plot_data.sim_plot.T): + lines.append( + go.Scatter3d( + x=i, y=j, z=k, mode="lines", line=line_marker, showlegend=showlegend + ) + ) + # make experimental data wireframe + line_marker = dict(color=plot_data.data_color, width=4) + counter = 0 + for i, j, k in zip(plot_data.x_plot, plot_data.y_plot, plot_data.masked_data): + showlegend = show_legend(counter) + lines.append( + go.Scatter3d( + x=i, + y=j, + z=k, + mode="lines", + name="data", + line=line_marker, + showlegend=showlegend, + ) + ) + counter += 1 + for i, j, k in zip(plot_data.x_plot.T, plot_data.y_plot.T, plot_data.masked_data.T): + lines.append( + go.Scatter3d( + x=i, y=j, z=k, mode="lines", line=line_marker, showlegend=showlegend + ) + ) + + return lines + + +def construct_surface_legend_string(row): + surface_legend = "" + surface_legend += row.assignment + return surface_legend + + +def create_plotly_surfaces(plot_data: PlottingDataForPlane): + data = [] + color_scale_values = np.linspace(0, 1, len(plot_data.single_colors)) + color_scale = [ + [val, f"rgb({', '.join('%d'%(i*255) for i in c[0:3])})"] + for val, c in zip(color_scale_values, plot_data.single_colors) + ] + for val, individual_peak, row in zip( + color_scale_values, + plot_data.sim_data_singles, + plot_data.plane_lineshape_parameters.itertuples(), + ): + name = construct_surface_legend_string(row) + colors = np.zeros(shape=individual_peak.shape) + val + data.append( + go.Surface( + z=individual_peak, + x=plot_data.x_plot, + y=plot_data.y_plot, + opacity=0.5, + surfacecolor=colors, + colorscale=color_scale, + showscale=False, + cmin=0, + cmax=1, + name=name, + ) + ) + return data + + +def create_residual_contours(plot_data: PlottingDataForPlane): + contours = go.Contour( + x=plot_data.x_plot[0], y=plot_data.y_plot.T[0], z=plot_data.residual + ) + return contours + + +def create_residual_figure(plot_data: PlottingDataForPlane): + data = create_residual_contours(plot_data) + fig = go.Figure(data=data) + fig.update_layout( + title="Fit residuals", + xaxis_title=f"{plot_data.pseudo3D.f2_label} ppm", + yaxis_title=f"{plot_data.pseudo3D.f1_label} ppm", + xaxis=dict(range=[plot_data.x_plot.max(), plot_data.x_plot.min()]), + yaxis=dict(range=[plot_data.y_plot.max(), plot_data.y_plot.min()]), + ) + return fig + + +def create_plotly_figure(plot_data: PlottingDataForPlane): + lines = create_plotly_wireframe_lines(plot_data) + surfaces = create_plotly_surfaces(plot_data) + fig = go.Figure(data=lines + surfaces) + fig = update_axis_ranges(fig, plot_data) + return fig + + +def update_axis_ranges(fig, plot_data: PlottingDataForPlane): + fig.update_layout( + scene=dict( + xaxis=dict(range=[plot_data.x_plot.max(), plot_data.x_plot.min()]), + yaxis=dict(range=[plot_data.y_plot.max(), plot_data.y_plot.min()]), + xaxis_title=f"{plot_data.pseudo3D.f2_label} ppm", + yaxis_title=f"{plot_data.pseudo3D.f1_label} ppm", + annotations=make_annotations(plot_data), + ) + ) + return fig + + +def make_annotations(plot_data: PlottingDataForPlane): + annotations = [] + for row in plot_data.plane_lineshape_parameters.itertuples(): + annotations.append( + dict( + showarrow=True, + x=row.center_x_ppm, + y=row.center_y_ppm, + z=row.height * 1.0, + text=row.assignment, + opacity=0.8, + textangle=0, + arrowsize=1, + ) + ) + return annotations + + +def validate_sample_count(sample_count): + if type(sample_count) == int: + sample_count = sample_count + else: + raise TypeError("Sample count (ccount, rcount) should be an integer") + return sample_count + + +def unpack_plotting_colors(colors): + match colors: + case (data_color, fit_color): + data_color, fit_color = colors + case _: + data_color, fit_color = "green", "blue" + return data_color, fit_color diff --git a/peakipy/utils.py b/peakipy/utils.py new file mode 100644 index 00000000..310ee656 --- /dev/null +++ b/peakipy/utils.py @@ -0,0 +1,239 @@ +import sys +import json +from datetime import datetime +from pathlib import Path +from typing import List + +from rich import print +from rich.table import Table + +# for printing dataframes +peaklist_columns_for_printing = ["INDEX", "ASS", "X_PPM", "Y_PPM", "CLUSTID", "MEMCNT"] +bad_column_selection = [ + "clustid", + "amp", + "center_x_ppm", + "center_y_ppm", + "fwhm_x_hz", + "fwhm_y_hz", + "lineshape", +] +bad_color_selection = [ + "green", + "blue", + "yellow", + "red", + "yellow", + "red", + "magenta", +] + + +def run_log(log_name="run_log.txt"): + """Write log file containing time script was run and with which arguments""" + with open(log_name, "a") as log: + sys_argv = sys.argv + sys_argv[0] = Path(sys_argv[0]).name + run_args = " ".join(sys_argv) + time_stamp = datetime.now() + time_stamp = time_stamp.strftime("%A %d %B %Y at %H:%M") + log.write(f"# Script run on {time_stamp}:\n{run_args}\n") + + +def df_to_rich_table(df, title: str, columns: List[str], styles: str): + """Print dataframe using rich library + + Parameters + ---------- + df : pandas.DataFrame + title : str + title of table + columns : List[str] + list of column names (must be in df) + styles : List[str] + list of styles in same order as columns + """ + table = Table(title=title) + for col, style in zip(columns, styles): + table.add_column(col, style=style) + for _, row in df.iterrows(): + row = row[columns].values + str_row = [] + for i in row: + match i: + case str(): + str_row.append(f"{i}") + case float() if i > 1e5: + str_row.append(f"{i:.1e}") + case float(): + str_row.append(f"{i:.3f}") + case bool(): + str_row.append(f"{i}") + case int(): + str_row.append(f"{i}") + table.add_row(*str_row) + return table + + +def load_config(config_path): + if config_path.exists(): + with open(config_path) as opened_config: + config_dic = json.load(opened_config) + return config_dic + else: + return {} + + +def write_config(config_path, config_dic): + with open(config_path, "w") as config: + config.write(json.dumps(config_dic, sort_keys=True, indent=4)) + + +def update_config_file(config_path, config_kvs): + config_dic = load_config(config_path) + config_dic.update(config_kvs) + write_config(config_path, config_dic) + return config_dic + + +def update_args_with_values_from_config_file(args, config_path="peakipy.config"): + """read a peakipy config file, extract params and update args dict + + :param args: dict containing params extracted from docopt command line + :type args: dict + :param config_path: path to peakipy config file [default: peakipy.config] + :type config_path: str + + :returns args: updated args dict + :rtype args: dict + :returns config: dict that resulted from reading config file + :rtype config: dict + + """ + # update args with values from peakipy.config file + config_path = Path(config_path) + if config_path.exists(): + try: + config = load_config(config_path) + print( + f"[green]Using config file with dims [yellow]{config.get('dims')}[/yellow][/green]" + ) + args["dims"] = config.get("dims", (0, 1, 2)) + noise = config.get("noise") + if noise: + noise = float(noise) + + colors = config.get("colors", ["#5e3c99", "#e66101"]) + except json.decoder.JSONDecodeError: + print( + "[red]Your peakipy.config file is corrupted - maybe your JSON is not correct...[/red]" + ) + print("[red]Not using[/red]") + noise = False + colors = args.get("colors", ("#5e3c99", "#e66101")) + config = {} + else: + print( + "[red]No peakipy.config found - maybe you need to generate one with peakipy read or see docs[/red]" + ) + noise = False + colors = args.get("colors", ("#5e3c99", "#e66101")) + config = {} + + args["noise"] = noise + args["colors"] = colors + + return args, config + + +def update_linewidths_from_hz_to_points(peakipy_data): + """in case they were adjusted when running edit.py""" + peakipy_data.df["XW"] = peakipy_data.df.XW_HZ * peakipy_data.pt_per_hz_f2 + peakipy_data.df["YW"] = peakipy_data.df.YW_HZ * peakipy_data.pt_per_hz_f1 + return peakipy_data + + +def update_peak_positions_from_ppm_to_points(peakipy_data): + # convert peak positions from ppm to points in case they were adjusted running edit.py + peakipy_data.df["X_AXIS"] = peakipy_data.df.X_PPM.apply( + lambda x: peakipy_data.uc_f2(x, "PPM") + ) + peakipy_data.df["Y_AXIS"] = peakipy_data.df.Y_PPM.apply( + lambda x: peakipy_data.uc_f1(x, "PPM") + ) + peakipy_data.df["X_AXISf"] = peakipy_data.df.X_PPM.apply( + lambda x: peakipy_data.uc_f2.f(x, "PPM") + ) + peakipy_data.df["Y_AXISf"] = peakipy_data.df.Y_PPM.apply( + lambda x: peakipy_data.uc_f1.f(x, "PPM") + ) + return peakipy_data + + +def save_data(df, output_name): + suffix = output_name.suffix + + if suffix == ".csv": + df.to_csv(output_name, float_format="%.4f", index=False) + + elif suffix == ".tab": + df.to_csv(output_name, sep="\t", float_format="%.4f", index=False) + + else: + df.to_pickle(output_name) + + +def check_data_shape_is_consistent_with_dims(peakipy_data): + # check data shape is consistent with dims + if len(peakipy_data.dims) != len(peakipy_data.data.shape): + print( + f"Dims are {peakipy_data.dims} while data shape is {peakipy_data.data.shape}?" + ) + exit() + + +def check_for_include_column_and_add_if_missing(peakipy_data): + # only include peaks with 'include' + if "include" in peakipy_data.df.columns: + pass + else: + # for compatibility + peakipy_data.df["include"] = peakipy_data.df.apply(lambda _: "yes", axis=1) + return peakipy_data + + +def remove_excluded_peaks(peakipy_data): + if len(peakipy_data.df[peakipy_data.df.include != "yes"]) > 0: + excluded = peakipy_data.df[peakipy_data.df.include != "yes"][ + peaklist_columns_for_printing + ] + table = df_to_rich_table( + excluded, + title="[yellow] Excluded peaks [/yellow]", + columns=excluded.columns, + styles=["yellow" for i in excluded.columns], + ) + print(table) + peakipy_data.df = peakipy_data.df[peakipy_data.df.include == "yes"] + return peakipy_data + + +def warn_if_trying_to_fit_large_clusters(max_cluster_size, peakipy_data): + if max_cluster_size is None: + max_cluster_size = peakipy_data.df.MEMCNT.max() + if peakipy_data.df.MEMCNT.max() > 10: + print( + f"""[red] + ################################################################## + You have some clusters of as many as {max_cluster_size} peaks. + You may want to consider reducing the size of your clusters as the + fits will struggle. + + Otherwise you can use the --max-cluster-size flag to exclude large + clusters + ################################################################## + [/red]""" + ) + else: + max_cluster_size = max_cluster_size + return max_cluster_size diff --git a/test/test_cli.py b/test/test_cli.py index aec9c35c..d30404c4 100644 --- a/test/test_cli.py +++ b/test/test_cli.py @@ -6,12 +6,10 @@ import peakipy.cli.main from peakipy.cli.main import PeaklistFormat, Lineshape -os.chdir("test") - @pytest.fixture def protein_L(): - path = Path("test_protein_L") + path = Path("test/test_protein_L") return path diff --git a/test/test_data.py b/test/test_data.py index 2a37150e..0b105bf8 100644 --- a/test/test_data.py +++ b/test/test_data.py @@ -6,15 +6,8 @@ from mpl_toolkits.mplot3d import Axes3D from lmfit import Model, report_fit -from peakipy.core import ( - pvoigt2d, - fix_params, - get_params, - make_mask, - # fit_first_plane, - make_models, - Lineshape, -) +from peakipy.lineshapes import pvoigt2d, Lineshape +from peakipy.fitting import make_mask, make_models def fit_first_plane( @@ -153,7 +146,7 @@ def fit_first_plane( for p in peaks: data += pvoigt2d( XY, - *p + *p, # amplitude=1e8, # center_x=200, # center_y=100, diff --git a/test/test_edit.py b/test/test_edit.py new file mode 100644 index 00000000..ab01e4ab --- /dev/null +++ b/test/test_edit.py @@ -0,0 +1 @@ +import panel as pn diff --git a/test/test_fit.py b/test/test_fit.py index 83512aed..c41d0ebf 100644 --- a/test/test_fit.py +++ b/test/test_fit.py @@ -25,7 +25,7 @@ FitPeaksArgs, FitPeaksInput, ) -from peakipy.core import Lineshape, pvoigt2d +from peakipy.lineshapes import Lineshape, pvoigt2d def test_get_fit_peaks_result_validation_model_PVPV(): diff --git a/test/test_fitting.py b/test/test_fitting.py new file mode 100644 index 00000000..01c9ed38 --- /dev/null +++ b/test/test_fitting.py @@ -0,0 +1,692 @@ +import unittest +from pathlib import Path +from collections import namedtuple + +import numpy as np +import pandas as pd +import pytest +import nmrglue as ng +from numpy.testing import assert_array_equal +from lmfit import Model, Parameters + +from peakipy.io import Pseudo3D +from peakipy.fitting import ( + FitDataModel, + validate_fit_data, + validate_fit_dataframe, + select_reference_planes_using_indices, + slice_peaks_from_data_using_mask, + select_planes_above_threshold_from_masked_data, + get_limits_for_axis_in_points, + deal_with_peaks_on_edge_of_spectrum, + estimate_amplitude, + make_mask, + make_mask_from_peak_cluster, + make_meshgrid, + get_params, + fix_params, + make_param_dict, + to_prefix, + make_models, + PeakLimits, + update_params, + make_masks_from_plane_data, +) +from peakipy.lineshapes import Lineshape, pvoigt2d, pv_pv + + +@pytest.fixture +def fitdatamodel_dict(): + return FitDataModel( + plane=1, + clustid=1, + assignment="assignment", + memcnt=1, + amp=10.0, + height=10.0, + center_x_ppm=0.0, + center_y_ppm=0.0, + fwhm_x_hz=10.0, + fwhm_y_hz=10.0, + lineshape="PV", + x_radius=0.04, + y_radius=0.4, + center_x=0.0, + center_y=0.0, + sigma_x=1.0, + sigma_y=1.0, + ).model_dump() + + +def test_validate_fit_data_PVGL(fitdatamodel_dict): + fitdatamodel_dict.update(dict(fraction=0.5)) + validate_fit_data(fitdatamodel_dict) + + fitdatamodel_dict.update(dict(lineshape="G")) + validate_fit_data(fitdatamodel_dict) + + fitdatamodel_dict.update(dict(lineshape="L")) + validate_fit_data(fitdatamodel_dict) + + fitdatamodel_dict.update( + dict(lineshape="V", fraction=0.5, gamma_x=1.0, gamma_y=1.0) + ) + validate_fit_data(fitdatamodel_dict) + + fitdatamodel_dict.update(dict(lineshape="PVPV", fraction_x=0.5, fraction_y=1.0)) + validate_fit_data(fitdatamodel_dict) + + +def test_validate_fit_dataframe(fitdatamodel_dict): + fitdatamodel_dict.update(dict(fraction=0.5)) + df = pd.DataFrame([fitdatamodel_dict] * 5) + validate_fit_dataframe(df) + + +def test_select_reference_planes_using_indices(): + data = np.zeros((6, 100, 200)) + indices = [] + np.testing.assert_array_equal( + select_reference_planes_using_indices(data, indices), data + ) + indices = [1] + assert select_reference_planes_using_indices(data, indices).shape == (1, 100, 200) + indices = [1, -1] + assert select_reference_planes_using_indices(data, indices).shape == (2, 100, 200) + + +def test_select_reference_planes_using_indices_min_index_error(): + data = np.zeros((6, 100, 200)) + indices = [-7] + with pytest.raises(IndexError): + select_reference_planes_using_indices(data, indices) + + +def test_select_reference_planes_using_indices_max_index_error(): + data = np.zeros((6, 100, 200)) + indices = [6] + with pytest.raises(IndexError): + select_reference_planes_using_indices(data, indices) + + +def test_slice_peaks_from_data_using_mask(): + data = np.array( + [ + np.array( + [ + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 1, 1, 0, 0, 0, 0], + [0, 0, 0, 1, 2, 2, 1, 0, 0, 0], + [0, 0, 1, 2, 3, 3, 2, 1, 0, 0], + [0, 1, 2, 3, 4, 4, 3, 2, 1, 0], + [1, 2, 3, 4, 5, 5, 4, 3, 2, 1], + [0, 1, 2, 3, 4, 4, 3, 2, 1, 0], + [0, 0, 1, 2, 3, 3, 2, 1, 0, 0], + [0, 0, 0, 1, 2, 2, 1, 0, 0, 0], + [0, 0, 0, 0, 1, 1, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + ] + ) + for i in range(5) + ] + ) + mask = data[0] > 0 + assert data.shape == (5, 11, 10) + assert mask.shape == (11, 10) + peak_slices = slice_peaks_from_data_using_mask(data, mask) + # array is flattened by application of mask + assert peak_slices.shape == (5, 50) + + +def test_select_planes_above_threshold_from_masked_data(): + peak_slices = np.array( + [ + [1, 1, 1, 1, 1, 1], + [2, 2, 2, 2, 2, 2], + [-1, -1, -1, -1, -1, -1], + [-2, -2, -2, -2, -2, -2], + ] + ) + assert peak_slices.shape == (4, 6) + threshold = -1 + assert select_planes_above_threshold_from_masked_data( + peak_slices, threshold + ).shape == ( + 4, + 6, + ) + threshold = 2 + assert_array_equal( + select_planes_above_threshold_from_masked_data(peak_slices, threshold), + peak_slices, + ) + threshold = 1 + assert select_planes_above_threshold_from_masked_data( + peak_slices, threshold + ).shape == (2, 6) + + threshold = None + assert_array_equal( + select_planes_above_threshold_from_masked_data(peak_slices, threshold), + peak_slices, + ) + threshold = 10 + assert_array_equal( + select_planes_above_threshold_from_masked_data(peak_slices, threshold), + peak_slices, + ) + + +def test_make_param_dict(): + selected_planes = [1, 2] + data = np.ones((4, 10, 5)) + expected_shape = (2, 10, 5) + actual_shape = data[np.array(selected_planes)].shape + assert expected_shape == actual_shape + + +def test_make_param_dict_sum(): + data = np.ones((4, 10, 5)) + expected_sum = 200 + actual_sum = data.sum() + assert expected_sum == actual_sum + + +def test_make_param_dict_selected(): + selected_planes = [1, 2] + data = np.ones((4, 10, 5)) + data = data[np.array(selected_planes)] + expected_sum = 100 + actual_sum = data.sum() + assert expected_sum == actual_sum + + +def test_update_params_normal_case(): + params = Parameters() + params.add("center_x", value=0) + params.add("center_y", value=0) + params.add("sigma", value=1) + params.add("gamma", value=1) + params.add("fraction", value=0.5) + + param_dict = { + "center_x": 10, + "center_y": 20, + "sigma": 2, + "gamma": 3, + "fraction": 0.8, + } + + xy_bounds = (5, 5) + + update_params(params, param_dict, Lineshape.PV, xy_bounds) + + assert params["center_x"].value == 10 + assert params["center_y"].value == 20 + assert params["sigma"].value == 2 + assert params["gamma"].value == 3 + assert params["fraction"].value == 0.8 + assert params["center_x"].min == 5 + assert params["center_x"].max == 15 + assert params["center_y"].min == 15 + assert params["center_y"].max == 25 + assert params["sigma"].min == 0.0 + assert params["sigma"].max == 1e4 + assert params["gamma"].min == 0.0 + assert params["gamma"].max == 1e4 + assert params["fraction"].min == 0.0 + assert params["fraction"].max == 1.0 + assert params["fraction"].vary is True + + +def test_update_params_lineshape_G(): + params = Parameters() + params.add("fraction", value=0.5) + + param_dict = {"fraction": 0.7} + + update_params(params, param_dict, Lineshape.G) + + assert params["fraction"].value == 0.7 + assert params["fraction"].min == 0.0 + assert params["fraction"].max == 1.0 + assert params["fraction"].vary is False + + +def test_update_params_lineshape_L(): + params = Parameters() + params.add("fraction", value=0.5) + + param_dict = {"fraction": 0.7} + + update_params(params, param_dict, Lineshape.L) + + assert params["fraction"].value == 0.7 + assert params["fraction"].min == 0.0 + assert params["fraction"].max == 1.0 + assert params["fraction"].vary is False + + +def test_update_params_lineshape_PV_PV(): + params = Parameters() + params.add("fraction", value=0.5) + + param_dict = {"fraction": 0.7} + + update_params(params, param_dict, Lineshape.PV_PV) + + assert params["fraction"].value == 0.7 + assert params["fraction"].min == 0.0 + assert params["fraction"].max == 1.0 + assert params["fraction"].vary is True + + +def test_update_params_no_bounds(): + params = Parameters() + params.add("center_x", value=0) + params.add("center_y", value=0) + + param_dict = { + "center_x": 10, + "center_y": 20, + } + + update_params(params, param_dict, Lineshape.PV, None) + + assert params["center_x"].value == 10 + assert params["center_y"].value == 20 + assert params["center_x"].min == -np.inf + assert params["center_x"].max == np.inf + assert params["center_y"].min == -np.inf + assert params["center_y"].max == np.inf + + +def test_peak_limits_normal_case(): + peak = pd.DataFrame({"X_AXIS": [5], "Y_AXIS": [5], "XW": [2], "YW": [2]}).iloc[0] + data = np.zeros((10, 10)) + pl = PeakLimits(peak, data) + assert pl.min_x == 3 + assert pl.max_x == 8 + assert pl.min_y == 3 + assert pl.max_y == 8 + + +def test_peak_limits_at_edge(): + peak = pd.DataFrame({"X_AXIS": [1], "Y_AXIS": [1], "XW": [2], "YW": [2]}).iloc[0] + data = np.zeros((10, 10)) + pl = PeakLimits(peak, data) + assert pl.min_x == 0 + assert pl.max_x == 4 + assert pl.min_y == 0 + assert pl.max_y == 4 + + +def test_peak_limits_exceeding_bounds(): + peak = pd.DataFrame({"X_AXIS": [9], "Y_AXIS": [9], "XW": [2], "YW": [2]}).iloc[0] + data = np.zeros((10, 10)) + pl = PeakLimits(peak, data) + assert pl.min_x == 7 + assert pl.max_x == 10 + assert pl.min_y == 7 + assert pl.max_y == 10 + + +def test_peak_limits_small_data(): + peak = pd.DataFrame({"X_AXIS": [2], "Y_AXIS": [2], "XW": [5], "YW": [5]}).iloc[0] + data = np.zeros((5, 5)) + pl = PeakLimits(peak, data) + assert pl.min_x == 0 + assert pl.max_x == 5 + assert pl.min_y == 0 + assert pl.max_y == 5 + + +def test_peak_limits_assertion_error(): + peak = pd.DataFrame({"X_AXIS": [11], "Y_AXIS": [11], "XW": [2], "YW": [2]}).iloc[0] + data = np.zeros((10, 10)) + with pytest.raises(AssertionError): + pl = PeakLimits(peak, data) + + +def test_estimate_amplitude(): + peak = namedtuple("peak", ["X_AXIS", "XW", "Y_AXIS", "YW"]) + p = peak(5, 2, 3, 2) + data = np.ones((20, 10)) + expected_result = 25 + actual_result = estimate_amplitude(p, data) + assert expected_result == actual_result + + +def test_estimate_amplitude_invalid_indices(): + peak = namedtuple("peak", ["X_AXIS", "XW", "Y_AXIS", "YW"]) + p = peak(1, 2, 3, 2) + data = np.ones((20, 10)) + expected_result = 20 + actual_result = estimate_amplitude(p, data) + assert expected_result == actual_result + + +def test_make_mask_from_peak_cluster(): + data = np.ones((10, 10)) + group = pd.DataFrame( + {"X_AXISf": [3, 6], "Y_AXISf": [3, 6], "X_RADIUS": [2, 3], "Y_RADIUS": [2, 3]} + ) + mask, peak = make_mask_from_peak_cluster(group, data) + expected_mask = np.array( + [ + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 1, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 1, 1, 0, 0, 0, 0, 0], + [0, 1, 1, 1, 1, 1, 1, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 1, 1, 0], + [0, 0, 0, 1, 1, 1, 1, 1, 1, 0], + [0, 0, 0, 1, 1, 1, 1, 1, 1, 1], + [0, 0, 0, 0, 1, 1, 1, 1, 1, 0], + [0, 0, 0, 0, 1, 1, 1, 1, 1, 0], + [0, 0, 0, 0, 0, 0, 1, 0, 0, 0], + ], + dtype=bool, + ) + assert_array_equal(expected_mask, mask) + + +# get_limits_for_axis_in_points +def test_positive_points(): + group_axis_points = np.array([1, 2, 3, 4, 5]) + mask_radius_in_points = 2 + expected = (8, -1) # ceil(5+1+1), floor(1-1) + assert ( + get_limits_for_axis_in_points(group_axis_points, mask_radius_in_points) + == expected + ) + + +def test_single_point(): + group_axis_points = np.array([5]) + mask_radius_in_points = 3 + expected = (9, 2) + assert ( + get_limits_for_axis_in_points(group_axis_points, mask_radius_in_points) + == expected + ) + + +def test_no_radius(): + group_axis_points = np.array([1, 2, 3]) + mask_radius_in_points = 0 + expected = (4, 1) + assert ( + get_limits_for_axis_in_points(group_axis_points, mask_radius_in_points) + == expected + ) + + +# deal_with_peaks_on_edge_of_spectrum +def test_min_y_less_than_zero(): + assert deal_with_peaks_on_edge_of_spectrum((100, 200), 50, 30, 10, -10) == ( + 50, + 30, + 10, + 0, + ) + + +def test_min_x_less_than_zero(): + assert deal_with_peaks_on_edge_of_spectrum((100, 200), 50, -5, 70, 20) == ( + 50, + 0, + 70, + 20, + ) + + +def test_max_y_exceeds_data_shape(): + assert deal_with_peaks_on_edge_of_spectrum((100, 200), 50, 30, 110, 20) == ( + 50, + 30, + 100, + 20, + ) + + +def test_max_x_exceeds_data_shape(): + assert deal_with_peaks_on_edge_of_spectrum((100, 200), 250, 30, 70, 20) == ( + 200, + 30, + 70, + 20, + ) + + +def test_values_within_range(): + assert deal_with_peaks_on_edge_of_spectrum((100, 200), 50, 30, 70, 20) == ( + 50, + 30, + 70, + 20, + ) + + +def test_all_edge_cases(): + assert deal_with_peaks_on_edge_of_spectrum((100, 200), 250, -5, 110, -10) == ( + 200, + 0, + 100, + 0, + ) + + +def test_make_meshgrid(): + data_shape = (4, 5) + expected_x = np.array( + [[0, 1, 2, 3, 4], [0, 1, 2, 3, 4], [0, 1, 2, 3, 4], [0, 1, 2, 3, 4]] + ) + expected_y = np.array( + [[0, 0, 0, 0, 0], [1, 1, 1, 1, 1], [2, 2, 2, 2, 2], [3, 3, 3, 3, 3]] + ) + XY = make_meshgrid(data_shape) + np.testing.assert_array_equal(XY[0], expected_x) + np.testing.assert_array_equal(XY[1], expected_y) + + +class TestCoreFunctions(unittest.TestCase): + test_directory = Path(__file__).parent + test_directory = "./test" + + def test_make_mask(self): + data = np.ones((10, 10)) + c_x = 5 + c_y = 5 + r_x = 3 + r_y = 2 + + expected_result = np.array( + [ + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0], + [0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + ] + ) + + result = np.array(make_mask(data, c_x, c_y, r_x, r_y), dtype=int) + test = result - expected_result + # print(test) + # print(test.sum()) + # print(result) + self.assertEqual(test.sum(), 0) + + def test_make_mask_2(self): + data = np.ones((10, 10)) + c_x = 5 + c_y = 8 + r_x = 3 + r_y = 2 + + expected_result = np.array( + [ + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0], + [0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0], + ] + ) + + result = np.array(make_mask(data, c_x, c_y, r_x, r_y), dtype=int) + test = result - expected_result + # print(test) + # print(test.sum()) + # print(result) + self.assertEqual(test.sum(), 0) + + def test_fix_params(self): + mod = Model(pvoigt2d) + pars = mod.make_params() + to_fix = ["center", "sigma", "fraction"] + fix_params(pars, to_fix) + + self.assertEqual(pars["center_x"].vary, False) + self.assertEqual(pars["center_y"].vary, False) + self.assertEqual(pars["sigma_x"].vary, False) + self.assertEqual(pars["sigma_y"].vary, False) + self.assertEqual(pars["fraction"].vary, False) + + def test_get_params(self): + mod = Model(pvoigt2d, prefix="p1_") + pars = mod.make_params(p1_center_x=20.0, p1_center_y=30.0) + pars["p1_center_x"].stderr = 1.0 + pars["p1_center_y"].stderr = 2.0 + ps, ps_err, names, prefixes = get_params(pars, "center") + #  get index of values + cen_x = names.index("p1_center_x") + cen_y = names.index("p1_center_y") + + self.assertEqual(ps[cen_x], 20.0) + self.assertEqual(ps[cen_y], 30.0) + self.assertEqual(ps_err[cen_x], 1.0) + self.assertEqual(ps_err[cen_y], 2.0) + self.assertEqual(prefixes[cen_y], "p1_") + + def test_make_param_dict(self): + peaks = pd.DataFrame( + { + "ASS": ["one", "two", "three"], + "X_AXISf": [5.0, 10.0, 15.0], + "X_AXIS": [5, 10, 15], + "Y_AXISf": [15.0, 10.0, 5.0], + "Y_AXIS": [15, 10, 5], + "XW": [2.5, 2.5, 2.5], + "YW": [2.5, 2.5, 2.5], + } + ) + data = np.ones((20, 20)) + + for ls, frac in zip([Lineshape.PV, Lineshape.G, Lineshape.L], [0.5, 0.0, 1.0]): + params = make_param_dict(peaks, data, ls) + self.assertEqual(params["_one_fraction"], frac) + self.assertEqual(params["_two_fraction"], frac) + self.assertEqual(params["_three_fraction"], frac) + + self.assertEqual(params["_one_center_x"], 5.0) + self.assertEqual(params["_two_center_x"], 10.0) + self.assertEqual(params["_two_sigma_x"], 1.25) + self.assertEqual(params["_two_sigma_y"], 1.25) + + voigt_params = make_param_dict(peaks, data, Lineshape.V) + self.assertEqual( + voigt_params["_one_sigma_x"], 2.5 / (2.0 * np.sqrt(2.0 * np.log(2))) + ) + self.assertEqual(voigt_params["_one_gamma_x"], 2.5 / 2.0) + + def test_to_prefix(self): + names = [ + (1, "_1_"), + (1.0, "_1_0_"), + (" one", "_one_"), + (" one/two", "_oneortwo_"), + (" one?two", "_onemaybetwo_"), + (r" [{one?two\}][", "___onemaybetwo____"), + ] + for test, expect in names: + prefix = to_prefix(test) + # print(prefix) + self.assertEqual(prefix, expect) + + def test_make_models(self): + peaks = pd.DataFrame( + { + "ASS": ["one", "two", "three"], + "X_AXISf": [5.0, 10.0, 15.0], + "X_AXIS": [5, 10, 15], + "Y_AXISf": [15.0, 10.0, 5.0], + "Y_AXIS": [15, 10, 5], + "XW": [2.5, 2.5, 2.5], + "YW": [2.5, 2.5, 2.5], + "CLUSTID": [1, 1, 1], + } + ) + + group = peaks.groupby("CLUSTID") + + data = np.ones((20, 20)) + + lineshapes = [Lineshape.PV, Lineshape.L, Lineshape.G, Lineshape.PV_PV] + + for lineshape in lineshapes: + match lineshape: + case lineshape.PV: + mod, p_guess = make_models(pvoigt2d, peaks, data, lineshape) + self.assertEqual(p_guess["_one_fraction"].vary, True) + self.assertEqual(p_guess["_one_fraction"].value, 0.5) + + case lineshape.G: + mod, p_guess = make_models(pvoigt2d, peaks, data, lineshape) + self.assertEqual(p_guess["_one_fraction"].vary, False) + self.assertEqual(p_guess["_one_fraction"].value, 0.0) + + case lineshape.L: + mod, p_guess = make_models(pvoigt2d, peaks, data, lineshape) + self.assertEqual(p_guess["_one_fraction"].vary, False) + self.assertEqual(p_guess["_one_fraction"].value, 1.0) + + case lineshape.PV_PV: + mod, p_guess = make_models(pv_pv, peaks, data, lineshape) + self.assertEqual(p_guess["_one_fraction_x"].vary, True) + self.assertEqual(p_guess["_one_fraction_x"].value, 0.5) + self.assertEqual(p_guess["_one_fraction_y"].vary, True) + self.assertEqual(p_guess["_one_fraction_y"].value, 0.5) + + def test_Pseudo3D(self): + datasets = [ + (f"{self.test_directory}/test_protein_L/test1.ft2", [0, 1, 2]), + (f"{self.test_directory}/test_protein_L/test_tp.ft2", [2, 1, 0]), + (f"{self.test_directory}/test_protein_L/test_tp2.ft2", [1, 2, 0]), + ] + + # expected shape + data_shape = (4, 256, 546) + test_nu = 1 + for dataset, dims in datasets: + with self.subTest(i=test_nu): + dic, data = ng.pipe.read(dataset) + pseudo3D = Pseudo3D(dic, data, dims) + self.assertEqual(dims, pseudo3D.dims) + self.assertEqual(pseudo3D.data.shape, data_shape) + self.assertEqual(pseudo3D.f1_label, "15N") + self.assertEqual(pseudo3D.f2_label, "HN") + self.assertEqual(pseudo3D.dims, dims) + self.assertEqual(pseudo3D.f1_size, 256) + self.assertEqual(pseudo3D.f2_size, 546) + test_nu += 1 diff --git a/test/test_io.py b/test/test_io.py new file mode 100644 index 00000000..a945852d --- /dev/null +++ b/test/test_io.py @@ -0,0 +1,428 @@ +import unittest +from unittest.mock import patch +from pathlib import Path +import json + +import pytest +import numpy as np +import nmrglue as ng +import pandas as pd + +from peakipy.io import ( + Pseudo3D, + Peaklist, + LoadData, + PeaklistFormat, + OutFmt, + StrucEl, + UnknownFormat, + ClustersResult, + get_vclist, +) +from peakipy.fitting import PeakLimits +from peakipy.utils import load_config, write_config, update_config_file + + +@pytest.fixture +def test_directory(): + return Path(__file__).parent + + +# test for read, edit, fit, check and spec scripts +# need to actually write proper tests +class TestBokehScript(unittest.TestCase): + @patch("peakipy.cli.edit.BokehScript") + def test_BokehScript(self, MockBokehScript): + args = {"": "hello", "": "data"} + bokeh_plots = MockBokehScript(args) + self.assertIsNotNone(bokeh_plots) + + +class TestCheckScript(unittest.TestCase): + @patch("peakipy.cli.main.check") + def test_main(self, MockCheck): + args = {"": "hello", "": "data"} + check = MockCheck(args) + self.assertIsNotNone(check) + + +class TestFitScript(unittest.TestCase): + @patch("peakipy.cli.main.fit") + def test_main(self, MockFit): + args = {"": "hello", "": "data"} + fit = MockFit(args) + self.assertIsNotNone(fit) + + +class TestReadScript(unittest.TestCase): + test_directory = "./test/" + + @patch("peakipy.cli.main.read") + def test_main(self, MockRead): + args = {"": "hello", "": "data"} + read = MockRead(args) + self.assertIsNotNone(read) + + def test_read_pipe_peaklist(self): + args = { + "path": f"{self.test_directory}/test_pipe.tab", + "data_path": f"{self.test_directory}/test_pipe.ft2", + "dims": [0, 1, 2], + "fmt": PeaklistFormat.pipe, + } + peaklist = Peaklist(**args) + self.assertIsNotNone(peaklist) + self.assertIs(len(peaklist.df), 3) + # self.assertIs(peaklist.df.X_AXISf.iloc[0], 323.019) + self.assertIs(peaklist.fmt.value, "pipe") + # self.assertEqual(peaklist.df.ASS.iloc[0], "None") + # self.assertEqual(peaklist.df.ASS.iloc[1], "None_dummy_1") + + +class TestSpecScript(unittest.TestCase): + @patch("peakipy.cli.main.spec") + def test_main(self, MockSpec): + args = {"": "hello", "": "data"} + spec = MockSpec(args) + self.assertIsNotNone(spec) + + +def test_load_config_existing(): + config_path = Path("test_config.json") + # Create a dummy existing config file + with open(config_path, "w") as f: + json.dump({"key1": "value1"}, f) + + loaded_config = load_config(config_path) + + assert loaded_config == {"key1": "value1"} + + # Clean up + config_path.unlink() + + +def test_load_config_nonexistent(): + config_path = Path("test_config.json") + + loaded_config = load_config(config_path) + + assert loaded_config == {} + + +def test_write_config(): + config_path = Path("test_config.json") + config_kvs = {"key1": "value1", "key2": "value2"} + + write_config(config_path, config_kvs) + + # Check if the config file is created correctly + assert config_path.exists() + + # Check if the config file content is correct + with open(config_path) as f: + created_config = json.load(f) + assert created_config == {"key1": "value1", "key2": "value2"} + + # Clean up + config_path.unlink() + + +def test_update_config_file_existing(): + config_path = Path("test_config.json") + # Create a dummy existing config file + with open(config_path, "w") as f: + json.dump({"key1": "value1"}, f) + + config_kvs = {"key2": "value2", "key3": "value3"} + updated_config = update_config_file(config_path, config_kvs) + + assert updated_config == {"key1": "value1", "key2": "value2", "key3": "value3"} + + # Clean up + config_path.unlink() + + +def test_update_config_file_nonexistent(): + config_path = Path("test_config.json") + config_kvs = {"key1": "value1", "key2": "value2"} + updated_config = update_config_file(config_path, config_kvs) + + assert updated_config == {"key1": "value1", "key2": "value2"} + + # Clean up + config_path.unlink() + + +@pytest.fixture +def sample_data(): + return np.zeros((10, 10)) + + +@pytest.fixture +def sample_peak(): + peak_data = {"X_AXIS": [5], "Y_AXIS": [5], "XW": [2], "YW": [2]} + return pd.DataFrame(peak_data).iloc[0] + + +def test_peak_limits_max_min(sample_peak, sample_data): + limits = PeakLimits(sample_peak, sample_data) + + assert limits.max_x == 8 + assert limits.max_y == 8 + assert limits.min_x == 3 + assert limits.min_y == 3 + + +def test_peak_limits_boundary(sample_data): + peak_data = {"X_AXIS": [8], "Y_AXIS": [8], "XW": [2], "YW": [2]} + peak = pd.DataFrame(peak_data).iloc[0] + limits = PeakLimits(peak, sample_data) + + assert limits.max_x == 10 + assert limits.max_y == 10 + assert limits.min_x == 6 + assert limits.min_y == 6 + + +def test_peak_limits_at_boundary(sample_data): + peak_data = {"X_AXIS": [0], "Y_AXIS": [0], "XW": [2], "YW": [2]} + peak = pd.DataFrame(peak_data).iloc[0] + limits = PeakLimits(peak, sample_data) + + assert limits.max_x == 3 + assert limits.max_y == 3 + assert limits.min_x == 0 + assert limits.min_y == 0 + + +def test_peak_limits_outside_boundary(sample_data): + peak_data = {"X_AXIS": [15], "Y_AXIS": [15], "XW": [2], "YW": [2]} + peak = pd.DataFrame(peak_data).iloc[0] + with pytest.raises(AssertionError): + limits = PeakLimits(peak, sample_data) + + +def test_peak_limits_1d_data(): + data = np.zeros(10) + peak_data = {"X_AXIS": [5], "Y_AXIS": [0], "XW": [2], "YW": [0]} + peak = pd.DataFrame(peak_data).iloc[0] + with pytest.raises(IndexError): + limits = PeakLimits(peak, data) + + +def test_StrucEl(): + assert StrucEl.square.value == "square" + assert StrucEl.disk.value == "disk" + assert StrucEl.rectangle.value == "rectangle" + assert StrucEl.mask_method.value == "mask_method" + + +def test_PeaklistFormat(): + assert PeaklistFormat.a2.value == "a2" + assert PeaklistFormat.a3.value == "a3" + assert PeaklistFormat.sparky.value == "sparky" + assert PeaklistFormat.pipe.value == "pipe" + assert PeaklistFormat.peakipy.value == "peakipy" + + +def test_OutFmt(): + assert OutFmt.csv.value == "csv" + assert OutFmt.pkl.value == "pkl" + + +@pytest.fixture +def test_data_path(): + return Path("./test/test_protein_L") + + +@pytest.fixture +def pseudo3d_args(test_data_path): + dic, data = ng.pipe.read(test_data_path / "test1.ft2") + dims = [0, 1, 2] + return dic, data, dims + + +@pytest.fixture +def peaklist(test_data_path): + dims = [0, 1, 2] + path = test_data_path / "test.tab" + data_path = test_data_path / "test1.ft2" + fmt = PeaklistFormat.pipe + radii = [0.04, 0.4] + return Peaklist(path, data_path, fmt, dims, radii) + + +def test_Pseudo3D_properties(pseudo3d_args): + dic, data, dims = pseudo3d_args + pseudo3d = Pseudo3D(dic, data, dims) + assert pseudo3d.dic == dic + assert np.array_equal(pseudo3d._data, data.reshape((4, 256, 546))) + assert pseudo3d.dims == dims + + +def test_Peaklist_initialization(test_data_path, peaklist): + + assert peaklist.peaklist_path == test_data_path / "test.tab" + assert peaklist.data_path == test_data_path / "test1.ft2" + assert peaklist.fmt == PeaklistFormat.pipe + assert peaklist.radii == [0.04, 0.4] + + +def test_Peaklist_a2(test_data_path): + dims = [0, 1, 2] + path = test_data_path / "peaks.a2" + data_path = test_data_path / "test1.ft2" + fmt = PeaklistFormat.a2 + radii = [0.04, 0.4] + peaklist = Peaklist(path, data_path, fmt, dims, radii) + peaklist.update_df() + + +def test_Peaklist_a3(test_data_path): + dims = [0, 1, 2] + path = test_data_path / "ccpnTable.tsv" + data_path = test_data_path / "test1.ft2" + fmt = PeaklistFormat.a3 + radii = [0.04, 0.4] + peaklist = Peaklist(path, data_path, fmt, dims, radii) + peaklist.update_df() + + +def test_Peaklist_sparky(test_data_path): + dims = [0, 1, 2] + path = test_data_path / "peaks.sparky" + data_path = test_data_path / "test1.ft2" + fmt = PeaklistFormat.sparky + radii = [0.04, 0.4] + Peaklist(path, data_path, fmt, dims, radii) + + +@pytest.fixture +def loaddata(test_data_path): + dims = [0, 1, 2] + path = test_data_path / "test.csv" + data_path = test_data_path / "test1.ft2" + fmt = PeaklistFormat.peakipy + radii = [0.04, 0.4] + return LoadData(path, data_path, fmt, dims, radii) + + +def test_LoadData_initialization(test_data_path, loaddata): + assert loaddata.peaklist_path == test_data_path / "test.csv" + assert loaddata.data_path == test_data_path / "test1.ft2" + assert loaddata.fmt == PeaklistFormat.peakipy + assert loaddata.radii == [0.04, 0.4] + loaddata.check_data_frame() + loaddata.check_assignments() + loaddata.check_peak_bounds() + loaddata.update_df() + + +def test_LoadData_with_Edited_column(loaddata): + loaddata.df["Edited"] = "yes" + loaddata.check_data_frame() + + +def test_LoadData_without_include_column(loaddata): + loaddata.df.drop(columns=["include"], inplace=True) + loaddata.check_data_frame() + assert "include" in loaddata.df.columns + assert np.all(loaddata.df.include == "yes") + + +def test_LoadData_with_X_DIAMETER_PPM_column(loaddata): + loaddata.df["X_DIAMETER_PPM"] = 0.04 + loaddata.check_data_frame() + assert "X_DIAMETER_PPM" in loaddata.df.columns + + +def test_UnknownFormat(): + with pytest.raises(UnknownFormat): + raise UnknownFormat("This is an unknown format") + + +def test_update_df(peaklist): + peaklist.update_df() + + df = peaklist.df + + # Check that X_AXIS and Y_AXIS columns are created and values are set correctly + assert "X_AXIS" in df.columns + assert "Y_AXIS" in df.columns + + # Check that X_AXISf and Y_AXISf columns are created and values are set correctly + assert "X_AXISf" in df.columns + assert "Y_AXISf" in df.columns + + # Check that XW_HZ and YW_HZ columns are converted to float correctly + assert df["XW_HZ"].dtype == float + assert df["YW_HZ"].dtype == float + + # Check that XW and YW columns are created + assert "XW" in df.columns + assert "YW" in df.columns + + # Check the assignment column + assert "ASS" in df.columns + + # Check radii columns + assert "X_RADIUS_PPM" in df.columns + assert "Y_RADIUS_PPM" in df.columns + assert "X_RADIUS" in df.columns + assert "Y_RADIUS" in df.columns + + # Check 'include' column is created and set to 'yes' + assert "include" in df.columns + assert all(df["include"] == "yes") + + # Check that the peaks are within bounds + assert all( + (df["X_PPM"] < peaklist.f2_ppm_max) & (df["X_PPM"] > peaklist.f2_ppm_min) + ) + assert all( + (df["Y_PPM"] < peaklist.f1_ppm_max) & (df["Y_PPM"] > peaklist.f1_ppm_min) + ) + + +def test_update_df_with_excluded_peaks(peaklist): + peaklist._df.loc[1, "X_PPM"] = 100.0 # This peak should be out of bounds + peaklist.update_df() + + df = peaklist.df + + # Check that out of bounds peak is excluded + assert len(df) == 62 + assert not ((df["X_PPM"] == 100.0).any()) + + +def test_clusters_result_initialization(): + labeled_array = np.array([[1, 2], [3, 4]]) + num_features = 5 + closed_data = np.array([[5, 6], [7, 8]]) + peaks = [(1, 2), (3, 4)] + + clusters_result = ClustersResult(labeled_array, num_features, closed_data, peaks) + + assert np.array_equal(clusters_result.labeled_array, labeled_array) + assert clusters_result.num_features == num_features + assert np.array_equal(clusters_result.closed_data, closed_data) + assert clusters_result.peaks == peaks + + +def test_get_vclist_None(): + assert get_vclist(None, {})["vclist"] == False + + +def test_get_vclist_exists(test_data_path): + vclist = test_data_path / "vclist" + assert get_vclist(vclist, {})["vclist"] == True + + +def test_get_vclist_not_exists(test_data_path): + vclist = test_data_path / "vclistbla" + with pytest.raises(Exception): + get_vclist(vclist, {})["vclist"] == True + + +if __name__ == "__main__": + unittest.main(verbosity=2) diff --git a/test/test_lineshapes.py b/test/test_lineshapes.py new file mode 100644 index 00000000..43e867c9 --- /dev/null +++ b/test/test_lineshapes.py @@ -0,0 +1,353 @@ +from pathlib import Path +from unittest.mock import Mock + +import pytest +import pandas as pd +import numpy as np +from numpy.testing import assert_almost_equal + +from peakipy.io import Peaklist, PeaklistFormat +from peakipy.constants import tiny +from peakipy.lineshapes import ( + gaussian, + gaussian_lorentzian, + pv_g, + pv_l, + voigt2d, + pvoigt2d, + pv_pv, + get_lineshape_function, + Lineshape, + calculate_height_for_voigt_lineshape, + calculate_fwhm_for_voigt_lineshape, + calculate_fwhm_for_pseudo_voigt_lineshape, + calculate_height_for_pseudo_voigt_lineshape, + calculate_height_for_gaussian_lineshape, + calculate_height_for_lorentzian_lineshape, + calculate_height_for_pv_pv_lineshape, + calculate_lineshape_specific_height_and_fwhm, + calculate_peak_linewidths_in_hz, + calculate_peak_centers_in_ppm, +) + + +def test_gaussian_typical_values(): + x = np.array([0, 1, 2]) + center = 0.0 + sigma = 1.0 + expected = (1.0 / (np.sqrt(2 * np.pi) * sigma)) * np.exp( + -((x - center) ** 2) / (2 * sigma**2) + ) + result = gaussian(x, center, sigma) + assert_almost_equal(result, expected, decimal=7) + + +def test_gaussian_center_nonzero(): + x = np.array([0, 1, 2]) + center = 1.0 + sigma = 1.0 + expected = (1.0 / (np.sqrt(2 * np.pi) * sigma)) * np.exp( + -((x - center) ** 2) / (2 * sigma**2) + ) + result = gaussian(x, center, sigma) + assert_almost_equal(result, expected, decimal=7) + + +def test_gaussian_sigma_nonzero(): + x = np.array([0, 1, 2]) + center = 0.0 + sigma = 2.0 + expected = (1.0 / (np.sqrt(2 * np.pi) * sigma)) * np.exp( + -((x - center) ** 2) / (2 * sigma**2) + ) + result = gaussian(x, center, sigma) + assert_almost_equal(result, expected, decimal=7) + + +def test_gaussian_zero_center(): + x = np.array([0, 1, 2]) + center = 0.0 + sigma = 1.0 + expected = (1.0 / (np.sqrt(2 * np.pi) * sigma)) * np.exp( + -((x - center) ** 2) / (2 * sigma**2) + ) + result = gaussian(x, center, sigma) + assert_almost_equal(result, expected, decimal=7) + + +def test_calculate_height_for_voigt_lineshape(): + data = { + "sigma_x": [1.0, 2.0], + "sigma_y": [1.0, 2.0], + "gamma_x": [1.0, 2.0], + "gamma_y": [1.0, 2.0], + "amp": [10.0, 20.0], + "amp_err": [1.0, 2.0], + } + df = pd.DataFrame(data) + result_df = calculate_height_for_voigt_lineshape(df) + + assert np.allclose(result_df["height"], [0.435596, 0.217798]) + assert np.allclose(result_df["height_err"], [0.04356, 0.02178]) + + +def test_calculate_fwhm_for_voigt_lineshape(): + data = { + "sigma_x": [1.0, 2.0], + "sigma_y": [1.0, 2.0], + "gamma_x": [1.0, 2.0], + "gamma_y": [1.0, 2.0], + "amp": [10.0, 20.0], + "amp_err": [1.0, 2.0], + } + df = pd.DataFrame(data) + result_df = calculate_fwhm_for_voigt_lineshape(df) + + assert np.allclose(result_df["fwhm_l_x"], [2.0, 4.0]) + assert np.allclose(result_df["fwhm_l_y"], [2.0, 4.0]) + assert np.allclose(result_df["fwhm_g_x"], [2.35482, 4.70964]) + assert np.allclose(result_df["fwhm_g_y"], [2.35482, 4.70964]) + assert np.allclose(result_df["fwhm_x"], [3.601309, 7.202619]) + assert np.allclose(result_df["fwhm_y"], [3.601309, 7.202619]) + + +def test_calculate_height_for_pseudo_voigt_lineshape(): + data = { + "sigma_x": [1.0, 2.0], + "sigma_y": [1.0, 2.0], + "gamma_x": [1.0, 2.0], + "gamma_y": [1.0, 2.0], + "amp": [10.0, 20.0], + "amp_err": [1.0, 2.0], + "fraction": [0.5, 0.5], + } + df = pd.DataFrame(data) + result_df = calculate_height_for_pseudo_voigt_lineshape(df) + + assert np.allclose(result_df["height"], [1.552472, 0.776236]) + assert np.allclose(result_df["height_err"], [0.155247, 0.077624]) + + +def test_calculate_fwhm_for_pseudo_voigt_lineshape(): + data = { + "sigma_x": [1.0, 2.0], + "sigma_y": [1.0, 2.0], + "gamma_x": [1.0, 2.0], + "gamma_y": [1.0, 2.0], + "amp": [10.0, 20.0], + "amp_err": [1.0, 2.0], + "fraction": [0.5, 0.5], + } + df = pd.DataFrame(data) + result_df = calculate_fwhm_for_pseudo_voigt_lineshape(df) + + assert np.allclose(result_df["fwhm_x"], [2.0, 4.0]) + assert np.allclose(result_df["fwhm_y"], [2.0, 4.0]) + + +def test_calculate_height_for_gaussian_lineshape(): + data = { + "sigma_x": [1.0, 2.0], + "sigma_y": [1.0, 2.0], + "gamma_x": [1.0, 2.0], + "gamma_y": [1.0, 2.0], + "amp": [10.0, 20.0], + "amp_err": [1.0, 2.0], + "fraction": [0.5, 0.5], + } + df = pd.DataFrame(data) + result_df = calculate_height_for_gaussian_lineshape(df) + + assert np.allclose(result_df["height"], [2.206356, 1.103178]) + assert np.allclose(result_df["height_err"], [0.220636, 0.110318]) + + +def test_calculate_height_for_lorentzian_lineshape(): + data = { + "sigma_x": [1.0, 2.0], + "sigma_y": [1.0, 2.0], + "gamma_x": [1.0, 2.0], + "gamma_y": [1.0, 2.0], + "amp": [10.0, 20.0], + "amp_err": [1.0, 2.0], + "fraction": [0.5, 0.5], + } + df = pd.DataFrame(data) + result_df = calculate_height_for_lorentzian_lineshape(df) + + assert np.allclose(result_df["height"], [1.013212, 0.506606]) + assert np.allclose(result_df["height_err"], [0.101321, 0.050661]) + + +def test_calculate_height_for_pv_pv_lineshape(): + data = { + "sigma_x": [1.0, 2.0], + "sigma_y": [1.0, 2.0], + "gamma_x": [1.0, 2.0], + "gamma_y": [1.0, 2.0], + "amp": [10.0, 20.0], + "amp_err": [1.0, 2.0], + "fraction_x": [0.5, 0.5], + "fraction_y": [0.5, 0.5], + } + df = pd.DataFrame(data) + result_df = calculate_height_for_pv_pv_lineshape(df) + + assert np.allclose(result_df["height"], [1.552472, 0.776236]) + assert np.allclose(result_df["height_err"], [0.155247, 0.077624]) + + +def test_calculate_height_for_pv_pv_lineshape_fraction_y(): + data = { + "sigma_x": [1.0, 2.0], + "sigma_y": [1.0, 2.0], + "gamma_x": [1.0, 2.0], + "gamma_y": [1.0, 2.0], + "amp": [10.0, 20.0], + "amp_err": [1.0, 2.0], + "fraction_x": [0.5, 0.5], + "fraction_y": [1.0, 1.0], + } + df = pd.DataFrame(data) + result_df = calculate_height_for_pv_pv_lineshape(df) + + assert np.allclose(result_df["height"], [1.254186, 0.627093]) + assert np.allclose(result_df["height_err"], [0.125419, 0.062709]) + + +def test_calculate_lineshape_specific_height_and_fwhm(): + data = { + "sigma_x": [1.0, 2.0], + "sigma_y": [1.0, 2.0], + "gamma_x": [1.0, 2.0], + "gamma_y": [1.0, 2.0], + "amp": [10.0, 20.0], + "amp_err": [1.0, 2.0], + "fraction": [0.5, 0.5], + "fraction_x": [0.5, 0.5], + "fraction_y": [0.5, 0.5], + } + df = pd.DataFrame(data) + calculate_lineshape_specific_height_and_fwhm(Lineshape.G, df) + calculate_lineshape_specific_height_and_fwhm(Lineshape.L, df) + calculate_lineshape_specific_height_and_fwhm(Lineshape.V, df) + calculate_lineshape_specific_height_and_fwhm(Lineshape.PV, df) + calculate_lineshape_specific_height_and_fwhm(Lineshape.PV_PV, df) + calculate_lineshape_specific_height_and_fwhm(Lineshape.PV_G, df) + calculate_lineshape_specific_height_and_fwhm(Lineshape.PV_L, df) + + +def test_get_lineshape_function(): + assert get_lineshape_function(Lineshape.PV) == pvoigt2d + assert get_lineshape_function(Lineshape.L) == pvoigt2d + assert get_lineshape_function(Lineshape.G) == pvoigt2d + assert get_lineshape_function(Lineshape.G_L) == gaussian_lorentzian + assert get_lineshape_function(Lineshape.PV_G) == pv_g + assert get_lineshape_function(Lineshape.PV_L) == pv_l + assert get_lineshape_function(Lineshape.PV_PV) == pv_pv + assert get_lineshape_function(Lineshape.V) == voigt2d + + +def test_get_lineshape_function_exception(): + with pytest.raises(Exception): + get_lineshape_function("bla") + + +@pytest.fixture +def peakipy_data(): + test_data_path = Path("./test/test_protein_L/") + return Peaklist( + test_data_path / "test.tab", test_data_path / "test1.ft2", PeaklistFormat.pipe + ) + + +def test_calculate_peak_linewidths_in_hz(): + # Sample data for testing + data = { + "sigma_x": [1.0, 2.0, 3.0], + "sigma_y": [1.5, 2.5, 3.5], + "fwhm_x": [0.5, 1.5, 2.5], + "fwhm_y": [0.7, 1.7, 2.7], + } + df = pd.DataFrame(data) + + # Mock peakipy_data object + peakipy_data = Mock() + peakipy_data.ppm_per_pt_f2 = 0.01 + peakipy_data.ppm_per_pt_f1 = 0.02 + peakipy_data.hz_per_pt_f2 = 10.0 + peakipy_data.hz_per_pt_f1 = 20.0 + + # Expected results + expected_sigma_x_ppm = [0.01, 0.02, 0.03] + expected_sigma_y_ppm = [0.03, 0.05, 0.07] + expected_fwhm_x_ppm = [0.005, 0.015, 0.025] + expected_fwhm_y_ppm = [0.014, 0.034, 0.054] + expected_fwhm_x_hz = [5.0, 15.0, 25.0] + expected_fwhm_y_hz = [14.0, 34.0, 54.0] + + # Run the function + result_df = calculate_peak_linewidths_in_hz(df, peakipy_data) + + # Assertions + pd.testing.assert_series_equal( + result_df["sigma_x_ppm"], pd.Series(expected_sigma_x_ppm), check_names=False + ) + pd.testing.assert_series_equal( + result_df["sigma_y_ppm"], pd.Series(expected_sigma_y_ppm), check_names=False + ) + pd.testing.assert_series_equal( + result_df["fwhm_x_ppm"], pd.Series(expected_fwhm_x_ppm), check_names=False + ) + pd.testing.assert_series_equal( + result_df["fwhm_y_ppm"], pd.Series(expected_fwhm_y_ppm), check_names=False + ) + pd.testing.assert_series_equal( + result_df["fwhm_x_hz"], pd.Series(expected_fwhm_x_hz), check_names=False + ) + pd.testing.assert_series_equal( + result_df["fwhm_y_hz"], pd.Series(expected_fwhm_y_hz), check_names=False + ) + + +def test_calculate_peak_centers_in_ppm(): + # Sample data for testing + data = { + "center_x": [10, 20, 30], + "center_y": [15, 25, 35], + "init_center_x": [12, 22, 32], + "init_center_y": [18, 28, 38], + } + df = pd.DataFrame(data) + + # Mock peakipy_data object + peakipy_data = Mock() + peakipy_data.uc_f2.ppm = Mock(side_effect=lambda x: x * 0.1) + peakipy_data.uc_f1.ppm = Mock(side_effect=lambda x: x * 0.2) + + # Expected results + expected_center_x_ppm = [1.0, 2.0, 3.0] + expected_center_y_ppm = [3.0, 5.0, 7.0] + expected_init_center_x_ppm = [1.2, 2.2, 3.2] + expected_init_center_y_ppm = [3.6, 5.6, 7.6] + + # Run the function + result_df = calculate_peak_centers_in_ppm(df, peakipy_data) + + # Assertions + pd.testing.assert_series_equal( + result_df["center_x_ppm"], pd.Series(expected_center_x_ppm), check_names=False + ) + pd.testing.assert_series_equal( + result_df["center_y_ppm"], pd.Series(expected_center_y_ppm), check_names=False + ) + pd.testing.assert_series_equal( + result_df["init_center_x_ppm"], + pd.Series(expected_init_center_x_ppm), + check_names=False, + ) + pd.testing.assert_series_equal( + result_df["init_center_y_ppm"], + pd.Series(expected_init_center_y_ppm), + check_names=False, + ) diff --git a/test/test_utils.py b/test/test_utils.py new file mode 100644 index 00000000..baf9f8da --- /dev/null +++ b/test/test_utils.py @@ -0,0 +1,252 @@ +from unittest.mock import patch, mock_open, MagicMock +from datetime import datetime +import json +import os +import tempfile +from pathlib import Path + +import pytest +import pandas as pd + +# Assuming the run_log function is defined in a module named 'log_module' +from peakipy.utils import ( + run_log, + update_args_with_values_from_config_file, + update_peak_positions_from_ppm_to_points, + update_linewidths_from_hz_to_points, + save_data, +) + + +@patch("peakipy.utils.open", new_callable=mock_open) +@patch("peakipy.utils.datetime") +@patch("peakipy.utils.sys") +def test_run_log(mock_sys, mock_datetime, mock_open_file): + # Mocking sys.argv + mock_sys.argv = ["test_script.py", "arg1", "arg2"] + + # Mocking datetime to return a fixed timestamp + fixed_timestamp = datetime(2024, 5, 20, 15, 45) + mock_datetime.now.return_value = fixed_timestamp + + # Expected timestamp string + expected_time_stamp = fixed_timestamp.strftime("%A %d %B %Y at %H:%M") + + # Run the function + run_log("mock_run_log.txt") + + # Prepare the expected log content + expected_log_content = ( + f"# Script run on {expected_time_stamp}:\ntest_script.py arg1 arg2\n" + ) + + # Assert that the file was opened correctly + mock_open_file.assert_called_once_with("mock_run_log.txt", "a") + + # Assert that the correct content was written to the file + mock_open_file().write.assert_called_once_with(expected_log_content) + + # Assert that the script name is correctly set to the basename + assert mock_sys.argv[0] == "test_script.py" + + +# Mock configuration loader function (you need to replace 'config_module.load_config' with the actual path if different) +@patch("peakipy.utils.load_config") +@patch("peakipy.utils.Path.exists") +def test_update_args_with_config(mock_path_exists, mock_load_config): + # Test setup + mock_path_exists.return_value = True # Pretend the config file exists + mock_load_config.return_value = { + "dims": [1, 2, 3], + "noise": "0.05", + "colors": ["#ff0000", "#00ff00"], + } + + args = {"dims": (0, 1, 2), "noise": False, "colors": ["#5e3c99", "#e66101"]} + + # Run the function + updated_args, config = update_args_with_values_from_config_file(args) + + # Check the updates to args + assert updated_args["dims"] == [1, 2, 3] + assert updated_args["noise"] == 0.05 + assert updated_args["colors"] == ["#ff0000", "#00ff00"] + + # Check the returned config + assert config == { + "dims": [1, 2, 3], + "noise": "0.05", + "colors": ["#ff0000", "#00ff00"], + } + + +@patch("peakipy.utils.Path.exists") +def test_update_args_with_no_config_file(mock_path_exists): + # Test setup + mock_path_exists.return_value = False # Pretend the config file does not exist + + args = {"dims": (0, 1, 2), "noise": False, "colors": ["#5e3c99", "#e66101"]} + + # Run the function + updated_args, config = update_args_with_values_from_config_file(args) + + # Check the updates to args + assert updated_args["dims"] == (0, 1, 2) + assert updated_args["noise"] == False + assert updated_args["colors"] == ["#5e3c99", "#e66101"] + + # Check the returned config (should be empty) + assert config == {} + + +@patch("peakipy.utils.load_config") +@patch("peakipy.utils.Path.exists") +def test_update_args_with_corrupt_config_file(mock_path_exists, mock_load_config): + # Test setup + mock_path_exists.return_value = True # Pretend the config file exists + mock_load_config.side_effect = json.decoder.JSONDecodeError( + "Expecting value", "", 0 + ) # Simulate corrupt JSON + + args = {"dims": (0, 1, 2), "noise": False, "colors": ["#5e3c99", "#e66101"]} + + # Run the function + updated_args, config = update_args_with_values_from_config_file(args) + + # Check the updates to args + assert updated_args["dims"] == (0, 1, 2) + assert updated_args["noise"] == False + assert updated_args["colors"] == ["#5e3c99", "#e66101"] + + # Check the returned config (should be empty due to error) + assert config == {} + + # Mock class to simulate the peakipy_data object + + +class MockPeakipyData: + def __init__(self, df, pt_per_hz_f2, pt_per_hz_f1, uc_f2, uc_f1): + self.df = df + self.pt_per_hz_f2 = pt_per_hz_f2 + self.pt_per_hz_f1 = pt_per_hz_f1 + self.uc_f2 = uc_f2 + self.uc_f1 = uc_f1 + + +# Test data +@pytest.fixture +def mock_peakipy_data(): + df = pd.DataFrame( + { + "XW_HZ": [10, 20, 30], + "YW_HZ": [5, 15, 25], + "X_PPM": [1.0, 2.0, 3.0], + "Y_PPM": [0.5, 1.5, 2.5], + } + ) + + pt_per_hz_f2 = 2.0 + pt_per_hz_f1 = 3.0 + + uc_f2 = MagicMock() + uc_f1 = MagicMock() + uc_f2.side_effect = lambda x, unit: x * 100.0 if unit == "PPM" else x + uc_f1.side_effect = lambda x, unit: x * 200.0 if unit == "PPM" else x + uc_f2.f = MagicMock(side_effect=lambda x, unit: x * 1000.0 if unit == "PPM" else x) + uc_f1.f = MagicMock(side_effect=lambda x, unit: x * 2000.0 if unit == "PPM" else x) + + return MockPeakipyData(df, pt_per_hz_f2, pt_per_hz_f1, uc_f2, uc_f1) + + +def test_update_linewidths_from_hz_to_points(mock_peakipy_data): + peakipy_data = update_linewidths_from_hz_to_points(mock_peakipy_data) + + expected_XW = [20.0, 40.0, 60.0] + expected_YW = [15.0, 45.0, 75.0] + + pd.testing.assert_series_equal( + peakipy_data.df["XW"], pd.Series(expected_XW, name="XW") + ) + pd.testing.assert_series_equal( + peakipy_data.df["YW"], pd.Series(expected_YW, name="YW") + ) + + +def test_update_peak_positions_from_ppm_to_points(mock_peakipy_data): + peakipy_data = update_peak_positions_from_ppm_to_points(mock_peakipy_data) + + expected_X_AXIS = [100.0, 200.0, 300.0] + expected_Y_AXIS = [100.0, 300.0, 500.0] + expected_X_AXISf = [1000.0, 2000.0, 3000.0] + expected_Y_AXISf = [1000.0, 3000.0, 5000.0] + + pd.testing.assert_series_equal( + peakipy_data.df["X_AXIS"], pd.Series(expected_X_AXIS, name="X_AXIS") + ) + pd.testing.assert_series_equal( + peakipy_data.df["Y_AXIS"], pd.Series(expected_Y_AXIS, name="Y_AXIS") + ) + pd.testing.assert_series_equal( + peakipy_data.df["X_AXISf"], pd.Series(expected_X_AXISf, name="X_AXISf") + ) + pd.testing.assert_series_equal( + peakipy_data.df["Y_AXISf"], pd.Series(expected_Y_AXISf, name="Y_AXISf") + ) + + +@pytest.fixture +def sample_dataframe(): + data = {"A": [1, 2, 3], "B": [4.5678, 5.6789, 6.7890]} + return pd.DataFrame(data) + + +def test_save_data_csv(sample_dataframe): + with tempfile.NamedTemporaryFile(suffix=".csv", delete=False) as tmpfile: + output_name = Path(tmpfile.name) + + try: + save_data(sample_dataframe, output_name) + + assert output_name.exists() + + # Load the CSV and compare with the original dataframe + loaded_df = pd.read_csv(output_name) + pd.testing.assert_frame_equal( + loaded_df, sample_dataframe, check_exact=False, rtol=1e-4 + ) + finally: + os.remove(output_name) + + +def test_save_data_tab(sample_dataframe): + with tempfile.NamedTemporaryFile(suffix=".tab", delete=False) as tmpfile: + output_name = Path(tmpfile.name) + + try: + save_data(sample_dataframe, output_name) + + assert output_name.exists() + + # Load the tab-separated file and compare with the original dataframe + loaded_df = pd.read_csv(output_name, sep="\t") + pd.testing.assert_frame_equal( + loaded_df, sample_dataframe, check_exact=False, rtol=1e-4 + ) + finally: + os.remove(output_name) + + +def test_save_data_pickle(sample_dataframe): + with tempfile.NamedTemporaryFile(suffix=".pkl", delete=False) as tmpfile: + output_name = Path(tmpfile.name) + + try: + save_data(sample_dataframe, output_name) + + assert output_name.exists() + + # Load the pickle file and compare with the original dataframe + loaded_df = pd.read_pickle(output_name) + pd.testing.assert_frame_equal(loaded_df, sample_dataframe) + finally: + os.remove(output_name) From 1ceb4fb87fa21f16a8d6de538e3c4eafdf256925 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Sun, 26 May 2024 21:24:05 -0400 Subject: [PATCH 33/37] added more tests --- Makefile | 8 +- peakipy/cli/check_panel.py | 3 + peakipy/cli/edit_panel.py | 11 +- peakipy/cli/main.py | 401 +++++++------------------------- test/test_cli.py | 98 +++++++- test/test_fitting.py | 452 +++++++++++++++++++++++++++++++++++++ test/test_io.py | 8 - test/test_plotting.py | 104 +++++++++ 8 files changed, 745 insertions(+), 340 deletions(-) create mode 100644 test/test_plotting.py diff --git a/Makefile b/Makefile index 431618da..386f86c3 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,13 @@ coverage: #coverage run -m pytest test/test_core.py test/test_main.py test/test_fit.py test/test_cli.py - coverage run -m pytest test/test_fitting.py test/test_lineshapes.py test/test_io.py test/test_utils.py test/test_main.py test/test_cli.py + coverage run -m pytest test/test_fitting.py \ + test/test_lineshapes.py \ + test/test_io.py \ + test/test_utils.py \ + test/test_main.py \ + test/test_cli.py \ + test/test_plotting.py coverage-html: coverage html diff --git a/peakipy/cli/check_panel.py b/peakipy/cli/check_panel.py index 2b11d131..5dc06cd9 100644 --- a/peakipy/cli/check_panel.py +++ b/peakipy/cli/check_panel.py @@ -88,6 +88,7 @@ def create_check_panel( data_path: Path, config_path: Path = Path("./peakipy.config"), edit_panel: bool = False, + test: bool = False, ): data = data_singleton() data.fits_path = fits_path @@ -126,6 +127,8 @@ def create_check_panel( ) if edit_panel: return check_pane + elif test: + return else: check_pane.show() diff --git a/peakipy/cli/edit_panel.py b/peakipy/cli/edit_panel.py index f74f5b3e..f44b5faa 100644 --- a/peakipy/cli/edit_panel.py +++ b/peakipy/cli/edit_panel.py @@ -42,7 +42,7 @@ def update_peakipy_data_on_edit_of_table(event): data.bs.update_memcnt() -def panel_app(): +def panel_app(test=False): data = data_singleton() bs = data.bs bokeh_pane = pn.pane.Bokeh(bs.p) @@ -129,16 +129,19 @@ def update_source_selected_indices(event): ) check_app = pn.Card(title="Peakipy check") template.main.append(pn.Column(check_app, spectrum)) - template.show() + if test: + return + else: + template.show() @app.command() -def main(peaklist_path: Path, data_path: Path): +def main(peaklist_path: Path, data_path: Path, test: bool = False): data = data_singleton() data.peaklist_path = peaklist_path data.data_path = data_path data.load_data() - panel_app() + panel_app(test=test) if __name__ == "__main__": diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index 79613f42..6b3b0aed 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -3,7 +3,6 @@ import json import shutil from pathlib import Path -from dataclasses import dataclass, field from enum import Enum from typing import Optional, Tuple, List, Annotated from multiprocessing import Pool, cpu_count @@ -16,15 +15,11 @@ from tqdm import tqdm from rich import print from skimage.filters import threshold_otsu -from pydantic import BaseModel -import matplotlib -import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import axes3d from matplotlib import cm from matplotlib.backends.backend_pdf import PdfPages -import yaml import plotly.io as pio pio.templates.default = "plotly_dark" @@ -71,13 +66,11 @@ simulate_lineshapes_from_fitted_peak_parameters, simulate_pv_pv_lineshapes_from_fitted_peak_parameters, validate_fit_dataframe, -) - -from .fit import ( fit_peak_clusters, FitPeaksInput, FitPeaksArgs, ) + from peakipy.plotting import ( PlottingDataForPlane, validate_sample_count, @@ -86,7 +79,6 @@ create_residual_figure, create_matplotlib_figure, ) -from .spec import yaml_file app = typer.Typer() tmp_path = Path("tmp") @@ -119,7 +111,6 @@ def read( y_ppm_column_name: str = "Position F2", dims: Annotated[List[int], typer.Option(help=dims_help)] = [0, 1, 2], outfmt: OutFmt = OutFmt.csv, - show: bool = False, fuda: bool = False, ): """Read NMRPipe/Analysis peaklist into pandas dataframe @@ -163,9 +154,6 @@ def read( peak positions [default: "Position F2"] outfmt : OutFmt Format of output peaklist [default: csv] - show : bool - Show the clusters on the spectrum color coded using matplotlib - fuda : bool Create a parameter file for running fuda (params.fuda) @@ -302,39 +290,6 @@ def read( run_log() - yaml = f""" - ########################################################################################################## - # This first block is global parameters which can be overridden by adding the desired argument # - # to your list of spectra. One exception is "colors" which if set in global params overrides the # - # color option set for individual spectra as the colors will now cycle through the chosen matplotlib # - # colormap # - ########################################################################################################## - - cs: {thres} # contour start - contour_num: 10 # number of contours - contour_factor: 1.2 # contour factor - colors: tab20 # must be matplotlib.cm colormap - show_cs: True - - outname: ["clusters.pdf","clusters.png"] # either single value or list of output names - ncol: 1 # tells matplotlib how many columns to give the figure legend - if not set defaults to 2 - clusters: {outname} - dims: {dims} - - # Here is where your list of spectra to plot goes - spectra: - - - fname: {data_path} - label: "" - contour_num: 20 - linewidths: 0.5 - """ - - if show: - with open("show_clusters.yml", "w") as out: - out.write(yaml) - os.system("peakipy spec show_clusters.yml") - print( f"""[green] @@ -536,10 +491,13 @@ def fit( run_log() +fits_help = "CSV file containing peakipy fits" + + @app.command(help="Interactive plots for checking fits") def check( - fits: Path, - data_path: Path, + fits: Annotated[Path, typer.Argument(help=fits_help)], + data_path: Annotated[Path, typer.Argument(help=data_path_help)], clusters: Optional[List[int]] = None, plane: Optional[List[int]] = None, outname: Path = Path("plots.pdf"), @@ -551,8 +509,8 @@ def check( rcount: int = 50, ccount: int = 50, colors: Tuple[str, str] = ("#5e3c99", "#e66101"), - verb: bool = False, plotly: bool = False, + test: bool = False, ): """Interactive plots for checking fits @@ -635,288 +593,87 @@ def check( XY = np.meshgrid(x, y) X, Y = XY - with PdfPages(outname) as pdf: - for _, peak_cluster in peak_clusters: - table = df_to_rich_table( - peak_cluster, - title="", - columns=columns_to_print, - styles=["blue" for _ in columns_to_print], - ) - print(table) + all_plot_data = [] + for _, peak_cluster in peak_clusters: + table = df_to_rich_table( + peak_cluster, + title="", + columns=columns_to_print, + styles=["blue" for _ in columns_to_print], + ) + print(table) - x_radius = peak_cluster.x_radius.max() - y_radius = peak_cluster.y_radius.max() - max_x, min_x = get_limits_for_axis_in_points( - group_axis_points=peak_cluster.center_x, mask_radius_in_points=x_radius - ) - max_y, min_y = get_limits_for_axis_in_points( - group_axis_points=peak_cluster.center_y, mask_radius_in_points=y_radius - ) - max_x, min_x, max_y, min_y = deal_with_peaks_on_edge_of_spectrum( - pseudo3D.data.shape, max_x, min_x, max_y, min_y - ) + x_radius = peak_cluster.x_radius.max() + y_radius = peak_cluster.y_radius.max() + max_x, min_x = get_limits_for_axis_in_points( + group_axis_points=peak_cluster.center_x, mask_radius_in_points=x_radius + ) + max_y, min_y = get_limits_for_axis_in_points( + group_axis_points=peak_cluster.center_y, mask_radius_in_points=y_radius + ) + max_x, min_x, max_y, min_y = deal_with_peaks_on_edge_of_spectrum( + pseudo3D.data.shape, max_x, min_x, max_y, min_y + ) - empty_mask_array = np.zeros( - (pseudo3D.f1_size, pseudo3D.f2_size), dtype=bool - ) - first_plane = peak_cluster[peak_cluster.plane == selected_planes[0]] - individual_masks, mask = make_masks_from_plane_data( - empty_mask_array, first_plane - ) + empty_mask_array = np.zeros((pseudo3D.f1_size, pseudo3D.f2_size), dtype=bool) + first_plane = peak_cluster[peak_cluster.plane == selected_planes[0]] + individual_masks, mask = make_masks_from_plane_data( + empty_mask_array, first_plane + ) - # generate simulated data - for plane_id, plane in peak_cluster.groupby("plane"): - sim_data_singles = [] - sim_data = np.zeros((pseudo3D.f1_size, pseudo3D.f2_size)) - try: - ( - sim_data, - sim_data_singles, - ) = simulate_pv_pv_lineshapes_from_fitted_peak_parameters( - plane, XY, sim_data, sim_data_singles - ) - except: - ( - sim_data, - sim_data_singles, - ) = simulate_lineshapes_from_fitted_peak_parameters( - plane, XY, sim_data, sim_data_singles - ) - - plot_data = PlottingDataForPlane( - pseudo3D, - plane_id, - plane, - X, - Y, - mask, - individual_masks, + # generate simulated data + for plane_id, plane in peak_cluster.groupby("plane"): + sim_data_singles = [] + sim_data = np.zeros((pseudo3D.f1_size, pseudo3D.f2_size)) + try: + ( sim_data, sim_data_singles, - min_x, - max_x, - min_y, - max_y, - fit_color, - data_color, - rcount, - ccount, + ) = simulate_pv_pv_lineshapes_from_fitted_peak_parameters( + plane, XY, sim_data, sim_data_singles + ) + except: + ( + sim_data, + sim_data_singles, + ) = simulate_lineshapes_from_fitted_peak_parameters( + plane, XY, sim_data, sim_data_singles ) - if ccpn_flag: - plt = PlotterWidget() - # fig = create_plotly_figure(plot_data) - if plotly: - fig = create_plotly_figure(plot_data) - residual_fig = create_residual_figure(plot_data) - return fig, residual_fig - else: - plt = matplotlib.pyplot - create_matplotlib_figure( - plot_data, pdf, individual, label, ccpn_flag, show - ) - if first: - break - - run_log() - - -def make_yaml_file(name, yaml_file=yaml_file): - if os.path.exists(name): - print(f"Copying {name} to {name}.bak") - shutil.copy(name, f"{name}.bak") - - print(f"Making yaml file ... {name}") - with open(name, "w") as new_yaml_file: - new_yaml_file.write(yaml_file) - - -@app.command(help="Show first plane with clusters") -def spec(yaml_file: Path, new: bool = False): - if new: - make_yaml_file(name=yaml_file) - exit() - - if yaml_file.exists(): - params = yaml.load(open(yaml_file, "r"), Loader=yaml.FullLoader) - else: - print( - f"[red]{yaml_file} does not exist. Use 'peakipy spec --new' to create one[/red]" - ) - exit() - - fig = plt.figure() - ax = fig.add_subplot(111) - - cs_g = float(params["cs"]) - spectra = params["spectra"] - contour_num_g = params.get("contour_num", 10) - contour_factor_g = params.get("contour_factor", 1.2) - nspec = len(spectra) - notes = [] - legends = 0 - for num, spec in enumerate(spectra): - # unpack spec specific parameters - fname = spec["fname"] - - if params.get("colors"): - # currently overrides color option - color = np.linspace(0, 1, nspec)[num] - colors = cm.get_cmap(params.get("colors"))(color) - # print("Colors set to cycle though %s from Matplotlib"%params.get("colors")) - # print(colors) - colors = colors[:-1] - - else: - colors = spec["colors"] - - neg_colors = spec.get("neg_colors") - label = spec.get("label") - cs = float(spec.get("cs", cs_g)) - contour_num = spec.get("contour_num", contour_num_g) - contour_factor = spec.get("contour_factor", contour_factor_g) - #  append cs and colors to notes - notes.append((cs, colors)) - - # read spectra - dic, data = ng.pipe.read(fname) - udic = ng.pipe.guess_udic(dic, data) - - ndim = udic["ndim"] - - if ndim == 1: - uc_f1 = ng.pipe.make_uc(dic, data, dim=0) - - elif ndim == 2: - f1, f2 = params.get("dims", [0, 1]) - uc_f1 = ng.pipe.make_uc(dic, data, dim=f1) - uc_f2 = ng.pipe.make_uc(dic, data, dim=f2) - - ppm_f1 = uc_f1.ppm_scale() - ppm_f2 = uc_f2.ppm_scale() - - ppm_f1_0, ppm_f1_1 = uc_f1.ppm_limits() # max,min - ppm_f2_0, ppm_f2_1 = uc_f2.ppm_limits() # max,min - - elif ndim == 3: - dims = params.get("dims", [0, 1, 2]) - f1, f2, f3 = dims - uc_f1 = ng.pipe.make_uc(dic, data, dim=f1) - uc_f2 = ng.pipe.make_uc(dic, data, dim=f2) - uc_f3 = ng.pipe.make_uc(dic, data, dim=f3) - #  need to make more robust - ppm_f1 = uc_f2.ppm_scale() - ppm_f2 = uc_f3.ppm_scale() - - ppm_f1_0, ppm_f1_1 = uc_f2.ppm_limits() # max,min - ppm_f2_0, ppm_f2_1 = uc_f3.ppm_limits() # max,min - - # if f1 == 0: - # data = data[f1] - if dims != [1, 2, 3]: - data = np.transpose(data, dims) - data = data[0] - # x and y are set to f2 and f1 - f1, f2 = f2, f3 - # elif f1 == 1: - # data = data[:,0,:] - # else: - # data = data[:,:,0] - - # plot parameters - contour_start = cs # contour level start value - contour_num = contour_num # number of contour levels - contour_factor = contour_factor # scaling factor between contour levels - - # calculate contour levels - cl = contour_start * contour_factor ** np.arange(contour_num) - if len(cl) > 1 and np.min(np.diff(cl)) <= 0.0: - print(f"Setting contour levels to np.abs({cl})") - cl = np.abs(cl) - - ax.contour( - data, - cl, - colors=[colors for _ in cl], - linewidths=spec.get("linewidths", 0.5), - extent=(ppm_f2_0, ppm_f2_1, ppm_f1_0, ppm_f1_1), - ) - - if neg_colors: - ax.contour( - data * -1, - cl, - colors=[neg_colors for _ in cl], - linewidths=spec.get("linewidths", 0.5), - extent=(ppm_f2_0, ppm_f2_1, ppm_f1_0, ppm_f1_1), - ) - - else: # if no neg color given then plot with 0.5 alpha - ax.contour( - data * -1, - cl, - colors=[colors for _ in cl], - linewidths=spec.get("linewidths", 0.5), - extent=(ppm_f2_0, ppm_f2_1, ppm_f1_0, ppm_f1_1), - alpha=0.5, + plot_data = PlottingDataForPlane( + pseudo3D, + plane_id, + plane, + X, + Y, + mask, + individual_masks, + sim_data, + sim_data_singles, + min_x, + max_x, + min_y, + max_y, + fit_color, + data_color, + rcount, + ccount, ) + all_plot_data.append(plot_data) + if plotly: + fig = create_plotly_figure(plot_data) + residual_fig = create_residual_figure(plot_data) + return fig, residual_fig + if first: + break + + with PdfPages(outname) as pdf: + for plot_data in all_plot_data: + create_matplotlib_figure( + plot_data, pdf, individual, label, ccpn_flag, show, test + ) - # make legend - if label: - legends += 1 - # hack for legend - ax.plot([], [], c=colors, label=label) - - # plt.xlim(ppm_f2_0, ppm_f2_1) - ax.invert_xaxis() - ax.set_xlabel(udic[f2]["label"] + " ppm") - if params.get("xlim"): - ax.set_xlim(*params.get("xlim")) - - # plt.ylim(ppm_f1_0, ppm_f1_1) - ax.invert_yaxis() - ax.set_ylabel(udic[f1]["label"] + " ppm") - - if legends > 0: - plt.legend( - loc="upper center", bbox_to_anchor=(0.5, 1.20), ncol=params.get("ncol", 2) - ) - - plt.tight_layout() - - #  add a list of outfiles - y = 0.025 - # only write cs levels if show_cs: True in yaml file - if params.get("show_cs"): - for num, j in enumerate(notes): - col = j[1] - con_strt = j[0] - ax.text(0.025, y, "cs=%.2e" % con_strt, color=col, transform=ax.transAxes) - y += 0.05 - - if params.get("clusters"): - peaklist = params.get("clusters") - if os.path.splitext(peaklist)[-1] == ".csv": - clusters = pd.read_csv(peaklist) - else: - clusters = pd.read_pickle(peaklist) - groups = clusters.groupby("CLUSTID") - for ind, group in groups: - if len(group) == 1: - ax.plot(group.X_PPM, group.Y_PPM, "ko", markersize=1) # , picker=5) - else: - ax.plot(group.X_PPM, group.Y_PPM, "o", markersize=1) # , picker=5) - - if params.get("outname") and (type(params.get("outname")) == list): - for i in params.get("outname"): - plt.savefig(i, bbox_inches="tight", dpi=300) - else: - plt.savefig(params.get("outname", "test.pdf"), bbox_inches="tight") - - # fig.canvas.mpl_connect("pick_event", onpick) - # line, = ax.plot(np.random.rand(100), 'o', picker=5) # 5 points tolerance - plt.show() + run_log() if __name__ == "__main__": diff --git a/test/test_cli.py b/test/test_cli.py index d30404c4..3c14cd4b 100644 --- a/test/test_cli.py +++ b/test/test_cli.py @@ -4,6 +4,8 @@ import pytest import peakipy.cli.main +import peakipy.cli.check_panel +import peakipy.cli.edit_panel from peakipy.cli.main import PeaklistFormat, Lineshape @@ -49,6 +51,37 @@ def test_fit_main_with_default(protein_L): peakipy.cli.main.fit(**args) +def test_fit_main_with_centers_floated(protein_L): + args = dict( + peaklist_path=protein_L / Path("test.csv"), + data_path=protein_L / Path("test1.ft2"), + output_path=protein_L / Path("fits_PV_centers_floated.csv"), + fix=["fraction", "sigma"], + ) + peakipy.cli.main.fit(**args) + + +def test_fit_main_with_centers_bounded(protein_L): + args = dict( + peaklist_path=protein_L / Path("test.csv"), + data_path=protein_L / Path("test1.ft2"), + output_path=protein_L / Path("fits_PV_centers_bounded.csv"), + fix=["fraction", "sigma"], + xy_bounds=[0.01, 0.1], + ) + peakipy.cli.main.fit(**args) + + +def test_fit_main_with_sigmas_floated(protein_L): + args = dict( + peaklist_path=protein_L / Path("test.csv"), + data_path=protein_L / Path("test1.ft2"), + output_path=protein_L / Path("fits_PV_sigmas_floated.csv"), + fix=["fraction", "center"], + ) + peakipy.cli.main.fit(**args) + + def test_fit_main_with_vclist(protein_L): args = dict( peaklist_path=protein_L / Path("test.csv"), @@ -106,7 +139,8 @@ def test_check_main_with_default(protein_L): clusters=[1], first=True, label=True, - show=False, + show=True, + test=True, individual=True, ) peakipy.cli.main.check(**args) @@ -119,7 +153,8 @@ def test_check_main_with_gaussian(protein_L): clusters=[1], first=True, label=True, - show=False, + show=True, + test=True, individual=True, ) peakipy.cli.main.check(**args) @@ -132,7 +167,8 @@ def test_check_main_with_lorentzian(protein_L): clusters=[1], first=True, label=True, - show=False, + show=True, + test=True, individual=True, ) peakipy.cli.main.check(**args) @@ -145,7 +181,8 @@ def test_check_main_with_voigt(protein_L): clusters=[1], first=True, label=True, - show=False, + show=True, + test=True, individual=True, ) peakipy.cli.main.check(**args) @@ -158,7 +195,58 @@ def test_check_main_with_pv_pv(protein_L): clusters=[1], first=True, label=True, - show=False, + show=True, + test=True, individual=True, ) peakipy.cli.main.check(**args) + + +def test_check_panel_PVPV(protein_L): + args = dict( + fits_path=protein_L / Path("fits_PV_PV.csv"), + data_path=protein_L / Path("test1.ft2"), + config_path=protein_L / Path("peakipy.config"), + test=True, + ) + peakipy.cli.check_panel.create_check_panel(**args) + + +def test_check_panel_PV(protein_L): + args = dict( + fits_path=protein_L / Path("fits_PV.csv"), + data_path=protein_L / Path("test1.ft2"), + config_path=protein_L / Path("peakipy.config"), + test=True, + ) + peakipy.cli.check_panel.create_check_panel(**args) + + +def test_check_panel_V(protein_L): + args = dict( + fits_path=protein_L / Path("fits_V.csv"), + data_path=protein_L / Path("test1.ft2"), + config_path=protein_L / Path("peakipy.config"), + test=True, + ) + peakipy.cli.check_panel.create_check_panel(**args) + + +def test_check_panel_edit(protein_L): + args = dict( + fits_path=protein_L / Path("fits_V.csv"), + data_path=protein_L / Path("test1.ft2"), + config_path=protein_L / Path("peakipy.config"), + edit_panel=True, + test=True, + ) + peakipy.cli.check_panel.create_check_panel(**args) + + +def test_edit_panel(protein_L): + args = dict( + peaklist_path=protein_L / Path("test.csv"), + data_path=protein_L / Path("test1.ft2"), + test=True, + ) + peakipy.cli.edit_panel.main(**args) diff --git a/test/test_fitting.py b/test/test_fitting.py index 01c9ed38..7291885d 100644 --- a/test/test_fitting.py +++ b/test/test_fitting.py @@ -5,9 +5,11 @@ import numpy as np import pandas as pd import pytest +from pytest import fixture import nmrglue as ng from numpy.testing import assert_array_equal from lmfit import Model, Parameters +from lmfit.model import ModelResult from peakipy.io import Pseudo3D from peakipy.fitting import ( @@ -31,6 +33,24 @@ PeakLimits, update_params, make_masks_from_plane_data, + get_fit_peaks_result_validation_model, + FitPeaksResultRowPVPV, + FitPeaksResultRowVoigt, + FitPeaksResultRowGLPV, + filter_peak_clusters_by_max_cluster_size, + set_parameters_to_fix_during_fit, + unpack_fitted_parameters_for_lineshape, + get_default_lineshape_param_names, + split_parameter_sets_by_peak, + get_prefix_from_parameter_names, + create_parameter_dict, + perform_initial_lineshape_fit_on_cluster_of_peaks, + merge_unpacked_parameters_with_metadata, + add_vclist_to_df, + update_cluster_df_with_fit_statistics, + rename_columns_for_compatibility, + FitPeaksArgs, + FitPeaksInput, ) from peakipy.lineshapes import Lineshape, pvoigt2d, pv_pv @@ -690,3 +710,435 @@ def test_Pseudo3D(self): self.assertEqual(pseudo3D.f1_size, 256) self.assertEqual(pseudo3D.f2_size, 546) test_nu += 1 + + +def test_get_fit_peaks_result_validation_model_PVPV(): + validation_model = get_fit_peaks_result_validation_model(Lineshape.PV_PV) + assert validation_model == FitPeaksResultRowPVPV + + +def test_get_fit_peaks_result_validation_model_G(): + validation_model = get_fit_peaks_result_validation_model(Lineshape.G) + assert validation_model == FitPeaksResultRowGLPV + + +def test_get_fit_peaks_result_validation_model_L(): + validation_model = get_fit_peaks_result_validation_model(Lineshape.L) + assert validation_model == FitPeaksResultRowGLPV + + +def test_get_fit_peaks_result_validation_model_PV(): + validation_model = get_fit_peaks_result_validation_model(Lineshape.PV) + assert validation_model == FitPeaksResultRowGLPV + + +def test_get_fit_peaks_result_validation_model_V(): + validation_model = get_fit_peaks_result_validation_model(Lineshape.V) + assert validation_model == FitPeaksResultRowVoigt + + +def test_filter_groups_by_max_cluster_size(): + groups = pd.DataFrame( + dict( + col1=[1, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 4, 5, 6, 7], + col2=[1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7], + ) + ).groupby("col1") + max_cluster_size = 3 + filtered_groups = filter_peak_clusters_by_max_cluster_size(groups, max_cluster_size) + filtered_group_names = filtered_groups.col1.unique() + expected_group_names = np.array([3, 4, 5, 6, 7]) + np.testing.assert_array_equal(filtered_group_names, expected_group_names) + + +def test_set_parameters_to_fix_during_fit(): + parameter_set = Parameters() + parameter_set.add("test1", vary=True) + modified_parameter_set, float_str = set_parameters_to_fix_during_fit( + parameter_set, ["test1"] + ) + assert modified_parameter_set["test1"].vary == False + + +@fixture +def parameters_set_with_two_variables(): + parameter_set = Parameters() + parameter_set.add("prefix1_test1", vary=True) + parameter_set.add("prefix1_test2", vary=True) + return parameter_set + + +def test_set_parameters_to_fix_during_fit_2(parameters_set_with_two_variables): + modified_parameter_set, float_str = set_parameters_to_fix_during_fit( + parameters_set_with_two_variables, ["prefix1_test1", "prefix1_test2"] + ) + assert ( + modified_parameter_set["prefix1_test2"].vary + == modified_parameter_set["prefix1_test1"].vary + == False + ) + + +def test_set_parameters_to_fix_during_fit_3(): + parameter_set = Parameters() + parameter_set.add("test1", vary=True) + parameter_set.add("test2", vary=True) + modified_parameter_set, float_str = set_parameters_to_fix_during_fit( + parameter_set, ["test2"] + ) + assert ( + modified_parameter_set["test1"].vary + != modified_parameter_set["test2"].vary + == False + ) + + +def test_set_parameters_to_fix_during_fit_None(): + parameter_set = Parameters() + parameter_set.add("test1", vary=True) + parameter_set.add("test2", vary=True) + modified_parameter_set, float_str = set_parameters_to_fix_during_fit( + parameter_set, None + ) + assert ( + modified_parameter_set["test1"].vary + == modified_parameter_set["test2"].vary + == True + ) + + +def test_set_parameters_to_fix_during_fit_None_str(): + parameter_set = Parameters() + parameter_set.add("test1", vary=True) + parameter_set.add("test2", vary=True) + modified_parameter_set, float_str = set_parameters_to_fix_during_fit( + parameter_set, ["None"] + ) + assert ( + modified_parameter_set["test1"].vary + == modified_parameter_set["test2"].vary + == True + ) + + +def test_update_cluster_df_with_fit_statistics(): + result = ModelResult(Model(pvoigt2d), None, None) + result.aic = None + result.bic = None + data = [ + dict( + chisqr=None, + redchi=None, + residual_sum=None, + aic=None, + bic=None, + nfev=0, + ndata=0, + ) + ] + expected_cluster_df = pd.DataFrame(data) + actual_cluster_df = update_cluster_df_with_fit_statistics( + expected_cluster_df, result + ) + pd.testing.assert_frame_equal(actual_cluster_df, expected_cluster_df) + + +def test_rename_columns_for_compatibility(): + df = pd.DataFrame( + [ + dict( + amplitude=1, + amplitude_stderr=1, + X_AXIS=1, + Y_AXIS=1, + ASS="None", + MEMCNT=1, + X_RADIUS=1, + Y_RADIUS=1, + ) + ] + ) + expected_columns = [ + "amp", + "amp_err", + "init_center_x", + "init_center_y", + "assignment", + "memcnt", + "x_radius", + "y_radius", + ] + actual_columns = rename_columns_for_compatibility(df).columns + assert all([i == j for i, j in zip(actual_columns, expected_columns)]) + + +def test_get_default_param_names_pseudo_voigt(): + assert get_default_lineshape_param_names(Lineshape.PV) == [ + "amplitude", + "center_x", + "center_y", + "sigma_x", + "sigma_y", + "fraction", + ] + + +def test_get_default_param_names_gaussian(): + assert get_default_lineshape_param_names(Lineshape.G) == [ + "amplitude", + "center_x", + "center_y", + "sigma_x", + "sigma_y", + "fraction", + ] + + +def test_get_default_param_names_lorentzian(): + assert get_default_lineshape_param_names(Lineshape.L) == [ + "amplitude", + "center_x", + "center_y", + "sigma_x", + "sigma_y", + "fraction", + ] + + +def test_get_default_param_names_pv_pv(): + assert get_default_lineshape_param_names(Lineshape.PV_PV) == [ + "amplitude", + "center_x", + "center_y", + "sigma_x", + "sigma_y", + "fraction_x", + "fraction_y", + ] + + +def test_get_default_param_names_voigt(): + assert get_default_lineshape_param_names(Lineshape.V) == [ + "amplitude", + "center_x", + "center_y", + "sigma_x", + "sigma_y", + "gamma_x", + "gamma_y", + "fraction", + ] + + +def test_split_parameter_sets_by_peak(default_pseudo_voigt_parameter_names): + # the second element of each tuple actually contains an + # lmfit.Parameter object + params = [ + ("p1_amplitude", "amplitude"), + ("p1_center_x", "center_x"), + ("p1_center_y", "center_y"), + ("p1_sigma_x", "sigma_x"), + ("p1_sigma_y", "sigma_y"), + ("p1_fraction", "fraction"), + ("p2_amplitude", "amplitude"), + ("p2_center_x", "center_x"), + ("p2_center_y", "center_y"), + ("p2_sigma_x", "sigma_x"), + ("p2_sigma_y", "sigma_y"), + ("p2_fraction", "fraction"), + ("p3_amplitude", "amplitude"), + ("p3_center_x", "center_x"), + ("p3_center_y", "center_y"), + ("p3_sigma_x", "sigma_x"), + ("p3_sigma_y", "sigma_y"), + ("p3_fraction", "fraction"), + ] + expected_result = [ + [ + ("p1_amplitude", "amplitude"), + ("p1_center_x", "center_x"), + ("p1_center_y", "center_y"), + ("p1_sigma_x", "sigma_x"), + ("p1_sigma_y", "sigma_y"), + ("p1_fraction", "fraction"), + ], + [ + ("p2_amplitude", "amplitude"), + ("p2_center_x", "center_x"), + ("p2_center_y", "center_y"), + ("p2_sigma_x", "sigma_x"), + ("p2_sigma_y", "sigma_y"), + ("p2_fraction", "fraction"), + ], + [ + ("p3_amplitude", "amplitude"), + ("p3_center_x", "center_x"), + ("p3_center_y", "center_y"), + ("p3_sigma_x", "sigma_x"), + ("p3_sigma_y", "sigma_y"), + ("p3_fraction", "fraction"), + ], + ] + expected_result_parameter_names = [[j[0] for j in i] for i in expected_result] + split_parameter_names = [ + [j[0] for j in i] + for i in split_parameter_sets_by_peak( + default_pseudo_voigt_parameter_names, params + ) + ] + assert split_parameter_names == expected_result_parameter_names + + +@fixture +def default_pseudo_voigt_parameter_names(): + return Model(pvoigt2d).param_names + + +def test_get_prefix_from_parameter_names(default_pseudo_voigt_parameter_names): + parameter_items_with_prefixes = [ + ("p1_amplitude", "amplitude"), + ("p1_center_x", "center_x"), + ("p1_center_y", "center_y"), + ("p1_sigma_x", "sigma_x"), + ("p1_sigma_y", "sigma_y"), + ("p1_fraction", "fraction"), + ] + expected_result = "p1_" + actual_result = get_prefix_from_parameter_names( + default_pseudo_voigt_parameter_names, parameter_items_with_prefixes + ) + assert expected_result == actual_result + + +@fixture +def pseudo_voigt_model_result(): + m1 = Model(pvoigt2d, prefix="p1_") + m2 = Model(pvoigt2d, prefix="p2_") + model = m1 + m2 + params = model.make_params() + model_result = ModelResult(model, params) + return model_result + + +def test_create_parameter_dict(pseudo_voigt_model_result): + prefix = "p1_" + params = list(pseudo_voigt_model_result.params.items())[:6] + expected_result = dict( + prefix="p1_", + amplitude=1.0, + amplitude_stderr=None, + center_x=0.5, + center_x_stderr=None, + center_y=0.5, + center_y_stderr=None, + sigma_x=1.0, + sigma_x_stderr=None, + sigma_y=1.0, + sigma_y_stderr=None, + fraction=0.5, + fraction_stderr=None, + ) + actual_result = create_parameter_dict(prefix, params) + assert expected_result == actual_result + + +def test_unpack_fitted_parameters_for_lineshape_PV(pseudo_voigt_model_result): + expected_params = [ + dict( + prefix="p1_", + plane=0, + amplitude=1.0, + amplitude_stderr=None, + center_x=0.5, + center_x_stderr=None, + center_y=0.5, + center_y_stderr=None, + sigma_x=1.0, + sigma_x_stderr=None, + sigma_y=1.0, + sigma_y_stderr=None, + fraction=0.5, + fraction_stderr=None, + ), + dict( + prefix="p2_", + plane=0, + amplitude=1.0, + amplitude_stderr=None, + center_x=0.5, + center_x_stderr=None, + center_y=0.5, + center_y_stderr=None, + sigma_x=1.0, + sigma_x_stderr=None, + sigma_y=1.0, + sigma_y_stderr=None, + fraction=0.5, + fraction_stderr=None, + ), + ] + unpacked_params = unpack_fitted_parameters_for_lineshape( + Lineshape.PV, list(pseudo_voigt_model_result.params.items()), plane_number=0 + ) + assert expected_params == unpacked_params + + +def test_merge_unpacked_parameters_with_metadata(): + cluster_fit_df = pd.DataFrame( + dict( + plane=[0, 1, 2, 3, 0, 1, 2, 3], + prefix=["_p1_", "_p1_", "_p1_", "_p1_", "_p2_", "_p2_", "_p2_", "_p2_"], + ) + ) + peak_df = pd.DataFrame(dict(ASS=["p1", "p2"], data=["p1_data", "p2_data"])) + expected_result = pd.DataFrame( + dict( + plane=[0, 1, 2, 3, 0, 1, 2, 3], + prefix=["_p1_", "_p1_", "_p1_", "_p1_", "_p2_", "_p2_", "_p2_", "_p2_"], + ASS=["p1", "p1", "p1", "p1", "p2", "p2", "p2", "p2"], + data=[ + "p1_data", + "p1_data", + "p1_data", + "p1_data", + "p2_data", + "p2_data", + "p2_data", + "p2_data", + ], + ) + ) + actual_result = merge_unpacked_parameters_with_metadata(cluster_fit_df, peak_df) + assert expected_result.equals(actual_result) + + +def test_add_vclist_to_df(): + args = FitPeaksArgs( + noise=0, uc_dics={}, lineshape=Lineshape.PV, vclist_data=np.array([1, 2, 3]) + ) + fit_peaks_input = FitPeaksInput( + args=args, data=None, config=None, plane_numbers=None + ) + df = pd.DataFrame(dict(plane=[0, 1, 2])) + expected_df = pd.DataFrame(dict(plane=[0, 1, 2], vclist=[1, 2, 3])) + actual_df = add_vclist_to_df(fit_peaks_input, df) + assert actual_df.equals(expected_df) + + +def test_add_vclist_to_df_plane_order(): + args = FitPeaksArgs( + noise=0, uc_dics={}, lineshape=Lineshape.PV, vclist_data=np.array([1, 2, 3]) + ) + fit_peaks_input = FitPeaksInput( + args=args, data=None, config=None, plane_numbers=None + ) + df = pd.DataFrame(dict(plane=[2, 1, 0])) + expected_df = pd.DataFrame(dict(plane=[2, 1, 0], vclist=[3, 2, 1])) + actual_df = add_vclist_to_df(fit_peaks_input, df) + assert actual_df.equals(expected_df) + + +# def test_perform_initial_lineshape_fit_on_cluster_of_peaks(pseudo_voigt_model_result): +# expected_result = pseudo_voigt_model_result +# actual_result = perform_initial_lineshape_fit_on_cluster_of_peaks() +# assert expected_result == actual_result diff --git a/test/test_io.py b/test/test_io.py index a945852d..313ed0e3 100644 --- a/test/test_io.py +++ b/test/test_io.py @@ -79,14 +79,6 @@ def test_read_pipe_peaklist(self): # self.assertEqual(peaklist.df.ASS.iloc[1], "None_dummy_1") -class TestSpecScript(unittest.TestCase): - @patch("peakipy.cli.main.spec") - def test_main(self, MockSpec): - args = {"": "hello", "": "data"} - spec = MockSpec(args) - self.assertIsNotNone(spec) - - def test_load_config_existing(): config_path = Path("test_config.json") # Create a dummy existing config file diff --git a/test/test_plotting.py b/test/test_plotting.py new file mode 100644 index 00000000..9a0182ed --- /dev/null +++ b/test/test_plotting.py @@ -0,0 +1,104 @@ +from dataclasses import dataclass + +import numpy as np + +from peakipy.plotting import ( + construct_surface_legend_string, + plot_data_is_valid, + df_to_rich_table, +) + + +@dataclass +class Row: + assignment: str + + +def test_construct_surface_legend_string(): + row = Row("assignment") + expected_legend = "assignment" + actual_legend = construct_surface_legend_string(row) + assert expected_legend == actual_legend + + +import pytest +from unittest.mock import MagicMock, patch + + +# Mock PlottingDataForPlane class for testing purposes +class PlottingDataForPlane: + def __init__( + self, x_plot, y_plot, masked_data, plane_lineshape_parameters, pseudo3D, plane + ): + self.x_plot = x_plot + self.y_plot = y_plot + self.masked_data = masked_data + self.plane_lineshape_parameters = plane_lineshape_parameters + self.pseudo3D = pseudo3D + self.plane = plane + + +@pytest.fixture +def valid_plot_data(): + return PlottingDataForPlane( + x_plot=np.array([[1, 2, 3]]), + y_plot=np.array([[4, 5, 6]]), + masked_data=np.array([[7, 8, 9]]), + plane_lineshape_parameters=MagicMock(clustid=1), + pseudo3D=MagicMock( + f1_ppm_limits=[0, 1], f2_ppm_limits=[0, 1], f1_label="F1", f2_label="F2" + ), + plane=MagicMock(clustid=1), + ) + + +@pytest.fixture +def invalid_plot_data_empty_x(): + return PlottingDataForPlane( + x_plot=np.array([]), + y_plot=np.array([[4, 5, 6]]), + masked_data=np.array([[7, 8, 9]]), + plane_lineshape_parameters=MagicMock(clustid=1), + pseudo3D=MagicMock( + f1_ppm_limits=[0, 1], f2_ppm_limits=[0, 1], f1_label="F1", f2_label="F2" + ), + plane=MagicMock(clustid=1), + ) + + +@pytest.fixture +def invalid_plot_data_empty_masked(): + return PlottingDataForPlane( + x_plot=np.array([[1, 2, 3]]), + y_plot=np.array([[4, 5, 6]]), + masked_data=np.array([[]]), + plane_lineshape_parameters=MagicMock(clustid=1), + pseudo3D=MagicMock( + f1_ppm_limits=[0, 1], f2_ppm_limits=[0, 1], f1_label="F1", f2_label="F2" + ), + plane=MagicMock(clustid=1), + ) + + +def test_plot_data_is_valid(valid_plot_data): + assert plot_data_is_valid(valid_plot_data) == True + + +@patch("peakipy.plotting.print") +@patch("peakipy.plotting.plt.close") +def test_plot_data_is_invalid_empty_x( + mock_close, mock_print, invalid_plot_data_empty_x +): + assert plot_data_is_valid(invalid_plot_data_empty_x) == False + assert mock_print.call_count == 3 + mock_close.assert_called_once() + + +@patch("peakipy.plotting.print") +@patch("peakipy.plotting.plt.close") +def test_plot_data_is_invalid_empty_masked( + mock_close, mock_print, invalid_plot_data_empty_masked +): + assert plot_data_is_valid(invalid_plot_data_empty_masked) == False + assert mock_print.call_count == 4 + mock_close.assert_called_once() From 2a5e97fcc97d95c59089b2a7b4a288d7a76c3d3a Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Sun, 26 May 2024 21:33:09 -0400 Subject: [PATCH 34/37] more tests and refactoring --- Makefile | 1 - peakipy/cli/edit_fits_app/main.py | 628 ------ .../cli/edit_fits_app/templates/index.html | 16 - peakipy/cli/edit_fits_app/theme.yaml | 18 - peakipy/cli/fit.py | 563 ----- peakipy/cli/spec.py | 92 - peakipy/core.py | 1950 ----------------- peakipy/fitting.py | 538 ++++- peakipy/plotting.py | 3 + test/test_core.py | 570 ----- test/test_fit.py | 460 ---- 11 files changed, 538 insertions(+), 4301 deletions(-) delete mode 100755 peakipy/cli/edit_fits_app/main.py delete mode 100644 peakipy/cli/edit_fits_app/templates/index.html delete mode 100644 peakipy/cli/edit_fits_app/theme.yaml delete mode 100644 peakipy/cli/fit.py delete mode 100644 peakipy/cli/spec.py delete mode 100644 peakipy/core.py delete mode 100644 test/test_core.py delete mode 100644 test/test_fit.py diff --git a/Makefile b/Makefile index 386f86c3..432dee0d 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,6 @@ .PHONY: coverage coverage: - #coverage run -m pytest test/test_core.py test/test_main.py test/test_fit.py test/test_cli.py coverage run -m pytest test/test_fitting.py \ test/test_lineshapes.py \ test/test_io.py \ diff --git a/peakipy/cli/edit_fits_app/main.py b/peakipy/cli/edit_fits_app/main.py deleted file mode 100755 index de0d67fe..00000000 --- a/peakipy/cli/edit_fits_app/main.py +++ /dev/null @@ -1,628 +0,0 @@ -#!/usr/bin/env python3 -""" Script for checking fits and editing fit params - - Usage: - edit_fits_script.py [options] - - Arguments: - peaklist output from read_peaklist.py (csv, tab or pkl) - NMRPipe data - - Options: - --dims= order of dimensions [default: 0,1,2] - - - peakipy - deconvolute overlapping NMR peaks - Copyright (C) 2019 Jacob Peter Brady - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -""" -import os -import json - -from docopt import docopt -from pathlib import Path - -import pandas as pd -import nmrglue as ng -import numpy as np -import matplotlib.pyplot as plt -from matplotlib.cm import magma, autumn - -from scipy import ndimage - -# from scipy.ndimage.morphology import binary_dilation -from skimage.filters import threshold_otsu -from skimage.morphology import binary_closing, square, rectangle, disk - -from bokeh.events import ButtonClick, DoubleTap -from bokeh.layouts import row, column, widgetbox -from bokeh.models import ColumnDataSource -from bokeh.models.tools import HoverTool -from bokeh.models.widgets import ( - Slider, - Select, - Button, - DataTable, - TableColumn, - NumberFormatter, - NumberEditor, - IntEditor, - StringEditor, - StringFormatter, - SelectEditor, - TextInput, - RadioButtonGroup, - Div, -) -from bokeh.plotting import figure -from bokeh.io import curdoc -from bokeh.palettes import PuBuGn9, Category20 - -from peakipy.core import Pseudo3D - - -def clusters( - df, data, thres=None, struc_el="square", struc_size=(3,), iterations=1, l_struc=None -): - """Find clusters of peaks - - Need to update these docs. - - thres : float - threshold for signals above which clusters are selected - ndil: int - number of iterations of ndimage.binary_dilation function if set to 0 then function not used - """ - - peaks = [[y, x] for y, x in zip(df.Y_AXIS, df.X_AXIS)] - - if thres == None: - thresh = threshold_otsu(data[0]) - else: - thresh = thres - - thresh_data = np.bitwise_or(data[0] < (thresh * -1.0), data[0] > thresh) - - if struc_el == "disk": - radius = struc_size[0] - print(f"using disk with {radius}") - closed_data = binary_closing(thresh_data, disk(int(radius))) - # closed_data = binary_dilation(thresh_data, disk(radius), iterations=iterations) - - elif struc_el == "square": - width = struc_size[0] - print(f"using square with {width}") - closed_data = binary_closing(thresh_data, square(int(width))) - # closed_data = binary_dilation(thresh_data, square(width), iterations=iterations) - - elif struc_el == "rectangle": - width, height = struc_size - print(f"using rectangle with {width} and {height}") - closed_data = binary_closing(thresh_data, rectangle(int(width), int(height))) - # closed_data = binary_dilation(thresh_data, rectangle(width, height), iterations=iterations) - - else: - closed_data = thresh_data - print(f"Not using any closing function") - - labeled_array, num_features = ndimage.label(closed_data, l_struc) - # print(labeled_array, num_features) - - df["CLUSTID"] = [labeled_array[i[0], i[1]] for i in peaks] - - #  renumber "0" clusters - max_clustid = df["CLUSTID"].max() - n_of_zeros = len(df[df["CLUSTID"] == 0]["CLUSTID"]) - df.loc[df[df["CLUSTID"] == 0].index, "CLUSTID"] = np.arange( - max_clustid + 1, n_of_zeros + max_clustid + 1, dtype=int - ) - - for ind, group in df.groupby("CLUSTID"): - df.loc[group.index, "MEMCNT"] = len(group) - - df["color"] = df.apply( - lambda x: Category20[20][int(x.CLUSTID) % 20] if x.MEMCNT > 1 else "black", - axis=1, - ) - - source.data = {col: df[col] for col in df.columns} - - return df - - -def recluster_peaks(event): - struc_size = tuple([int(i) for i in struct_el_size.value.split(",")]) - - print(struc_size) - clusters( - df, - data, - thres=eval(contour_start.value), - struc_el=struct_el.value, - struc_size=struc_size, - # iterations=int(iterations.value) - ) - # print("struct", struct_el.value) - # print("struct size", struct_el_size.value ) - # print(type(struct_el_size.value) ) - # print(type(eval(struct_el_size.value)) ) - # print(type([].extend(eval(struct_el_size.value))) - - -def update_memcnt(df): - for ind, group in df.groupby("CLUSTID"): - df.loc[group.index, "MEMCNT"] = len(group) - - # set cluster colors (set to black if singlet peaks) - df["color"] = df.apply( - lambda x: Category20[20][int(x.CLUSTID) % 20] if x.MEMCNT > 1 else "black", - axis=1, - ) - # update source data - source.data = {col: df[col] for col in df.columns} - return df - - -def fit_selected(event): - selectionIndex = source.selected.indices - current = df.iloc[selectionIndex] - - df.loc[selectionIndex, "X_RADIUS_PPM"] = slider_X_RADIUS.value - df.loc[selectionIndex, "Y_RADIUS_PPM"] = slider_Y_RADIUS.value - - df.loc[selectionIndex, "X_DIAMETER_PPM"] = current["X_RADIUS_PPM"] * 2.0 - df.loc[selectionIndex, "Y_DIAMETER_PPM"] = current["Y_RADIUS_PPM"] * 2.0 - - selected_df = df[df.CLUSTID.isin(list(current.CLUSTID))] - - selected_df.to_csv("~tmp.csv") - - lineshape = lineshapes[radio_button_group.active] - print("Using LS = ", lineshape) - print( - f"fit_peaks ~tmp.csv {data_path} ~tmp_out.csv --plot=out --show --lineshape={lineshape} --dims={_dims}" - ) - os.system( - f"fit_peaks ~tmp.csv {data_path} ~tmp_out.csv --plot=out --show --lineshape={lineshape} --dims={_dims}" - ) - - -def save_peaks(event): - if savefilename.value: - to_save = Path(savefilename.value) - else: - to_save = Path(savefilename.placeholder) - - if to_save.exists(): - os.system(f"cp {to_save} {to_save}.bak") - print(f"Making backup {to_save}.bak") - - print(f"Saving peaks to {to_save}") - if to_save.suffix == ".csv": - df.to_csv(to_save, float_format="%.4f", index=False) - else: - df.to_pickle(to_save) - - -def select_callback(attrname, old, new): - # print("Calling Select Callback") - selectionIndex = source.selected.indices - current = df.iloc[selectionIndex] - - # update memcnt - update_memcnt(df) - - -def peak_pick_callback(event): - print(event.x, event.y) - - -def slider_callback(attrname, old, new): - selectionIndex = source.selected.indices - current = df.iloc[selectionIndex] - - df.loc[selectionIndex, "X_RADIUS"] = slider_X_RADIUS.value * pt_per_ppm_f2 - df.loc[selectionIndex, "Y_RADIUS"] = slider_Y_RADIUS.value * pt_per_ppm_f1 - df.loc[selectionIndex, "X_RADIUS_PPM"] = slider_X_RADIUS.value - df.loc[selectionIndex, "Y_RADIUS_PPM"] = slider_Y_RADIUS.value - - df.loc[selectionIndex, "X_DIAMETER_PPM"] = current["X_RADIUS_PPM"] * 2.0 - df.loc[selectionIndex, "Y_DIAMETER_PPM"] = current["Y_RADIUS_PPM"] * 2.0 - df.loc[selectionIndex, "X_DIAMETER"] = current["X_RADIUS"] * 2.0 - df.loc[selectionIndex, "Y_DIAMETER"] = current["Y_RADIUS"] * 2.0 - - # set edited rows to True - df.loc[selectionIndex, "Edited"] = True - - # selected_df = df[df.CLUSTID.isin(list(current.CLUSTID))] - # print(list(selected_df)) - source.data = {col: df[col] for col in df.columns} - - -def get_contour_data(data, levels, **kwargs): - cs = plt.contour(data, levels, **kwargs) - xs = [] - ys = [] - xt = [] - yt = [] - col = [] - text = [] - isolevelid = 0 - for isolevel in cs.collections: - isocol = isolevel.get_color()[0] - thecol = 3 * [None] - theiso = str(cs.get_array()[isolevelid]) - isolevelid += 1 - for i in range(3): - thecol[i] = int(255 * isocol[i]) - thecol = "#%02x%02x%02x" % (thecol[0], thecol[1], thecol[2]) - - for path in isolevel.get_paths(): - v = path.vertices - x = v[:, 0] - y = v[:, 1] - xs.append(x.tolist()) - ys.append(y.tolist()) - indx = int(len(x) / 2) - indy = int(len(y) / 2) - xt.append(x[indx]) - yt.append(y[indy]) - text.append(theiso) - col.append(thecol) - - source = ColumnDataSource( - data={"xs": xs, "ys": ys, "line_color": col, "xt": xt, "yt": yt, "text": text} - ) - return source - - -def update_contour(attrname, old, new): - new_cs = eval(contour_start.value) - cl = new_cs * contour_factor ** np.arange(contour_num) - spec_source.data = get_contour_data(data[0], cl, extent=extent).data - - -def exit_edit_peaks(event): - exit() - - -#  Script starts here - -args = docopt(__doc__) -path = Path(args.get("")) - -if path.suffix == ".csv": - df = pd.read_csv(path) # , comment="#") -elif path.suffix == ".tab": - df = pd.read_csv(path, sep="\t") # comment="#") -else: - df = pd.read_pickle(path) - -# make diameter columns -if "X_DIAMETER_PPM" in df.columns: - pass -else: - df["X_DIAMETER_PPM"] = df["X_RADIUS_PPM"] * 2.0 - df["Y_DIAMETER_PPM"] = df["Y_RADIUS_PPM"] * 2.0 - -#  make a column to track edited peaks -if "Edited" in df.columns: - pass -else: - df["Edited"] = np.zeros(len(df), dtype=bool) - -if "include" in df.columns: - pass -else: - df["include"] = df.apply(lambda _: "yes", axis=1) -# df["color"] = df.Edited.apply(lambda x: 'red' if x else 'black') - -# color clusters -df["color"] = df.apply( - lambda x: Category20[20][int(x.CLUSTID) % 20] if x.MEMCNT > 1 else "black", axis=1 -) - -# make datasource -source = ColumnDataSource(data=dict()) -source.data = {col: df[col] for col in df.columns} - - -#  read dims from config -config_path = Path("peakipy.config") -if config_path.exists(): - config = json.load(open(config_path)) - print(f"Using config file with --dims={config.get('--dims')}") - dims = config.get("--dims", [0, 1, 2]) - _dims = ",".join(str(i) for i in dims) - -else: - # get dim numbers from commandline - _dims = args.get("--dims") - dims = [int(i) for i in _dims.split(",")] - - -# read pipe data -data_path = args.get("") -dic, data = ng.pipe.read(data_path) -pseudo3D = Pseudo3D(dic, data, dims) -data = pseudo3D.data -udic = pseudo3D.udic - -dims = pseudo3D.dims -planes, f1, f2 = dims -# size of f1 and f2 in points -f2pts = pseudo3D.f2_size -f1pts = pseudo3D.f1_size - -#  points per ppm -pt_per_ppm_f1 = pseudo3D.pt_per_ppm_f1 -pt_per_ppm_f2 = pseudo3D.pt_per_ppm_f2 - -# get ppm limits for ppm scales -uc_f1 = pseudo3D.uc_f1 -ppm_f1 = uc_f1.ppm_scale() -ppm_f1_0, ppm_f1_1 = uc_f1.ppm_limits() - -uc_f2 = pseudo3D.uc_f2 -ppm_f2 = uc_f2.ppm_scale() -ppm_f2_0, ppm_f2_1 = uc_f2.ppm_limits() - -f2_label = pseudo3D.f2_label -f1_label = pseudo3D.f1_label -#  make bokeh figure -tools = [ - "redo", - "undo", - "tap", - "box_zoom", - "lasso_select", - "box_select", - "wheel_zoom", - "pan", - "reset", -] -p = figure( - x_range=(ppm_f2_0, ppm_f2_1), - y_range=(ppm_f1_0, ppm_f1_1), - x_axis_label=f"{f2_label} - ppm", - y_axis_label=f"{f1_label} - ppm", - title="Check fits", - tools=tools, - active_drag="pan", - active_scroll="wheel_zoom", - active_tap=None, -) - -thres = threshold_otsu(data[0]) -contour_start = thres # contour level start value -contour_num = 20 # number of contour levels -contour_factor = 1.20 # scaling factor between contour levels -cl = contour_start * contour_factor ** np.arange(contour_num) -extent = (ppm_f2_0, ppm_f2_1, ppm_f1_0, ppm_f1_1) -spec_source = get_contour_data(data[0], cl, extent=extent) -#  negative contours -spec_source_neg = get_contour_data(data[0] * -1.0, cl, extent=extent, cmap=autumn) -p.multi_line(xs="xs", ys="ys", line_color="line_color", source=spec_source) -p.multi_line(xs="xs", ys="ys", line_color="line_color", source=spec_source_neg) -# contour_num = Slider(title="contour number", value=20, start=1, end=50,step=1) -# contour_start = Slider(title="contour start", value=100000, start=1000, end=10000000,step=1000) -contour_start = TextInput(value="%.2e" % thres, title="Contour level:") -# contour_factor = Slider(title="contour factor", value=1.20, start=1., end=2.,step=0.05) -contour_start.on_change("value", update_contour) -# for w in [contour_num,contour_start,contour_factor]: -# w.on_change("value",update_contour) - -#  plot mask outlines -el = p.ellipse( - x="X_PPM", - y="Y_PPM", - width="X_DIAMETER_PPM", - height="Y_DIAMETER_PPM", - source=source, - fill_color="color", - fill_alpha=0.1, - line_dash="dotted", - line_color="red", -) - -p.add_tools( - HoverTool( - tooltips=[ - ("Index", "$index"), - ("Assignment", "@ASS"), - ("CLUSTID", "@CLUSTID"), - ("RADII", "@X_RADIUS_PPM{0.000}, @Y_RADIUS_PPM{0.000}"), - (f"{f2_label},{f1_label}", "$x{0.000} ppm, $y{0.000} ppm"), - ], - mode="mouse", - # add renderers - renderers=[el], - ) -) -# p.toolbar.active_scroll = "auto" - -p.circle(x="X_PPM", y="Y_PPM", source=source, color="color") -# plot cluster numbers -p.text( - x="X_PPM", - y="Y_PPM", - text="CLUSTID", - text_color="color", - source=source, - text_font_size="8pt", - text_font_style="bold", -) - -p.on_event(DoubleTap, peak_pick_callback) - -# configure sliders -slider_X_RADIUS = Slider( - title="X_RADIUS - ppm", - start=0.001, - end=0.200, - value=0.040, - step=0.001, - format="0[.]000", -) -slider_Y_RADIUS = Slider( - title="Y_RADIUS - ppm", - start=0.010, - end=2.000, - value=0.400, - step=0.001, - format="0[.]000", -) - -slider_X_RADIUS.on_change( - "value", lambda attr, old, new: slider_callback(attr, old, new) -) -slider_Y_RADIUS.on_change( - "value", lambda attr, old, new: slider_callback(attr, old, new) -) - -# save file -savefilename = TextInput( - title="Save file as (.csv or .pkl)", placeholder="edited_peaks.csv" -) -button = Button(label="Save", button_type="success") -button.on_event(ButtonClick, save_peaks) -# call fit_peaks -fit_button = Button(label="Fit selected cluster", button_type="primary") -radio_button_group = RadioButtonGroup( - labels=["PV", "G", "L", "PV_L", "PV_G", "PV_PV", "G_L"], active=0 -) -lineshapes = {0: "PV", 1: "G", 2: "L", 3: "PV_L", 4: "PV_G", 5: "PV_PV", 6: "G_L"} -ls_div = Div( - text="Choose lineshape you wish to fit. This can be Pseudo-voigt (PV), Gaussian (G), Lorentzian (L), PV/G, PV/L, PV_PV, G/L. PV/G fits a PV lineshape to the direct dimension and a G lineshape to the indirect." -) -clust_div = Div( - text="""If you want to adjust how the peaks are automatically clustered then try changing the - width/diameter/height (integer values) of the structuring element used during the binary dilation step - (you can also remove it by selecting 'None'). Increasing the size of the structuring element will cause - peaks to be more readily incorporated into clusters.""" -) - -#  not sure this is needed -selected_df = df.copy() - -fit_button.on_event(ButtonClick, fit_selected) - -# selected_columns = [ -# "ASS", -# "CLUSTID", -# "X_PPM", -# "Y_PPM", -# "X_RADIUS_PPM", -# "Y_RADIUS_PPM", -# "XW_HZ", -# "YW_HZ", -# "VOL", -# "include", -# "MEMCNT", -# ] -# -# columns = [TableColumn(field=field, title=field) for field in selected_columns] -columns = [ - TableColumn(field="ASS", title="Assignment"), - TableColumn(field="CLUSTID", title="Cluster", editor=IntEditor()), - TableColumn( - field="X_PPM", - title=f"{f2_label}", - editor=NumberEditor(step=0.0001), - formatter=NumberFormatter(format="0.0000"), - ), - TableColumn( - field="Y_PPM", - title=f"{f1_label}", - editor=NumberEditor(step=0.0001), - formatter=NumberFormatter(format="0.0000"), - ), - TableColumn( - field="X_RADIUS_PPM", - title=f"{f2_label} radius (ppm)", - editor=NumberEditor(step=0.0001), - formatter=NumberFormatter(format="0.0000"), - ), - TableColumn( - field="Y_RADIUS_PPM", - title=f"{f1_label} radius (ppm)", - editor=NumberEditor(step=0.0001), - formatter=NumberFormatter(format="0.0000"), - ), - TableColumn( - field="XW_HZ", - title=f"{f2_label} LW (Hz)", - editor=NumberEditor(step=0.01), - formatter=NumberFormatter(format="0.00"), - ), - TableColumn( - field="YW_HZ", - title=f"{f1_label} LW (Hz)", - editor=NumberEditor(step=0.01), - formatter=NumberFormatter(format="0.00"), - ), - TableColumn(field="VOL", title="Volume", formatter=NumberFormatter(format="0.0")), - TableColumn( - field="include", title="Include", editor=SelectEditor(options=["yes", "no"]) - ), - TableColumn(field="MEMCNT", title="MEMCNT", editor=IntEditor()), -] - -data_table = DataTable( - source=source, columns=columns, editable=True, fit_columns=True, width=800 -) - -# callback for adding -# source.selected.on_change('indices', callback) -source.selected.on_change("indices", select_callback) - -# controls = column(slider, button) -exit_button = Button(label="Quit", button_type="warning") -exit_button.on_event(ButtonClick, exit_edit_peaks) - -controls = column( - row(slider_X_RADIUS, slider_Y_RADIUS), - row( - column(contour_start, fit_button, widgetbox(ls_div), radio_button_group), - column(savefilename, button, exit_button), - ), -) - -# widgetbox(radio_button_group) -struct_el = Select( - title="Structuring element:", - value="disk", - options=["square", "disk", "rectangle", "None"], -) - -struct_el_size = TextInput( - value="3", title="Size(width/radius or width,height for rectangle):" -) -# iterations = TextInput(value="1", title="Number of iterations of binary dilation") -recluster = Button(label="Re-cluster", button_type="warning") -recluster.on_event(ButtonClick, recluster_peaks) - -# cluster_widget = widgetbox(struct_el, struct_el_size) -# recluster) -curdoc().add_root( - row( - column(p, widgetbox(clust_div), row(struct_el, struct_el_size), recluster), - column(data_table, controls), - ) -) -curdoc().title = "peakipy: Edit Fits" -# curdoc().theme = 'dark_minimal' -# update() diff --git a/peakipy/cli/edit_fits_app/templates/index.html b/peakipy/cli/edit_fits_app/templates/index.html deleted file mode 100644 index ce7d5b89..00000000 --- a/peakipy/cli/edit_fits_app/templates/index.html +++ /dev/null @@ -1,16 +0,0 @@ -{% extends base %} - -{% block title %}Bokeh Crossfilter Example{% endblock %} - -{% block preamble %} - -{% endblock %} diff --git a/peakipy/cli/edit_fits_app/theme.yaml b/peakipy/cli/edit_fits_app/theme.yaml deleted file mode 100644 index b972fba4..00000000 --- a/peakipy/cli/edit_fits_app/theme.yaml +++ /dev/null @@ -1,18 +0,0 @@ -attrs: - Figure: - background_fill_color: 'white' - border_fill_color: '#2F2F2F' - outline_line_color: '#444444' - - Axis: - axis_line_color: "white" - axis_label_text_color: "white" - major_label_text_color: "white" - major_tick_line_color: "white" - minor_tick_line_color: "white" - minor_tick_line_color: "white" - Grid: - grid_line_dash: [6, 4] - grid_line_alpha: .3 - -text_color: "white" diff --git a/peakipy/cli/fit.py b/peakipy/cli/fit.py deleted file mode 100644 index 8ffc73e1..00000000 --- a/peakipy/cli/fit.py +++ /dev/null @@ -1,563 +0,0 @@ -#!/usr/bin/env python3 -"""Fit and deconvolute NMR peaks: Functions used for running peakipy fit -""" -from pathlib import Path -from typing import Optional, List, Tuple -from dataclasses import dataclass, field - -import numpy as np -import pandas as pd - -from rich import print -from rich.console import Console -from pydantic import BaseModel -from lmfit import Model, Parameter, Parameters -from lmfit.model import ModelResult - -from peakipy.lineshapes import ( - Lineshape, - pvoigt2d, - voigt2d, - pv_pv, - get_lineshape_function, -) -from peakipy.fitting import ( - fix_params, - to_prefix, - get_limits_for_axis_in_points, - deal_with_peaks_on_edge_of_spectrum, - select_planes_above_threshold_from_masked_data, - select_reference_planes_using_indices, - make_models, - make_meshgrid, - slice_peaks_from_data_using_mask, - make_mask_from_peak_cluster, -) - -console = Console() -π = np.pi -sqrt2 = np.sqrt(2.0) - -tmp_path = Path("tmp") -tmp_path.mkdir(exist_ok=True) -log_path = Path("log.txt") - - -@dataclass -class FitPeaksArgs: - noise: float - uc_dics: dict - lineshape: Lineshape - dims: List[int] = field(default_factory=lambda: [0, 1, 2]) - colors: Tuple[str] = ("#5e3c99", "#e66101") - max_cluster_size: Optional[int] = None - to_fix: List[str] = field(default_factory=lambda: ["fraction", "sigma", "center"]) - xy_bounds: Tuple[float, float] = ((0, 0),) - vclist: Optional[Path] = (None,) - plane: Optional[List[int]] = (None,) - exclude_plane: Optional[List[int]] = (None,) - reference_plane_indices: List[int] = ([],) - initial_fit_threshold: Optional[float] = (None,) - jack_knife_sample_errors: bool = False - mp: bool = (True,) - verbose: bool = (False,) - vclist_data: Optional[np.array] = None - - -@dataclass -class Config: - fit_method: str = "leastsq" - - -@dataclass -class FitPeaksInput: - """input data for the fit_peaks function""" - - args: FitPeaksArgs - data: np.array - config: Config - plane_numbers: list - - -@dataclass -class FitPeakClusterInput: - args: FitPeaksArgs - data: np.array - config: Config - plane_numbers: list - clustid: int - group: pd.DataFrame - last_peak: pd.DataFrame - mask: np.array - mod: Model - p_guess: Parameters - XY: np.array - peak_slices: np.array - XY_slices: np.array - min_x: float - max_x: float - min_y: float - max_y: float - uc_dics: dict - first_plane_data: np.array - weights: np.array - fit_method: str = "leastsq" - verbose: bool = False - masked_plane_data: np.array = field(init=False) - - def __post_init__(self): - self.masked_plane_data = np.array([d[self.mask] for d in self.data]) - - -@dataclass -class FitResult: - out: ModelResult - mask: np.array - fit_str: str - log: str - group: pd.core.groupby.generic.DataFrameGroupBy - uc_dics: dict - min_x: float - min_y: float - max_x: float - max_y: float - X: np.array - Y: np.array - Z: np.array - Z_sim: np.array - peak_slices: np.array - XY_slices: np.array - weights: np.array - mod: Model - - def check_shifts(self): - """Calculate difference between initial peak positions - and check whether they moved too much from original - position - - """ - pass - - -@dataclass -class FitPeaksResult: - df: pd.DataFrame - log: str - - -class FitPeaksResultDfRow(BaseModel): - fit_prefix: str - assignment: str - amp: float - amp_err: float - center_x: float - init_center_x: float - center_y: float - init_center_y: float - sigma_x: float - sigma_y: float - clustid: int - memcnt: int - plane: int - x_radius: float - y_radius: float - x_radius_ppm: float - y_radius_ppm: float - lineshape: str - aic: float - chisqr: float - redchi: float - residual_sum: float - height: float - height_err: float - fwhm_x: float - fwhm_y: float - center_x_ppm: float - center_y_ppm: float - init_center_x_ppm: float - init_center_y_ppm: float - sigma_x_ppm: float - sigma_y_ppm: float - fwhm_x_ppm: float - fwhm_y_ppm: float - fwhm_x_hz: float - fwhm_y_hz: float - jack_knife_sample_index: Optional[int] - - -class FitPeaksResultRowGLPV(FitPeaksResultDfRow): - fraction: float - - -class FitPeaksResultRowPVPV(FitPeaksResultDfRow): - fraction_x: float # for PV_PV model - fraction_y: float # for PV_PV model - - -class FitPeaksResultRowVoigt(FitPeaksResultDfRow): - gamma_x_ppm: float # for voigt - gamma_y_ppm: float # for voigt - - -def get_fit_peaks_result_validation_model(lineshape): - match lineshape: - case lineshape.V: - validation_model = FitPeaksResultRowVoigt - case lineshape.PV_PV: - validation_model = FitPeaksResultRowPVPV - case _: - validation_model = FitPeaksResultRowGLPV - return validation_model - - -def filter_peak_clusters_by_max_cluster_size(grouped_peak_clusters, max_cluster_size): - filtered_peak_clusters = grouped_peak_clusters.filter( - lambda x: len(x) <= max_cluster_size - ) - return filtered_peak_clusters - - -def set_parameters_to_fix_during_fit(first_plane_fit_params, to_fix): - # fix sigma center and fraction parameters - # could add an option to select params to fix - match to_fix: - case None | () | []: - float_str = "Floating all parameters" - parameter_set = first_plane_fit_params - case ["None"] | ["none"]: - float_str = "Floating all parameters" - parameter_set = first_plane_fit_params - case _: - float_str = f"Fixing parameters: {to_fix}" - parameter_set = fix_params(first_plane_fit_params, to_fix) - return parameter_set, float_str - - -def get_default_lineshape_param_names(lineshape: Lineshape): - match lineshape: - case Lineshape.PV | Lineshape.G | Lineshape.L: - param_names = Model(pvoigt2d).param_names - case Lineshape.V: - param_names = Model(voigt2d).param_names - case Lineshape.PV_PV: - param_names = Model(pv_pv).param_names - return param_names - - -def split_parameter_sets_by_peak( - default_param_names: List, params: List[Tuple[str, Parameter]] -): - """params is a list of tuples where the first element of each tuple is a - prefixed parameter name and the second element is the corresponding - Parameter object. This is created by calling .items() on a Parameters - object - """ - number_of_fitted_parameters = len(params) - number_of_default_params = len(default_param_names) - number_of_fitted_peaks = int(number_of_fitted_parameters / number_of_default_params) - split_param_items = [ - params[i : (i + number_of_default_params)] - for i in range(0, number_of_fitted_parameters, number_of_default_params) - ] - assert len(split_param_items) == number_of_fitted_peaks - return split_param_items - - -def create_parameter_dict(prefix, parameters: List[Tuple[str, Parameter]]): - parameter_dict = dict(prefix=prefix) - parameter_dict.update({k.replace(prefix, ""): v.value for k, v in parameters}) - parameter_dict.update( - {f"{k.replace(prefix,'')}_stderr": v.stderr for k, v in parameters} - ) - return parameter_dict - - -def get_prefix_from_parameter_names( - default_param_names: List, parameters: List[Tuple[str, Parameter]] -): - prefixes = [ - param_key_val[0].replace(default_param_name, "") - for param_key_val, default_param_name in zip(parameters, default_param_names) - ] - assert len(set(prefixes)) == 1 - return prefixes[0] - - -def unpack_fitted_parameters_for_lineshape( - lineshape: Lineshape, params: List[dict], plane_number: int -): - default_param_names = get_default_lineshape_param_names(lineshape) - split_parameter_names = split_parameter_sets_by_peak(default_param_names, params) - prefixes = [ - get_prefix_from_parameter_names(default_param_names, i) - for i in split_parameter_names - ] - unpacked_params = [] - for parameter_names, prefix in zip(split_parameter_names, prefixes): - parameter_dict = create_parameter_dict(prefix, parameter_names) - parameter_dict.update({"plane": plane_number}) - unpacked_params.append(parameter_dict) - return unpacked_params - - -def perform_initial_lineshape_fit_on_cluster_of_peaks( - fit_peak_cluster_input: FitPeakClusterInput, -) -> FitResult: - mod = fit_peak_cluster_input.mod - peak_slices = fit_peak_cluster_input.peak_slices - XY_slices = fit_peak_cluster_input.XY_slices - p_guess = fit_peak_cluster_input.p_guess - weights = fit_peak_cluster_input.weights - fit_method = fit_peak_cluster_input.fit_method - mask = fit_peak_cluster_input.mask - XY = fit_peak_cluster_input.XY - X, Y = XY - first_plane_data = fit_peak_cluster_input.first_plane_data - peak = fit_peak_cluster_input.last_peak - group = fit_peak_cluster_input.group - min_x = fit_peak_cluster_input.min_x - min_y = fit_peak_cluster_input.min_y - max_x = fit_peak_cluster_input.max_x - max_y = fit_peak_cluster_input.max_y - verbose = fit_peak_cluster_input.verbose - uc_dics = fit_peak_cluster_input.uc_dics - - out = mod.fit( - peak_slices, XY=XY_slices, params=p_guess, weights=weights, method=fit_method - ) - - if verbose: - console.print(out.fit_report(), style="bold") - - z_sim = mod.eval(XY=XY, params=out.params) - z_sim[~mask] = np.nan - z_plot = first_plane_data.copy() - z_plot[~mask] = np.nan - fit_str = "" - log = "" - - return FitResult( - out=out, - mask=mask, - fit_str=fit_str, - log=log, - group=group, - uc_dics=uc_dics, - min_x=min_x, - min_y=min_y, - max_x=max_x, - max_y=max_y, - X=X, - Y=Y, - Z=z_plot, - Z_sim=z_sim, - peak_slices=peak_slices, - XY_slices=XY_slices, - weights=weights, - mod=mod, - ) - - -def refit_peak_cluster_with_constraints( - fit_input: FitPeakClusterInput, fit_result: FitPeaksResult -): - fit_results = [] - for num, d in enumerate(fit_input.masked_plane_data): - plane_number = fit_input.plane_numbers[num] - fit_result.out.fit( - data=d, - params=fit_result.out.params, - weights=fit_result.weights, - ) - fit_results.extend( - unpack_fitted_parameters_for_lineshape( - fit_input.args.lineshape, - list(fit_result.out.params.items()), - plane_number, - ) - ) - return fit_results - - -def merge_unpacked_parameters_with_metadata(cluster_fit_df, group_of_peaks_df): - group_of_peaks_df["prefix"] = group_of_peaks_df.ASS.apply(to_prefix) - merged_cluster_fit_df = cluster_fit_df.merge(group_of_peaks_df, on="prefix") - return merged_cluster_fit_df - - -def update_cluster_df_with_fit_statistics(cluster_df, fit_result: ModelResult): - cluster_df["chisqr"] = fit_result.chisqr - cluster_df["redchi"] = fit_result.redchi - cluster_df["residual_sum"] = np.sum(fit_result.residual) - cluster_df["aic"] = fit_result.aic - cluster_df["bic"] = fit_result.bic - cluster_df["nfev"] = fit_result.nfev - cluster_df["ndata"] = fit_result.ndata - return cluster_df - - -def rename_columns_for_compatibility(df): - mapping = { - "amplitude": "amp", - "amplitude_stderr": "amp_err", - "X_AXIS": "init_center_x", - "Y_AXIS": "init_center_y", - "ASS": "assignment", - "MEMCNT": "memcnt", - "X_RADIUS": "x_radius", - "Y_RADIUS": "y_radius", - } - df = df.rename(columns=mapping) - return df - - -def add_vclist_to_df(fit_input: FitPeaksInput, df: pd.DataFrame): - vclist_data = fit_input.args.vclist_data - df["vclist"] = df.plane.apply(lambda x: vclist_data[x]) - return df - - -def prepare_group_of_peaks_for_fitting(clustid, group, fit_peaks_input: FitPeaksInput): - lineshape_function = get_lineshape_function(fit_peaks_input.args.lineshape) - - first_plane_data = fit_peaks_input.data[0] - mask, peak = make_mask_from_peak_cluster(group, first_plane_data) - - x_radius = group.X_RADIUS.max() - y_radius = group.Y_RADIUS.max() - - max_x, min_x = get_limits_for_axis_in_points( - group_axis_points=group.X_AXISf, mask_radius_in_points=x_radius - ) - max_y, min_y = get_limits_for_axis_in_points( - group_axis_points=group.Y_AXISf, mask_radius_in_points=y_radius - ) - max_x, min_x, max_y, min_y = deal_with_peaks_on_edge_of_spectrum( - fit_peaks_input.data.shape, max_x, min_x, max_y, min_y - ) - selected_data = select_reference_planes_using_indices( - fit_peaks_input.data, fit_peaks_input.args.reference_plane_indices - ).sum(axis=0) - mod, p_guess = make_models( - lineshape_function, - group, - selected_data, - lineshape=fit_peaks_input.args.lineshape, - xy_bounds=fit_peaks_input.args.xy_bounds, - ) - peak_slices = slice_peaks_from_data_using_mask(fit_peaks_input.data, mask) - peak_slices = select_reference_planes_using_indices( - peak_slices, fit_peaks_input.args.reference_plane_indices - ) - peak_slices = select_planes_above_threshold_from_masked_data( - peak_slices, fit_peaks_input.args.initial_fit_threshold - ) - peak_slices = peak_slices.sum(axis=0) - - XY = make_meshgrid(fit_peaks_input.data.shape) - X, Y = XY - - XY_slices = np.array([X.copy()[mask], Y.copy()[mask]]) - weights = 1.0 / np.array([fit_peaks_input.args.noise] * len(np.ravel(peak_slices))) - return FitPeakClusterInput( - args=fit_peaks_input.args, - data=fit_peaks_input.data, - config=fit_peaks_input.config, - plane_numbers=fit_peaks_input.plane_numbers, - clustid=clustid, - group=group, - last_peak=peak, - mask=mask, - mod=mod, - p_guess=p_guess, - XY=XY, - peak_slices=peak_slices, - XY_slices=XY_slices, - weights=weights, - fit_method=Config.fit_method, - first_plane_data=first_plane_data, - uc_dics=fit_peaks_input.args.uc_dics, - min_x=min_x, - min_y=min_y, - max_x=max_x, - max_y=max_y, - verbose=fit_peaks_input.args.verbose, - ) - - -def fit_cluster_of_peaks(data_for_fitting: FitPeakClusterInput) -> pd.DataFrame: - fit_result = perform_initial_lineshape_fit_on_cluster_of_peaks(data_for_fitting) - fit_result.out.params, float_str = set_parameters_to_fix_during_fit( - fit_result.out.params, data_for_fitting.args.to_fix - ) - fit_results = refit_peak_cluster_with_constraints(data_for_fitting, fit_result) - cluster_df = pd.DataFrame(fit_results) - cluster_df = update_cluster_df_with_fit_statistics(cluster_df, fit_result.out) - cluster_df["clustid"] = data_for_fitting.clustid - cluster_df = merge_unpacked_parameters_with_metadata( - cluster_df, data_for_fitting.group - ) - return cluster_df - - -def fit_peak_clusters(peaks: pd.DataFrame, fit_input: FitPeaksInput) -> FitPeaksResult: - """Fit set of peak clusters to lineshape model - - :param peaks: peaklist with generated by peakipy read or edit - :type peaks: pd.DataFrame - - :param fit_input: Data structure containing input parameters (args, config and NMR data) - :type fit_input: FitPeaksInput - - :returns: Data structure containing pd.DataFrame with the fitted results and a log - :rtype: FitPeaksResult - """ - peak_clusters = peaks.groupby("CLUSTID") - filtered_peaks = filter_peak_clusters_by_max_cluster_size( - peak_clusters, fit_input.args.max_cluster_size - ) - peak_clusters = filtered_peaks.groupby("CLUSTID") - out_str = "" - cluster_dfs = [] - for clustid, peak_cluster in peak_clusters: - data_for_fitting = prepare_group_of_peaks_for_fitting( - clustid, - peak_cluster, - fit_input, - ) - if fit_input.args.jack_knife_sample_errors: - cluster_df = jack_knife_sample_errors(data_for_fitting) - else: - cluster_df = fit_cluster_of_peaks(data_for_fitting) - cluster_dfs.append(cluster_df) - df = pd.concat(cluster_dfs, ignore_index=True) - - df["lineshape"] = fit_input.args.lineshape.value - - if fit_input.args.vclist: - df = add_vclist_to_df(fit_input, df) - df = rename_columns_for_compatibility(df) - return FitPeaksResult(df=df, log=out_str) - - -def jack_knife_sample_errors(fit_input: FitPeakClusterInput) -> pd.DataFrame: - peak_slices = fit_input.peak_slices.copy() - XY_slices = fit_input.XY_slices.copy() - weights = fit_input.weights.copy() - masked_plane_data = fit_input.masked_plane_data.copy() - jk_results = [] - # first fit without jackknife - jk_result = fit_cluster_of_peaks(data_for_fitting=fit_input) - jk_result["jack_knife_sample_index"] = 0 - jk_results.append(jk_result) - for i in np.arange(0, len(peak_slices), 10, dtype=int): - fit_input.peak_slices = np.delete(peak_slices, i, None) - XY_slices_0 = np.delete(XY_slices[0], i, None) - XY_slices_1 = np.delete(XY_slices[1], i, None) - fit_input.XY_slices = np.array([XY_slices_0, XY_slices_1]) - fit_input.weights = np.delete(weights, i, None) - fit_input.masked_plane_data = np.delete(masked_plane_data, i, axis=1) - jk_result = fit_cluster_of_peaks(data_for_fitting=fit_input) - jk_result["jack_knife_sample_index"] = i + 1 - jk_results.append(jk_result) - return pd.concat(jk_results, ignore_index=True) diff --git a/peakipy/cli/spec.py b/peakipy/cli/spec.py deleted file mode 100644 index 4ffbdf6b..00000000 --- a/peakipy/cli/spec.py +++ /dev/null @@ -1,92 +0,0 @@ -#!/usr/bin/env python3 -""" - Usage: spec.py - spec.py make - - Plot NMRPipe spectra overlays using nmrglue and matplotlib. This is my attempt to make a general script for - plotting NMR data. - - - Below is an example yaml file for input - - # This first block is global parameters which can be overridden by adding the desired argument - # to your list of spectra. One exception is "colors" which if set in global params overrides the - # color option set for individual spectra as the colors will now cycle through the chosen matplotlib - # colormap - cs: 10e5 # contour start - contour_num: 10 # number of contours - contour_factor: 1.2 # contour factor - colors: Set1 # must be matplotlib.cm colormap - - outname: ["overlay.pdf","overlay.png"] # either single value or list of output names - - # Here is where your list of spectra to plot goes - spectra: - - - fname: test.ft2 - label: write legend here - contour_num: 1 - linewidths: 1 - - Options: - -h --help - -v --version - - - Dependencies: - - -- python3 - -- matplotlib, pyyaml, numpy, nmrglue, pandas and docopt - - - peakipy - deconvolute overlapping NMR peaks - Copyright (C) 2019 Jacob Peter Brady - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -""" - -yaml_file = """ -########################################################################################################## -# This first block is global parameters which can be overridden by adding the desired argument # -# to your list of spectra. One exception is "colors" which if set in global params overrides the # -# color option set for individual spectra as the colors will now cycle through the chosen matplotlib # -# colormap # -########################################################################################################## - -cs: 10e5 # contour start -contour_num: 10 # number of contours -contour_factor: 1.2 # contour factor -colors: Set1 # must be matplotlib.cm colormap - -outname: ["overlay.pdf","overlay.png"] # either single value or list of output names -ncol: 1 # tells matplotlib how many columns to give the figure legend - if not set defaults to 2 - -# Here is where your list of spectra to plot goes -spectra: - - - fname: test.ft2 - label: some information - contour_num: 1 - linewidths: 1 -""" - - -def onpick(event): - thisline = event.artist - xdata = thisline.get_xdata() - ydata = thisline.get_ydata() - ind = event.ind - points = tuple(zip(xdata[ind], ydata[ind])) - print("onpick points:", points) diff --git a/peakipy/core.py b/peakipy/core.py deleted file mode 100644 index d0473a61..00000000 --- a/peakipy/core.py +++ /dev/null @@ -1,1950 +0,0 @@ -""" - - peakipy - deconvolute overlapping NMR peaks - Copyright (C) 2019 Jacob Peter Brady - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - - -""" - -import sys -import json -from datetime import datetime -from pathlib import Path -from typing import List -from enum import Enum -from dataclasses import dataclass, field - -import numpy as np -import nmrglue as ng -import pandas as pd -import textwrap -from rich import print -from rich.table import Table -from rich.console import Console - -from numpy import sqrt, log, pi, exp, finfo - -from lmfit import Model -from scipy.special import wofz - -from bokeh.palettes import Category20 -from scipy import ndimage -from skimage.morphology import square, binary_closing, disk, rectangle -from skimage.filters import threshold_otsu - -console = Console() -# constants -log2 = log(2) -π = pi -tiny = finfo(float).eps - - -class StrucEl(str, Enum): - square = "square" - disk = "disk" - rectangle = "rectangle" - mask_method = "mask_method" - - -class PeaklistFormat(str, Enum): - a2 = "a2" - a3 = "a3" - sparky = "sparky" - pipe = "pipe" - peakipy = "peakipy" - - -class OutFmt(str, Enum): - csv = "csv" - pkl = "pkl" - - -class Lineshape(str, Enum): - PV = "PV" - V = "V" - G = "G" - L = "L" - PV_PV = "PV_PV" - G_L = "G_L" - PV_G = "PV_G" - PV_L = "PV_L" - - -def gaussian(x, center=0.0, sigma=1.0): - r"""1-dimensional Gaussian function. - - gaussian(x, center, sigma) = - (1/(s2pi*sigma)) * exp(-(1.0*x-center)**2 / (2*sigma**2)) - - :math:`\\frac{1}{ \sqrt{2\pi} } exp \left( \\frac{-(x-center)^2}{2 \sigma^2} \\right)` - - :param x: x - :param center: center - :param sigma: sigma - :type x: numpy.array - :type center: float - :type sigma: float - - :return: 1-dimensional Gaussian - :rtype: numpy.array - - """ - return (1.0 / max(tiny, (sqrt(2 * π) * sigma))) * exp( - -((1.0 * x - center) ** 2) / max(tiny, (2 * sigma**2)) - ) - - -def lorentzian(x, center=0.0, sigma=1.0): - r"""1-dimensional Lorentzian function. - - lorentzian(x, center, sigma) = - (1/(1 + ((1.0*x-center)/sigma)**2)) / (pi*sigma) - - :math:`\\frac{1}{ 1+ \left( \\frac{x-center}{\sigma}\\right)^2} / (\pi\sigma)` - - :param x: x - :param center: center - :param sigma: sigma - :type x: numpy.array - :type center: float - :type sigma: float - - :return: 1-dimensional Lorenztian - :rtype: numpy.array - - """ - return (1.0 / (1 + ((1.0 * x - center) / max(tiny, sigma)) ** 2)) / max( - tiny, (π * sigma) - ) - - -def voigt(x, center=0.0, sigma=1.0, gamma=None): - r"""Return a 1-dimensional Voigt function. - - voigt(x, center, sigma, gamma) = - amplitude*wofz(z).real / (sigma*sqrt(2.0 * π)) - - :math:`V(x,\sigma,\gamma) = (\\frac{Re[\omega(z)]}{\sigma \sqrt{2\pi}})` - - :math:`z=\\frac{x+i\gamma}{\sigma\sqrt{2}}` - - see Voigt_ wiki - - .. _Voigt: https://en.wikipedia.org/wiki/Voigt_profile - - - :param x: x values - :type x: numpy array 1d - :param center: center of lineshape in points - :type center: float - :param sigma: sigma of gaussian - :type sigma: float - :param gamma: gamma of lorentzian - :type gamma: float - - :returns: Voigt lineshape - :rtype: numpy.array - - """ - if gamma is None: - gamma = sigma - - z = (x - center + 1j * gamma) / max(tiny, (sigma * sqrt(2.0))) - return wofz(z).real / max(tiny, (sigma * sqrt(2.0 * π))) - - -def pseudo_voigt(x, center=0.0, sigma=1.0, fraction=0.5): - r"""1-dimensional Pseudo-voigt function - - Superposition of Gaussian and Lorentzian function - - :math:`(1-\phi) G(x,center,\sigma_g) + \phi L(x, center, \sigma)` - - Where :math:`\phi` is the fraction of Lorentzian lineshape and :math:`G` and :math:`L` are Gaussian and - Lorentzian functions, respectively. - - :param x: data - :type x: numpy.array - :param center: center of peak - :type center: float - :param sigma: sigma of lineshape - :type sigma: float - :param fraction: fraction of lorentzian lineshape (between 0 and 1) - :type fraction: float - - :return: pseudo-voigt function - :rtype: numpy.array - - """ - sigma_g = sigma / sqrt(2 * log2) - pv = (1 - fraction) * gaussian(x, center, sigma_g) + fraction * lorentzian( - x, center, sigma - ) - return pv - - -def pvoigt2d( - XY, - amplitude=1.0, - center_x=0.5, - center_y=0.5, - sigma_x=1.0, - sigma_y=1.0, - fraction=0.5, -): - r"""2D pseudo-voigt model - - :math:`(1-fraction) G(x,center,\sigma_{gx}) + (fraction) L(x, center, \sigma_x) * (1-fraction) G(y,center,\sigma_{gy}) + (fraction) L(y, center, \sigma_y)` - - :param XY: meshgrid of X and Y coordinates [X,Y] each with shape Z - :type XY: numpy.array - - :param amplitude: amplitude of peak - :type amplitude: float - - :param center_x: center of peak in x - :type center_x: float - - :param center_y: center of peak in x - :type center_y: float - - :param sigma_x: sigma of lineshape in x - :type sigma_x: float - - :param sigma_y: sigma of lineshape in y - :type sigma_y: float - - :param fraction: fraction of lorentzian lineshape (between 0 and 1) - :type fraction: float - - :return: flattened array of Z values (use Z.reshape(X.shape) for recovery) - :rtype: numpy.array - - """ - x, y = XY - pv_x = pseudo_voigt(x, center_x, sigma_x, fraction) - pv_y = pseudo_voigt(y, center_y, sigma_y, fraction) - return amplitude * pv_x * pv_y - - -def pv_l( - XY, - amplitude=1.0, - center_x=0.5, - center_y=0.5, - sigma_x=1.0, - sigma_y=1.0, - fraction=0.5, -): - """2D lineshape model with pseudo-voigt in x and lorentzian in y - - Arguments - ========= - - -- XY: meshgrid of X and Y coordinates [X,Y] each with shape Z - -- amplitude: peak amplitude (gaussian and lorentzian) - -- center_x: position of peak in x - -- center_y: position of peak in y - -- sigma_x: linewidth in x - -- sigma_y: linewidth in y - -- fraction: fraction of lorentzian in fit - - Returns - ======= - - -- flattened array of Z values (use Z.reshape(X.shape) for recovery) - - """ - - x, y = XY - pv_x = pseudo_voigt(x, center_x, sigma_x, fraction) - pv_y = pseudo_voigt(y, center_y, sigma_y, 1.0) # lorentzian - return amplitude * pv_x * pv_y - - -def pv_g( - XY, - amplitude=1.0, - center_x=0.5, - center_y=0.5, - sigma_x=1.0, - sigma_y=1.0, - fraction=0.5, -): - """2D lineshape model with pseudo-voigt in x and gaussian in y - - Arguments - --------- - - -- XY: meshgrid of X and Y coordinates [X,Y] each with shape Z - -- amplitude: peak amplitude (gaussian and lorentzian) - -- center_x: position of peak in x - -- center_y: position of peak in y - -- sigma_x: linewidth in x - -- sigma_y: linewidth in y - -- fraction: fraction of lorentzian in fit - - Returns - ------- - - -- flattened array of Z values (use Z.reshape(X.shape) for recovery) - - """ - x, y = XY - pv_x = pseudo_voigt(x, center_x, sigma_x, fraction) - pv_y = pseudo_voigt(y, center_y, sigma_y, 0.0) # gaussian - return amplitude * pv_x * pv_y - - -def pv_pv( - XY, - amplitude=1.0, - center_x=0.5, - center_y=0.5, - sigma_x=1.0, - sigma_y=1.0, - fraction_x=0.5, - fraction_y=0.5, -): - """2D lineshape model with pseudo-voigt in x and pseudo-voigt in y - i.e. fraction_x and fraction_y params - - Arguments - ========= - - -- XY: meshgrid of X and Y coordinates [X,Y] each with shape Z - -- amplitude: peak amplitude (gaussian and lorentzian) - -- center_x: position of peak in x - -- center_y: position of peak in y - -- sigma_x: linewidth in x - -- sigma_y: linewidth in y - -- fraction_x: fraction of lorentzian in x - -- fraction_y: fraction of lorentzian in y - - Returns - ======= - - -- flattened array of Z values (use Z.reshape(X.shape) for recovery) - - """ - - x, y = XY - pv_x = pseudo_voigt(x, center_x, sigma_x, fraction_x) - pv_y = pseudo_voigt(y, center_y, sigma_y, fraction_y) - return amplitude * pv_x * pv_y - - -def gaussian_lorentzian( - XY, - amplitude=1.0, - center_x=0.5, - center_y=0.5, - sigma_x=1.0, - sigma_y=1.0, - fraction=0.5, -): - """2D lineshape model with gaussian in x and lorentzian in y - - Arguments - ========= - - -- XY: meshgrid of X and Y coordinates [X,Y] each with shape Z - -- amplitude: peak amplitude (gaussian and lorentzian) - -- center_x: position of peak in x - -- center_y: position of peak in y - -- sigma_x: linewidth in x - -- sigma_y: linewidth in y - -- fraction: fraction of lorentzian in fit - - Returns - ======= - - -- flattened array of Z values (use Z.reshape(X.shape) for recovery) - - """ - x, y = XY - pv_x = pseudo_voigt(x, center_x, sigma_x, 0.0) # gaussian - pv_y = pseudo_voigt(y, center_y, sigma_y, 1.0) # lorentzian - return amplitude * pv_x * pv_y - - -def voigt2d( - XY, - amplitude=1.0, - center_x=0.5, - center_y=0.5, - sigma_x=1.0, - sigma_y=1.0, - gamma_x=1.0, - gamma_y=1.0, - fraction=0.5, -): - fraction = 0.5 - gamma_x = None - gamma_y = None - x, y = XY - voigt_x = voigt(x, center_x, sigma_x, gamma_x) - voigt_y = voigt(y, center_y, sigma_y, gamma_y) - return amplitude * voigt_x * voigt_y - - -def make_mask(data, c_x, c_y, r_x, r_y): - """Create and elliptical mask - - Generate an elliptical boolean mask with center c_x/c_y in points - with radii r_x and r_y. Used to generate fit mask - - :param data: 2D array - :type data: np.array - - :param c_x: x center - :type c_x: float - - :param c_y: y center - :type c_y: float - - :param r_x: radius in x - :type r_x: float - - :param r_y: radius in y - :type r_y: float - - :return: boolean mask of data.shape - :rtype: numpy.array - - """ - a, b = c_y, c_x - n_y, n_x = data.shape - y, x = np.ogrid[-a : n_y - a, -b : n_x - b] - mask = x**2.0 / r_x**2.0 + y**2.0 / r_y**2.0 <= 1.0 - return mask - - -def rmsd(residuals): - return np.sqrt(np.sum(residuals**2.0) / len(residuals)) - - -def fix_params(params, to_fix): - """Set parameters to fix - - - :param params: lmfit parameters - :type params: lmfit.Parameters - - :param to_fix: list of parameter name to fix - :type to_fix: list - - :return: updated parameter object - :rtype: lmfit.Parameters - - """ - for k in params: - for p in to_fix: - if p in k: - params[k].vary = False - - return params - - -def get_params(params, name): - ps = [] - ps_err = [] - names = [] - prefixes = [] - for k in params: - if name in k: - ps.append(params[k].value) - ps_err.append(params[k].stderr) - names.append(k) - prefixes.append(k.split(name)[0]) - return ps, ps_err, names, prefixes - - -@dataclass -class PeakLimits: - """Given a peak position and linewidth in points determine - the limits based on the data - - Arguments - --------- - peak: pd.DataFrame - peak is a row from a pandas dataframe - data: np.array - 2D numpy array - """ - - peak: pd.DataFrame - data: np.array - min_x: int = field(init=False) - max_x: int = field(init=False) - min_y: int = field(init=False) - max_y: int = field(init=False) - - def __post_init__(self): - assert self.peak.Y_AXIS <= self.data.shape[0] - assert self.peak.X_AXIS <= self.data.shape[1] - self.max_y = int(np.ceil(self.peak.Y_AXIS + self.peak.YW)) + 1 - if self.max_y > self.data.shape[0]: - self.max_y = self.data.shape[0] - self.max_x = int(np.ceil(self.peak.X_AXIS + self.peak.XW)) + 1 - if self.max_x > self.data.shape[1]: - self.max_x = self.data.shape[1] - - self.min_y = int(self.peak.Y_AXIS - self.peak.YW) - if self.min_y < 0: - self.min_y = 0 - self.min_x = int(self.peak.X_AXIS - self.peak.XW) - if self.min_x < 0: - self.min_x = 0 - - -def estimate_amplitude(peak, data): - assert len(data.shape) == 2 - limits = PeakLimits(peak, data) - amplitude_est = data[limits.min_y : limits.max_y, limits.min_x : limits.max_x].sum() - return amplitude_est - - -def make_param_dict(peaks, data, lineshape: Lineshape = Lineshape.PV): - """Make dict of parameter names using prefix""" - - param_dict = {} - - for _, peak in peaks.iterrows(): - str_form = lambda x: "%s%s" % (to_prefix(peak.ASS), x) - # using exact value of points (i.e decimal) - param_dict[str_form("center_x")] = peak.X_AXISf - param_dict[str_form("center_y")] = peak.Y_AXISf - # estimate peak volume - amplitude_est = estimate_amplitude(peak, data) - param_dict[str_form("amplitude")] = amplitude_est - # sigma linewidth esimate - param_dict[str_form("sigma_x")] = peak.XW / 2.0 - param_dict[str_form("sigma_y")] = peak.YW / 2.0 - - match lineshape: - case lineshape.V: - #  Voigt G sigma from linewidth esimate - param_dict[str_form("sigma_x")] = peak.XW / ( - 2.0 * sqrt(2.0 * log2) - ) # 3.6013 - param_dict[str_form("sigma_y")] = peak.YW / ( - 2.0 * sqrt(2.0 * log2) - ) # 3.6013 - #  Voigt L gamma from linewidth esimate - param_dict[str_form("gamma_x")] = peak.XW / 2.0 - param_dict[str_form("gamma_y")] = peak.YW / 2.0 - # height - # add height here - - case lineshape.G: - param_dict[str_form("fraction")] = 0.0 - case lineshape.L: - param_dict[str_form("fraction")] = 1.0 - case lineshape.PV_PV: - param_dict[str_form("fraction_x")] = 0.5 - param_dict[str_form("fraction_y")] = 0.5 - case _: - param_dict[str_form("fraction")] = 0.5 - - return param_dict - - -def to_prefix(x): - """ - Peak assignments with characters that are not compatible lmfit model naming - are converted to lmfit "safe" names. - - :param x: Peak assignment to be used as prefix for lmfit model - :type x: str - - :returns: lmfit model prefix (_Peak_assignment_) - :rtype: str - - """ - # must be string - if type(x) != str: - x = str(x) - - prefix = "_" + x - to_replace = [ - [".", "_"], - [" ", ""], - ["{", "_"], - ["}", "_"], - ["[", "_"], - ["]", "_"], - ["-", ""], - ["/", "or"], - ["?", "maybe"], - ["\\", ""], - ["(", "_"], - [")", "_"], - ["@", "_at_"], - ] - for p in to_replace: - prefix = prefix.replace(*p) - return prefix + "_" - - -def make_models( - model, - peaks, - data, - lineshape: Lineshape = Lineshape.PV, - xy_bounds=None, -): - """Make composite models for multiple peaks - - :param model: lineshape function - :type model: function - - :param peaks: instance of pandas.df.groupby("CLUSTID") - :type peaks: pandas.df.groupby("CLUSTID") - - :param data: NMR data - :type data: numpy.array - - :param lineshape: lineshape to use for fit (PV/G/L/PV_PV) - :type lineshape: str - - :param xy_bounds: bounds for peak centers (+/-x, +/-y) - :type xy_bounds: tuple - - :return mod: Composite lmfit model containing all peaks - :rtype mod: lmfit.CompositeModel - - :return p_guess: params for composite model with starting values - :rtype p_guess: lmfit.Parameters - - """ - if len(peaks) == 1: - # make model for first peak - mod = Model(model, prefix="%s" % to_prefix(peaks.ASS.iloc[0])) - # add parameters - param_dict = make_param_dict( - peaks, - data, - lineshape=lineshape, - ) - p_guess = mod.make_params(**param_dict) - - elif len(peaks) > 1: - # make model for first peak - first_peak, *remaining_peaks = peaks.iterrows() - mod = Model(model, prefix="%s" % to_prefix(first_peak[1].ASS)) - for _, peak in remaining_peaks: - mod += Model(model, prefix="%s" % to_prefix(peak.ASS)) - - param_dict = make_param_dict( - peaks, - data, - lineshape=lineshape, - ) - p_guess = mod.make_params(**param_dict) - # add Peak params to p_guess - - update_params(p_guess, param_dict, lineshape=lineshape, xy_bounds=xy_bounds) - - return mod, p_guess - - -def update_params( - params, param_dict, lineshape: Lineshape = Lineshape.PV, xy_bounds=None -): - """Update lmfit parameters with values from Peak - - :param params: lmfit parameters - :type params: lmfit.Parameters object - :param param_dict: parameters corresponding to each peak in fit - :type param_dict: dict - :param lineshape: Lineshape (PV, G, L, PV_PV etc.) - :type lineshape: Lineshape - :param xy_bounds: bounds on xy peak positions - :type xy_bounds: tuple - - :returns: None - :rtype: None - - ToDo - -- deal with boundaries - -- currently positions in points - - """ - for k, v in param_dict.items(): - params[k].value = v - # print("update", k, v) - if "center" in k: - if xy_bounds == None: - # no bounds set - pass - else: - if "center_x" in k: - # set x bounds - x_bound = xy_bounds[0] - params[k].min = v - x_bound - params[k].max = v + x_bound - elif "center_y" in k: - # set y bounds - y_bound = xy_bounds[1] - params[k].min = v - y_bound - params[k].max = v + y_bound - # pass - # print( - # "setting limit of %s, min = %.3e, max = %.3e" - # % (k, params[k].min, params[k].max) - # ) - elif "sigma" in k: - params[k].min = 0.0 - params[k].max = 1e4 - - elif "gamma" in k: - params[k].min = 0.0 - params[k].max = 1e4 - # print( - # "setting limit of %s, min = %.3e, max = %.3e" - # % (k, params[k].min, params[k].max) - # ) - elif "fraction" in k: - # fix weighting between 0 and 1 - params[k].min = 0.0 - params[k].max = 1.0 - - #  fix fraction of G or L - match lineshape: - case lineshape.G | lineshape.L: - params[k].vary = False - case lineshape.PV | lineshape.PV_PV: - params[k].vary = True - case _: - pass - - # return params - - -def run_log(log_name="run_log.txt"): - """Write log file containing time script was run and with which arguments""" - with open(log_name, "a") as log: - sys_argv = sys.argv - sys_argv[0] = Path(sys_argv[0]).name - run_args = " ".join(sys_argv) - time_stamp = datetime.now() - time_stamp = time_stamp.strftime("%A %d %B %Y at %H:%M") - log.write(f"# Script run on {time_stamp}:\n{run_args}\n") - - -def df_to_rich_table(df, title: str, columns: List[str], styles: str): - """Print dataframe using rich library - - Parameters - ---------- - df : pandas.DataFrame - title : str - title of table - columns : List[str] - list of column names (must be in df) - styles : List[str] - list of styles in same order as columns - """ - table = Table(title=title) - for col, style in zip(columns, styles): - table.add_column(col, style=style) - for _, row in df.iterrows(): - row = row[columns].values - str_row = [] - for i in row: - match i: - case str(): - str_row.append(f"{i}") - case float() if i > 1e5: - str_row.append(f"{i:.1e}") - case float(): - str_row.append(f"{i:.3f}") - case bool(): - str_row.append(f"{i}") - case int(): - str_row.append(f"{i}") - table.add_row(*str_row) - return table - - -def make_mask_from_peak_cluster(group, data): - mask = np.zeros(data.shape, dtype=bool) - for _, peak in group.iterrows(): - mask += make_mask( - data, peak.X_AXISf, peak.Y_AXISf, peak.X_RADIUS, peak.Y_RADIUS - ) - return mask, peak - - -def select_reference_planes_using_indices(data, indices: List[int]): - n_planes = data.shape[0] - if indices == []: - return data - - max_index = max(indices) - min_index = min(indices) - - if max_index >= n_planes: - raise IndexError( - f"Your data has {n_planes}. You selected plane {max_index} (allowed indices between 0 and {n_planes-1})" - ) - elif min_index < (-1 * n_planes): - raise IndexError( - f"Your data has {n_planes}. You selected plane {min_index} (allowed indices between -{n_planes} and {n_planes-1})" - ) - else: - data = data[indices] - return data - - -def select_planes_above_threshold_from_masked_data(data, threshold=None): - """This function returns planes with data above the threshold. - - It currently uses absolute intensity values. - Negative thresholds just result in return of the orignal data. - - """ - if threshold == None: - selected_data = data - else: - selected_data = data[np.abs(data).max(axis=1) > threshold] - - if selected_data.shape[0] == 0: - selected_data = data - - return selected_data - - -def get_lineshape_function(lineshape: Lineshape): - match lineshape: - case lineshape.PV | lineshape.G | lineshape.L: - lineshape_function = pvoigt2d - case lineshape.V: - lineshape_function = voigt2d - case lineshape.PV_PV: - lineshape_function = pv_pv - case lineshape.G_L: - lineshape_function = gaussian_lorentzian - case lineshape.PV_G: - lineshape_function = pv_g - case lineshape.PV_L: - lineshape_function = pv_l - case _: - raise Exception("No lineshape was selected!") - return lineshape_function - - -def slice_peaks_from_data_using_mask(data, mask): - peak_slices = np.array([d[mask] for d in data]) - return peak_slices - - -def get_limits_for_axis_in_points(group_axis_points, mask_radius_in_points): - max_point, min_point = ( - int(np.ceil(max(group_axis_points) + mask_radius_in_points + 1)), - int(np.floor(min(group_axis_points) - mask_radius_in_points)), - ) - return max_point, min_point - - -def deal_with_peaks_on_edge_of_spectrum(data_shape, max_x, min_x, max_y, min_y): - if min_y < 0: - min_y = 0 - - if min_x < 0: - min_x = 0 - - if max_y > data_shape[-2]: - max_y = data_shape[-2] - - if max_x > data_shape[-1]: - max_x = data_shape[-1] - return max_x, min_x, max_y, min_y - - -def make_meshgrid(data_shape): - # must be a better way to make the meshgrid - x = np.arange(data_shape[-1]) - y = np.arange(data_shape[-2]) - XY = np.meshgrid(x, y) - return XY - - -class Pseudo3D: - """Read dic, data from NMRGlue and dims from input to create a Pseudo3D dataset - - :param dic: from nmrglue.pipe.read - :type dic: dict - - :param data: data from nmrglue.pipe.read - :type data: numpy.array - - :param dims: dimension order i.e [0,1,2] where 0 = planes, 1 = f1, 2 = f2 - :type dims: list - """ - - def __init__(self, dic, data, dims): - # check dimensions - self._udic = ng.pipe.guess_udic(dic, data) - self._ndim = self._udic["ndim"] - - if self._ndim == 1: - err = f"""[red] - ########################################## - NMR Data should be either 2D or 3D - ########################################## - [/red]""" - # raise TypeError(err) - sys.exit(err) - - # check that spectrum has correct number of dims - elif self._ndim != len(dims): - err = f"""[red] - ################################################################# - Your spectrum has {self._ndim} dimensions with shape {data.shape} - but you have given a dimension order of {dims}... - ################################################################# - [/red]""" - # raise ValueError(err) - sys.exit(err) - - elif (self._ndim == 2) and (len(dims) == 2): - self._f1_dim, self._f2_dim = dims - self._planes = 0 - self._uc_f1 = ng.pipe.make_uc(dic, data, dim=self._f1_dim) - self._uc_f2 = ng.pipe.make_uc(dic, data, dim=self._f2_dim) - # make data pseudo3d - self._data = data.reshape((1, data.shape[0], data.shape[1])) - self._dims = [self._planes, self._f1_dim + 1, self._f2_dim + 1] - - else: - self._planes, self._f1_dim, self._f2_dim = dims - self._dims = dims - self._data = data - # make unit conversion dicts - self._uc_f2 = ng.pipe.make_uc(dic, data, dim=self._f2_dim) - self._uc_f1 = ng.pipe.make_uc(dic, data, dim=self._f1_dim) - - #  rearrange data if dims not in standard order - if self._dims != [0, 1, 2]: - # np.argsort returns indices of array for order 0,1,2 to transpose data correctly - # self._dims = np.argsort(self._dims) - self._data = np.transpose(data, self._dims) - - self._dic = dic - - self._f1_label = self._udic[self._f1_dim]["label"] - self._f2_label = self._udic[self._f2_dim]["label"] - - @property - def uc_f1(self): - """Return unit conversion dict for F1""" - return self._uc_f1 - - @property - def uc_f2(self): - """Return unit conversion dict for F2""" - return self._uc_f2 - - @property - def dims(self): - """Return dimension order""" - return self._dims - - @property - def data(self): - """Return array containing data""" - return self._data - - @data.setter - def data(self, data): - self._data = data - - @property - def dic(self): - return self._dic - - @property - def udic(self): - return self._udic - - @property - def ndim(self): - return self._ndim - - @property - def f1_label(self): - # dim label - return self._f1_label - - @property - def f2_label(self): - # dim label - return self._f2_label - - @property - def planes(self): - return self.dims[0] - - @property - def n_planes(self): - return self.data.shape[self.planes] - - @property - def f1(self): - return self.dims[1] - - @property - def f2(self): - return self.dims[2] - - # size of f1 and f2 in points - @property - def f2_size(self): - """Return size of f2 dimension in points""" - return self._udic[self._f2_dim]["size"] - - @property - def f1_size(self): - """Return size of f1 dimension in points""" - return self._udic[self._f1_dim]["size"] - - # points per ppm - @property - def pt_per_ppm_f1(self): - return self.f1_size / ( - self._udic[self._f1_dim]["sw"] / self._udic[self._f1_dim]["obs"] - ) - - @property - def pt_per_ppm_f2(self): - return self.f2_size / ( - self._udic[self._f2_dim]["sw"] / self._udic[self._f2_dim]["obs"] - ) - - # points per hz - @property - def pt_per_hz_f1(self): - return self.f1_size / self._udic[self._f1_dim]["sw"] - - @property - def pt_per_hz_f2(self): - return self.f2_size / self._udic[self._f2_dim]["sw"] - - # hz per point - @property - def hz_per_pt_f1(self): - return 1.0 / self.pt_per_hz_f1 - - @property - def hz_per_pt_f2(self): - return 1.0 / self.pt_per_hz_f2 - - # ppm per point - @property - def ppm_per_pt_f1(self): - return 1.0 / self.pt_per_ppm_f1 - - @property - def ppm_per_pt_f2(self): - return 1.0 / self.pt_per_ppm_f2 - - # get ppm limits for ppm scales - @property - def f2_ppm_scale(self): - return self.uc_f2.ppm_scale() - - @property - def f1_ppm_scale(self): - return self.uc_f1.ppm_scale() - - @property - def f2_ppm_limits(self): - return self.uc_f2.ppm_limits() - - @property - def f1_ppm_limits(self): - return self.uc_f1.ppm_limits() - - @property - def f1_ppm_max(self): - return max(self.f1_ppm_limits) - - @property - def f1_ppm_min(self): - return min(self.f1_ppm_limits) - - @property - def f2_ppm_max(self): - return max(self.f2_ppm_limits) - - @property - def f2_ppm_min(self): - return min(self.f2_ppm_limits) - - @property - def f2_ppm_0(self): - return self.f2_ppm_limits[0] - - @property - def f2_ppm_1(self): - return self.f2_ppm_limits[1] - - @property - def f1_ppm_0(self): - return self.f1_ppm_limits[0] - - @property - def f1_ppm_1(self): - return self.f1_ppm_limits[1] - - -class UnknownFormat(Exception): - pass - - -class Peaklist(Pseudo3D): - """Read analysis, sparky or NMRPipe peak list and convert to NMRPipe-ish format also find peak clusters - - Parameters - ---------- - path : path-like or str - path to peaklist - data_path : ndarray - NMRPipe format data - fmt : str - a2|a3|sparky|pipe - dims: list - [planes,y,x] - radii: list - [x,y] Mask radii in ppm - - - Methods - ------- - - clusters : - mask_method : - adaptive_clusters : - - Returns - ------- - df : pandas DataFrame - dataframe containing peaklist - - """ - - def __init__( - self, - path, - data_path, - fmt: PeaklistFormat = PeaklistFormat.a2, - dims=[0, 1, 2], - radii=[0.04, 0.4], - posF1="Position F2", - posF2="Position F1", - verbose=False, - ): - dic, data = ng.pipe.read(data_path) - Pseudo3D.__init__(self, dic, data, dims) - self.fmt = fmt - self.peaklist_path = path - self.data_path = data_path - self.verbose = verbose - self._radii = radii - self._thres = None - if self.verbose: - print( - "Points per hz f1 = %.3f, f2 = %.3f" - % (self.pt_per_hz_f1, self.pt_per_hz_f2) - ) - - self._analysis_to_pipe_dic = { - "#": "INDEX", - "Position F1": "X_PPM", - "Position F2": "Y_PPM", - "Line Width F1 (Hz)": "XW_HZ", - "Line Width F2 (Hz)": "YW_HZ", - "Height": "HEIGHT", - "Volume": "VOL", - } - self._assign_to_pipe_dic = { - "#": "INDEX", - "Pos F1": "X_PPM", - "Pos F2": "Y_PPM", - "LW F1 (Hz)": "XW_HZ", - "LW F2 (Hz)": "YW_HZ", - "Height": "HEIGHT", - "Volume": "VOL", - } - - self._sparky_to_pipe_dic = { - "index": "INDEX", - "w1": "X_PPM", - "w2": "Y_PPM", - "lw1 (hz)": "XW_HZ", - "lw2 (hz)": "YW_HZ", - "Height": "HEIGHT", - "Volume": "VOL", - "Assignment": "ASS", - } - - self._analysis_to_pipe_dic[posF1] = "Y_PPM" - self._analysis_to_pipe_dic[posF2] = "X_PPM" - - self._df = self.read_peaklist() - - def read_peaklist(self): - match self.fmt: - case self.fmt.a2: - self._df = self._read_analysis() - - case self.fmt.a3: - self._df = self._read_assign() - - case self.fmt.sparky: - self._df = self._read_sparky() - - case self.fmt.pipe: - self._df = self._read_pipe() - - case _: - raise UnknownFormat("I don't know this format: {self.fmt}") - - return self._df - - @property - def df(self): - return self._df - - @df.setter - def df(self, df): - self._df = df - return self._df - - @property - def radii(self): - return self._radii - - @property - def f2_radius(self): - """radius for fitting mask in f2""" - return self.radii[0] - - @property - def f1_radius(self): - """radius for fitting mask in f1""" - return self.radii[1] - - @property - def analysis_to_pipe_dic(self): - return self._analysis_to_pipe_dic - - @property - def assign_to_pipe_dic(self): - return self._assign_to_pipe_dic - - @property - def sparky_to_pipe_dic(self): - return self._sparky_to_pipe_dic - - @property - def thres(self): - if self._thres == None: - self._thres = abs(threshold_otsu(self.data[0])) - return self._thres - else: - return self._thres - - def update_df(self): - # int point value - self.df["X_AXIS"] = self.df.X_PPM.apply(lambda x: self.uc_f2(x, "ppm")) - self.df["Y_AXIS"] = self.df.Y_PPM.apply(lambda x: self.uc_f1(x, "ppm")) - # decimal point value - self.df["X_AXISf"] = self.df.X_PPM.apply(lambda x: self.uc_f2.f(x, "ppm")) - self.df["Y_AXISf"] = self.df.Y_PPM.apply(lambda x: self.uc_f1.f(x, "ppm")) - # in case of missing values (should estimate though) - self.df["XW_HZ"] = self.df.XW_HZ.replace("None", "20.0") - self.df["YW_HZ"] = self.df.YW_HZ.replace("None", "20.0") - self.df["XW_HZ"] = self.df.XW_HZ.replace(np.NaN, "20.0") - self.df["YW_HZ"] = self.df.YW_HZ.replace(np.NaN, "20.0") - # convert linewidths to float - self.df["XW_HZ"] = self.df.XW_HZ.apply(lambda x: float(x)) - self.df["YW_HZ"] = self.df.YW_HZ.apply(lambda x: float(x)) - # convert Hz lw to points - self.df["XW"] = self.df.XW_HZ.apply(lambda x: x * self.pt_per_hz_f2) - self.df["YW"] = self.df.YW_HZ.apply(lambda x: x * self.pt_per_hz_f1) - # makes an assignment column from Assign F1 and Assign F2 columns - # in analysis2.x and ccpnmr v3 assign peak lists - if self.fmt in [PeaklistFormat.a2, PeaklistFormat.a3]: - self.df["ASS"] = self.df.apply( - # lambda i: "".join([i["Assign F1"], i["Assign F2"]]), axis=1 - lambda i: f"{i['Assign F1']}_{i['Assign F2']}", - axis=1, - ) - - # make default values for X and Y radii for fit masks - self.df["X_RADIUS_PPM"] = np.zeros(len(self.df)) + self.f2_radius - self.df["Y_RADIUS_PPM"] = np.zeros(len(self.df)) + self.f1_radius - self.df["X_RADIUS"] = self.df.X_RADIUS_PPM.apply( - lambda x: x * self.pt_per_ppm_f2 - ) - self.df["Y_RADIUS"] = self.df.Y_RADIUS_PPM.apply( - lambda x: x * self.pt_per_ppm_f1 - ) - # add include column - if "include" in self.df.columns: - pass - else: - self.df["include"] = self.df.apply(lambda x: "yes", axis=1) - - # check assignments for duplicates - self.check_assignments() - # check that peaks are within the bounds of the data - self.check_peak_bounds() - - def add_fix_bound_columns(self): - """add columns containing parameter bounds (param_upper/param_lower) - and whether or not parameter should be fixed (yes/no) - - For parameter bounding: - - Column names are _upper and _lower for upper and lower bounds respectively. - Values are given as floating point. Value of 0.0 indicates that parameter is unbounded - X/Y positions are given in ppm - Linewidths are given in Hz - - For parameter fixing: - - Column names are _fix. - Values are given as a string 'yes' or 'no' - - """ - pass - - def _read_analysis(self): - df = pd.read_csv(self.peaklist_path, delimiter="\t") - new_columns = [self.analysis_to_pipe_dic.get(i, i) for i in df.columns] - pipe_columns = dict(zip(df.columns, new_columns)) - df = df.rename(index=str, columns=pipe_columns) - - return df - - def _read_assign(self): - df = pd.read_csv(self.peaklist_path, delimiter="\t") - new_columns = [self.assign_to_pipe_dic.get(i, i) for i in df.columns] - pipe_columns = dict(zip(df.columns, new_columns)) - df = df.rename(index=str, columns=pipe_columns) - - return df - - def _read_sparky(self): - df = pd.read_csv( - self.peaklist_path, - skiprows=1, - sep=r"\s+", - names=["ASS", "Y_PPM", "X_PPM", "VOLUME", "HEIGHT", "YW_HZ", "XW_HZ"], - ) - df["INDEX"] = df.index - - return df - - def _read_pipe(self): - to_skip = 0 - with open(self.peaklist_path) as f: - lines = f.readlines() - for line in lines: - if line.startswith("VARS"): - columns = line.strip().split()[1:] - elif line[:5].strip(" ").isdigit(): - break - else: - to_skip += 1 - df = pd.read_csv( - self.peaklist_path, skiprows=to_skip, names=columns, sep=r"\s+" - ) - return df - - def check_assignments(self): - # self.df["ASS"] = self.df. - self.df["ASS"] = self.df.ASS.astype(object) - self.df.loc[self.df["ASS"].isnull(), "ASS"] = "None_dummy_0" - self.df["ASS"] = self.df.ASS.astype(str) - duplicates_bool = self.df.ASS.duplicated() - duplicates = self.df.ASS[duplicates_bool] - if len(duplicates) > 0: - console.print( - textwrap.dedent( - """ - ############################################################################# - You have duplicated assignments in your list... - Currently each peak needs a unique assignment. Sorry about that buddy... - ############################################################################# - """ - ), - style="yellow", - ) - self.df.loc[duplicates_bool, "ASS"] = [ - f"{i}_dummy_{num+1}" for num, i in enumerate(duplicates) - ] - if self.verbose: - print("Here are the duplicates") - print(duplicates) - print(self.df.ASS) - - print( - textwrap.dedent( - """ - Creating dummy assignments for duplicates - - """ - ) - ) - - def check_peak_bounds(self): - columns_to_print = ["INDEX", "ASS", "X_AXIS", "Y_AXIS", "X_PPM", "Y_PPM"] - # check that peaks are within the bounds of spectrum - within_x = (self.df.X_PPM < self.f2_ppm_max) & (self.df.X_PPM > self.f2_ppm_min) - within_y = (self.df.Y_PPM < self.f1_ppm_max) & (self.df.Y_PPM > self.f1_ppm_min) - self.excluded = self.df[~(within_x & within_y)] - self.df = self.df[within_x & within_y] - if len(self.excluded) > 0: - print( - textwrap.dedent( - f"""[red] - ################################################################################# - - Excluding the following peaks as they are not within the spectrum which has shape - - {self.data.shape} - [/red]""" - ) - ) - table_to_print = df_to_rich_table( - self.excluded, - title="Excluded", - columns=columns_to_print, - styles=["red" for i in columns_to_print], - ) - print(table_to_print) - print( - "[red]#################################################################################[/red]" - ) - - def clusters( - self, - thres=None, - struc_el: StrucEl = StrucEl.disk, - struc_size=(3,), - l_struc=None, - ): - """Find clusters of peaks - - :param thres: threshold for positive signals above which clusters are selected. If None then threshold_otsu is used - :type thres: float - - :param struc_el: 'square'|'disk'|'rectangle' - structuring element for binary_closing of thresholded data can be square, disc or rectangle - :type struc_el: str - - :param struc_size: size/dimensions of structuring element - for square and disk first element of tuple is used (for disk value corresponds to radius) - for rectangle, tuple corresponds to (width,height). - :type struc_size: tuple - - - """ - peaks = [[y, x] for y, x in zip(self.df.Y_AXIS, self.df.X_AXIS)] - - if thres == None: - thres = self.thres - self._thres = abs(threshold_otsu(self.data[0])) - else: - self._thres = thres - - # get positive and negative - thresh_data = np.bitwise_or( - self.data[0] < (self._thres * -1.0), self.data[0] > self._thres - ) - - match struc_el: - case struc_el.disk: - radius = struc_size[0] - if self.verbose: - print(f"using disk with {radius}") - closed_data = binary_closing(thresh_data, disk(int(radius))) - - case struc_el.square: - width = struc_size[0] - if self.verbose: - print(f"using square with {width}") - closed_data = binary_closing(thresh_data, square(int(width))) - - case struc_el.rectangle: - width, height = struc_size - if self.verbose: - print(f"using rectangle with {width} and {height}") - closed_data = binary_closing( - thresh_data, rectangle(int(width), int(height)) - ) - - case _: - if self.verbose: - print(f"Not using any closing function") - closed_data = thresh_data - - labeled_array, num_features = ndimage.label(closed_data, l_struc) - - self.df.loc[:, "CLUSTID"] = [labeled_array[i[0], i[1]] for i in peaks] - - #  renumber "0" clusters - max_clustid = self.df["CLUSTID"].max() - n_of_zeros = len(self.df[self.df["CLUSTID"] == 0]["CLUSTID"]) - self.df.loc[self.df[self.df["CLUSTID"] == 0].index, "CLUSTID"] = np.arange( - max_clustid + 1, n_of_zeros + max_clustid + 1, dtype=int - ) - - # count how many peaks per cluster - for ind, group in self.df.groupby("CLUSTID"): - self.df.loc[group.index, "MEMCNT"] = len(group) - - self.df.loc[:, "color"] = self.df.apply( - lambda x: Category20[20][int(x.CLUSTID) % 20] if x.MEMCNT > 1 else "black", - axis=1, - ) - return ClustersResult(labeled_array, num_features, closed_data, peaks) - - def mask_method(self, overlap=1.0, l_struc=None): - """connect clusters based on overlap of fitting masks - - :param overlap: fraction of mask for which overlaps are calculated - :type overlap: float - - :returns ClusterResult: Instance of ClusterResult - :rtype: ClustersResult - """ - # overlap is positive - overlap = abs(overlap) - - self._thres = threshold_otsu(self.data[0]) - - mask = np.zeros(self.data[0].shape, dtype=bool) - - for ind, peak in self.df.iterrows(): - mask += make_mask( - self.data[0], - peak.X_AXISf, - peak.Y_AXISf, - peak.X_RADIUS * overlap, - peak.Y_RADIUS * overlap, - ) - - peaks = [[y, x] for y, x in zip(self.df.Y_AXIS, self.df.X_AXIS)] - labeled_array, num_features = ndimage.label(mask, l_struc) - - self.df.loc[:, "CLUSTID"] = [labeled_array[i[0], i[1]] for i in peaks] - - #  renumber "0" clusters - max_clustid = self.df["CLUSTID"].max() - n_of_zeros = len(self.df[self.df["CLUSTID"] == 0]["CLUSTID"]) - self.df.loc[self.df[self.df["CLUSTID"] == 0].index, "CLUSTID"] = np.arange( - max_clustid + 1, n_of_zeros + max_clustid + 1, dtype=int - ) - - # count how many peaks per cluster - for ind, group in self.df.groupby("CLUSTID"): - self.df.loc[group.index, "MEMCNT"] = len(group) - - self.df.loc[:, "color"] = self.df.apply( - lambda x: Category20[20][int(x.CLUSTID) % 20] if x.MEMCNT > 1 else "black", - axis=1, - ) - - return ClustersResult(labeled_array, num_features, mask, peaks) - - def to_fuda(self, fname="params.fuda"): - with open("peaks.fuda", "w") as peaks_fuda: - for ass, f1_ppm, f2_ppm in zip(self.df.ASS, self.df.Y_PPM, self.df.X_PPM): - peaks_fuda.write(f"{ass}\t{f1_ppm:.3f}\t{f2_ppm:.3f}\n") - groups = self.df.groupby("CLUSTID") - fuda_params = Path(fname) - overlap_peaks = "" - - for ind, group in groups: - if len(group) > 1: - overlap_peaks_str = ";".join(group.ASS) - overlap_peaks += f"OVERLAP_PEAKS=({overlap_peaks_str})\n" - - fuda_file = textwrap.dedent( - f"""\ - -# Read peaklist and spectrum info -PEAKLIST=peaks.fuda -SPECFILE={self.data_path} -PARAMETERFILE=(bruker;vclist) -ZCORR=ncyc -NOISE={self.thres} # you'll need to adjust this -BASELINE=N -VERBOSELEVEL=5 -PRINTDATA=Y -LM=(MAXFEV=250;TOL=1e-5) -#Specify the default values. All values are in ppm: -DEF_LINEWIDTH_F1={self.f1_radius} -DEF_LINEWIDTH_F2={self.f2_radius} -DEF_RADIUS_F1={self.f1_radius} -DEF_RADIUS_F2={self.f2_radius} -SHAPE=GLORE -# OVERLAP PEAKS -{overlap_peaks}""" - ) - with open(fuda_params, "w") as f: - print(f"Writing FuDA file {fuda_file}") - f.write(fuda_file) - if self.verbose: - print(overlap_peaks) - - -class ClustersResult: - """Class to store results of clusters function""" - - def __init__(self, labeled_array, num_features, closed_data, peaks): - self._labeled_array = labeled_array - self._num_features = num_features - self._closed_data = closed_data - self._peaks = peaks - - @property - def labeled_array(self): - return self._labeled_array - - @property - def num_features(self): - return self._num_features - - @property - def closed_data(self): - return self._closed_data - - @property - def peaks(self): - return self._peaks - - -class LoadData(Peaklist): - """Load peaklist data from peakipy .csv file output from either peakipy read or edit - - read_peaklist is redefined to just read a .csv file - - check_data_frame makes sure data frame is in good shape for setting up fits - - """ - - def read_peaklist(self): - if self.peaklist_path.suffix == ".csv": - self.df = pd.read_csv(self.peaklist_path) # , comment="#") - - elif self.peaklist_path.suffix == ".tab": - self.df = pd.read_csv(self.peaklist_path, sep="\t") # comment="#") - - else: - self.df = pd.read_pickle(self.peaklist_path) - - self._thres = threshold_otsu(self.data[0]) - - return self.df - - def check_data_frame(self): - # make diameter columns - if "X_DIAMETER_PPM" in self.df.columns: - pass - else: - self.df["X_DIAMETER_PPM"] = self.df["X_RADIUS_PPM"] * 2.0 - self.df["Y_DIAMETER_PPM"] = self.df["Y_RADIUS_PPM"] * 2.0 - - #  make a column to track edited peaks - if "Edited" in self.df.columns: - pass - else: - self.df["Edited"] = np.zeros(len(self.df), dtype=bool) - - # create include column if it doesn't exist - if "include" in self.df.columns: - pass - else: - self.df["include"] = self.df.apply(lambda _: "yes", axis=1) - - # color clusters - self.df["color"] = self.df.apply( - lambda x: Category20[20][int(x.CLUSTID) % 20] if x.MEMCNT > 1 else "black", - axis=1, - ) - - # get rid of unnamed columns - unnamed_cols = [i for i in self.df.columns if "Unnamed:" in i] - self.df = self.df.drop(columns=unnamed_cols) - - def update_df(self): - """Slightly modified to retain previous configurations""" - # int point value - self.df["X_AXIS"] = self.df.X_PPM.apply(lambda x: self.uc_f2(x, "ppm")) - self.df["Y_AXIS"] = self.df.Y_PPM.apply(lambda x: self.uc_f1(x, "ppm")) - # decimal point value - self.df["X_AXISf"] = self.df.X_PPM.apply(lambda x: self.uc_f2.f(x, "ppm")) - self.df["Y_AXISf"] = self.df.Y_PPM.apply(lambda x: self.uc_f1.f(x, "ppm")) - # in case of missing values (should estimate though) - self.df["XW_HZ"] = self.df.XW_HZ.replace(np.NaN, "20.0") - self.df["YW_HZ"] = self.df.YW_HZ.replace(np.NaN, "20.0") - # convert linewidths to float - self.df["XW_HZ"] = self.df.XW_HZ.apply(lambda x: float(x)) - self.df["YW_HZ"] = self.df.YW_HZ.apply(lambda x: float(x)) - # convert Hz lw to points - self.df["XW"] = self.df.XW_HZ.apply(lambda x: x * self.pt_per_hz_f2) - self.df["YW"] = self.df.YW_HZ.apply(lambda x: x * self.pt_per_hz_f1) - # makes an assignment column - if self.fmt == "a2": - self.df["ASS"] = self.df.apply( - lambda i: "".join([i["Assign F1"], i["Assign F2"]]), axis=1 - ) - - # make default values for X and Y radii for fit masks - # self.df["X_RADIUS_PPM"] = np.zeros(len(self.df)) + self.f2_radius - # self.df["Y_RADIUS_PPM"] = np.zeros(len(self.df)) + self.f1_radius - self.df["X_RADIUS"] = self.df.X_RADIUS_PPM.apply( - lambda x: x * self.pt_per_ppm_f2 - ) - self.df["Y_RADIUS"] = self.df.Y_RADIUS_PPM.apply( - lambda x: x * self.pt_per_ppm_f1 - ) - # add include column - if "include" in self.df.columns: - pass - else: - self.df["include"] = self.df.apply(lambda x: "yes", axis=1) - - # check assignments for duplicates - self.check_assignments() - # check that peaks are within the bounds of the data - self.check_peak_bounds() - - -def load_config(config_path): - if config_path.exists(): - with open(config_path) as opened_config: - config_dic = json.load(opened_config) - return config_dic - else: - return {} - - -def write_config(config_path, config_dic): - with open(config_path, "w") as config: - config.write(json.dumps(config_dic, sort_keys=True, indent=4)) - - -def update_config_file(config_path, config_kvs): - config_dic = load_config(config_path) - config_dic.update(config_kvs) - write_config(config_path, config_dic) - return config_dic - - -def update_args_with_values_from_config_file(args, config_path="peakipy.config"): - """read a peakipy config file, extract params and update args dict - - :param args: dict containing params extracted from docopt command line - :type args: dict - :param config_path: path to peakipy config file [default: peakipy.config] - :type config_path: str - - :returns args: updated args dict - :rtype args: dict - :returns config: dict that resulted from reading config file - :rtype config: dict - - """ - # update args with values from peakipy.config file - config_path = Path(config_path) - if config_path.exists(): - try: - config = load_config(config_path) - print( - f"[green]Using config file with dims [yellow]{config.get('dims')}[/yellow][/green]" - ) - args["dims"] = config.get("dims", (0, 1, 2)) - noise = config.get("noise") - if noise: - noise = float(noise) - - colors = config.get("colors", ["#5e3c99", "#e66101"]) - except json.decoder.JSONDecodeError: - print( - "[red]Your peakipy.config file is corrupted - maybe your JSON is not correct...[/red]" - ) - print("[red]Not using[/red]") - noise = False - colors = args.get("colors", "#5e3c99,#e66101").strip().split(",") - else: - print( - "[red]No peakipy.config found - maybe you need to generate one with peakipy read or see docs[/red]" - ) - noise = False - colors = args.get("colors", "#5e3c99,#e66101").strip().split(",") - config = {} - - args["noise"] = noise - args["colors"] = colors - - return args, config - - -def calculate_height_for_voigt_lineshape(df): - df["height"] = df.apply( - lambda x: voigt2d( - XY=[0, 0], - center_x=0.0, - center_y=0.0, - sigma_x=x.sigma_x, - sigma_y=x.sigma_y, - gamma_x=x.gamma_x, - gamma_y=x.gamma_y, - amplitude=x.amp, - ), - axis=1, - ) - df["height_err"] = df.apply( - lambda x: x.amp_err * (x.height / x.amp) if x.amp_err != None else 0.0, - axis=1, - ) - return df - - -def calculate_fwhm_for_voigt_lineshape(df): - df["fwhm_g_x"] = df.sigma_x.apply( - lambda x: 2.0 * x * np.sqrt(2.0 * np.log(2.0)) - ) # fwhm of gaussian - df["fwhm_g_y"] = df.sigma_y.apply(lambda x: 2.0 * x * np.sqrt(2.0 * np.log(2.0))) - df["fwhm_l_x"] = df.gamma_x.apply(lambda x: 2.0 * x) # fwhm of lorentzian - df["fwhm_l_y"] = df.gamma_y.apply(lambda x: 2.0 * x) - df["fwhm_x"] = df.apply( - lambda x: 0.5346 * x.fwhm_l_x - + np.sqrt(0.2166 * x.fwhm_l_x**2.0 + x.fwhm_g_x**2.0), - axis=1, - ) - df["fwhm_y"] = df.apply( - lambda x: 0.5346 * x.fwhm_l_y - + np.sqrt(0.2166 * x.fwhm_l_y**2.0 + x.fwhm_g_y**2.0), - axis=1, - ) - return df - - -def calculate_height_for_pseudo_voigt_lineshape(df): - df["height"] = df.apply( - lambda x: pvoigt2d( - XY=[0, 0], - center_x=0.0, - center_y=0.0, - sigma_x=x.sigma_x, - sigma_y=x.sigma_y, - amplitude=x.amp, - fraction=x.fraction, - ), - axis=1, - ) - df["height_err"] = df.apply(lambda x: x.amp_err * (x.height / x.amp), axis=1) - return df - - -def calculate_fwhm_for_pseudo_voigt_lineshape(df): - df["fwhm_x"] = df.sigma_x.apply(lambda x: x * 2.0) - df["fwhm_y"] = df.sigma_y.apply(lambda x: x * 2.0) - return df - - -def calculate_height_for_gaussian_lineshape(df): - df["height"] = df.apply( - lambda x: pvoigt2d( - XY=[0, 0], - center_x=0.0, - center_y=0.0, - sigma_x=x.sigma_x, - sigma_y=x.sigma_y, - amplitude=x.amp, - fraction=0.0, # gaussian - ), - axis=1, - ) - df["height_err"] = df.apply(lambda x: x.amp_err * (x.height / x.amp), axis=1) - return df - - -def calculate_height_for_lorentzian_lineshape(df): - df["height"] = df.apply( - lambda x: pvoigt2d( - XY=[0, 0], - center_x=0.0, - center_y=0.0, - sigma_x=x.sigma_x, - sigma_y=x.sigma_y, - amplitude=x.amp, - fraction=1.0, # lorentzian - ), - axis=1, - ) - df["height_err"] = df.apply(lambda x: x.amp_err * (x.height / x.amp), axis=1) - return df - - -def calculate_height_for_pv_pv_lineshape(df): - df["height"] = df.apply( - lambda x: pv_pv( - XY=[0, 0], - center_x=0.0, - center_y=0.0, - sigma_x=x.sigma_x, - sigma_y=x.sigma_y, - amplitude=x.amp, - fraction_x=x.fraction_x, - fraction_y=x.fraction_y, - ), - axis=1, - ) - df["height_err"] = df.apply(lambda x: x.amp_err * (x.height / x.amp), axis=1) - return df - - -def calculate_peak_centers_in_ppm(df, peakipy_data): - #  convert values to ppm - df["center_x_ppm"] = df.center_x.apply(lambda x: peakipy_data.uc_f2.ppm(x)) - df["center_y_ppm"] = df.center_y.apply(lambda x: peakipy_data.uc_f1.ppm(x)) - df["init_center_x_ppm"] = df.init_center_x.apply( - lambda x: peakipy_data.uc_f2.ppm(x) - ) - df["init_center_y_ppm"] = df.init_center_y.apply( - lambda x: peakipy_data.uc_f1.ppm(x) - ) - return df - - -def calculate_peak_linewidths_in_hz(df, peakipy_data): - df["sigma_x_ppm"] = df.sigma_x.apply(lambda x: x * peakipy_data.ppm_per_pt_f2) - df["sigma_y_ppm"] = df.sigma_y.apply(lambda x: x * peakipy_data.ppm_per_pt_f1) - df["fwhm_x_ppm"] = df.fwhm_x.apply(lambda x: x * peakipy_data.ppm_per_pt_f2) - df["fwhm_y_ppm"] = df.fwhm_y.apply(lambda x: x * peakipy_data.ppm_per_pt_f1) - df["fwhm_x_hz"] = df.fwhm_x.apply(lambda x: x * peakipy_data.hz_per_pt_f2) - df["fwhm_y_hz"] = df.fwhm_y.apply(lambda x: x * peakipy_data.hz_per_pt_f1) - return df diff --git a/peakipy/fitting.py b/peakipy/fitting.py index 915ebfae..e99f6aa4 100644 --- a/peakipy/fitting.py +++ b/peakipy/fitting.py @@ -1,13 +1,25 @@ +from pathlib import Path from dataclasses import dataclass, field -from typing import List +from typing import List, Tuple, Optional import numpy as np from numpy import sqrt import pandas as pd -from lmfit import Model +from rich import console +from lmfit import Model, Parameters, Parameter +from lmfit.model import ModelResult from pydantic import BaseModel -from peakipy.lineshapes import Lineshape, pvoigt2d, pv_pv, pv_g, pv_l, voigt2d +from peakipy.lineshapes import ( + Lineshape, + pvoigt2d, + pv_pv, + pv_g, + pv_l, + voigt2d, + gaussian_lorentzian, + get_lineshape_function, +) from peakipy.constants import log2 @@ -643,3 +655,523 @@ def simulate_lineshapes_from_fitted_peak_parameters( sim_data += sim_data_i sim_data_singles.append(sim_data_i) return sim_data, sim_data_singles + + +@dataclass +class FitPeaksArgs: + noise: float + uc_dics: dict + lineshape: Lineshape + dims: List[int] = field(default_factory=lambda: [0, 1, 2]) + colors: Tuple[str] = ("#5e3c99", "#e66101") + max_cluster_size: Optional[int] = None + to_fix: List[str] = field(default_factory=lambda: ["fraction", "sigma", "center"]) + xy_bounds: Tuple[float, float] = ((0, 0),) + vclist: Optional[Path] = (None,) + plane: Optional[List[int]] = (None,) + exclude_plane: Optional[List[int]] = (None,) + reference_plane_indices: List[int] = ([],) + initial_fit_threshold: Optional[float] = (None,) + jack_knife_sample_errors: bool = False + mp: bool = (True,) + verbose: bool = (False,) + vclist_data: Optional[np.array] = None + + +@dataclass +class Config: + fit_method: str = "leastsq" + + +@dataclass +class FitPeaksInput: + """input data for the fit_peaks function""" + + args: FitPeaksArgs + data: np.array + config: Config + plane_numbers: list + + +@dataclass +class FitPeakClusterInput: + args: FitPeaksArgs + data: np.array + config: Config + plane_numbers: list + clustid: int + group: pd.DataFrame + last_peak: pd.DataFrame + mask: np.array + mod: Model + p_guess: Parameters + XY: np.array + peak_slices: np.array + XY_slices: np.array + min_x: float + max_x: float + min_y: float + max_y: float + uc_dics: dict + first_plane_data: np.array + weights: np.array + fit_method: str = "leastsq" + verbose: bool = False + masked_plane_data: np.array = field(init=False) + + def __post_init__(self): + self.masked_plane_data = np.array([d[self.mask] for d in self.data]) + + +@dataclass +class FitResult: + out: ModelResult + mask: np.array + fit_str: str + log: str + group: pd.core.groupby.generic.DataFrameGroupBy + uc_dics: dict + min_x: float + min_y: float + max_x: float + max_y: float + X: np.array + Y: np.array + Z: np.array + Z_sim: np.array + peak_slices: np.array + XY_slices: np.array + weights: np.array + mod: Model + + def check_shifts(self): + """Calculate difference between initial peak positions + and check whether they moved too much from original + position + + """ + pass + + +@dataclass +class FitPeaksResult: + df: pd.DataFrame + log: str + + +class FitPeaksResultDfRow(BaseModel): + fit_prefix: str + assignment: str + amp: float + amp_err: float + center_x: float + init_center_x: float + center_y: float + init_center_y: float + sigma_x: float + sigma_y: float + clustid: int + memcnt: int + plane: int + x_radius: float + y_radius: float + x_radius_ppm: float + y_radius_ppm: float + lineshape: str + aic: float + chisqr: float + redchi: float + residual_sum: float + height: float + height_err: float + fwhm_x: float + fwhm_y: float + center_x_ppm: float + center_y_ppm: float + init_center_x_ppm: float + init_center_y_ppm: float + sigma_x_ppm: float + sigma_y_ppm: float + fwhm_x_ppm: float + fwhm_y_ppm: float + fwhm_x_hz: float + fwhm_y_hz: float + jack_knife_sample_index: Optional[int] + + +class FitPeaksResultRowGLPV(FitPeaksResultDfRow): + fraction: float + + +class FitPeaksResultRowPVPV(FitPeaksResultDfRow): + fraction_x: float # for PV_PV model + fraction_y: float # for PV_PV model + + +class FitPeaksResultRowVoigt(FitPeaksResultDfRow): + gamma_x_ppm: float # for voigt + gamma_y_ppm: float # for voigt + + +def get_fit_peaks_result_validation_model(lineshape): + match lineshape: + case lineshape.V: + validation_model = FitPeaksResultRowVoigt + case lineshape.PV_PV: + validation_model = FitPeaksResultRowPVPV + case _: + validation_model = FitPeaksResultRowGLPV + return validation_model + + +def filter_peak_clusters_by_max_cluster_size(grouped_peak_clusters, max_cluster_size): + filtered_peak_clusters = grouped_peak_clusters.filter( + lambda x: len(x) <= max_cluster_size + ) + return filtered_peak_clusters + + +def set_parameters_to_fix_during_fit(first_plane_fit_params, to_fix): + # fix sigma center and fraction parameters + # could add an option to select params to fix + match to_fix: + case None | () | []: + float_str = "Floating all parameters" + parameter_set = first_plane_fit_params + case ["None"] | ["none"]: + float_str = "Floating all parameters" + parameter_set = first_plane_fit_params + case _: + float_str = f"Fixing parameters: {to_fix}" + parameter_set = fix_params(first_plane_fit_params, to_fix) + return parameter_set, float_str + + +def get_default_lineshape_param_names(lineshape: Lineshape): + match lineshape: + case Lineshape.PV | Lineshape.G | Lineshape.L: + param_names = Model(pvoigt2d).param_names + case Lineshape.V: + param_names = Model(voigt2d).param_names + case Lineshape.PV_PV: + param_names = Model(pv_pv).param_names + return param_names + + +def split_parameter_sets_by_peak( + default_param_names: List, params: List[Tuple[str, Parameter]] +): + """params is a list of tuples where the first element of each tuple is a + prefixed parameter name and the second element is the corresponding + Parameter object. This is created by calling .items() on a Parameters + object + """ + number_of_fitted_parameters = len(params) + number_of_default_params = len(default_param_names) + number_of_fitted_peaks = int(number_of_fitted_parameters / number_of_default_params) + split_param_items = [ + params[i : (i + number_of_default_params)] + for i in range(0, number_of_fitted_parameters, number_of_default_params) + ] + assert len(split_param_items) == number_of_fitted_peaks + return split_param_items + + +def create_parameter_dict(prefix, parameters: List[Tuple[str, Parameter]]): + parameter_dict = dict(prefix=prefix) + parameter_dict.update({k.replace(prefix, ""): v.value for k, v in parameters}) + parameter_dict.update( + {f"{k.replace(prefix,'')}_stderr": v.stderr for k, v in parameters} + ) + return parameter_dict + + +def get_prefix_from_parameter_names( + default_param_names: List, parameters: List[Tuple[str, Parameter]] +): + prefixes = [ + param_key_val[0].replace(default_param_name, "") + for param_key_val, default_param_name in zip(parameters, default_param_names) + ] + assert len(set(prefixes)) == 1 + return prefixes[0] + + +def unpack_fitted_parameters_for_lineshape( + lineshape: Lineshape, params: List[dict], plane_number: int +): + default_param_names = get_default_lineshape_param_names(lineshape) + split_parameter_names = split_parameter_sets_by_peak(default_param_names, params) + prefixes = [ + get_prefix_from_parameter_names(default_param_names, i) + for i in split_parameter_names + ] + unpacked_params = [] + for parameter_names, prefix in zip(split_parameter_names, prefixes): + parameter_dict = create_parameter_dict(prefix, parameter_names) + parameter_dict.update({"plane": plane_number}) + unpacked_params.append(parameter_dict) + return unpacked_params + + +def perform_initial_lineshape_fit_on_cluster_of_peaks( + fit_peak_cluster_input: FitPeakClusterInput, +) -> FitResult: + mod = fit_peak_cluster_input.mod + peak_slices = fit_peak_cluster_input.peak_slices + XY_slices = fit_peak_cluster_input.XY_slices + p_guess = fit_peak_cluster_input.p_guess + weights = fit_peak_cluster_input.weights + fit_method = fit_peak_cluster_input.fit_method + mask = fit_peak_cluster_input.mask + XY = fit_peak_cluster_input.XY + X, Y = XY + first_plane_data = fit_peak_cluster_input.first_plane_data + peak = fit_peak_cluster_input.last_peak + group = fit_peak_cluster_input.group + min_x = fit_peak_cluster_input.min_x + min_y = fit_peak_cluster_input.min_y + max_x = fit_peak_cluster_input.max_x + max_y = fit_peak_cluster_input.max_y + verbose = fit_peak_cluster_input.verbose + uc_dics = fit_peak_cluster_input.uc_dics + + out = mod.fit( + peak_slices, XY=XY_slices, params=p_guess, weights=weights, method=fit_method + ) + + if verbose: + console.print(out.fit_report(), style="bold") + + z_sim = mod.eval(XY=XY, params=out.params) + z_sim[~mask] = np.nan + z_plot = first_plane_data.copy() + z_plot[~mask] = np.nan + fit_str = "" + log = "" + + return FitResult( + out=out, + mask=mask, + fit_str=fit_str, + log=log, + group=group, + uc_dics=uc_dics, + min_x=min_x, + min_y=min_y, + max_x=max_x, + max_y=max_y, + X=X, + Y=Y, + Z=z_plot, + Z_sim=z_sim, + peak_slices=peak_slices, + XY_slices=XY_slices, + weights=weights, + mod=mod, + ) + + +def refit_peak_cluster_with_constraints( + fit_input: FitPeakClusterInput, fit_result: FitPeaksResult +): + fit_results = [] + for num, d in enumerate(fit_input.masked_plane_data): + plane_number = fit_input.plane_numbers[num] + fit_result.out.fit( + data=d, + params=fit_result.out.params, + weights=fit_result.weights, + ) + fit_results.extend( + unpack_fitted_parameters_for_lineshape( + fit_input.args.lineshape, + list(fit_result.out.params.items()), + plane_number, + ) + ) + return fit_results + + +def merge_unpacked_parameters_with_metadata(cluster_fit_df, group_of_peaks_df): + group_of_peaks_df["prefix"] = group_of_peaks_df.ASS.apply(to_prefix) + merged_cluster_fit_df = cluster_fit_df.merge(group_of_peaks_df, on="prefix") + return merged_cluster_fit_df + + +def update_cluster_df_with_fit_statistics(cluster_df, fit_result: ModelResult): + cluster_df["chisqr"] = fit_result.chisqr + cluster_df["redchi"] = fit_result.redchi + cluster_df["residual_sum"] = np.sum(fit_result.residual) + cluster_df["aic"] = fit_result.aic + cluster_df["bic"] = fit_result.bic + cluster_df["nfev"] = fit_result.nfev + cluster_df["ndata"] = fit_result.ndata + return cluster_df + + +def rename_columns_for_compatibility(df): + mapping = { + "amplitude": "amp", + "amplitude_stderr": "amp_err", + "X_AXIS": "init_center_x", + "Y_AXIS": "init_center_y", + "ASS": "assignment", + "MEMCNT": "memcnt", + "X_RADIUS": "x_radius", + "Y_RADIUS": "y_radius", + } + df = df.rename(columns=mapping) + return df + + +def add_vclist_to_df(fit_input: FitPeaksInput, df: pd.DataFrame): + vclist_data = fit_input.args.vclist_data + df["vclist"] = df.plane.apply(lambda x: vclist_data[x]) + return df + + +def prepare_group_of_peaks_for_fitting(clustid, group, fit_peaks_input: FitPeaksInput): + lineshape_function = get_lineshape_function(fit_peaks_input.args.lineshape) + + first_plane_data = fit_peaks_input.data[0] + mask, peak = make_mask_from_peak_cluster(group, first_plane_data) + + x_radius = group.X_RADIUS.max() + y_radius = group.Y_RADIUS.max() + + max_x, min_x = get_limits_for_axis_in_points( + group_axis_points=group.X_AXISf, mask_radius_in_points=x_radius + ) + max_y, min_y = get_limits_for_axis_in_points( + group_axis_points=group.Y_AXISf, mask_radius_in_points=y_radius + ) + max_x, min_x, max_y, min_y = deal_with_peaks_on_edge_of_spectrum( + fit_peaks_input.data.shape, max_x, min_x, max_y, min_y + ) + selected_data = select_reference_planes_using_indices( + fit_peaks_input.data, fit_peaks_input.args.reference_plane_indices + ).sum(axis=0) + mod, p_guess = make_models( + lineshape_function, + group, + selected_data, + lineshape=fit_peaks_input.args.lineshape, + xy_bounds=fit_peaks_input.args.xy_bounds, + ) + peak_slices = slice_peaks_from_data_using_mask(fit_peaks_input.data, mask) + peak_slices = select_reference_planes_using_indices( + peak_slices, fit_peaks_input.args.reference_plane_indices + ) + peak_slices = select_planes_above_threshold_from_masked_data( + peak_slices, fit_peaks_input.args.initial_fit_threshold + ) + peak_slices = peak_slices.sum(axis=0) + + XY = make_meshgrid(fit_peaks_input.data.shape) + X, Y = XY + + XY_slices = np.array([X.copy()[mask], Y.copy()[mask]]) + weights = 1.0 / np.array([fit_peaks_input.args.noise] * len(np.ravel(peak_slices))) + return FitPeakClusterInput( + args=fit_peaks_input.args, + data=fit_peaks_input.data, + config=fit_peaks_input.config, + plane_numbers=fit_peaks_input.plane_numbers, + clustid=clustid, + group=group, + last_peak=peak, + mask=mask, + mod=mod, + p_guess=p_guess, + XY=XY, + peak_slices=peak_slices, + XY_slices=XY_slices, + weights=weights, + fit_method=Config.fit_method, + first_plane_data=first_plane_data, + uc_dics=fit_peaks_input.args.uc_dics, + min_x=min_x, + min_y=min_y, + max_x=max_x, + max_y=max_y, + verbose=fit_peaks_input.args.verbose, + ) + + +def fit_cluster_of_peaks(data_for_fitting: FitPeakClusterInput) -> pd.DataFrame: + fit_result = perform_initial_lineshape_fit_on_cluster_of_peaks(data_for_fitting) + fit_result.out.params, float_str = set_parameters_to_fix_during_fit( + fit_result.out.params, data_for_fitting.args.to_fix + ) + fit_results = refit_peak_cluster_with_constraints(data_for_fitting, fit_result) + cluster_df = pd.DataFrame(fit_results) + cluster_df = update_cluster_df_with_fit_statistics(cluster_df, fit_result.out) + cluster_df["clustid"] = data_for_fitting.clustid + cluster_df = merge_unpacked_parameters_with_metadata( + cluster_df, data_for_fitting.group + ) + return cluster_df + + +def fit_peak_clusters(peaks: pd.DataFrame, fit_input: FitPeaksInput) -> FitPeaksResult: + """Fit set of peak clusters to lineshape model + + :param peaks: peaklist with generated by peakipy read or edit + :type peaks: pd.DataFrame + + :param fit_input: Data structure containing input parameters (args, config and NMR data) + :type fit_input: FitPeaksInput + + :returns: Data structure containing pd.DataFrame with the fitted results and a log + :rtype: FitPeaksResult + """ + peak_clusters = peaks.groupby("CLUSTID") + filtered_peaks = filter_peak_clusters_by_max_cluster_size( + peak_clusters, fit_input.args.max_cluster_size + ) + peak_clusters = filtered_peaks.groupby("CLUSTID") + out_str = "" + cluster_dfs = [] + for clustid, peak_cluster in peak_clusters: + data_for_fitting = prepare_group_of_peaks_for_fitting( + clustid, + peak_cluster, + fit_input, + ) + if fit_input.args.jack_knife_sample_errors: + cluster_df = jack_knife_sample_errors(data_for_fitting) + else: + cluster_df = fit_cluster_of_peaks(data_for_fitting) + cluster_dfs.append(cluster_df) + df = pd.concat(cluster_dfs, ignore_index=True) + + df["lineshape"] = fit_input.args.lineshape.value + + if fit_input.args.vclist: + df = add_vclist_to_df(fit_input, df) + df = rename_columns_for_compatibility(df) + return FitPeaksResult(df=df, log=out_str) + + +def jack_knife_sample_errors(fit_input: FitPeakClusterInput) -> pd.DataFrame: + peak_slices = fit_input.peak_slices.copy() + XY_slices = fit_input.XY_slices.copy() + weights = fit_input.weights.copy() + masked_plane_data = fit_input.masked_plane_data.copy() + jk_results = [] + # first fit without jackknife + jk_result = fit_cluster_of_peaks(data_for_fitting=fit_input) + jk_result["jack_knife_sample_index"] = 0 + jk_results.append(jk_result) + for i in np.arange(0, len(peak_slices), 10, dtype=int): + fit_input.peak_slices = np.delete(peak_slices, i, None) + XY_slices_0 = np.delete(XY_slices[0], i, None) + XY_slices_1 = np.delete(XY_slices[1], i, None) + fit_input.XY_slices = np.array([XY_slices_0, XY_slices_1]) + fit_input.weights = np.delete(weights, i, None) + fit_input.masked_plane_data = np.delete(masked_plane_data, i, axis=1) + jk_result = fit_cluster_of_peaks(data_for_fitting=fit_input) + jk_result["jack_knife_sample_index"] = i + 1 + jk_results.append(jk_result) + return pd.concat(jk_results, ignore_index=True) diff --git a/peakipy/plotting.py b/peakipy/plotting.py index 479f4ace..738c3712 100644 --- a/peakipy/plotting.py +++ b/peakipy/plotting.py @@ -122,6 +122,7 @@ def create_matplotlib_figure( label=False, ccpn_flag=False, show=True, + test=False, ): fig = plt.figure(figsize=(10, 6)) ax = fig.add_subplot(projection="3d") @@ -223,6 +224,8 @@ def next_plot(event): axnext = plt.axes([0.71, 0.05, 0.1, 0.075]) bnnext = Button(axnext, "Next") bnnext.on_clicked(next_plot) + if test: + return if ccpn_flag: plt.show(windowTitle="", size=(1000, 500)) else: diff --git a/test/test_core.py b/test/test_core.py deleted file mode 100644 index ed9dec4c..00000000 --- a/test/test_core.py +++ /dev/null @@ -1,570 +0,0 @@ -import unittest -from unittest.mock import patch -from collections import namedtuple -from pathlib import Path -import json - -import pytest -import numpy as np -from numpy.testing import assert_array_equal -import pandas as pd -import nmrglue as ng -from lmfit import Model - - -from peakipy.core import ( - make_mask, - fix_params, - gaussian_lorentzian, - pv_g, - pv_l, - voigt2d, - pvoigt2d, - pv_pv, - get_params, - make_param_dict, - to_prefix, - make_models, - get_lineshape_function, - Pseudo3D, - Peaklist, - Lineshape, - PeaklistFormat, - PeakLimits, - select_reference_planes_using_indices, - select_planes_above_threshold_from_masked_data, - slice_peaks_from_data_using_mask, - estimate_amplitude, - load_config, - write_config, - update_config_file, -) - - -@pytest.fixture -def test_directory(): - return Path(__file__).parent - - -def test_select_reference_planes_using_indices(): - data = np.zeros((6, 100, 200)) - indices = [] - np.testing.assert_array_equal( - select_reference_planes_using_indices(data, indices), data - ) - indices = [1] - assert select_reference_planes_using_indices(data, indices).shape == (1, 100, 200) - indices = [1, -1] - assert select_reference_planes_using_indices(data, indices).shape == (2, 100, 200) - - -def test_slice_peaks_from_data_using_mask(): - data = np.array( - [ - np.array( - [ - [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 1, 1, 0, 0, 0, 0], - [0, 0, 0, 1, 2, 2, 1, 0, 0, 0], - [0, 0, 1, 2, 3, 3, 2, 1, 0, 0], - [0, 1, 2, 3, 4, 4, 3, 2, 1, 0], - [1, 2, 3, 4, 5, 5, 4, 3, 2, 1], - [0, 1, 2, 3, 4, 4, 3, 2, 1, 0], - [0, 0, 1, 2, 3, 3, 2, 1, 0, 0], - [0, 0, 0, 1, 2, 2, 1, 0, 0, 0], - [0, 0, 0, 0, 1, 1, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], - ] - ) - for i in range(5) - ] - ) - mask = data[0] > 0 - assert data.shape == (5, 11, 10) - assert mask.shape == (11, 10) - peak_slices = slice_peaks_from_data_using_mask(data, mask) - # array is flattened by application of mask - assert peak_slices.shape == (5, 50) - - -def test_select_planes_above_threshold_from_masked_data(): - peak_slices = np.array( - [ - [1, 1, 1, 1, 1, 1], - [2, 2, 2, 2, 2, 2], - [-1, -1, -1, -1, -1, -1], - [-2, -2, -2, -2, -2, -2], - ] - ) - assert peak_slices.shape == (4, 6) - threshold = -1 - assert select_planes_above_threshold_from_masked_data( - peak_slices, threshold - ).shape == ( - 4, - 6, - ) - threshold = 2 - assert_array_equal( - select_planes_above_threshold_from_masked_data(peak_slices, threshold), - peak_slices, - ) - threshold = 1 - assert select_planes_above_threshold_from_masked_data( - peak_slices, threshold - ).shape == (2, 6) - - threshold = None - assert_array_equal( - select_planes_above_threshold_from_masked_data(peak_slices, threshold), - peak_slices, - ) - threshold = 10 - assert_array_equal( - select_planes_above_threshold_from_masked_data(peak_slices, threshold), - peak_slices, - ) - - -def test_make_param_dict(): - selected_planes = [1, 2] - data = np.ones((4, 10, 5)) - expected_shape = (2, 10, 5) - actual_shape = data[np.array(selected_planes)].shape - assert expected_shape == actual_shape - - -def test_make_param_dict_sum(): - data = np.ones((4, 10, 5)) - expected_sum = 200 - actual_sum = data.sum() - assert expected_sum == actual_sum - - -def test_make_param_dict_selected(): - selected_planes = [1, 2] - data = np.ones((4, 10, 5)) - data = data[np.array(selected_planes)] - expected_sum = 100 - actual_sum = data.sum() - assert expected_sum == actual_sum - - -def test_estimate_amplitude(): - peak = namedtuple("peak", ["X_AXIS", "XW", "Y_AXIS", "YW"]) - p = peak(5, 2, 3, 2) - data = np.ones((20, 10)) - expected_result = 25 - actual_result = estimate_amplitude(p, data) - assert expected_result == actual_result - - -def test_estimate_amplitude_invalid_indices(): - peak = namedtuple("peak", ["X_AXIS", "XW", "Y_AXIS", "YW"]) - p = peak(1, 2, 3, 2) - data = np.ones((20, 10)) - expected_result = 20 - actual_result = estimate_amplitude(p, data) - assert expected_result == actual_result - - -class TestCoreFunctions(unittest.TestCase): - test_directory = Path(__file__).parent - test_directory = "./" - - def test_make_mask(self): - data = np.ones((10, 10)) - c_x = 5 - c_y = 5 - r_x = 3 - r_y = 2 - - expected_result = np.array( - [ - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], - [0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0], - [0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0], - [0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0], - [0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0], - [0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0], - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], - ] - ) - - result = np.array(make_mask(data, c_x, c_y, r_x, r_y), dtype=int) - test = result - expected_result - # print(test) - # print(test.sum()) - # print(result) - self.assertEqual(test.sum(), 0) - - def test_make_mask_2(self): - data = np.ones((10, 10)) - c_x = 5 - c_y = 8 - r_x = 3 - r_y = 2 - - expected_result = np.array( - [ - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], - [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], - [0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0], - [0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0], - [0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0], - [0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0], - ] - ) - - result = np.array(make_mask(data, c_x, c_y, r_x, r_y), dtype=int) - test = result - expected_result - # print(test) - # print(test.sum()) - # print(result) - self.assertEqual(test.sum(), 0) - - def test_fix_params(self): - mod = Model(pvoigt2d) - pars = mod.make_params() - to_fix = ["center", "sigma", "fraction"] - fix_params(pars, to_fix) - - self.assertEqual(pars["center_x"].vary, False) - self.assertEqual(pars["center_y"].vary, False) - self.assertEqual(pars["sigma_x"].vary, False) - self.assertEqual(pars["sigma_y"].vary, False) - self.assertEqual(pars["fraction"].vary, False) - - def test_get_params(self): - mod = Model(pvoigt2d, prefix="p1_") - pars = mod.make_params(p1_center_x=20.0, p1_center_y=30.0) - pars["p1_center_x"].stderr = 1.0 - pars["p1_center_y"].stderr = 2.0 - ps, ps_err, names, prefixes = get_params(pars, "center") - #  get index of values - cen_x = names.index("p1_center_x") - cen_y = names.index("p1_center_y") - - self.assertEqual(ps[cen_x], 20.0) - self.assertEqual(ps[cen_y], 30.0) - self.assertEqual(ps_err[cen_x], 1.0) - self.assertEqual(ps_err[cen_y], 2.0) - self.assertEqual(prefixes[cen_y], "p1_") - - def test_make_param_dict(self): - peaks = pd.DataFrame( - { - "ASS": ["one", "two", "three"], - "X_AXISf": [5.0, 10.0, 15.0], - "X_AXIS": [5, 10, 15], - "Y_AXISf": [15.0, 10.0, 5.0], - "Y_AXIS": [15, 10, 5], - "XW": [2.5, 2.5, 2.5], - "YW": [2.5, 2.5, 2.5], - } - ) - data = np.ones((20, 20)) - - for ls, frac in zip([Lineshape.PV, Lineshape.G, Lineshape.L], [0.5, 0.0, 1.0]): - params = make_param_dict(peaks, data, ls) - self.assertEqual(params["_one_fraction"], frac) - self.assertEqual(params["_two_fraction"], frac) - self.assertEqual(params["_three_fraction"], frac) - - self.assertEqual(params["_one_center_x"], 5.0) - self.assertEqual(params["_two_center_x"], 10.0) - self.assertEqual(params["_two_sigma_x"], 1.25) - self.assertEqual(params["_two_sigma_y"], 1.25) - - voigt_params = make_param_dict(peaks, data, Lineshape.V) - self.assertEqual( - voigt_params["_one_sigma_x"], 2.5 / (2.0 * np.sqrt(2.0 * np.log(2))) - ) - self.assertEqual(voigt_params["_one_gamma_x"], 2.5 / 2.0) - - def test_to_prefix(self): - names = [ - (1, "_1_"), - (1.0, "_1_0_"), - (" one", "_one_"), - (" one/two", "_oneortwo_"), - (" one?two", "_onemaybetwo_"), - (r" [{one?two\}][", "___onemaybetwo____"), - ] - for test, expect in names: - prefix = to_prefix(test) - # print(prefix) - self.assertEqual(prefix, expect) - - def test_make_models(self): - peaks = pd.DataFrame( - { - "ASS": ["one", "two", "three"], - "X_AXISf": [5.0, 10.0, 15.0], - "X_AXIS": [5, 10, 15], - "Y_AXISf": [15.0, 10.0, 5.0], - "Y_AXIS": [15, 10, 5], - "XW": [2.5, 2.5, 2.5], - "YW": [2.5, 2.5, 2.5], - "CLUSTID": [1, 1, 1], - } - ) - - group = peaks.groupby("CLUSTID") - - data = np.ones((20, 20)) - - lineshapes = [Lineshape.PV, Lineshape.L, Lineshape.G, Lineshape.PV_PV] - - for lineshape in lineshapes: - match lineshape: - case lineshape.PV: - mod, p_guess = make_models(pvoigt2d, peaks, data, lineshape) - self.assertEqual(p_guess["_one_fraction"].vary, True) - self.assertEqual(p_guess["_one_fraction"].value, 0.5) - - case lineshape.G: - mod, p_guess = make_models(pvoigt2d, peaks, data, lineshape) - self.assertEqual(p_guess["_one_fraction"].vary, False) - self.assertEqual(p_guess["_one_fraction"].value, 0.0) - - case lineshape.L: - mod, p_guess = make_models(pvoigt2d, peaks, data, lineshape) - self.assertEqual(p_guess["_one_fraction"].vary, False) - self.assertEqual(p_guess["_one_fraction"].value, 1.0) - - case lineshape.PV_PV: - mod, p_guess = make_models(pv_pv, peaks, data, lineshape) - self.assertEqual(p_guess["_one_fraction_x"].vary, True) - self.assertEqual(p_guess["_one_fraction_x"].value, 0.5) - self.assertEqual(p_guess["_one_fraction_y"].vary, True) - self.assertEqual(p_guess["_one_fraction_y"].value, 0.5) - - def test_Pseudo3D(self): - datasets = [ - (f"{self.test_directory}/test_protein_L/test1.ft2", [0, 1, 2]), - (f"{self.test_directory}/test_protein_L/test_tp.ft2", [2, 1, 0]), - (f"{self.test_directory}/test_protein_L/test_tp2.ft2", [1, 2, 0]), - ] - - # expected shape - data_shape = (4, 256, 546) - test_nu = 1 - for dataset, dims in datasets: - with self.subTest(i=test_nu): - dic, data = ng.pipe.read(dataset) - pseudo3D = Pseudo3D(dic, data, dims) - self.assertEqual(dims, pseudo3D.dims) - self.assertEqual(pseudo3D.data.shape, data_shape) - self.assertEqual(pseudo3D.f1_label, "15N") - self.assertEqual(pseudo3D.f2_label, "HN") - self.assertEqual(pseudo3D.dims, dims) - self.assertEqual(pseudo3D.f1_size, 256) - self.assertEqual(pseudo3D.f2_size, 546) - test_nu += 1 - - -# test for read, edit, fit, check and spec scripts -# need to actually write proper tests -class TestBokehScript(unittest.TestCase): - @patch("peakipy.cli.edit.BokehScript") - def test_BokehScript(self, MockBokehScript): - args = {"": "hello", "": "data"} - bokeh_plots = MockBokehScript(args) - self.assertIsNotNone(bokeh_plots) - - -class TestCheckScript(unittest.TestCase): - @patch("peakipy.cli.main.check") - def test_main(self, MockCheck): - args = {"": "hello", "": "data"} - check = MockCheck(args) - self.assertIsNotNone(check) - - -class TestFitScript(unittest.TestCase): - @patch("peakipy.cli.main.fit") - def test_main(self, MockFit): - args = {"": "hello", "": "data"} - fit = MockFit(args) - self.assertIsNotNone(fit) - - -class TestReadScript(unittest.TestCase): - test_directory = "./" - - @patch("peakipy.cli.main.read") - def test_main(self, MockRead): - args = {"": "hello", "": "data"} - read = MockRead(args) - self.assertIsNotNone(read) - - def test_read_pipe_peaklist(self): - args = { - "path": f"{self.test_directory}/test_pipe.tab", - "data_path": f"{self.test_directory}/test_pipe.ft2", - "dims": [0, 1, 2], - "fmt": PeaklistFormat.pipe, - } - peaklist = Peaklist(**args) - self.assertIsNotNone(peaklist) - self.assertIs(len(peaklist.df), 3) - # self.assertIs(peaklist.df.X_AXISf.iloc[0], 323.019) - self.assertIs(peaklist.fmt.value, "pipe") - # self.assertEqual(peaklist.df.ASS.iloc[0], "None") - # self.assertEqual(peaklist.df.ASS.iloc[1], "None_dummy_1") - - -class TestSpecScript(unittest.TestCase): - @patch("peakipy.cli.main.spec") - def test_main(self, MockSpec): - args = {"": "hello", "": "data"} - spec = MockSpec(args) - self.assertIsNotNone(spec) - - -def test_load_config_existing(): - config_path = Path("test_config.json") - # Create a dummy existing config file - with open(config_path, "w") as f: - json.dump({"key1": "value1"}, f) - - loaded_config = load_config(config_path) - - assert loaded_config == {"key1": "value1"} - - # Clean up - config_path.unlink() - - -def test_load_config_nonexistent(): - config_path = Path("test_config.json") - - loaded_config = load_config(config_path) - - assert loaded_config == {} - - -def test_write_config(): - config_path = Path("test_config.json") - config_kvs = {"key1": "value1", "key2": "value2"} - - write_config(config_path, config_kvs) - - # Check if the config file is created correctly - assert config_path.exists() - - # Check if the config file content is correct - with open(config_path) as f: - created_config = json.load(f) - assert created_config == {"key1": "value1", "key2": "value2"} - - # Clean up - config_path.unlink() - - -def test_update_config_file_existing(): - config_path = Path("test_config.json") - # Create a dummy existing config file - with open(config_path, "w") as f: - json.dump({"key1": "value1"}, f) - - config_kvs = {"key2": "value2", "key3": "value3"} - updated_config = update_config_file(config_path, config_kvs) - - assert updated_config == {"key1": "value1", "key2": "value2", "key3": "value3"} - - # Clean up - config_path.unlink() - - -def test_update_config_file_nonexistent(): - config_path = Path("test_config.json") - config_kvs = {"key1": "value1", "key2": "value2"} - updated_config = update_config_file(config_path, config_kvs) - - assert updated_config == {"key1": "value1", "key2": "value2"} - - # Clean up - config_path.unlink() - - -@pytest.fixture -def sample_data(): - return np.zeros((10, 10)) - - -@pytest.fixture -def sample_peak(): - peak_data = {"X_AXIS": [5], "Y_AXIS": [5], "XW": [2], "YW": [2]} - return pd.DataFrame(peak_data).iloc[0] - - -def test_peak_limits_max_min(sample_peak, sample_data): - limits = PeakLimits(sample_peak, sample_data) - - assert limits.max_x == 8 - assert limits.max_y == 8 - assert limits.min_x == 3 - assert limits.min_y == 3 - - -def test_peak_limits_boundary(sample_data): - peak_data = {"X_AXIS": [8], "Y_AXIS": [8], "XW": [2], "YW": [2]} - peak = pd.DataFrame(peak_data).iloc[0] - limits = PeakLimits(peak, sample_data) - - assert limits.max_x == 10 - assert limits.max_y == 10 - assert limits.min_x == 6 - assert limits.min_y == 6 - - -def test_peak_limits_at_boundary(sample_data): - peak_data = {"X_AXIS": [0], "Y_AXIS": [0], "XW": [2], "YW": [2]} - peak = pd.DataFrame(peak_data).iloc[0] - limits = PeakLimits(peak, sample_data) - - assert limits.max_x == 3 - assert limits.max_y == 3 - assert limits.min_x == 0 - assert limits.min_y == 0 - - -def test_peak_limits_outside_boundary(sample_data): - peak_data = {"X_AXIS": [15], "Y_AXIS": [15], "XW": [2], "YW": [2]} - peak = pd.DataFrame(peak_data).iloc[0] - with pytest.raises(AssertionError): - limits = PeakLimits(peak, sample_data) - - -def test_peak_limits_1d_data(): - data = np.zeros(10) - peak_data = {"X_AXIS": [5], "Y_AXIS": [0], "XW": [2], "YW": [0]} - peak = pd.DataFrame(peak_data).iloc[0] - with pytest.raises(IndexError): - limits = PeakLimits(peak, data) - - -def test_get_lineshape_function(): - assert get_lineshape_function(Lineshape.PV) == pvoigt2d - assert get_lineshape_function(Lineshape.L) == pvoigt2d - assert get_lineshape_function(Lineshape.G) == pvoigt2d - assert get_lineshape_function(Lineshape.G_L) == gaussian_lorentzian - assert get_lineshape_function(Lineshape.PV_G) == pv_g - assert get_lineshape_function(Lineshape.PV_L) == pv_l - assert get_lineshape_function(Lineshape.PV_PV) == pv_pv - assert get_lineshape_function(Lineshape.V) == voigt2d - with pytest.raises(Exception): - get_lineshape_function("bla") - - -if __name__ == "__main__": - unittest.main(verbosity=2) diff --git a/test/test_fit.py b/test/test_fit.py deleted file mode 100644 index c41d0ebf..00000000 --- a/test/test_fit.py +++ /dev/null @@ -1,460 +0,0 @@ -from pytest import fixture - -import pandas as pd -import numpy as np -from lmfit import Parameters, Model -from lmfit.model import ModelResult - -from peakipy.cli.fit import ( - get_fit_peaks_result_validation_model, - FitPeaksResultRowPVPV, - FitPeaksResultRowVoigt, - FitPeaksResultRowGLPV, - filter_peak_clusters_by_max_cluster_size, - set_parameters_to_fix_during_fit, - unpack_fitted_parameters_for_lineshape, - get_default_lineshape_param_names, - split_parameter_sets_by_peak, - get_prefix_from_parameter_names, - create_parameter_dict, - perform_initial_lineshape_fit_on_cluster_of_peaks, - merge_unpacked_parameters_with_metadata, - add_vclist_to_df, - update_cluster_df_with_fit_statistics, - rename_columns_for_compatibility, - FitPeaksArgs, - FitPeaksInput, -) -from peakipy.lineshapes import Lineshape, pvoigt2d - - -def test_get_fit_peaks_result_validation_model_PVPV(): - validation_model = get_fit_peaks_result_validation_model(Lineshape.PV_PV) - assert validation_model == FitPeaksResultRowPVPV - - -def test_get_fit_peaks_result_validation_model_G(): - validation_model = get_fit_peaks_result_validation_model(Lineshape.G) - assert validation_model == FitPeaksResultRowGLPV - - -def test_get_fit_peaks_result_validation_model_L(): - validation_model = get_fit_peaks_result_validation_model(Lineshape.L) - assert validation_model == FitPeaksResultRowGLPV - - -def test_get_fit_peaks_result_validation_model_PV(): - validation_model = get_fit_peaks_result_validation_model(Lineshape.PV) - assert validation_model == FitPeaksResultRowGLPV - - -def test_get_fit_peaks_result_validation_model_V(): - validation_model = get_fit_peaks_result_validation_model(Lineshape.V) - assert validation_model == FitPeaksResultRowVoigt - - -def test_filter_groups_by_max_cluster_size(): - groups = pd.DataFrame( - dict( - col1=[1, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 4, 5, 6, 7], - col2=[1, 2, 3, 4, 5, 6, 7, 8, 9, 1, 2, 3, 4, 5, 6, 7], - ) - ).groupby("col1") - max_cluster_size = 3 - filtered_groups = filter_peak_clusters_by_max_cluster_size(groups, max_cluster_size) - filtered_group_names = filtered_groups.col1.unique() - expected_group_names = np.array([3, 4, 5, 6, 7]) - np.testing.assert_array_equal(filtered_group_names, expected_group_names) - - -def test_set_parameters_to_fix_during_fit(): - parameter_set = Parameters() - parameter_set.add("test1", vary=True) - modified_parameter_set, float_str = set_parameters_to_fix_during_fit( - parameter_set, ["test1"] - ) - assert modified_parameter_set["test1"].vary == False - - -@fixture -def parameters_set_with_two_variables(): - parameter_set = Parameters() - parameter_set.add("prefix1_test1", vary=True) - parameter_set.add("prefix1_test2", vary=True) - return parameter_set - - -def test_set_parameters_to_fix_during_fit_2(parameters_set_with_two_variables): - modified_parameter_set, float_str = set_parameters_to_fix_during_fit( - parameters_set_with_two_variables, ["prefix1_test1", "prefix1_test2"] - ) - assert ( - modified_parameter_set["prefix1_test2"].vary - == modified_parameter_set["prefix1_test1"].vary - == False - ) - - -def test_set_parameters_to_fix_during_fit_3(): - parameter_set = Parameters() - parameter_set.add("test1", vary=True) - parameter_set.add("test2", vary=True) - modified_parameter_set, float_str = set_parameters_to_fix_during_fit( - parameter_set, ["test2"] - ) - assert ( - modified_parameter_set["test1"].vary - != modified_parameter_set["test2"].vary - == False - ) - - -def test_set_parameters_to_fix_during_fit_None(): - parameter_set = Parameters() - parameter_set.add("test1", vary=True) - parameter_set.add("test2", vary=True) - modified_parameter_set, float_str = set_parameters_to_fix_during_fit( - parameter_set, None - ) - assert ( - modified_parameter_set["test1"].vary - == modified_parameter_set["test2"].vary - == True - ) - - -def test_set_parameters_to_fix_during_fit_None_str(): - parameter_set = Parameters() - parameter_set.add("test1", vary=True) - parameter_set.add("test2", vary=True) - modified_parameter_set, float_str = set_parameters_to_fix_during_fit( - parameter_set, ["None"] - ) - assert ( - modified_parameter_set["test1"].vary - == modified_parameter_set["test2"].vary - == True - ) - - -def test_update_cluster_df_with_fit_statistics(): - result = ModelResult(Model(pvoigt2d), None, None) - result.aic = None - result.bic = None - data = [ - dict( - chisqr=None, - redchi=None, - residual_sum=None, - aic=None, - bic=None, - nfev=0, - ndata=0, - ) - ] - expected_cluster_df = pd.DataFrame(data) - actual_cluster_df = update_cluster_df_with_fit_statistics( - expected_cluster_df, result - ) - pd.testing.assert_frame_equal(actual_cluster_df, expected_cluster_df) - - -def test_rename_columns_for_compatibility(): - df = pd.DataFrame( - [ - dict( - amplitude=1, - amplitude_stderr=1, - X_AXIS=1, - Y_AXIS=1, - ASS="None", - MEMCNT=1, - X_RADIUS=1, - Y_RADIUS=1, - ) - ] - ) - expected_columns = [ - "amp", - "amp_err", - "init_center_x", - "init_center_y", - "assignment", - "memcnt", - "x_radius", - "y_radius", - ] - actual_columns = rename_columns_for_compatibility(df).columns - assert all([i == j for i, j in zip(actual_columns, expected_columns)]) - - -def test_get_default_param_names_pseudo_voigt(): - assert get_default_lineshape_param_names(Lineshape.PV) == [ - "amplitude", - "center_x", - "center_y", - "sigma_x", - "sigma_y", - "fraction", - ] - - -def test_get_default_param_names_gaussian(): - assert get_default_lineshape_param_names(Lineshape.G) == [ - "amplitude", - "center_x", - "center_y", - "sigma_x", - "sigma_y", - "fraction", - ] - - -def test_get_default_param_names_lorentzian(): - assert get_default_lineshape_param_names(Lineshape.L) == [ - "amplitude", - "center_x", - "center_y", - "sigma_x", - "sigma_y", - "fraction", - ] - - -def test_get_default_param_names_pv_pv(): - assert get_default_lineshape_param_names(Lineshape.PV_PV) == [ - "amplitude", - "center_x", - "center_y", - "sigma_x", - "sigma_y", - "fraction_x", - "fraction_y", - ] - - -def test_get_default_param_names_voigt(): - assert get_default_lineshape_param_names(Lineshape.V) == [ - "amplitude", - "center_x", - "center_y", - "sigma_x", - "sigma_y", - "gamma_x", - "gamma_y", - "fraction", - ] - - -def test_split_parameter_sets_by_peak(default_pseudo_voigt_parameter_names): - # the second element of each tuple actually contains an - # lmfit.Parameter object - params = [ - ("p1_amplitude", "amplitude"), - ("p1_center_x", "center_x"), - ("p1_center_y", "center_y"), - ("p1_sigma_x", "sigma_x"), - ("p1_sigma_y", "sigma_y"), - ("p1_fraction", "fraction"), - ("p2_amplitude", "amplitude"), - ("p2_center_x", "center_x"), - ("p2_center_y", "center_y"), - ("p2_sigma_x", "sigma_x"), - ("p2_sigma_y", "sigma_y"), - ("p2_fraction", "fraction"), - ("p3_amplitude", "amplitude"), - ("p3_center_x", "center_x"), - ("p3_center_y", "center_y"), - ("p3_sigma_x", "sigma_x"), - ("p3_sigma_y", "sigma_y"), - ("p3_fraction", "fraction"), - ] - expected_result = [ - [ - ("p1_amplitude", "amplitude"), - ("p1_center_x", "center_x"), - ("p1_center_y", "center_y"), - ("p1_sigma_x", "sigma_x"), - ("p1_sigma_y", "sigma_y"), - ("p1_fraction", "fraction"), - ], - [ - ("p2_amplitude", "amplitude"), - ("p2_center_x", "center_x"), - ("p2_center_y", "center_y"), - ("p2_sigma_x", "sigma_x"), - ("p2_sigma_y", "sigma_y"), - ("p2_fraction", "fraction"), - ], - [ - ("p3_amplitude", "amplitude"), - ("p3_center_x", "center_x"), - ("p3_center_y", "center_y"), - ("p3_sigma_x", "sigma_x"), - ("p3_sigma_y", "sigma_y"), - ("p3_fraction", "fraction"), - ], - ] - expected_result_parameter_names = [[j[0] for j in i] for i in expected_result] - split_parameter_names = [ - [j[0] for j in i] - for i in split_parameter_sets_by_peak( - default_pseudo_voigt_parameter_names, params - ) - ] - assert split_parameter_names == expected_result_parameter_names - - -@fixture -def default_pseudo_voigt_parameter_names(): - return Model(pvoigt2d).param_names - - -def test_get_prefix_from_parameter_names(default_pseudo_voigt_parameter_names): - parameter_items_with_prefixes = [ - ("p1_amplitude", "amplitude"), - ("p1_center_x", "center_x"), - ("p1_center_y", "center_y"), - ("p1_sigma_x", "sigma_x"), - ("p1_sigma_y", "sigma_y"), - ("p1_fraction", "fraction"), - ] - expected_result = "p1_" - actual_result = get_prefix_from_parameter_names( - default_pseudo_voigt_parameter_names, parameter_items_with_prefixes - ) - assert expected_result == actual_result - - -@fixture -def pseudo_voigt_model_result(): - m1 = Model(pvoigt2d, prefix="p1_") - m2 = Model(pvoigt2d, prefix="p2_") - model = m1 + m2 - params = model.make_params() - model_result = ModelResult(model, params) - return model_result - - -def test_create_parameter_dict(pseudo_voigt_model_result): - prefix = "p1_" - params = list(pseudo_voigt_model_result.params.items())[:6] - expected_result = dict( - prefix="p1_", - amplitude=1.0, - amplitude_stderr=None, - center_x=0.5, - center_x_stderr=None, - center_y=0.5, - center_y_stderr=None, - sigma_x=1.0, - sigma_x_stderr=None, - sigma_y=1.0, - sigma_y_stderr=None, - fraction=0.5, - fraction_stderr=None, - ) - actual_result = create_parameter_dict(prefix, params) - assert expected_result == actual_result - - -def test_unpack_fitted_parameters_for_lineshape_PV(pseudo_voigt_model_result): - expected_params = [ - dict( - prefix="p1_", - plane=0, - amplitude=1.0, - amplitude_stderr=None, - center_x=0.5, - center_x_stderr=None, - center_y=0.5, - center_y_stderr=None, - sigma_x=1.0, - sigma_x_stderr=None, - sigma_y=1.0, - sigma_y_stderr=None, - fraction=0.5, - fraction_stderr=None, - ), - dict( - prefix="p2_", - plane=0, - amplitude=1.0, - amplitude_stderr=None, - center_x=0.5, - center_x_stderr=None, - center_y=0.5, - center_y_stderr=None, - sigma_x=1.0, - sigma_x_stderr=None, - sigma_y=1.0, - sigma_y_stderr=None, - fraction=0.5, - fraction_stderr=None, - ), - ] - unpacked_params = unpack_fitted_parameters_for_lineshape( - Lineshape.PV, list(pseudo_voigt_model_result.params.items()), plane_number=0 - ) - assert expected_params == unpacked_params - - -def test_merge_unpacked_parameters_with_metadata(): - cluster_fit_df = pd.DataFrame( - dict( - plane=[0, 1, 2, 3, 0, 1, 2, 3], - prefix=["_p1_", "_p1_", "_p1_", "_p1_", "_p2_", "_p2_", "_p2_", "_p2_"], - ) - ) - peak_df = pd.DataFrame(dict(ASS=["p1", "p2"], data=["p1_data", "p2_data"])) - expected_result = pd.DataFrame( - dict( - plane=[0, 1, 2, 3, 0, 1, 2, 3], - prefix=["_p1_", "_p1_", "_p1_", "_p1_", "_p2_", "_p2_", "_p2_", "_p2_"], - ASS=["p1", "p1", "p1", "p1", "p2", "p2", "p2", "p2"], - data=[ - "p1_data", - "p1_data", - "p1_data", - "p1_data", - "p2_data", - "p2_data", - "p2_data", - "p2_data", - ], - ) - ) - actual_result = merge_unpacked_parameters_with_metadata(cluster_fit_df, peak_df) - assert expected_result.equals(actual_result) - - -def test_add_vclist_to_df(): - args = FitPeaksArgs( - noise=0, uc_dics={}, lineshape=Lineshape.PV, vclist_data=np.array([1, 2, 3]) - ) - fit_peaks_input = FitPeaksInput( - args=args, data=None, config=None, plane_numbers=None - ) - df = pd.DataFrame(dict(plane=[0, 1, 2])) - expected_df = pd.DataFrame(dict(plane=[0, 1, 2], vclist=[1, 2, 3])) - actual_df = add_vclist_to_df(fit_peaks_input, df) - assert actual_df.equals(expected_df) - - -def test_add_vclist_to_df_plane_order(): - args = FitPeaksArgs( - noise=0, uc_dics={}, lineshape=Lineshape.PV, vclist_data=np.array([1, 2, 3]) - ) - fit_peaks_input = FitPeaksInput( - args=args, data=None, config=None, plane_numbers=None - ) - df = pd.DataFrame(dict(plane=[2, 1, 0])) - expected_df = pd.DataFrame(dict(plane=[2, 1, 0], vclist=[3, 2, 1])) - actual_df = add_vclist_to_df(fit_peaks_input, df) - assert actual_df.equals(expected_df) - - -# def test_perform_initial_lineshape_fit_on_cluster_of_peaks(pseudo_voigt_model_result): -# expected_result = pseudo_voigt_model_result -# actual_result = perform_initial_lineshape_fit_on_cluster_of_peaks() -# assert expected_result == actual_result From d901e91d8b86ddd0c1058f620a1c8752597f5868 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Mon, 27 May 2024 22:37:27 -0400 Subject: [PATCH 35/37] added simulation and tests --- Makefile | 1 + peakipy/cli/edit.py | 2 +- peakipy/cli/main.py | 23 ++++--- peakipy/fitting.py | 8 ++- peakipy/utils.py | 10 +++ test/test_cli.py | 45 +++++++++++++ test/test_fitting.py | 112 +++++++++++++++++++++++++++++++ test/test_simulation.py | 143 ++++++++++++++++++++++++++++++++++++++++ 8 files changed, 332 insertions(+), 12 deletions(-) create mode 100644 test/test_simulation.py diff --git a/Makefile b/Makefile index 432dee0d..908cac1e 100644 --- a/Makefile +++ b/Makefile @@ -7,6 +7,7 @@ coverage: test/test_utils.py \ test/test_main.py \ test/test_cli.py \ + test/test_simulation.py \ test/test_plotting.py coverage-html: diff --git a/peakipy/cli/edit.py b/peakipy/cli/edit.py index 3f4d31ef..2e2d6cb0 100644 --- a/peakipy/cli/edit.py +++ b/peakipy/cli/edit.py @@ -98,7 +98,7 @@ def peakipy_data(self): def make_temp_files(self): # Temp files - self.TEMP_PATH = Path("tmp") + self.TEMP_PATH = self.path.parent / Path("tmp") self.TEMP_PATH.mkdir(parents=True, exist_ok=True) self.TEMP_OUT_CSV = self.TEMP_PATH / Path("tmp_out.csv") diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index 6b3b0aed..f22998c3 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -34,6 +34,8 @@ get_vclist, ) from peakipy.utils import ( + mkdir_tmp_dir, + create_log_path, run_log, df_to_rich_table, write_config, @@ -81,9 +83,6 @@ ) app = typer.Typer() -tmp_path = Path("tmp") -tmp_path.mkdir(exist_ok=True) -log_path = Path("log.txt") peaklist_path_help = "Path to peaklist" @@ -185,6 +184,8 @@ def read( Clusters of peaks are selected """ + mkdir_tmp_dir(peaklist_path.parent) + log_path = create_log_path(peaklist_path.parent) clust_args = { "struc_el": struc_el, @@ -251,7 +252,10 @@ def read( thres = peaks.thres if cluster: - peaks.clusters(thres=thres, **clust_args, l_struc=None) + if struc_el == StrucEl.mask_method: + peaks.mask_method(overlap=struc_size[0]) + else: + peaks.clusters(thres=thres, **clust_args, l_struc=None) else: pass @@ -288,7 +292,7 @@ def read( config_dic = dict(config_kvs) write_config(config_path, config_dic) - run_log() + run_log(log_path) print( f"""[green] @@ -382,6 +386,8 @@ def fit( verb : bool Print what's going on """ + tmp_path = mkdir_tmp_dir(peaklist_path.parent) + log_path = create_log_path(peaklist_path.parent) # number of CPUs n_cpu = cpu_count() @@ -412,7 +418,7 @@ def fit( args = get_vclist(vclist, args) # plot results or not - log_file = open(tmp_path / log_path, "w") + log_file = open(log_path, "w") uc_dics = {"f1": peakipy_data.uc_f1, "f2": peakipy_data.uc_f2} args["uc_dics"] = uc_dics @@ -488,7 +494,7 @@ def fit( [/green] """ ) - run_log() + run_log(log_path) fits_help = "CSV file containing peakipy fits" @@ -547,6 +553,7 @@ def check( verb : bool verbose mode """ + log_path = create_log_path(fits.parent) columns_to_print = [ "assignment", "clustid", @@ -673,7 +680,7 @@ def check( plot_data, pdf, individual, label, ccpn_flag, show, test ) - run_log() + run_log(log_path) if __name__ == "__main__": diff --git a/peakipy/fitting.py b/peakipy/fitting.py index e99f6aa4..1bd23263 100644 --- a/peakipy/fitting.py +++ b/peakipy/fitting.py @@ -5,7 +5,7 @@ import numpy as np from numpy import sqrt import pandas as pd -from rich import console +from rich import print from lmfit import Model, Parameters, Parameter from lmfit.model import ModelResult from pydantic import BaseModel @@ -941,7 +941,7 @@ def perform_initial_lineshape_fit_on_cluster_of_peaks( ) if verbose: - console.print(out.fit_report(), style="bold") + print(out.fit_report()) z_sim = mod.eval(XY=XY, params=out.params) z_sim[~mask] = np.nan @@ -995,7 +995,9 @@ def refit_peak_cluster_with_constraints( def merge_unpacked_parameters_with_metadata(cluster_fit_df, group_of_peaks_df): group_of_peaks_df["prefix"] = group_of_peaks_df.ASS.apply(to_prefix) - merged_cluster_fit_df = cluster_fit_df.merge(group_of_peaks_df, on="prefix") + merged_cluster_fit_df = cluster_fit_df.merge( + group_of_peaks_df, on="prefix", suffixes=["", "_init"] + ) return merged_cluster_fit_df diff --git a/peakipy/utils.py b/peakipy/utils.py index 310ee656..5f6d68b3 100644 --- a/peakipy/utils.py +++ b/peakipy/utils.py @@ -29,6 +29,16 @@ ] +def mkdir_tmp_dir(base_path: Path = Path("./")): + tmp_dir = base_path / "tmp" + tmp_dir.mkdir(exist_ok=True) + return tmp_dir + + +def create_log_path(base_path: Path = Path("./")): + return base_path / "run_log.txt" + + def run_log(log_name="run_log.txt"): """Write log file containing time script was run and with which arguments""" with open(log_name, "a") as log: diff --git a/test/test_cli.py b/test/test_cli.py index 3c14cd4b..d89454bd 100644 --- a/test/test_cli.py +++ b/test/test_cli.py @@ -7,6 +7,7 @@ import peakipy.cli.check_panel import peakipy.cli.edit_panel from peakipy.cli.main import PeaklistFormat, Lineshape +from peakipy.io import StrucEl @pytest.fixture @@ -42,6 +43,50 @@ def test_read_main_with_default_sparky(protein_L): peakipy.cli.main.read(**args) +def test_read_main_with_strucel_square(protein_L): + args = dict( + peaklist_path=protein_L / Path("peaks.sparky"), + data_path=protein_L / Path("test1.ft2"), + peaklist_format=PeaklistFormat.sparky, + struc_el=StrucEl.square, + ) + peakipy.cli.main.read(**args) + + +def test_read_main_with_strucel_rectangle(protein_L): + args = dict( + peaklist_path=protein_L / Path("peaks.sparky"), + data_path=protein_L / Path("test1.ft2"), + peaklist_format=PeaklistFormat.sparky, + struc_el=StrucEl.rectangle, + struc_size=(3, 3), + ) + peakipy.cli.main.read(**args) + + +def test_read_main_with_mask_method(protein_L): + args = dict( + peaklist_path=protein_L / Path("peaks.sparky"), + data_path=protein_L / Path("test1.ft2"), + peaklist_format=PeaklistFormat.sparky, + struc_el=StrucEl.mask_method, + struc_size=(1, 1), + ) + peakipy.cli.main.read(**args) + + +def test_read_main_with_mask_method_fuda(protein_L): + args = dict( + peaklist_path=protein_L / Path("peaks.sparky"), + data_path=protein_L / Path("test1.ft2"), + peaklist_format=PeaklistFormat.sparky, + struc_el=StrucEl.mask_method, + struc_size=(1, 1), + fuda=True, + ) + peakipy.cli.main.read(**args) + + def test_fit_main_with_default(protein_L): args = dict( peaklist_path=protein_L / Path("test.csv"), diff --git a/test/test_fitting.py b/test/test_fitting.py index 7291885d..05855a66 100644 --- a/test/test_fitting.py +++ b/test/test_fitting.py @@ -1,4 +1,5 @@ import unittest +from unittest.mock import MagicMock from pathlib import Path from collections import namedtuple @@ -51,6 +52,7 @@ rename_columns_for_compatibility, FitPeaksArgs, FitPeaksInput, + FitResult, ) from peakipy.lineshapes import Lineshape, pvoigt2d, pv_pv @@ -1142,3 +1144,113 @@ def test_add_vclist_to_df_plane_order(): # expected_result = pseudo_voigt_model_result # actual_result = perform_initial_lineshape_fit_on_cluster_of_peaks() # assert expected_result == actual_result +# Mock FitPeakClusterInput class for testing purposes +class MockFitPeakClusterInput: + def __init__( + self, + mod, + peak_slices, + XY_slices, + p_guess, + weights, + fit_method, + mask, + XY, + first_plane_data, + last_peak, + group, + min_x, + min_y, + max_x, + max_y, + verbose, + uc_dics, + ): + self.mod = mod + self.peak_slices = peak_slices + self.XY_slices = XY_slices + self.p_guess = p_guess + self.weights = weights + self.fit_method = fit_method + self.mask = mask + self.XY = XY + self.first_plane_data = first_plane_data + self.last_peak = last_peak + self.group = group + self.min_x = min_x + self.min_y = min_y + self.max_x = max_x + self.max_y = max_y + self.verbose = verbose + self.uc_dics = uc_dics + + +@pytest.fixture +def fit_peak_cluster_input(): + mod = MagicMock() + mod.fit = MagicMock( + return_value=MagicMock( + params="params", fit_report=MagicMock(return_value="fit_report") + ) + ) + mod.eval = MagicMock(return_value=np.array([2.0, 1.0, 2.0])) + + return MockFitPeakClusterInput( + mod=mod, + peak_slices="peak_slices", + XY_slices="XY_slices", + p_guess="p_guess", + weights="weights", + fit_method="fit_method", + mask=np.array([True, False, True]), + XY=(np.array([0, 1, 2]), np.array([0, 1, 2])), + first_plane_data=np.array([2.0, 1.0, 2.0]), + last_peak="last_peak", + group="group", + min_x="min_x", + min_y="min_y", + max_x="max_x", + max_y="max_y", + verbose=True, + uc_dics="uc_dics", + ) + + +def test_perform_initial_lineshape_fit_on_cluster_of_peaks(fit_peak_cluster_input): + + result = perform_initial_lineshape_fit_on_cluster_of_peaks(fit_peak_cluster_input) + + # Check if result is an instance of FitResult + assert isinstance(result, FitResult) + + # Verify returned values + assert result.out.params == "params" + np.testing.assert_array_equal(result.mask, np.array([True, False, True])) + assert result.fit_str == "" + assert result.log == "" + assert result.group == "group" + assert result.uc_dics == "uc_dics" + assert result.min_x == "min_x" + assert result.min_y == "min_y" + assert result.max_x == "max_x" + assert result.max_y == "max_y" + np.testing.assert_array_equal(result.X, np.array([0, 1, 2])) + np.testing.assert_array_equal(result.Y, np.array([0, 1, 2])) + np.testing.assert_array_equal(result.Z, np.array([2.0, np.nan, 2.0])) + np.testing.assert_array_equal(result.Z_sim, np.array([2.0, np.nan, 2.0])) + assert result.peak_slices == "peak_slices" + assert result.XY_slices == "XY_slices" + assert result.weights == "weights" + assert result.mod == fit_peak_cluster_input.mod + + # Check if mod.fit and mod.eval were called with correct arguments + fit_peak_cluster_input.mod.fit.assert_called_once_with( + "peak_slices", + XY="XY_slices", + params="p_guess", + weights="weights", + method="fit_method", + ) + # fit_peak_cluster_input.mod.eval.assert_called_once_with( + # XY=(np.array([0,1,2]), np.array([0,1,2])), params='params' + # ) diff --git a/test/test_simulation.py b/test/test_simulation.py new file mode 100644 index 00000000..26c11a5b --- /dev/null +++ b/test/test_simulation.py @@ -0,0 +1,143 @@ +""" Simultate some peaks add noise and fit using peakipy """ + +import numpy as np +import pandas as pd +from lmfit import Model +from skimage.filters import threshold_otsu +from nmrglue.fileio.fileiobase import unit_conversion +from mpl_toolkits.mplot3d import axes3d +import matplotlib.pyplot as plt + +from peakipy.lineshapes import pvoigt2d, Lineshape +from peakipy.fitting import ( + simulate_lineshapes_from_fitted_peak_parameters, + make_models, + make_meshgrid, + FitPeaksArgs, + FitPeaksInput, + fit_peak_clusters, +) + + +uc_x = unit_conversion(400, False, 2400, 600, 4800) +uc_y = unit_conversion(200, False, 1200, 60, 7200) +uc_dics = {"f1": uc_y, "f2": uc_x} + +p1 = dict( + amp=10, + center_x=200.0, + center_y=100.0, + sigma_x=10.0, + sigma_y=18.0, + fraction=0.5, + lineshape="PV", + ASS="one", +) + +p2 = dict( + amp=12, + center_x=220.0, + center_y=130.0, + sigma_x=20.0, + sigma_y=15.0, + fraction=0.5, + lineshape="PV", + ASS="two", +) + +peak_parameters = pd.DataFrame([p1, p2]) +peak_parameters["X_AXIS"] = peak_parameters.center_x +peak_parameters["Y_AXIS"] = peak_parameters.center_y +peak_parameters["X_AXISf"] = peak_parameters.center_x +peak_parameters["Y_AXISf"] = peak_parameters.center_y +peak_parameters["X_RADIUS"] = 60 +peak_parameters["Y_RADIUS"] = 30 +peak_parameters["X_PPM"] = peak_parameters.center_x.apply(uc_x.ppm) +peak_parameters["Y_PPM"] = peak_parameters.center_x.apply(uc_x.ppm) +peak_parameters["XW"] = peak_parameters.sigma_x +peak_parameters["YW"] = peak_parameters.sigma_y +peak_parameters["CLUSTID"] = 1 +peak_parameters["MEMCNT"] = peak_parameters.shape[0] +peak_parameters["plane"] = 0 + +x = 400 +y = 200 +data_shape_Y_X = (y, x) +data_shape_X_Y = (x, y) +XY = make_meshgrid(data_shape_Y_X) +X, Y = XY +Z_sim = np.random.normal(loc=0.0, scale=0.0001, size=data_shape_Y_X) +Z_sim_singles = [] +Z_sim, Z_sim_singles = simulate_lineshapes_from_fitted_peak_parameters( + peak_parameters=peak_parameters, + XY=XY, + sim_data=Z_sim, + sim_data_singles=Z_sim_singles, +) + +fit_peaks_args = FitPeaksArgs( + noise=threshold_otsu(Z_sim), + uc_dics=uc_dics, + lineshape=Lineshape.PV, + max_cluster_size=10, + reference_plane_indices=[], + xy_bounds=None, + initial_fit_threshold=None, + vclist=None, +) + +fit_peaks_input = FitPeaksInput( + fit_peaks_args, Z_sim.reshape(1, y, x), dict(dims=[0, 1]), plane_numbers=[0] +) + +fit_peaks_result = fit_peak_clusters(peak_parameters, fit_peaks_input) + + +def test_fit_from_simulated_data(): + pd.testing.assert_series_equal( + fit_peaks_result.df.center_x, + fit_peaks_result.df.center_x_init, + check_exact=False, + check_names=False, + rtol=1e-3, + ) + pd.testing.assert_series_equal( + fit_peaks_result.df.center_y, + fit_peaks_result.df.center_y_init, + check_exact=False, + check_names=False, + rtol=1e-3, + ) + pd.testing.assert_series_equal( + fit_peaks_result.df.sigma_x, + fit_peaks_result.df.sigma_x_init, + check_exact=False, + check_names=False, + rtol=1e-2, + ) + pd.testing.assert_series_equal( + fit_peaks_result.df.sigma_y, + fit_peaks_result.df.sigma_y_init, + check_exact=False, + check_names=False, + rtol=1e-2, + ) + + +def test_fit_from_simulated_data_jack_knife(): + fit_peaks_input = FitPeaksInput( + fit_peaks_args, Z_sim.reshape(1, y, x), dict(dims=[0, 1]), plane_numbers=[0] + ) + fit_peaks_input.args.jack_knife_sample_errors = True + fit_peaks_result = fit_peak_clusters(peak_parameters, fit_peaks_input) + + +# def plot3D(X,Y,Z,Z_singles): +# fig = plt.figure() +# ax = fig.add_subplot(projection='3d') +# ax.plot_wireframe(X,Y,Z) +# for Z_single in Z_singles: +# ax.plot_surface(X, Y, Z_single,alpha=0.5) +# plt.xlabel("X") +# plt.ylabel("Y") +# plt.show() From 76e19e839a8b8e2415c633e7468eca829b0cdffd Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Thu, 30 May 2024 09:01:52 -0400 Subject: [PATCH 36/37] refactored --- peakipy/cli/check_panel.py | 137 ---------------- peakipy/cli/edit_panel.py | 148 ----------------- peakipy/cli/main.py | 315 ++++++++++++++++++++++++++++++++++--- peakipy/io.py | 5 +- test/test_cli.py | 37 ++--- 5 files changed, 309 insertions(+), 333 deletions(-) delete mode 100644 peakipy/cli/check_panel.py delete mode 100644 peakipy/cli/edit_panel.py diff --git a/peakipy/cli/check_panel.py b/peakipy/cli/check_panel.py deleted file mode 100644 index 5dc06cd9..00000000 --- a/peakipy/cli/check_panel.py +++ /dev/null @@ -1,137 +0,0 @@ -from pathlib import Path -from dataclasses import dataclass, field -from functools import lru_cache -import panel as pn -import pandas as pd -import typer - -from peakipy.cli.main import check, validate_fit_dataframe - -pn.extension() - - -@dataclass -class Data: - fits_path: Path = Path("./fits.csv") - data_path: Path = Path("./test.ft2") - config_path: Path = Path("./peakipy.config") - _df: pd.DataFrame = field(init=False) - - def load_dataframe(self): - self._df = validate_fit_dataframe(pd.read_csv(self.fits_path)) - - @property - def df(self): - return self._df - - -@lru_cache(maxsize=1) -def data_singleton(): - return Data() - - -def get_cluster(cluster): - data = data_singleton() - cluster_groups = data.df.groupby("clustid") - cluster_group = cluster_groups.get_group(cluster) - df_pane = pn.widgets.Tabulator( - cluster_group[ - [ - "assignment", - "clustid", - "memcnt", - "plane", - "amp", - "height", - "center_x_ppm", - "center_y_ppm", - "fwhm_x_hz", - "fwhm_y_hz", - "lineshape", - ] - ], - selectable=False, - disabled=True, - ) - return df_pane - - -@dataclass -class PlotContainer: - main_figure: pn.pane.Plotly - residual_figure: pn.pane.Plotly - - -def create_plotly_pane(cluster, plane): - data = data_singleton() - fig, residual_fig = check( - fits=data.fits_path, - data_path=data.data_path, - clusters=[cluster], - plane=[plane], - # config_path=data.config_path, - plotly=True, - ) - - fig["layout"].update(height=800, width=800) - fig = fig.to_dict() - residual_fig = residual_fig.to_dict() - return pn.Column(pn.pane.Plotly(fig), pn.pane.Plotly(residual_fig)) - - -app = typer.Typer() - - -@app.command() -def create_check_panel( - fits_path: Path, - data_path: Path, - config_path: Path = Path("./peakipy.config"), - edit_panel: bool = False, - test: bool = False, -): - data = data_singleton() - data.fits_path = fits_path - data.data_path = data_path - data.config_path = config_path - data.load_dataframe() - - clusters = [(row.clustid, row.memcnt) for _, row in data.df.iterrows()] - - select_cluster = pn.widgets.Select( - name="Cluster (number of peaks)", options={f"{c} ({m})": c for c, m in clusters} - ) - select_plane = pn.widgets.Select( - name="Plane", options={f"{plane}": plane for plane in data.df.plane.unique()} - ) - result_table_pane = pn.bind(get_cluster, select_cluster) - interactive_plotly_pane = pn.bind( - create_plotly_pane, cluster=select_cluster, plane=select_plane - ) - info_pane = pn.pane.Markdown( - "Select a cluster and plane to look at from the dropdown menus" - ) - check_pane = pn.Card( - # info_pane, - # pn.Row(select_cluster, select_plane), - pn.Row( - pn.Column( - pn.Row( - pn.Card(interactive_plotly_pane, title="Fitted cluster"), - pn.Column(info_pane, select_cluster, select_plane), - ), - pn.Card(result_table_pane, title="Fitted parameters for cluster"), - ) - ), - title="Peakipy check", - ) - if edit_panel: - return check_pane - elif test: - return - else: - check_pane.show() - - -if __name__ == "__main__": - app() diff --git a/peakipy/cli/edit_panel.py b/peakipy/cli/edit_panel.py deleted file mode 100644 index f44b5faa..00000000 --- a/peakipy/cli/edit_panel.py +++ /dev/null @@ -1,148 +0,0 @@ -from pathlib import Path -from dataclasses import dataclass, field -from functools import lru_cache - -import panel as pn -from typer import Typer - -from peakipy.cli.edit import BokehScript -from peakipy.cli.check_panel import create_check_panel - -app = Typer() - -pn.extension("plotly") -pn.config.theme = "dark" - - -@dataclass -class Data: - peaklist_path: Path = Path("./test.csv") - data_path: Path = Path("./test.ft2") - _bs: BokehScript = field(init=False) - - def load_data(self): - self._bs = BokehScript(self.peaklist_path, self.data_path) - - @property - def bs(self): - return self._bs - - -@lru_cache(maxsize=1) -def data_singleton(): - return Data() - - -def update_peakipy_data_on_edit_of_table(event): - data = data_singleton() - column = event.column - row = event.row - value = event.value - data.bs.peakipy_data.df.loc[row, column] = value - data.bs.update_memcnt() - - -def panel_app(test=False): - data = data_singleton() - bs = data.bs - bokeh_pane = pn.pane.Bokeh(bs.p) - spectrum_view_settings = pn.WidgetBox( - "# Contour settings", bs.pos_neg_contour_radiobutton, bs.contour_start - ) - save_peaklist_box = pn.WidgetBox( - "# Save your peaklist", - bs.savefilename, - bs.button, - pn.layout.Divider(), - bs.exit_button, - ) - recluster_settings = pn.WidgetBox( - "# Re-cluster your peaks", - bs.clust_div, - bs.struct_el, - bs.struct_el_size, - pn.layout.Divider(), - bs.recluster_warning, - bs.recluster, - sizing_mode="stretch_width", - ) - button = pn.widgets.Button(name="Fit selected cluster(s)", button_type="primary") - fit_controls = pn.WidgetBox( - "# Fit controls", - button, - pn.layout.Divider(), - bs.select_plane, - bs.checkbox_group, - pn.layout.Divider(), - bs.select_reference_planes_help, - bs.select_reference_planes, - pn.layout.Divider(), - bs.set_initial_fit_threshold_help, - bs.set_initial_fit_threshold, - pn.layout.Divider(), - bs.select_fixed_parameters_help, - bs.select_fixed_parameters, - pn.layout.Divider(), - bs.select_lineshape_radiobuttons_help, - bs.select_lineshape_radiobuttons, - ) - - mask_adjustment_controls = pn.WidgetBox( - "# Fitting mask adjustment", bs.slider_X_RADIUS, bs.slider_Y_RADIUS - ) - - # bs.source.on_change() - def fit_peaks_button_click(event): - check_app.loading = True - bs.fit_selected(None) - check_panel = create_check_panel(bs.TEMP_OUT_CSV, bs.data_path, edit_panel=True) - check_app.objects = check_panel.objects - check_app.loading = False - - button.on_click(fit_peaks_button_click) - - def update_source_selected_indices(event): - # print(bs.tablulator_widget.selection) - # hack to make current selection however, only allows one selection - # at a time - bs.tablulator_widget._update_selection([event.value]) - bs.source.selected.indices = bs.tablulator_widget.selection - # print(bs.tablulator_widget.selection) - - bs.tablulator_widget.on_click(update_source_selected_indices) - bs.tablulator_widget.on_edit(update_peakipy_data_on_edit_of_table) - - template = pn.template.BootstrapTemplate( - title="Peakipy", - sidebar=[mask_adjustment_controls, fit_controls], - ) - spectrum = pn.Card( - pn.Column( - pn.Row( - bokeh_pane, - pn.Column(spectrum_view_settings, save_peaklist_box), - recluster_settings, - ), - bs.tablulator_widget, - ), - title="Peakipy fit", - ) - check_app = pn.Card(title="Peakipy check") - template.main.append(pn.Column(check_app, spectrum)) - if test: - return - else: - template.show() - - -@app.command() -def main(peaklist_path: Path, data_path: Path, test: bool = False): - data = data_singleton() - data.peaklist_path = peaklist_path - data.data_path = data_path - data.load_data() - panel_app(test=test) - - -if __name__ == "__main__": - app() diff --git a/peakipy/cli/main.py b/peakipy/cli/main.py index f22998c3..fce08f4f 100644 --- a/peakipy/cli/main.py +++ b/peakipy/cli/main.py @@ -1,9 +1,9 @@ #!/usr/bin/env python3 -import os import json import shutil from pathlib import Path -from enum import Enum +from functools import lru_cache +from dataclasses import dataclass, field from typing import Optional, Tuple, List, Annotated from multiprocessing import Pool, cpu_count @@ -17,10 +17,10 @@ from skimage.filters import threshold_otsu from mpl_toolkits.mplot3d import axes3d -from matplotlib import cm from matplotlib.backends.backend_pdf import PdfPages import plotly.io as pio +import panel as pn pio.templates.default = "plotly_dark" @@ -81,6 +81,57 @@ create_residual_figure, create_matplotlib_figure, ) +from peakipy.cli.edit import BokehScript + +pn.extension("plotly") +pn.config.theme = "dark" + + +@dataclass +class PlotContainer: + main_figure: pn.pane.Plotly + residual_figure: pn.pane.Plotly + + +@lru_cache(maxsize=1) +def data_singleton_edit(): + return EditData() + + +@lru_cache(maxsize=1) +def data_singleton_check(): + return CheckData() + + +@dataclass +class EditData: + peaklist_path: Path = Path("./test.csv") + data_path: Path = Path("./test.ft2") + _bs: BokehScript = field(init=False) + + def load_data(self): + self._bs = BokehScript(self.peaklist_path, self.data_path) + + @property + def bs(self): + return self._bs + + +@dataclass +class CheckData: + fits_path: Path = Path("./fits.csv") + data_path: Path = Path("./test.ft2") + config_path: Path = Path("./peakipy.config") + _df: pd.DataFrame = field(init=False) + + def load_dataframe(self): + self._df = validate_fit_dataframe(pd.read_csv(self.fits_path)) + print("Here") + + @property + def df(self): + return self._df + app = typer.Typer() @@ -497,24 +548,46 @@ def fit( run_log(log_path) +@app.command() +def edit(peaklist_path: Path, data_path: Path, test: bool = False): + data = data_singleton_edit() + data.peaklist_path = peaklist_path + data.data_path = data_path + data.load_data() + panel_app(test=test) + + fits_help = "CSV file containing peakipy fits" +panel_help = "Open fits in browser with an interactive panel app" +individual_help = "Show individual peak fits as surfaces" +label_help = "Add peak assignment labels" +first_help = "Show only first plane" +plane_help = "Select planes to plot" +clusters_help = "Select clusters to plot" +colors_help = "Customize colors for data and fit lines respectively" +show_help = "Open interactive matplotlib window" +outname_help = "Name of output multipage pdf" @app.command(help="Interactive plots for checking fits") def check( - fits: Annotated[Path, typer.Argument(help=fits_help)], + fits_path: Annotated[Path, typer.Argument(help=fits_help)], data_path: Annotated[Path, typer.Argument(help=data_path_help)], - clusters: Optional[List[int]] = None, - plane: Optional[List[int]] = None, - outname: Path = Path("plots.pdf"), - first: bool = False, - show: bool = False, - label: bool = False, - individual: bool = False, - ccpn: bool = False, + panel: Annotated[bool, typer.Option(help=panel_help)] = False, + clusters: Annotated[Optional[List[int]], typer.Option(help=clusters_help)] = None, + plane: Annotated[Optional[List[int]], typer.Option(help=plane_help)] = None, + first: Annotated[bool, typer.Option(help=first_help)] = False, + show: Annotated[bool, typer.Option(help=show_help)] = False, + label: Annotated[bool, typer.Option(help=label_help)] = False, + individual: Annotated[bool, typer.Option(help=individual_help)] = False, + outname: Annotated[Path, typer.Option(help=outname_help)] = Path("plots.pdf"), + colors: Annotated[Tuple[str, str], typer.Option(help=colors_help)] = ( + "#5e3c99", + "#e66101", + ), rcount: int = 50, ccount: int = 50, - colors: Tuple[str, str] = ("#5e3c99", "#e66101"), + ccpn: bool = False, plotly: bool = False, test: bool = False, ): @@ -553,7 +626,7 @@ def check( verb : bool verbose mode """ - log_path = create_log_path(fits.parent) + log_path = create_log_path(fits_path.parent) columns_to_print = [ "assignment", "clustid", @@ -567,13 +640,19 @@ def check( "fwhm_y_hz", "lineshape", ] - fits = validate_fit_dataframe(pd.read_csv(fits)) + fits = validate_fit_dataframe(pd.read_csv(fits_path)) args = {} # get dims from config file config_path = data_path.parent / "peakipy.config" args, config = update_args_with_values_from_config_file(args, config_path) dims = config.get("dims", (1, 2, 3)) + if panel: + create_check_panel( + fits_path=fits_path, data_path=data_path, config_path=config_path, test=test + ) + return + ccpn_flag = ccpn if ccpn_flag: from ccpn.ui.gui.widgets.PlotterWidget import PlotterWidget @@ -674,13 +753,207 @@ def check( if first: break - with PdfPages(outname) as pdf: - for plot_data in all_plot_data: - create_matplotlib_figure( - plot_data, pdf, individual, label, ccpn_flag, show, test - ) + with PdfPages(data_path.parent / outname) as pdf: + for plot_data in all_plot_data: + create_matplotlib_figure( + plot_data, pdf, individual, label, ccpn_flag, show, test + ) + + run_log(log_path) + + +def create_plotly_pane(cluster, plane): + data = data_singleton_check() + fig, residual_fig = check( + fits_path=data.fits_path, + data_path=data.data_path, + clusters=[cluster], + plane=[plane], + # config_path=data.config_path, + plotly=True, + ) + fig["layout"].update(height=800, width=800) + fig = fig.to_dict() + residual_fig = residual_fig.to_dict() + return pn.Column(pn.pane.Plotly(fig), pn.pane.Plotly(residual_fig)) + + +def get_cluster(cluster): + data = data_singleton_check() + cluster_groups = data.df.groupby("clustid") + cluster_group = cluster_groups.get_group(cluster) + df_pane = pn.widgets.Tabulator( + cluster_group[ + [ + "assignment", + "clustid", + "memcnt", + "plane", + "amp", + "height", + "center_x_ppm", + "center_y_ppm", + "fwhm_x_hz", + "fwhm_y_hz", + "lineshape", + ] + ], + selectable=False, + disabled=True, + ) + return df_pane + + +def update_peakipy_data_on_edit_of_table(event): + data = data_singleton_edit() + column = event.column + row = event.row + value = event.value + data.bs.peakipy_data.df.loc[row, column] = value + data.bs.update_memcnt() + + +def panel_app(test=False): + data = data_singleton_edit() + bs = data.bs + bokeh_pane = pn.pane.Bokeh(bs.p) + spectrum_view_settings = pn.WidgetBox( + "# Contour settings", bs.pos_neg_contour_radiobutton, bs.contour_start + ) + save_peaklist_box = pn.WidgetBox( + "# Save your peaklist", + bs.savefilename, + bs.button, + pn.layout.Divider(), + bs.exit_button, + ) + recluster_settings = pn.WidgetBox( + "# Re-cluster your peaks", + bs.clust_div, + bs.struct_el, + bs.struct_el_size, + pn.layout.Divider(), + bs.recluster_warning, + bs.recluster, + sizing_mode="stretch_width", + ) + button = pn.widgets.Button(name="Fit selected cluster(s)", button_type="primary") + fit_controls = pn.WidgetBox( + "# Fit controls", + button, + pn.layout.Divider(), + bs.select_plane, + bs.checkbox_group, + pn.layout.Divider(), + bs.select_reference_planes_help, + bs.select_reference_planes, + pn.layout.Divider(), + bs.set_initial_fit_threshold_help, + bs.set_initial_fit_threshold, + pn.layout.Divider(), + bs.select_fixed_parameters_help, + bs.select_fixed_parameters, + pn.layout.Divider(), + bs.select_lineshape_radiobuttons_help, + bs.select_lineshape_radiobuttons, + ) + + mask_adjustment_controls = pn.WidgetBox( + "# Fitting mask adjustment", bs.slider_X_RADIUS, bs.slider_Y_RADIUS + ) + + # bs.source.on_change() + def fit_peaks_button_click(event): + check_app.loading = True + bs.fit_selected(None) + check_panel = create_check_panel(bs.TEMP_OUT_CSV, bs.data_path, edit_panel=True) + check_app.objects = check_panel.objects + check_app.loading = False + + button.on_click(fit_peaks_button_click) + + def update_source_selected_indices(event): + # print(bs.tablulator_widget.selection) + # hack to make current selection however, only allows one selection + # at a time + bs.tablulator_widget._update_selection([event.value]) + bs.source.selected.indices = bs.tablulator_widget.selection + # print(bs.tablulator_widget.selection) + + bs.tablulator_widget.on_click(update_source_selected_indices) + bs.tablulator_widget.on_edit(update_peakipy_data_on_edit_of_table) + + template = pn.template.BootstrapTemplate( + title="Peakipy", + sidebar=[mask_adjustment_controls, fit_controls], + ) + spectrum = pn.Card( + pn.Column( + pn.Row( + bokeh_pane, + pn.Column(spectrum_view_settings, save_peaklist_box), + recluster_settings, + ), + bs.tablulator_widget, + ), + title="Peakipy fit", + ) + check_app = pn.Card(title="Peakipy check") + template.main.append(pn.Column(check_app, spectrum)) + if test: + return + else: + template.show() + + +def create_check_panel( + fits_path: Path, + data_path: Path, + config_path: Path = Path("./peakipy.config"), + edit_panel: bool = False, + test: bool = False, +): + data = data_singleton_check() + data.fits_path = fits_path + data.data_path = data_path + data.config_path = config_path + data.load_dataframe() + + clusters = [(row.clustid, row.memcnt) for _, row in data.df.iterrows()] - run_log(log_path) + select_cluster = pn.widgets.Select( + name="Cluster (number of peaks)", options={f"{c} ({m})": c for c, m in clusters} + ) + select_plane = pn.widgets.Select( + name="Plane", options={f"{plane}": plane for plane in data.df.plane.unique()} + ) + result_table_pane = pn.bind(get_cluster, select_cluster) + interactive_plotly_pane = pn.bind( + create_plotly_pane, cluster=select_cluster, plane=select_plane + ) + info_pane = pn.pane.Markdown( + "Select a cluster and plane to look at from the dropdown menus" + ) + check_pane = pn.Card( + # info_pane, + # pn.Row(select_cluster, select_plane), + pn.Row( + pn.Column( + pn.Row( + pn.Card(interactive_plotly_pane, title="Fitted cluster"), + pn.Column(info_pane, select_cluster, select_plane), + ), + pn.Card(result_table_pane, title="Fitted parameters for cluster"), + ) + ), + title="Peakipy check", + ) + if edit_panel: + return check_pane + elif test: + return + else: + check_pane.show() if __name__ == "__main__": diff --git a/peakipy/io.py b/peakipy/io.py index 88ed5be8..8df438f6 100644 --- a/peakipy/io.py +++ b/peakipy/io.py @@ -733,8 +733,9 @@ def mask_method(self, overlap=1.0, l_struc=None): return ClustersResult(labeled_array, num_features, mask, peaks) - def to_fuda(self, fname="params.fuda"): - with open("peaks.fuda", "w") as peaks_fuda: + def to_fuda(self): + fname = self.peaklist_path.parent / "params.fuda" + with open(self.peaklist_path.parent / "peaks.fuda", "w") as peaks_fuda: for ass, f1_ppm, f2_ppm in zip(self.df.ASS, self.df.Y_PPM, self.df.X_PPM): peaks_fuda.write(f"{ass}\t{f1_ppm:.3f}\t{f2_ppm:.3f}\n") groups = self.df.groupby("CLUSTID") diff --git a/test/test_cli.py b/test/test_cli.py index d89454bd..4704bbb8 100644 --- a/test/test_cli.py +++ b/test/test_cli.py @@ -4,8 +4,6 @@ import pytest import peakipy.cli.main -import peakipy.cli.check_panel -import peakipy.cli.edit_panel from peakipy.cli.main import PeaklistFormat, Lineshape from peakipy.io import StrucEl @@ -179,7 +177,7 @@ def test_fit_main_with_pv_pv(protein_L): def test_check_main_with_default(protein_L): args = dict( - fits=protein_L / Path("fits_PV.csv"), + fits_path=protein_L / Path("fits_PV.csv"), data_path=protein_L / Path("test1.ft2"), clusters=[1], first=True, @@ -193,7 +191,7 @@ def test_check_main_with_default(protein_L): def test_check_main_with_gaussian(protein_L): args = dict( - fits=protein_L / Path("fits_G.csv"), + fits_path=protein_L / Path("fits_G.csv"), data_path=protein_L / Path("test1.ft2"), clusters=[1], first=True, @@ -207,7 +205,7 @@ def test_check_main_with_gaussian(protein_L): def test_check_main_with_lorentzian(protein_L): args = dict( - fits=protein_L / Path("fits_L.csv"), + fits_path=protein_L / Path("fits_L.csv"), data_path=protein_L / Path("test1.ft2"), clusters=[1], first=True, @@ -221,7 +219,7 @@ def test_check_main_with_lorentzian(protein_L): def test_check_main_with_voigt(protein_L): args = dict( - fits=protein_L / Path("fits_V.csv"), + fits_path=protein_L / Path("fits_V.csv"), data_path=protein_L / Path("test1.ft2"), clusters=[1], first=True, @@ -235,7 +233,7 @@ def test_check_main_with_voigt(protein_L): def test_check_main_with_pv_pv(protein_L): args = dict( - fits=protein_L / Path("fits_PV_PV.csv"), + fits_path=protein_L / Path("fits_PV_PV.csv"), data_path=protein_L / Path("test1.ft2"), clusters=[1], first=True, @@ -251,41 +249,30 @@ def test_check_panel_PVPV(protein_L): args = dict( fits_path=protein_L / Path("fits_PV_PV.csv"), data_path=protein_L / Path("test1.ft2"), - config_path=protein_L / Path("peakipy.config"), test=True, + panel=True, ) - peakipy.cli.check_panel.create_check_panel(**args) + peakipy.cli.main.check(**args) def test_check_panel_PV(protein_L): args = dict( fits_path=protein_L / Path("fits_PV.csv"), data_path=protein_L / Path("test1.ft2"), - config_path=protein_L / Path("peakipy.config"), test=True, + panel=True, ) - peakipy.cli.check_panel.create_check_panel(**args) + peakipy.cli.main.check(**args) def test_check_panel_V(protein_L): args = dict( fits_path=protein_L / Path("fits_V.csv"), data_path=protein_L / Path("test1.ft2"), - config_path=protein_L / Path("peakipy.config"), test=True, + panel=True, ) - peakipy.cli.check_panel.create_check_panel(**args) - - -def test_check_panel_edit(protein_L): - args = dict( - fits_path=protein_L / Path("fits_V.csv"), - data_path=protein_L / Path("test1.ft2"), - config_path=protein_L / Path("peakipy.config"), - edit_panel=True, - test=True, - ) - peakipy.cli.check_panel.create_check_panel(**args) + peakipy.cli.main.check(**args) def test_edit_panel(protein_L): @@ -294,4 +281,4 @@ def test_edit_panel(protein_L): data_path=protein_L / Path("test1.ft2"), test=True, ) - peakipy.cli.edit_panel.main(**args) + peakipy.cli.main.edit(**args) From 68a07030f3c82918a3ee2e226743d685df29ef41 Mon Sep 17 00:00:00 2001 From: Jacob Brady Date: Thu, 30 May 2024 09:12:57 -0400 Subject: [PATCH 37/37] adding coverage badge --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index b35fb261..243e0cb8 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,8 @@ # Peakipy - NMR peak integration/deconvolution using python ![Test](https://github.com/j-brady/peakipy/actions/workflows/ci.yml/badge.svg) +[![codecov](https://codecov.io/gh/j-brady/peakipy/graph/badge.svg?token=1F90QI5WMS)](https://codecov.io/gh/j-brady/peakipy) + [peakipy documentation](https://j-brady.github.io/peakipy) @@ -26,7 +28,7 @@ should have been added to your path. ## Inputs -1. Peak list (NMRPipe, Analysis v2.4, Sparky) +1. Peak list (NMRPipe, Analysis v2.4, Sparky) 2. NMRPipe frequency domain dataset (2D or Pseudo 3D) There are four main commands: