From 57ec6c22409724b01deb950f8eb546def2eae82b Mon Sep 17 00:00:00 2001 From: sambokar Date: Fri, 6 Sep 2024 16:28:02 -0400 Subject: [PATCH] full-scope changes. debounce added to layout files and selection systems. datagridcommons debugged for deep bugs found during add/editing row process. Additional stabilization steps need to be taken, but website is partially functional and testing environment has been prepared --- frontend/app/(hub)/layout.tsx | 165 ++-- .../[dataType]/[[...slugs]]/route.ts | 4 +- frontend/components/client/rollovermodal.tsx | 6 +- .../applications/attributesdatagrid.tsx | 8 +- .../components/datagrids/datagridcommons.tsx | 204 +++- .../components/datagrids/reentrydatamodal.tsx | 16 +- .../components/processors/processcensus.tsx | 57 +- .../components/processors/processormacros.tsx | 5 + .../processors/processpersonnel.tsx | 64 +- .../components/processors/processspecies.tsx | 25 +- .../processors/validationshardcoded.tsx | 925 ------------------ frontend/components/sidebar.tsx | 12 +- frontend/components/themeregistry/theme.ts | 8 + .../displayparseddatagrid.tsx | 19 +- frontend/config/utils.ts | 66 +- frontend/package-lock.json | 5 +- frontend/playwright.config.ts | 20 +- frontend/sqlscripting/resetautoincrements.sql | 165 ++++ frontend/sqlscripting/updatedviews.sql | 2 +- 19 files changed, 631 insertions(+), 1145 deletions(-) delete mode 100644 frontend/components/processors/validationshardcoded.tsx create mode 100644 frontend/sqlscripting/resetautoincrements.sql diff --git a/frontend/app/(hub)/layout.tsx b/frontend/app/(hub)/layout.tsx index a8d8fb15..060593e8 100644 --- a/frontend/app/(hub)/layout.tsx +++ b/frontend/app/(hub)/layout.tsx @@ -4,18 +4,18 @@ import { title } from '@/config/primitives'; import { useSession } from 'next-auth/react'; import { redirect, usePathname } from 'next/navigation'; import dynamic from 'next/dynamic'; -import { Box, IconButton, Stack, Tooltip, Typography } from '@mui/joy'; +import { Box, IconButton, Stack, Typography } from '@mui/joy'; import Divider from '@mui/joy/Divider'; import { useLoading } from '@/app/contexts/loadingprovider'; import { getAllSchemas } from '@/components/processors/processorhelperfunctions'; import { useOrgCensusContext, usePlotContext, useSiteContext } from '@/app/contexts/userselectionprovider'; import { useOrgCensusListDispatch, usePlotListDispatch, useQuadratListDispatch, useSiteListDispatch } from '@/app/contexts/listselectionprovider'; import { getEndpointHeaderName, siteConfig } from '@/config/macros/siteconfigs'; -import { AcaciaVersionTypography } from '@/styles/versions/acaciaversion'; import GithubFeedbackModal from '@/components/client/githubfeedbackmodal'; import HelpOutlineOutlinedIcon from '@mui/icons-material/HelpOutlineOutlined'; import { useLockAnimation } from '../contexts/lockanimationcontext'; import { createAndUpdateCensusList } from '@/config/sqlrdsdefinitions/timekeeping'; +import { AcaciaVersionTypography } from '@/styles/versions/acaciaversion'; const Sidebar = dynamic(() => import('@/components/sidebar'), { ssr: false }); const Header = dynamic(() => import('@/components/header'), { ssr: false }); @@ -67,7 +67,25 @@ export default function HubLayout({ children }: { children: React.ReactNode }) { const coreDataLoaded = siteListLoaded && plotListLoaded && censusListLoaded && quadratListLoaded; const { isPulsing } = useLockAnimation(); + const lastExecutedRef = useRef(null); + // Refs for debouncing + const plotLastExecutedRef = useRef(null); + const censusLastExecutedRef = useRef(null); + const quadratLastExecutedRef = useRef(null); + + // Debounce delay + const debounceDelay = 100; + const fetchSiteList = useCallback(async () => { + const now = Date.now(); + if (lastExecutedRef.current && now - lastExecutedRef.current < debounceDelay + 200) { + console.log('Debounced fetchSiteList: Too soon since last call.'); + return; + } + + // Update last executed timestamp + lastExecutedRef.current = now; + try { setLoading(true, 'Loading Sites...'); if (session && !siteListLoaded) { @@ -86,74 +104,92 @@ export default function HubLayout({ children }: { children: React.ReactNode }) { } }, [session, siteListLoaded, siteListDispatch, setLoading]); - const loadData = useCallback(async () => { - try { - setLoading(true, 'Loading data...'); - - const promises = []; + const loadPlotData = useCallback(async () => { + const now = Date.now(); + if (plotLastExecutedRef.current && now - plotLastExecutedRef.current < debounceDelay) { + console.log('Debounced loadPlotData: Too soon since last call.'); + return; + } + plotLastExecutedRef.current = now; - // Load plot data + try { + setLoading(true, 'Loading plot data...'); if (currentSite && !plotListLoaded) { - const loadPlots = fetch(`/api/fetchall/plots?schema=${currentSite?.schemaName || ''}`) - .then(response => response.json()) - .then(plotsData => { - if (!plotsData) throw new Error('Failed to load plots data'); - if (plotListDispatch) return plotListDispatch({ plotList: plotsData }); - }); - promises.push(loadPlots); + const response = await fetch(`/api/fetchall/plots?schema=${currentSite?.schemaName || ''}`); + const plotsData = await response.json(); + if (!plotsData) throw new Error('Failed to load plots data'); + if (plotListDispatch) await plotListDispatch({ plotList: plotsData }); setPlotListLoaded(true); } + } catch (error) { + console.error('Error loading plot data:', error); + } finally { + setLoading(false); + } + }, [currentSite, plotListLoaded, plotListDispatch, setLoading]); + + // Function to load census data with debounce + const loadCensusData = useCallback(async () => { + const now = Date.now(); + if (censusLastExecutedRef.current && now - censusLastExecutedRef.current < debounceDelay) { + console.log('Debounced loadCensusData: Too soon since last call.'); + return; + } + censusLastExecutedRef.current = now; - // Load census data + try { + setLoading(true, 'Loading census data...'); if (currentSite && currentPlot && !censusListLoaded) { - const loadCensus = fetch(`/api/fetchall/census/${currentPlot.plotID}?schema=${currentSite.schemaName}`) - .then(response => response.json()) - .then(async censusRDSLoad => { - if (!censusRDSLoad) throw new Error('Failed to load census data'); - const censusList = await createAndUpdateCensusList(censusRDSLoad); - if (censusListDispatch) return censusListDispatch({ censusList }); - }); - promises.push(loadCensus); + const response = await fetch(`/api/fetchall/census/${currentPlot.plotID}?schema=${currentSite.schemaName}`); + const censusRDSLoad = await response.json(); + if (!censusRDSLoad) throw new Error('Failed to load census data'); + const censusList = await createAndUpdateCensusList(censusRDSLoad); + if (censusListDispatch) await censusListDispatch({ censusList }); setCensusListLoaded(true); } + } catch (error) { + console.error('Error loading census data:', error); + } finally { + setLoading(false); + } + }, [currentSite, currentPlot, censusListLoaded, censusListDispatch, setLoading]); - // Load quadrat data + // Function to load quadrat data with debounce + const loadQuadratData = useCallback(async () => { + const now = Date.now(); + if (quadratLastExecutedRef.current && now - quadratLastExecutedRef.current < debounceDelay) { + console.log('Debounced loadQuadratData: Too soon since last call.'); + return; + } + quadratLastExecutedRef.current = now; + + try { + setLoading(true, 'Loading quadrat data...'); if (currentSite && currentPlot && currentCensus && !quadratListLoaded) { - const loadQuadrats = fetch(`/api/fetchall/quadrats/${currentPlot.plotID}/${currentCensus.plotCensusNumber}?schema=${currentSite.schemaName}`) - .then(response => response.json()) - .then(quadratsData => { - if (!quadratsData) throw new Error('Failed to load quadrats data'); - if (quadratListDispatch) return quadratListDispatch({ quadratList: quadratsData }); - }); - promises.push(loadQuadrats); + const response = await fetch(`/api/fetchall/quadrats/${currentPlot.plotID}/${currentCensus.plotCensusNumber}?schema=${currentSite.schemaName}`); + const quadratsData = await response.json(); + if (!quadratsData) throw new Error('Failed to load quadrats data'); + if (quadratListDispatch) await quadratListDispatch({ quadratList: quadratsData }); setQuadratListLoaded(true); } - - // Wait for all promises to resolve - await Promise.all(promises); } catch (error) { - console.error('Error loading data:', error); + console.error('Error loading quadrat data:', error); } finally { setLoading(false); } - }, [ - currentSite, - currentPlot, - currentCensus, - plotListLoaded, - censusListLoaded, - quadratListLoaded, - plotListDispatch, - censusListDispatch, - quadratListDispatch, - setLoading - ]); + }, [currentSite, currentPlot, currentCensus, quadratListLoaded, quadratListDispatch, setLoading]); useEffect(() => { - if (currentSite || currentPlot || currentCensus) { - loadData().catch(console.error); + if (currentSite && siteListLoaded) { + loadPlotData().catch(console.error); + } + if (currentSite && siteListLoaded && currentPlot && plotListLoaded) { + loadCensusData().catch(console.error); + } + if (currentSite && siteListLoaded && currentPlot && plotListLoaded && currentCensus && censusListLoaded) { + loadQuadratData().catch(console.error); } - }, [currentSite, currentPlot, currentCensus, loadData]); + }, [currentSite, currentPlot, currentCensus, loadPlotData, loadCensusData, loadQuadratData]); useEffect(() => { if (manualReset) { @@ -163,9 +199,8 @@ export default function HubLayout({ children }: { children: React.ReactNode }) { setCensusListLoaded(false); setQuadratListLoaded(false); setManualReset(false); - loadData().catch(console.error); } - }, [manualReset, loadData]); + }, [manualReset]); useEffect(() => { if (session && !siteListLoaded) { @@ -211,13 +246,16 @@ export default function HubLayout({ children }: { children: React.ReactNode }) { await Promise.all(promises); // After clearing, load the new data - loadData().catch(console.error); + loadPlotData() + .then(() => loadCensusData()) + .then(() => loadQuadratData()) + .catch(console.error); }; if (hasSiteChanged || hasPlotChanged || hasCensusChanged) { clearLists().catch(console.error); } - }, [currentSite, currentPlot, currentCensus, plotListDispatch, censusListDispatch, quadratListDispatch, loadData]); + }, [currentSite, currentPlot, currentCensus, plotListDispatch, censusListDispatch, quadratListDispatch, loadPlotData, loadCensusData, loadQuadratData]); useEffect(() => { // if contexts are reset due to website refresh, system needs to redirect user back to dashboard @@ -325,15 +363,16 @@ export default function HubLayout({ children }: { children: React.ReactNode }) { justifyContent: 'center' }} > - {siteConfig.name} + {/*{siteConfig.name}*/} + {siteConfig.name} - - - - {siteConfig.version} - - - + {/**/} + {/* */} + {/* */} + {/* {siteConfig.version}*/} + {/* */} + {/* */} + {/**/} setIsFeedbackModalOpen(true)} diff --git a/frontend/app/api/fixeddata/[dataType]/[[...slugs]]/route.ts b/frontend/app/api/fixeddata/[dataType]/[[...slugs]]/route.ts index 18562b10..3649c71b 100644 --- a/frontend/app/api/fixeddata/[dataType]/[[...slugs]]/route.ts +++ b/frontend/app/api/fixeddata/[dataType]/[[...slugs]]/route.ts @@ -349,7 +349,7 @@ export async function DELETE(request: NextRequest, { params }: { params: { dataT const { [viewConfig.primaryKey]: primaryKeyValue } = deleteRowData; if (!primaryKeyValue) throw new Error(`Primary key value missing for ${viewConfig.primaryKey} in view ${params.dataType}`); - const deleteQuery = `DELETE FROM ${schema}.${viewConfig.table} WHERE ${viewConfig.primaryKey} = ${primaryKeyValue}`; + const deleteQuery = format(`DELETE FROM ?? WHERE ?? = ?`, [`${schema}.${params.dataType}`, viewConfig.primaryKey, primaryKeyValue]); await runQuery(conn, deleteQuery); await conn.commit(); return NextResponse.json({ message: 'Delete successful' }, { status: HTTPResponses.OK }); @@ -357,7 +357,7 @@ export async function DELETE(request: NextRequest, { params }: { params: { dataT // Handle deletion for tables const deleteRowData = MapperFactory.getMapper(params.dataType).demapData([newRow])[0]; const { [demappedGridID]: gridIDKey } = deleteRowData; - const deleteQuery = `DELETE FROM ${schema}.${params.dataType} WHERE ${demappedGridID} = ${gridIDKey}`; + const deleteQuery = format(`DELETE FROM ?? WHERE ?? = ?`, [`${schema}.${params.dataType}`, demappedGridID, gridIDKey]); await runQuery(conn, deleteQuery); await conn.commit(); return NextResponse.json({ message: 'Delete successful' }, { status: HTTPResponses.OK }); diff --git a/frontend/components/client/rollovermodal.tsx b/frontend/components/client/rollovermodal.tsx index 7ec6cbb5..ee486444 100644 --- a/frontend/components/client/rollovermodal.tsx +++ b/frontend/components/client/rollovermodal.tsx @@ -123,7 +123,7 @@ export default function RolloverModal(props: RolloverModalProps) { useEffect(() => { if (open) { - validatePreviousCensusData(); + validatePreviousCensusData().catch(console.error); } }, [open]); @@ -139,7 +139,7 @@ export default function RolloverModal(props: RolloverModalProps) { if (foundCensus) { const plotCensusNumber = foundCensus.plotCensusNumber; - fetchPreviousQuadratsData(plotCensusNumber); + fetchPreviousQuadratsData(plotCensusNumber).catch(console.error); } } }, [selectedQuadratsCensus, censusListContext]); @@ -149,7 +149,7 @@ export default function RolloverModal(props: RolloverModalProps) { const foundCensus = censusListContext?.find(census => census?.dateRanges.some(dateRange => dateRange.censusID === selectedPersonnelCensus.censusID)); if (foundCensus) { const plotCensusNumber = foundCensus.plotCensusNumber; - fetchPreviousPersonnelData(plotCensusNumber); + fetchPreviousPersonnelData(plotCensusNumber).catch(console.error); } } }, [selectedPersonnelCensus, censusListContext]); diff --git a/frontend/components/datagrids/applications/attributesdatagrid.tsx b/frontend/components/datagrids/applications/attributesdatagrid.tsx index 0ac382d1..7787b013 100644 --- a/frontend/components/datagrids/applications/attributesdatagrid.tsx +++ b/frontend/components/datagrids/applications/attributesdatagrid.tsx @@ -11,9 +11,13 @@ import UploadParentModal from '@/components/uploadsystemhelpers/uploadparentmoda import { AttributeGridColumns } from '@/components/client/datagridcolumns'; import { FormType } from '@/config/macros/formdetails'; -import { initialAttributesRDSRow } from '@/config/sqlrdsdefinitions/core'; - export default function AttributesDataGrid() { + const initialAttributesRDSRow = { + id: 0, + code: '', + description: '', + status: '' + }; const [rows, setRows] = useState([initialAttributesRDSRow] as GridRowsProp); const [rowCount, setRowCount] = useState(0); const [rowModesModel, setRowModesModel] = useState({}); diff --git a/frontend/components/datagrids/datagridcommons.tsx b/frontend/components/datagrids/datagridcommons.tsx index 38de9663..be48b800 100644 --- a/frontend/components/datagrids/datagridcommons.tsx +++ b/frontend/components/datagrids/datagridcommons.tsx @@ -158,12 +158,16 @@ export default function DataGridCommons(props: Readonly) { }, [refresh, setRefresh]); useEffect(() => { - const initialRowModesModel = rows.reduce((acc, row) => { - acc[row.id] = { mode: GridRowModes.View }; + const updatedRowModesModel = rows.reduce((acc, row) => { + if (!rowModesModel[row.id]) { + acc[row.id] = { mode: GridRowModes.View }; + } else { + acc[row.id] = rowModesModel[row.id]; + } return acc; }, {} as GridRowModesModel); - setRowModesModel(initialRowModesModel); - }, [rows]); + setRowModesModel(updatedRowModesModel); + }, [rows]); // Only runs when rows change const fetchFullData = async () => { setLoading(true, 'Fetching full dataset...'); @@ -193,28 +197,35 @@ export default function DataGridCommons(props: Readonly) { const openConfirmationDialog = (actionType: 'save' | 'delete', actionId: GridRowId) => { setPendingAction({ actionType, actionId }); + const row = rows.find(row => String(row.id) === String(actionId)); if (row) { if (actionType === 'delete') { setIsDeleteDialogOpen(true); } else { + // Open the reentry modal after setting promiseArguments setIsDialogOpen(true); - setRowModesModel(oldModel => ({ - ...oldModel, - [actionId]: { mode: GridRowModes.View } - })); } } }; - const handleConfirmAction = async (selectedRow?: GridRowModel) => { + const handleConfirmAction = async (confirmedRow?: GridRowModel) => { setIsDialogOpen(false); setIsDeleteDialogOpen(false); - if (pendingAction.actionType === 'save' && pendingAction.actionId !== null) { - await performSaveAction(pendingAction.actionId, selectedRow); - } else if (pendingAction.actionType === 'delete' && pendingAction.actionId !== null) { - await performDeleteAction(pendingAction.actionId); + + if (promiseArguments) { + try { + const resolvedRow = confirmedRow || promiseArguments.newRow; + + // Proceed with saving the row after confirmation + await performSaveAction(promiseArguments.newRow.id, resolvedRow); + + setSnackbar({ children: 'Row successfully updated!', severity: 'success' }); + } catch (error: any) { + setSnackbar({ children: `Error: ${error.message}`, severity: 'error' }); + } } + setPendingAction({ actionType: '', actionId: null }); setPromiseArguments(null); // Clear promise arguments after handling }; @@ -229,30 +240,41 @@ export default function DataGridCommons(props: Readonly) { setPromiseArguments(null); // Clear promise arguments after handling }; - const performSaveAction = async (id: GridRowId, selectedRow?: GridRowModel) => { + const performSaveAction = async (id: GridRowId, confirmedRow: GridRowModel) => { if (locked || !promiseArguments) return; + setLoading(true, 'Saving changes...'); + try { + // Set the row to view mode after confirmation + setRowModesModel(prevModel => ({ + ...prevModel, + [id]: { mode: GridRowModes.View } + })); + const updatedRow = await updateRow( gridType, currentSite?.schemaName, - selectedRow ?? promiseArguments.newRow, - promiseArguments.oldRow, + confirmedRow, // Use the confirmed row + promiseArguments.oldRow, // Pass the old row for comparison setSnackbar, setIsNewRowAdded, setShouldAddRowAfterFetch, fetchPaginatedData, paginationModel ); + promiseArguments.resolve(updatedRow); } catch (error) { promiseArguments.reject(error); } + const row = rows.find(row => String(row.id) === String(id)); if (row?.isNew) { setIsNewRowAdded(false); setShouldAddRowAfterFetch(false); } + if (handleSelectQuadrat) handleSelectQuadrat(null); triggerRefresh(); setLoading(false); @@ -301,9 +323,33 @@ export default function DataGridCommons(props: Readonly) { } }; - const handleSaveClick = (id: GridRowId) => () => { + const handleSaveClick = (id: GridRowId) => async () => { if (locked) return; - openConfirmationDialog('save', id); + + // Stop edit mode and apply changes locally without committing to the server yet + apiRef.current.stopRowEditMode({ id, ignoreModifications: true }); + + // Get the original row data (before edits) + const oldRow = rows.find(row => String(row.id) === String(id)); + + // Use getRowWithUpdatedValues to fetch all updated field values (the field is ignored in row editing mode) + const updatedRow = apiRef.current.getRowWithUpdatedValues(id, 'anyField'); // 'anyField' is a dummy value, ignored in row editing + + console.log('Old Row:', oldRow); + console.log('Updated Row:', updatedRow); + + if (oldRow && updatedRow) { + // Set promise arguments before opening the modal + setPromiseArguments({ + resolve: (value: GridRowModel) => {}, // Define resolve + reject: (reason?: any) => {}, // Define reject + oldRow, // Pass the old (original) row + newRow: updatedRow // Pass the updated (edited) row + }); + + // Open the confirmation dialog for reentry data + openConfirmationDialog('save', id); + } }; const handleDeleteClick = (id: GridRowId) => () => { @@ -338,6 +384,8 @@ export default function DataGridCommons(props: Readonly) { setPaginationModel({ ...paginationModel, page: existingLastPage }); addNewRowToGrid(); } + + console.log('rowModesModel: ', rowModesModel); }; const handleRefresh = async () => { @@ -390,7 +438,7 @@ export default function DataGridCommons(props: Readonly) { try { const response = await fetch(fetchProcessQuery, { - method: oldRow.isNew ? 'POST' : 'PATCH', + method: oldRow.isNew ? 'POST' : 'PATCH', // Ensure POST for new row, PATCH for existing headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ oldRow: oldRow, newRow: newRow }) }); @@ -415,7 +463,7 @@ export default function DataGridCommons(props: Readonly) { setShouldAddRowAfterFetch(false); await fetchPaginatedData(paginationModel.page); } - // call refreshmeasurementssummary or viewfulltable if needed: await fetch(`/api/refresh/${gridType}`); + return newRow; } catch (error: any) { setSnackbar({ children: `Error: ${error.message}`, severity: 'error' }); @@ -424,17 +472,77 @@ export default function DataGridCommons(props: Readonly) { }; const processRowUpdate = useCallback( - (newRow: GridRowModel, oldRow: GridRowModel) => - new Promise((resolve, reject) => { - setLoading(true, 'Processing changes...'); - if (newRow.id === '') { - setLoading(false); - return reject(new Error('Primary key id cannot be empty!')); - } + async (newRow: GridRowModel, oldRow: GridRowModel) => { + setLoading(true, 'Processing changes...'); + + // Check if it's a new row and interrupt the API call + if (newRow.isNew || !newRow.id) { + // Set promiseArguments to handle the modal confirmation + setPromiseArguments({ + resolve: async (confirmedRow: GridRowModel) => { + try { + // Proceed with updating the row after confirmation + const updatedRow = await updateRow( + gridType, + currentSite?.schemaName, + confirmedRow, + oldRow, + setSnackbar, + setIsNewRowAdded, + setShouldAddRowAfterFetch, + fetchPaginatedData, + paginationModel + ); + setLoading(false); + return updatedRow; + } catch (error: any) { + setLoading(false); + setSnackbar({ + children: `Error: ${error.message}`, + severity: 'error' + }); + return Promise.reject(error); + } + }, + reject: reason => { + setLoading(false); + return Promise.reject(reason); + }, + oldRow, + newRow + }); + + // Open confirmation dialog to let the user reenter data or confirm + openConfirmationDialog('save', newRow.id); - setPromiseArguments({ resolve, reject, newRow, oldRow }); + // Interrupt processRowUpdate by throwing a rejection to stop any API call until the modal is confirmed + return Promise.reject(new Error('Row update interrupted for new row, awaiting confirmation')); + } + + // For existing rows, proceed with the normal update flow + try { + const updatedRow = await updateRow( + gridType, + currentSite?.schemaName, + newRow, + oldRow, + setSnackbar, + setIsNewRowAdded, + setShouldAddRowAfterFetch, + fetchPaginatedData, + paginationModel + ); + setLoading(false); + return updatedRow; + } catch (error: any) { setLoading(false); - }), + setSnackbar({ + children: `Error: ${error.message}`, + severity: 'error' + }); + return Promise.reject(error); + } + }, [gridType, currentSite?.schemaName, setSnackbar, setIsNewRowAdded, setShouldAddRowAfterFetch, fetchPaginatedData, paginationModel] ); @@ -515,8 +623,8 @@ export default function DataGridCommons(props: Readonly) { ); - const getGridActionsColumn = useCallback((): GridColDef => { - return { + const getGridActionsColumn = useCallback( + (): GridColDef => ({ field: 'actions', type: 'actions', headerName: 'Actions', @@ -532,8 +640,9 @@ export default function DataGridCommons(props: Readonly) { } return [getEnhancedCellAction('Edit', , handleEditClick(id)), getEnhancedCellAction('Delete', , handleDeleteClick(id))]; } - }; - }, [rowModesModel, locked]); + }), + [rowModesModel, locked] + ); const columns = useMemo(() => { return [...gridColumns, getGridActionsColumn()]; @@ -555,19 +664,21 @@ export default function DataGridCommons(props: Readonly) { const handleCellKeyDown: GridEventListener<'cellKeyDown'> = (params, event) => { if (event.key === 'Enter' && !locked) { - console.log('params: ', params); - setRowModesModel(prevModel => ({ - ...prevModel, - [params.id]: { mode: GridRowModes.Edit } - })); + event.defaultMuiPrevented = true; + // console.log('params: ', params); + // setRowModesModel(prevModel => ({ + // ...prevModel, + // [params.id]: { mode: GridRowModes.Edit } + // })); } if (event.key === 'Escape') { - console.log('params: ', params); - setRowModesModel(prevModel => ({ - ...prevModel, - [params.id]: { mode: GridRowModes.View, ignoreModifications: true } - })); - handleCancelClick(params.id, event); + event.defaultMuiPrevented = true; + // console.log('params: ', params); + // setRowModesModel(prevModel => ({ + // ...prevModel, + // [params.id]: { mode: GridRowModes.View, ignoreModifications: true } + // })); + // handleCancelClick(params.id, event); } }; @@ -644,12 +755,13 @@ export default function DataGridCommons(props: Readonly) { )} {isDialogOpen && promiseArguments && ( )} {isDeleteDialogOpen && ( diff --git a/frontend/components/datagrids/reentrydatamodal.tsx b/frontend/components/datagrids/reentrydatamodal.tsx index 2748d732..ef24d961 100644 --- a/frontend/components/datagrids/reentrydatamodal.tsx +++ b/frontend/components/datagrids/reentrydatamodal.tsx @@ -2,7 +2,7 @@ import React, { useEffect, useState } from 'react'; import { DataGrid, GridColDef, GridRenderCellParams, GridRowModel } from '@mui/x-data-grid'; import moment from 'moment'; -import { unitSelectionOptions } from '@/config/macros'; +import { ColumnStates, unitSelectionOptions } from '@/config/macros'; import { Box, Button, @@ -30,9 +30,10 @@ interface ReEnterDataModalProps { handleSave: (selectedRow: GridRowModel) => void; columns: GridColDef[]; selectionOptions?: { value: string | number; label: string }[]; + hiddenColumns?: ColumnStates; } -const ReEnterDataModal: React.FC = ({ row, reEnterData, handleClose, handleSave, columns, selectionOptions }) => { +const ReEnterDataModal: React.FC = ({ row, reEnterData, handleClose, handleSave, columns, selectionOptions, hiddenColumns }) => { const [localData, setLocalData] = useState({ ...reEnterData }); const [selectedRow, setSelectedRow] = useState(null); const [isConfirmStep, setIsConfirmStep] = useState(false); @@ -101,6 +102,12 @@ const ReEnterDataModal: React.FC = ({ row, reEnterData, h return value; }; + // Filter the columns based on hiddenColumns + const filteredColumns = columns.filter(column => { + const isHidden = hiddenColumns ? hiddenColumns[column.field] === false : false; + return !isHidden; + }); + return ( @@ -108,7 +115,7 @@ const ReEnterDataModal: React.FC = ({ row, reEnterData, h {!isConfirmStep ? ( - {columns.map(column => { + {filteredColumns.map(column => { const { field, type, editable } = column; const value = localData[field]; if (!editable) { @@ -123,7 +130,6 @@ const ReEnterDataModal: React.FC = ({ row, reEnterData, h } else { valueOptions = selectionOptions; } - // const valueOptions = selectionOptions ? selectionOptions : field !== 'status' ? unitSelectionOptions : AttributeStatusOptions; return ( @@ -211,7 +217,7 @@ const ReEnterDataModal: React.FC = ({ row, reEnterData, h ) }, - ...columns.map(col => ({ + ...filteredColumns.map(col => ({ ...col, flex: col.flex || 1, minWidth: col.minWidth || 150, diff --git a/frontend/components/processors/processcensus.tsx b/frontend/components/processors/processcensus.tsx index cc9ca59b..52e7f0c2 100644 --- a/frontend/components/processors/processcensus.tsx +++ b/frontend/components/processors/processcensus.tsx @@ -7,49 +7,45 @@ import { CMAttributesResult, CoreMeasurementsResult } from '@/config/sqlrdsdefin export async function processCensus(props: Readonly): Promise { const { connection, rowData, schema, plotID, censusID } = props; - if (!plotID || !censusID) throw new Error('Process Census: Missing plotID, censusID, quadratID or full name'); + if (!plotID || !censusID) { + console.error('Missing required parameters: plotID or censusID'); + throw new Error('Process Census: Missing plotID or censusID'); + } const { tag, stemtag, spcode, quadrat, lx, ly, coordinateunit, dbh, dbhunit, hom, homunit, date, codes } = rowData; try { await connection.beginTransaction(); + // Fetch species const speciesID = await fetchPrimaryKey(schema, 'species', { SpeciesCode: spcode }, connection, 'SpeciesID'); + + // Fetch quadrat const quadratID = await fetchPrimaryKey( schema, 'quadrats', - { - QuadratName: quadrat, - PlotID: plotID, - CensusID: censusID - }, + { QuadratName: quadrat, PlotID: plotID, CensusID: censusID }, connection, 'QuadratID' ); - // const subquadratID = subquadrat - // ? await fetchPrimaryKey(schema, 'subquadrats', { SubquadratName: subquadrat }, connection) - // : null; if (tag) { + // Handle Tree Upsert const treeID = await handleUpsert(connection, schema, 'trees', { TreeTag: tag, SpeciesID: speciesID }, 'TreeID'); if (stemtag && lx && ly) { + console.log('Processing stem with StemTag:', stemtag); + // Handle Stem Upsert const stemID = await handleUpsert( connection, schema, 'stems', - { - StemTag: stemtag, - TreeID: treeID, - QuadratID: quadratID, - LocalX: lx, - LocalY: ly, - CoordinateUnits: coordinateunit - }, + { StemTag: stemtag, TreeID: treeID, QuadratID: quadratID, LocalX: lx, LocalY: ly, CoordinateUnits: coordinateunit }, 'StemID' ); if (dbh && hom && date) { + // Handle Core Measurement Upsert const coreMeasurementID = await handleUpsert( connection, schema, @@ -67,33 +63,32 @@ export async function processCensus(props: Readonly): Pr 'CoreMeasurementID' ); + // Handle CM Attributes Upsert if (codes) { const parsedCodes = codes .split(';') .map(code => code.trim()) .filter(Boolean); - for (const code of parsedCodes) { - const attributeRows = await runQuery(connection, `SELECT COUNT(*) as count FROM ${schema}.attributes WHERE Code = ?`, [code]); - if (!attributeRows || attributeRows.length === 0 || !attributeRows[0].count) { - throw createError(`Attribute code ${code} not found or query failed.`, { code }); + if (parsedCodes.length === 0) { + console.error('No valid attribute codes found:', codes); + } else { + for (const code of parsedCodes) { + const attributeRows = await runQuery(connection, `SELECT COUNT(*) as count FROM ${schema}.attributes WHERE Code = ?`, [code]); + if (!attributeRows || attributeRows.length === 0 || !attributeRows[0].count) { + throw createError(`Attribute code ${code} not found or query failed.`, { code }); + } + await handleUpsert(connection, schema, 'cmattributes', { CoreMeasurementID: coreMeasurementID, Code: code }, 'CMAID'); } - await handleUpsert( - connection, - schema, - 'cmattributes', - { - CoreMeasurementID: coreMeasurementID, - Code: code - }, - 'CMAID' - ); } } + + // Update Census Start/End Dates const combinedQuery = ` UPDATE ${schema}.census c JOIN ( SELECT CensusID, MIN(MeasurementDate) AS FirstMeasurementDate, MAX(MeasurementDate) AS LastMeasurementDate FROM ${schema}.coremeasurements + WHERE CensusID = ${censusID} GROUP BY CensusID ) m ON c.CensusID = m.CensusID SET c.StartDate = m.FirstMeasurementDate, c.EndDate = m.LastMeasurementDate diff --git a/frontend/components/processors/processormacros.tsx b/frontend/components/processors/processormacros.tsx index b51d2622..90815695 100644 --- a/frontend/components/processors/processormacros.tsx +++ b/frontend/components/processors/processormacros.tsx @@ -155,6 +155,11 @@ export async function getConn() { export async function runQuery(connection: PoolConnection, query: string, params?: any[]): Promise { try { + // If params exist, replace any undefined values with null + if (params) { + params = params.map(param => (param === undefined ? null : param)); + } + // Check if the query is for calling a stored procedure if (query.trim().startsWith('CALL')) { // Use `connection.query` for stored procedures diff --git a/frontend/components/processors/processpersonnel.tsx b/frontend/components/processors/processpersonnel.tsx index 9639f658..9d97dac1 100644 --- a/frontend/components/processors/processpersonnel.tsx +++ b/frontend/components/processors/processpersonnel.tsx @@ -1,6 +1,5 @@ -import { SpecialProcessingProps } from '@/components/processors/processormacros'; -import { createError, handleUpsert } from '@/config/utils'; - +import { createError, createInsertOrUpdateQuery, createSelectQuery } from '@/config/utils'; +import { runQuery, SpecialProcessingProps } from '@/components/processors/processormacros'; import { PersonnelResult, RoleResult } from '@/config/sqlrdsdefinitions/personnel'; export async function processPersonnel(props: Readonly) { @@ -8,37 +7,70 @@ export async function processPersonnel(props: Readonly) if (!censusID) throw createError('CensusID missing', { censusID }); if (!rowData.role) throw createError('Row data does not contain a role property', { rowData }); + const { firstname, lastname, role, roledescription } = rowData; + try { await connection.beginTransaction(); // Normalize the role name - const normalizedRole = rowData.role + const normalizedRole = role .toLowerCase() .replace(/([a-z])([A-Z])/g, '$1 $2') .trim(); console.log('normalizedRole: ', normalizedRole); // Handle Role insertion/updation - const roleID = await handleUpsert( - connection, - schema, - 'roles', - { + const roleQuery = createSelectQuery(schema, 'roles', { RoleName: normalizedRole }); + console.log('role query: ', roleQuery); + const existingRoles = await runQuery(connection, roleQuery, [normalizedRole]); + console.log('existing roles: ', existingRoles); + + let roleID; + if (existingRoles.length > 0) { + console.log('role exists'); + // If the role exists, update the description + roleID = existingRoles[0].RoleID; + console.log('existing role id: ', roleID); + const updateRoleQuery = `UPDATE \`${schema}\`.\`roles\` SET RoleDescription = ? WHERE RoleID = ?`; + console.log('update role query: ', updateRoleQuery); + await runQuery(connection, updateRoleQuery, [roledescription, roleID]); + console.log('Role updated with description:', roledescription); + } else { + // If the role does not exist, insert a new role + const insertRoleQuery = createInsertOrUpdateQuery(schema, 'roles', { RoleName: normalizedRole, - RoleDescription: rowData.roledescription ?? null - }, - 'RoleID' - ); + RoleDescription: roledescription + }); + const insertResult = await runQuery(connection, insertRoleQuery, [normalizedRole, roledescription]); + roleID = insertResult.insertId; + console.log('New role inserted with RoleID:', roleID); + } // Handle Personnel insertion/updation const personnelData = { CensusID: censusID, - FirstName: rowData.firstname, - LastName: rowData.lastname, + FirstName: firstname, + LastName: lastname, RoleID: roleID }; - const personnelID = await handleUpsert(connection, schema, 'personnel', personnelData, 'PersonnelID'); + const personnelQuery = createSelectQuery(schema, 'personnel', personnelData); + const existingPersonnel = await runQuery(connection, personnelQuery, Object.values(personnelData)); + + let personnelID; + if (existingPersonnel.length > 0) { + // If personnel exists, update the row + personnelID = existingPersonnel[0].PersonnelID; + const updatePersonnelQuery = createInsertOrUpdateQuery(schema, 'personnel', personnelData); + await runQuery(connection, updatePersonnelQuery, Object.values(personnelData)); + console.log('Personnel updated:', personnelID); + } else { + // Insert new personnel record + const insertPersonnelQuery = createInsertOrUpdateQuery(schema, 'personnel', personnelData); + const insertResult = await runQuery(connection, insertPersonnelQuery, Object.values(personnelData)); + personnelID = insertResult.insertId; + console.log('New personnel inserted with PersonnelID:', personnelID); + } await connection.commit(); console.log('Upsert successful. Personnel ID:', personnelID); diff --git a/frontend/components/processors/processspecies.tsx b/frontend/components/processors/processspecies.tsx index fd7b52b6..a83824a3 100644 --- a/frontend/components/processors/processspecies.tsx +++ b/frontend/components/processors/processspecies.tsx @@ -6,7 +6,7 @@ function cleanInputData(data: any) { const cleanedData: any = {}; for (const key in data) { if (data.hasOwnProperty(key)) { - cleanedData[key] = data[key] !== undefined ? data[key] : null; + cleanedData[key] = data[key] !== undefined && data[key] !== '' ? data[key] : null; } } return cleanedData; @@ -22,13 +22,23 @@ export async function processSpecies(props: Readonly): P // Handle Family insertion/updation let familyID: number | undefined; if (rowData.family) { - familyID = await handleUpsert(connection, schema, 'family', { Family: rowData.family }, 'FamilyID'); + try { + familyID = await handleUpsert(connection, schema, 'family', { Family: rowData.family }, 'FamilyID'); + } catch (error: any) { + console.error('Family upsert failed:', error.message); + throw createError('Family upsert failed', { error }); + } } // Handle Genus insertion/updation let genusID: number | undefined; if (rowData.genus) { - genusID = await handleUpsert(connection, schema, 'genus', { Genus: rowData.genus, FamilyID: familyID }, 'GenusID'); + try { + genusID = await handleUpsert(connection, schema, 'genus', { Genus: rowData.genus, FamilyID: familyID }, 'GenusID'); + } catch (error: any) { + console.error('Genus upsert failed:', error.message); + throw createError('Genus upsert failed', { error }); + } } // Handle Species insertion/updation @@ -45,7 +55,14 @@ export async function processSpecies(props: Readonly): P }; const cleanedSpeciesData = cleanInputData(speciesData); - speciesID = await handleUpsert(connection, schema, 'species', cleanedSpeciesData, 'SpeciesID'); + console.log('Cleaned species data: ', cleanedSpeciesData); + + try { + speciesID = await handleUpsert(connection, schema, 'species', cleanedSpeciesData, 'SpeciesID'); + } catch (error: any) { + console.error('Species upsert failed:', error.message); + throw createError('Species upsert failed', { error }); + } } await connection.commit(); diff --git a/frontend/components/processors/validationshardcoded.tsx b/frontend/components/processors/validationshardcoded.tsx deleted file mode 100644 index 4ef86c47..00000000 --- a/frontend/components/processors/validationshardcoded.tsx +++ /dev/null @@ -1,925 +0,0 @@ -import { getConn, runQuery } from './processormacros'; - -// Centralized validation function -export async function runValidation( - validationProcedureName: string, - cursorQuery: string, - cursorParams: any[], - validationCriteria: string, - errorMessage: string, - expectedValueRange: string, - measuredValue: (row: any) => string, - additionalDetails: string -) { - const conn = await getConn(); - let insertCount = 0; - - try { - const validationProcedureQuery = ` - SELECT ValidationID - FROM catalog.validationprocedures - WHERE ProcedureName = ?; - `; - const validationResult = await runQuery(conn, validationProcedureQuery, [validationProcedureName]); - if (validationResult.length === 0) { - throw new Error('Validation procedure not found.'); - } - const veID = validationResult[0]?.ValidationID; - - const cursorResults = await runQuery(conn, cursorQuery, cursorParams); - - if (cursorResults.length > 0) { - const insertErrorQuery = ` - INSERT INTO cmverrors (CoreMeasurementID, ValidationErrorID) - SELECT ?, ? - FROM DUAL - WHERE NOT EXISTS ( - SELECT 1 - FROM cmverrors - WHERE CoreMeasurementID = ? AND ValidationErrorID = ? - ); - `; - const logValidationQuery = ` - INSERT INTO validationchangelog ( - ProcedureName, RunDateTime, TargetRowID, ValidationOutcome, ErrorMessage, - ValidationCriteria, MeasuredValue, ExpectedValueRange, AdditionalDetails - ) VALUES (?, NOW(), ?, 'Failed', ?, ?, ?, ?, ?); - `; - - const insertErrorParams: any[] = []; - const logValidationParams: any[] = []; - - for (const row of cursorResults) { - insertErrorParams.push(row.CoreMeasurementID, veID, row.CoreMeasurementID, veID); - logValidationParams.push( - validationProcedureName, - row.CoreMeasurementID, - errorMessage, - validationCriteria, - measuredValue(row), - expectedValueRange, - additionalDetails - ); - - insertCount++; - } - - // Execute batch inserts - await runQuery(conn, insertErrorQuery, insertErrorParams); - await runQuery(conn, logValidationQuery, logValidationParams); - } - - return { - TotalRows: cursorResults.length, - FailedRows: insertCount, - Message: `Validation completed successfully. Total rows: ${cursorResults.length}, Failed rows: ${insertCount}` - }; - } catch (error: any) { - console.error(`Error during ${validationProcedureName} validation:`, error.message); - throw new Error(`${validationProcedureName} validation failed. Please check the logs for more details.`); - } finally { - if (conn) conn.release(); - } -} - -// centralized function: -// Define the enum for DBH and HOM units -enum Units { - km = 'km', - hm = 'hm', - dam = 'dam', - m = 'm', - dm = 'dm', - cm = 'cm', - mm = 'mm' -} - -// Map the units to their conversion factors for DBH (in mm) and HOM (in meters) -const unitConversionFactors: Record = { - km: 1000000, - hm: 100000, - dam: 10000, - m: 1000, - dm: 100, - cm: 10, - mm: 1 -}; - -const unitConversionFactorsHOM: Record = { - km: 1000, - hm: 100, - dam: 10, - m: 1, - dm: 0.1, - cm: 0.01, - mm: 0.001 -}; - -// Define the enum for DBH units -enum DBHUnits { - km = 'km', - hm = 'hm', - dam = 'dam', - m = 'm', - dm = 'dm', - cm = 'cm', - mm = 'mm' -} - -export async function validateDBHGrowthExceedsMax(p_CensusID: number | null, p_PlotID: number | null) { - const cursorQuery = ` - SELECT - cm2.CoreMeasurementID, - cm1.MeasuredDBH * (CASE cm1.DBHUnit - WHEN 'km' THEN 1000000 - WHEN 'hm' THEN 100000 - WHEN 'dam' THEN 10000 - WHEN 'm' THEN 1000 - WHEN 'dm' THEN 100 - WHEN 'cm' THEN 10 - WHEN 'mm' THEN 1 - ELSE 1 END) AS vPrevDBHInMM, - cm2.MeasuredDBH * (CASE cm2.DBHUnit - WHEN 'km' THEN 1000000 - WHEN 'hm' THEN 100000 - WHEN 'dam' THEN 10000 - WHEN 'm' THEN 1000 - WHEN 'dm' THEN 100 - WHEN 'cm' THEN 10 - WHEN 'mm' THEN 1 - ELSE 1 END) AS vCurrDBHInMM - FROM coremeasurements cm1 - JOIN coremeasurements cm2 - ON cm1.StemID = cm2.StemID - AND YEAR(cm2.MeasurementDate) = YEAR(cm1.MeasurementDate) + 1 - LEFT JOIN stems st2 - ON cm2.StemID = st2.StemID - LEFT JOIN quadrats q - ON st2.QuadratID = q.QuadratID - LEFT JOIN cmattributes cma - ON cm1.CoreMeasurementID = cma.CoreMeasurementID - LEFT JOIN attributes a - ON cma.Code = a.Code - WHERE - (a.Status NOT IN ('dead', 'stem dead', 'broken below', 'missing', 'omitted') OR a.Status IS NULL) - AND cm1.MeasuredDBH IS NOT NULL - AND cm2.MeasuredDBH IS NOT NULL - AND cm1.IsValidated IS TRUE - AND cm2.IsValidated IS NULL - AND (@p_CensusID IS NULL OR q.CensusID = @p_CensusID) - AND (@p_PlotID IS NULL OR q.PlotID = @p_PlotID); - AND (cm2.MeasuredDBH * (CASE cm2.DBHUnit - WHEN 'km' THEN 1000000 - WHEN 'hm' THEN 100000 - WHEN 'dam' THEN 10000 - WHEN 'm' THEN 1000 - WHEN 'dm' THEN 100 - WHEN 'cm' THEN 10 - WHEN 'mm' THEN 1 - ELSE 1 END) - - cm1.MeasuredDBH * (CASE cm1.DBHUnit - WHEN 'km' THEN 1000000 - WHEN 'hm' THEN 100000 - WHEN 'dam' THEN 10000 - WHEN 'm' THEN 1000 - WHEN 'dm' THEN 100 - WHEN 'cm' THEN 10 - WHEN 'mm' THEN 1 - ELSE 1 END) > 65); - `; - - const cursorParams = []; - if (p_CensusID !== null) cursorParams.push(p_CensusID); - if (p_PlotID !== null) cursorParams.push(p_PlotID); - - return runValidation( - 'ValidateDBHGrowthExceedsMax', - cursorQuery, - cursorParams, - 'Annual DBH Growth', - 'Growth exceeds max threshold.', - 'Growth <= 65 mm', - row => `Previous DBH in mm: ${row.vPrevDBHInMM}, Current DBH in mm: ${row.vCurrDBHInMM}`, - 'Checked for excessive DBH growth over a year' - ); -} - -export async function validateDBHShrinkageExceedsMax(p_CensusID: number | null, p_PlotID: number | null) { - const cursorQuery = ` - SELECT - cm2.CoreMeasurementID, - cm1.MeasuredDBH * (CASE cm1.DBHUnit - WHEN 'km' THEN 1000000 - WHEN 'hm' THEN 100000 - WHEN 'dam' THEN 10000 - WHEN 'm' THEN 1000 - WHEN 'dm' THEN 100 - WHEN 'cm' THEN 10 - WHEN 'mm' THEN 1 - ELSE 1 END) AS vPrevDBHInMM, - cm2.MeasuredDBH * (CASE cm2.DBHUnit - WHEN 'km' THEN 1000000 - WHEN 'hm' THEN 100000 - WHEN 'dam' THEN 10000 - WHEN 'm' THEN 1000 - WHEN 'dm' THEN 100 - WHEN 'cm' THEN 10 - WHEN 'mm' THEN 1 - ELSE 1 END) AS vCurrDBHInMM - FROM coremeasurements cm1 - JOIN coremeasurements cm2 - ON cm1.StemID = cm2.StemID - AND YEAR(cm2.MeasurementDate) = YEAR(cm1.MeasurementDate) + 1 - LEFT JOIN stems st - ON cm2.StemID = st.StemID - LEFT JOIN quadrats q - ON st.QuadratID = q.QuadratID - LEFT JOIN cmattributes cma - ON cm1.CoreMeasurementID = cma.CoreMeasurementID - LEFT JOIN attributes a - ON cma.Code = a.Code - WHERE - (a.Status NOT IN ('dead', 'stem dead', 'broken below', 'missing', 'omitted') OR a.Status IS NULL) - AND cm1.MeasuredDBH IS NOT NULL - AND cm2.MeasuredDBH IS NOT NULL - AND cm1.IsValidated IS TRUE - AND cm2.IsValidated IS NULL - AND (@p_CensusID IS NULL OR q.CensusID = @p_CensusID) - AND (@p_PlotID IS NULL OR q.PlotID = @p_PlotID) - AND (cm2.MeasuredDBH * (CASE cm2.DBHUnit - WHEN 'km' THEN 1000000 - WHEN 'hm' THEN 100000 - WHEN 'dam' THEN 10000 - WHEN 'm' THEN 1000 - WHEN 'dm' THEN 100 - WHEN 'cm' THEN 10 - WHEN 'mm' THEN 1 - ELSE 1 END) - < cm1.MeasuredDBH * (CASE cm1.DBHUnit - WHEN 'km' THEN 1000000 - WHEN 'hm' THEN 100000 - WHEN 'dam' THEN 10000 - WHEN 'm' THEN 1000 - WHEN 'dm' THEN 100 - WHEN 'cm' THEN 10 - WHEN 'mm' THEN 1 - ELSE 1 END) * 0.95); - `; - - const cursorParams: any[] = []; - if (p_CensusID !== null) cursorParams.push(p_CensusID); - if (p_PlotID !== null) cursorParams.push(p_PlotID); - - return runValidation( - 'ValidateDBHShrinkageExceedsMax', - cursorQuery, - cursorParams, - 'Annual DBH Shrinkage', - 'Shrinkage exceeds maximum allowed threshold.', - 'Shrinkage < 5% of previous DBH', - row => `Previous DBH in mm: ${row.vPrevDBHInMM}, Current DBH in mm: ${row.vCurrDBHInMM}`, - 'Checked for excessive DBH shrinkage over a year' - ); -} - -export async function validateFindAllInvalidSpeciesCodes(p_CensusID: number | null, p_PlotID: number | null) { - const cursorQuery = ` - SELECT cm.CoreMeasurementID - FROM stems s - JOIN trees t ON s.TreeID = t.TreeID - LEFT JOIN species sp ON t.SpeciesID = sp.SpeciesID - JOIN coremeasurements cm ON s.StemID = cm.StemID - LEFT JOIN quadrats q ON s.QuadratID = q.QuadratID - WHERE sp.SpeciesID IS NULL - AND cm.IsValidated IS NULL - AND (@p_CensusID IS NULL OR q.CensusID = @p_CensusID) - AND (@p_PlotID IS NULL OR q.PlotID = @p_PlotID) - GROUP BY cm.CoreMeasurementID; - `; - - const cursorParams: any[] = []; - if (p_CensusID !== null) cursorParams.push(p_CensusID); - if (p_PlotID !== null) cursorParams.push(p_PlotID); - - return runValidation( - 'ValidateFindAllInvalidSpeciesCodes', - cursorQuery, - cursorParams, - 'Species Code Validation', - 'Invalid species code detected.', - 'Non-null and valid Species ID', - () => 'Species ID: NULL', - 'Checking for the existence of valid species codes for each measurement.' - ); -} - -export async function validateFindDuplicateStemTreeTagCombinationsPerCensus(p_CensusID: number | null, p_PlotID: number | null) { - const cursorQuery = ` - SELECT cm.CoreMeasurementID - FROM coremeasurements cm - INNER JOIN stems s ON cm.StemID = s.StemID - INNER JOIN trees t ON s.TreeID = t.TreeID - INNER JOIN quadrats q ON s.QuadratID = q.QuadratID - WHERE (@p_CensusID IS NULL OR q.CensusID = @p_CensusID) - AND (@p_PlotID IS NULL OR q.PlotID = @p_PlotID) - AND cm.IsValidated = NULL - GROUP BY q.CensusID, s.StemTag, t.TreeTag - HAVING COUNT(cm.CoreMeasurementID) > 1; - `; - - const cursorParams: any[] = []; - if (p_CensusID !== null) cursorParams.push(p_CensusID); - if (p_PlotID !== null) cursorParams.push(p_PlotID); - - return runValidation( - 'ValidateFindDuplicateStemTreeTagCombinationsPerCensus', - cursorQuery, - cursorParams, - 'Duplicate Stem-Tree Tag Combinations per Census', - 'Duplicate stem and tree tag combination detected.', - 'Unique Stem-Tree Tag Combinations', - () => 'N/A', - 'Checking for duplicate stem and tree tag combinations in each census.' - ); -} - -export async function validateFindDuplicatedQuadratsByName(p_CensusID: number | null, p_PlotID: number | null) { - const cursorQuery = ` - SELECT cm.CoreMeasurementID - FROM quadrats q - LEFT JOIN stems st ON q.QuadratID = st.QuadratID - JOIN coremeasurements cm ON st.StemID = cm.StemID - WHERE cm.IsValidated IS NULL - AND (q.PlotID, q.QuadratName) IN ( - SELECT PlotID, QuadratName - FROM quadrats - GROUP BY PlotID, QuadratName - HAVING COUNT(*) > 1 - ) - AND (@p_CensusID IS NULL OR q.CensusID = @p_CensusID) - AND (@p_PlotID IS NULL OR q.PlotID = @p_PlotID) - GROUP BY cm.CoreMeasurementID; - `; - - const cursorParams: any[] = []; - if (p_CensusID !== null) cursorParams.push(p_CensusID); - if (p_PlotID !== null) cursorParams.push(p_PlotID); - - return runValidation( - 'ValidateFindDuplicatedQuadratsByName', - cursorQuery, - cursorParams, - 'Quadrat Name Duplication', - 'Duplicated quadrat name detected.', - 'Unique Quadrat Names per Plot', - () => 'N/A', - 'Checking for duplicated quadrat names within the same plot.' - ); -} - -export async function validateFindMeasurementsOutsideCensusDateBoundsGroupByQuadrat(p_CensusID: number | null, p_PlotID: number | null) { - const cursorQuery = ` - SELECT MIN(cm.CoreMeasurementID) AS CoreMeasurementID - FROM coremeasurements cm - JOIN stems st ON cm.StemID = st.StemID - JOIN quadrats q ON st.QuadratID = q.QuadratID - JOIN census c ON q.CensusID = c.CensusID - WHERE (cm.MeasurementDate < c.StartDate OR cm.MeasurementDate > c.EndDate) - AND cm.MeasurementDate IS NOT NULL - AND cm.IsValidated IS NULL - AND (@p_CensusID IS NULL OR q.CensusID = @p_CensusID) - AND (@p_PlotID IS NULL OR q.PlotID = @p_PlotID) - GROUP BY q.QuadratID, c.CensusID, c.StartDate, c.EndDate; - `; - - const cursorParams: any[] = []; - if (p_CensusID !== null) cursorParams.push(p_CensusID); - if (p_PlotID !== null) cursorParams.push(p_PlotID); - - return runValidation( - 'ValidateFindMeasurementsOutsideCensusDateBoundsGroupByQuadrat', - cursorQuery, - cursorParams, - 'Measurement Date vs Census Date Bounds', - 'Measurement outside census date bounds.', - 'Within Census Start and End Dates', - () => 'Measurement Date', - 'Checking if measurement dates fall within the start and end dates of their respective censuses.' - ); -} - -export async function validateFindStemsInTreeWithDifferentSpecies(p_CensusID: number | null, p_PlotID: number | null) { - const cursorQuery = ` - SELECT cm.CoreMeasurementID - FROM coremeasurements cm - JOIN stems s ON cm.StemID = s.StemID - JOIN trees t ON s.TreeID = t.TreeID - JOIN quadrats q ON s.QuadratID = q.QuadratID - WHERE cm.IsValidated = NULL - AND (@p_CensusID IS NULL OR q.CensusID = @p_CensusID) - AND (@p_PlotID IS NULL OR q.PlotID = @p_PlotID) - GROUP BY t.TreeID, cm.CoreMeasurementID - HAVING COUNT(DISTINCT t.SpeciesID) > 1; - `; - - const cursorParams: any[] = []; - if (p_CensusID !== null) cursorParams.push(p_CensusID); - if (p_PlotID !== null) cursorParams.push(p_PlotID); - - return runValidation( - 'ValidateFindStemsInTreeWithDifferentSpecies', - cursorQuery, - cursorParams, - 'Species consistency across tree stems', - 'Stems in the same tree have different species.', - 'One species per tree', - () => 'One species per tree', - 'Checking if stems belonging to the same tree have different species IDs.' - ); -} - -export async function validateFindStemsOutsidePlots(p_CensusID: number | null, p_PlotID: number | null) { - const conn = await getConn(); - let insertCount = 0; - - try { - const validationProcedureQuery = ` - SELECT ValidationID - FROM catalog.validationprocedures - WHERE ProcedureName = 'ValidateFindStemsOutsidePlots'; - `; - const validationResult = await runQuery(conn, validationProcedureQuery); - if (validationResult.length === 0) { - throw new Error('Validation procedure not found.'); - } - const veID = validationResult[0]?.ValidationID; - - // Query to find stems outside plot dimensions - const cursorQuery = ` - SELECT cm.CoreMeasurementID - FROM stems s - INNER JOIN coremeasurements cm ON s.StemID = cm.StemID - INNER JOIN quadrats q ON s.QuadratID = q.QuadratID - INNER JOIN plots p ON q.PlotID = p.PlotID - WHERE (s.LocalX > p.DimensionX OR s.LocalY > p.DimensionY) - AND s.LocalX IS NOT NULL - AND s.LocalY IS NOT NULL - AND p.DimensionX > 0 - AND p.DimensionY > 0 - AND cm.IsValidated IS NULL - AND (@p_CensusID IS NULL OR q.CensusID = @p_CensusID) - AND (@p_PlotID IS NULL OR q.PlotID = @p_PlotID) - GROUP BY cm.CoreMeasurementID; - `; - - const cursorParams: any[] = []; - if (p_CensusID !== null) cursorParams.push(p_CensusID); - if (p_PlotID !== null) cursorParams.push(p_PlotID); - - const cursorResults = await runQuery(conn, cursorQuery, cursorParams); - - if (cursorResults.length > 0) { - const insertErrorQuery = ` - INSERT INTO cmverrors (CoreMeasurementID, ValidationErrorID) - SELECT ?, ? - FROM DUAL - WHERE NOT EXISTS ( - SELECT 1 - FROM cmverrors - WHERE CoreMeasurementID = ? AND ValidationErrorID = ? - ); - `; - const logValidationQuery = ` - INSERT INTO validationchangelog ( - ProcedureName, RunDateTime, TargetRowID, ValidationOutcome, ErrorMessage, - ValidationCriteria, MeasuredValue, ExpectedValueRange, AdditionalDetails - ) VALUES (?, NOW(), ?, 'Failed', 'Stem is outside plot dimensions.', - 'Stem Placement within Plot Boundaries', 'Stem Plot Coordinates', - 'Within Plot Dimensions', 'Validating whether stems are located within the specified plot dimensions.'); - `; - - const insertErrorParams: any[] = []; - const logValidationParams: any[] = []; - - for (const row of cursorResults) { - const { CoreMeasurementID } = row; - - insertErrorParams.push(CoreMeasurementID, veID, CoreMeasurementID, veID); - logValidationParams.push('ValidateFindStemsOutsidePlots', CoreMeasurementID); - - insertCount++; - } - - // Execute batch inserts - await runQuery(conn, insertErrorQuery, insertErrorParams); - await runQuery(conn, logValidationQuery, logValidationParams); - } - - return { - TotalRows: cursorResults.length, - FailedRows: insertCount, - Message: `Validation completed. Total rows: ${cursorResults.length}, Failed rows: ${insertCount}` - }; - } catch (error: any) { - console.error('Error during Stem Placement validation:', error.message); - throw new Error('Stem Placement validation failed. Please check the logs for more details.'); - } finally { - if (conn) conn.release(); - } -} - -export async function validateFindTreeStemsInDifferentQuadrats(p_CensusID: number | null, p_PlotID: number | null) { - const conn = await getConn(); - let insertCount = 0; - - try { - const validationProcedureQuery = ` - SELECT ValidationID - FROM catalog.validationprocedures - WHERE ProcedureName = 'ValidateFindTreeStemsInDifferentQuadrats'; - `; - const validationResult = await runQuery(conn, validationProcedureQuery); - if (validationResult.length === 0) { - throw new Error('Validation procedure not found.'); - } - const veID = validationResult[0]?.ValidationID; - - // Query to find tree stems located in different quadrats - const cursorQuery = ` - SELECT cm1.CoreMeasurementID - FROM stems s1 - JOIN stems s2 ON s1.TreeID = s2.TreeID AND s1.StemID != s2.StemID - JOIN quadrats q1 ON s1.QuadratID = q1.QuadratID - JOIN quadrats q2 ON s2.QuadratID = q2.QuadratID - JOIN coremeasurements cm1 ON s1.StemID = cm1.StemID - WHERE q1.QuadratID != q2.QuadratID - AND cm1.IsValidated IS NULL - AND (@p_CensusID IS NULL OR q.CensusID = @p_CensusID) - AND (@p_PlotID IS NULL OR q.PlotID = @p_PlotID) - GROUP BY cm1.CoreMeasurementID; - `; - - const cursorParams: any[] = []; - if (p_CensusID !== null) cursorParams.push(p_CensusID); - if (p_PlotID !== null) cursorParams.push(p_PlotID); - - const cursorResults = await runQuery(conn, cursorQuery, cursorParams); - - if (cursorResults.length > 0) { - const insertErrorQuery = ` - INSERT INTO cmverrors (CoreMeasurementID, ValidationErrorID) - SELECT ?, ? - FROM DUAL - WHERE NOT EXISTS ( - SELECT 1 - FROM cmverrors - WHERE CoreMeasurementID = ? AND ValidationErrorID = ? - ); - `; - const logValidationQuery = ` - INSERT INTO validationchangelog ( - ProcedureName, RunDateTime, TargetRowID, ValidationOutcome, ErrorMessage, - ValidationCriteria, MeasuredValue, ExpectedValueRange, AdditionalDetails - ) VALUES (?, NOW(), ?, 'Failed', 'Stems in the same tree are in different quadrats.', - 'Stem Quadrat Consistency within Trees', 'Quadrat IDs of Stems', - 'Consistent Quadrat IDs for all Stems in a Tree', - 'Validating that all stems within the same tree are located in the same quadrat.'); - `; - - const insertErrorParams: any[] = []; - const logValidationParams: any[] = []; - - for (const row of cursorResults) { - const { CoreMeasurementID } = row; - - insertErrorParams.push(CoreMeasurementID, veID, CoreMeasurementID, veID); - logValidationParams.push('ValidateFindTreeStemsInDifferentQuadrats', CoreMeasurementID); - - insertCount++; - } - - // Execute batch inserts - await runQuery(conn, insertErrorQuery, insertErrorParams); - await runQuery(conn, logValidationQuery, logValidationParams); - } - - return { - TotalRows: cursorResults.length, - FailedRows: insertCount, - Message: `Validation completed. Total rows: ${cursorResults.length}, Failed rows: ${insertCount}` - }; - } catch (error: any) { - console.error('Error during Stem Quadrat Consistency validation:', error.message); - throw new Error('Stem Quadrat Consistency validation failed. Please check the logs for more details.'); - } finally { - if (conn) conn.release(); - } -} - -enum HOMUnits { - km = 'km', - hm = 'hm', - dam = 'dam', - m = 'm', - dm = 'dm', - cm = 'cm', - mm = 'mm' -} - -export async function validateHOMUpperAndLowerBounds(p_CensusID: number | null, p_PlotID: number | null, minHOM: number | null, maxHOM: number | null) { - const conn = await getConn(); - let insertCount = 0; - - try { - const validationProcedureQuery = ` - SELECT ValidationID - FROM catalog.validationprocedures - WHERE ProcedureName = 'ValidateHOMUpperAndLowerBounds'; - `; - const validationResult = await runQuery(conn, validationProcedureQuery); - if (validationResult.length === 0) { - throw new Error('Validation procedure not found.'); - } - const veID = validationResult[0]?.ValidationID; - - // Query to find measurements outside the HOM bounds - const cursorQuery = ` - SELECT cm.CoreMeasurementID, cm.MeasuredDBH, cm.DBHUnit - FROM coremeasurements cm - LEFT JOIN census c ON cm.CensusID = c.CensusID - WHERE ( - (@minHOM IS NOT NULL AND cm.MeasuredHOM < @minHOM) - OR - (@maxHOM IS NOT NULL AND cm.MeasuredHOM > @maxHOM) - ) - AND cm.IsValidated IS NULL - AND (@p_CensusID IS NULL OR c.CensusID = @p_CensusID) - AND (@p_PlotID IS NULL OR c.PlotID = @p_PlotID); - `; - - const cursorParams: any[] = []; - if (minHOM !== null) cursorParams.push(minHOM); - if (maxHOM !== null) cursorParams.push(maxHOM); - if (p_CensusID !== null) cursorParams.push(p_CensusID); - if (p_PlotID !== null) cursorParams.push(p_PlotID); - - const cursorResults = await runQuery(conn, cursorQuery, cursorParams); - - if (cursorResults.length > 0) { - const insertErrorQuery = ` - INSERT INTO cmverrors (CoreMeasurementID, ValidationErrorID) - SELECT ?, ? - FROM DUAL - WHERE NOT EXISTS ( - SELECT 1 - FROM cmverrors - WHERE CoreMeasurementID = ? AND ValidationErrorID = ? - ); - `; - const logValidationQuery = ` - INSERT INTO validationchangelog ( - ProcedureName, RunDateTime, TargetRowID, ValidationOutcome, ErrorMessage, - ValidationCriteria, MeasuredValue, ExpectedValueRange, AdditionalDetails - ) VALUES (?, NOW(), ?, 'Failed', ?, ?, ?, ?, ?); - `; - - const insertErrorParams: any[] = []; - const logValidationParams: any[] = []; - - for (const row of cursorResults) { - const { CoreMeasurementID, MeasuredHOM, HOMUnit } = row; - - const homUnit = HOMUnit as HOMUnits; - const measuredHOMInMeters = MeasuredHOM * (unitConversionFactorsHOM[homUnit] || 1); - - const validationCriteria = 'HOM Measurement Range Validation'; - const measuredValue = `Measured HOM: ${measuredHOMInMeters} meters`; - const expectedValueRange = `Expected HOM Range: ${minHOM} - ${maxHOM} meters`; - const additionalDetails = 'Checks if the measured HOM falls within the specified minimum and maximum range in meters.'; - - insertErrorParams.push(CoreMeasurementID, veID, CoreMeasurementID, veID); - logValidationParams.push( - 'ValidateHOMUpperAndLowerBounds', - CoreMeasurementID, - `HOM outside bounds: ${minHOM} - ${maxHOM} meters`, - validationCriteria, - measuredValue, - expectedValueRange, - additionalDetails - ); - - insertCount++; - } - - // Execute batch inserts - await runQuery(conn, insertErrorQuery, insertErrorParams); - await runQuery(conn, logValidationQuery, logValidationParams); - } - - return { - TotalRows: cursorResults.length, - FailedRows: insertCount, - Message: `Validation completed successfully. Total rows: ${cursorResults.length}, Failed rows: ${insertCount}` - }; - } catch (error: any) { - console.error('Error during HOM Bounds validation:', error.message); - throw new Error('HOM Bounds validation failed. Please check the logs for more details.'); - } finally { - if (conn) conn.release(); - } -} - -export async function validateScreenMeasuredDiameterMinMax(p_CensusID: number | null, p_PlotID: number | null, minDBH: number | null, maxDBH: number | null) { - const conn = await getConn(); - let insertCount = 0; - - try { - const validationProcedureQuery = ` - SELECT ValidationID - FROM catalog.validationprocedures - WHERE ProcedureName = 'ValidateScreenMeasuredDiameterMinMax'; - `; - const validationResult = await runQuery(conn, validationProcedureQuery); - if (validationResult.length === 0) { - throw new Error('Validation procedure not found.'); - } - const veID = validationResult[0]?.ValidationID; - - // Query to find measurements outside the DBH bounds - const cursorQuery = ` - SELECT cm.CoreMeasurementID, cm.MeasuredDBH, cm.DBHUnit - FROM coremeasurements cm - LEFT JOIN census c ON cm.CensusID = c.CensusID - WHERE ( - (@minDBH IS NOT NULL AND cm.MeasuredDBH < @minDBH) - OR - (@maxDBH IS NOT NULL AND cm.MeasuredDBH > @maxDBH) - ) - AND cm.IsValidated IS NULL - AND (@p_CensusID IS NULL OR c.CensusID = @p_CensusID) - AND (@p_PlotID IS NULL OR c.PlotID = @p_PlotID); - `; - - const cursorParams: any[] = []; - if (minDBH !== null) cursorParams.push(minDBH); - if (maxDBH !== null) cursorParams.push(maxDBH); - if (p_CensusID !== null) cursorParams.push(p_CensusID); - if (p_PlotID !== null) cursorParams.push(p_PlotID); - - const cursorResults = await runQuery(conn, cursorQuery, cursorParams); - - if (cursorResults.length > 0) { - const insertErrorQuery = ` - INSERT INTO cmverrors (CoreMeasurementID, ValidationErrorID) - SELECT ?, ? - FROM DUAL - WHERE NOT EXISTS ( - SELECT 1 - FROM cmverrors - WHERE CoreMeasurementID = ? AND ValidationErrorID = ? - ); - `; - const logValidationQuery = ` - INSERT INTO validationchangelog ( - ProcedureName, RunDateTime, TargetRowID, ValidationOutcome, ErrorMessage, - ValidationCriteria, MeasuredValue, ExpectedValueRange, AdditionalDetails - ) VALUES (?, NOW(), ?, 'Failed', ?, ?, ?, ?, ?); - `; - - const insertErrorParams: any[] = []; - const logValidationParams: any[] = []; - - for (const row of cursorResults) { - const { CoreMeasurementID, MeasuredDBH, DBHUnit } = row; - - // Convert DBH to millimeters - const dbhUnit = DBHUnit as DBHUnits; - const measuredDBHInMM = MeasuredDBH * (unitConversionFactors[dbhUnit] || 1); - - const validationCriteria = 'DBH Measurement Range Validation'; - const measuredValue = `Measured DBH: ${measuredDBHInMM} mm`; - const expectedValueRange = `Expected DBH Range: ${minDBH} - ${maxDBH} mm`; - const additionalDetails = 'Checks if the measured DBH falls within the specified minimum and maximum range in millimeters.'; - - insertErrorParams.push(CoreMeasurementID, veID, CoreMeasurementID, veID); - logValidationParams.push( - 'ValidateScreenMeasuredDiameterMinMax', - CoreMeasurementID, - `DBH outside bounds: ${minDBH} - ${maxDBH} mm`, - validationCriteria, - measuredValue, - expectedValueRange, - additionalDetails - ); - - insertCount++; - } - - // Execute batch inserts - await runQuery(conn, insertErrorQuery, insertErrorParams); - await runQuery(conn, logValidationQuery, logValidationParams); - } - - return { - TotalRows: cursorResults.length, - FailedRows: insertCount, - Message: `Validation completed successfully. Total rows: ${cursorResults.length}, Failed rows: ${insertCount}` - }; - } catch (error: any) { - console.error('Error during DBH Bounds validation:', error.message); - throw new Error('DBH Bounds validation failed. Please check the logs for more details.'); - } finally { - if (conn) conn.release(); - } -} - -export async function validateScreenStemsWithMeasurementsButDeadAttributes(p_CensusID: number | null, p_PlotID: number | null) { - const conn = await getConn(); - let insertCount = 0; - - try { - const validationProcedureQuery = ` - SELECT ValidationID - FROM catalog.validationprocedures - WHERE ProcedureName = 'ValidateScreenStemsWithMeasurementsButDeadAttributes'; - `; - const validationResult = await runQuery(conn, validationProcedureQuery); - if (validationResult.length === 0) { - throw new Error('Validation procedure not found.'); - } - const veID = validationResult[0]?.ValidationID; - - // Query to find stems with measurements but dead attributes - const cursorQuery = ` - SELECT cm.CoreMeasurementID - FROM coremeasurements cm - JOIN census c ON cm.CensusID = c.CensusID - JOIN cmattributes cma ON cm.CoreMeasurementID = cma.CoreMeasurementID - JOIN attributes a ON cma.Code = a.Code - WHERE ( - (cm.MeasuredDBH IS NOT NULL AND cm.MeasuredDBH > 0) OR - (cm.MeasuredHOM IS NOT NULL AND cm.MeasuredHOM > 0) - ) - AND a.Status IN ('dead', 'stem dead', 'missing', 'broken below', 'omitted') - AND cm.IsValidated IS NULL - AND (@p_CensusID IS NULL OR c.CensusID = @p_CensusID) - AND (@p_PlotID IS NULL OR c.PlotID = @p_PlotID); - `; - - const cursorParams: any[] = []; - if (p_CensusID !== null) cursorParams.push(p_CensusID); - if (p_PlotID !== null) cursorParams.push(p_PlotID); - - const cursorResults = await runQuery(conn, cursorQuery, cursorParams); - - if (cursorResults.length > 0) { - const insertErrorQuery = ` - INSERT INTO cmverrors (CoreMeasurementID, ValidationErrorID) - SELECT ?, ? - FROM DUAL - WHERE NOT EXISTS ( - SELECT 1 - FROM cmverrors - WHERE CoreMeasurementID = ? AND ValidationErrorID = ? - ); - `; - const logValidationQuery = ` - INSERT INTO validationchangelog ( - ProcedureName, RunDateTime, TargetRowID, ValidationOutcome, ErrorMessage, - ValidationCriteria, AdditionalDetails - ) VALUES (?, NOW(), ?, 'Failed', 'Stem with measurements but dead attributes detected.', - ?, ?); - `; - - const insertErrorParams: any[] = []; - const logValidationParams: any[] = []; - - for (const row of cursorResults) { - const { CoreMeasurementID } = row; - - const validationCriteria = 'Stem Measurements with Dead Attributes Validation'; - const additionalDetails = 'Verifies that stems marked as dead do not have active measurements.'; - - insertErrorParams.push(CoreMeasurementID, veID, CoreMeasurementID, veID); - logValidationParams.push('ValidateScreenStemsWithMeasurementsButDeadAttributes', CoreMeasurementID, validationCriteria, additionalDetails); - - insertCount++; - } - - // Execute batch inserts - await runQuery(conn, insertErrorQuery, insertErrorParams); - await runQuery(conn, logValidationQuery, logValidationParams); - } - - return { - TotalRows: cursorResults.length, - FailedRows: insertCount, - Message: `Validation completed successfully. Total rows: ${cursorResults.length}, Failed rows: ${insertCount}` - }; - } catch (error: any) { - console.error('Error during Stem with Dead Attributes validation:', error.message); - throw new Error('Stem with Dead Attributes validation failed. Please check the logs for more details.'); - } finally { - if (conn) conn.release(); - } -} diff --git a/frontend/components/sidebar.tsx b/frontend/components/sidebar.tsx index eb0f43ce..492ef80b 100644 --- a/frontend/components/sidebar.tsx +++ b/frontend/components/sidebar.tsx @@ -21,7 +21,7 @@ import { useSiteDispatch } from '@/app/contexts/userselectionprovider'; import { usePathname, useRouter } from 'next/navigation'; -import { Badge, Button, IconButton, SelectOption, Stack, Tooltip } from '@mui/joy'; +import { Badge, IconButton, SelectOption, Stack, Tooltip } from '@mui/joy'; import AddIcon from '@mui/icons-material/Add'; import Select from '@mui/joy/Select'; import Option from '@mui/joy/Option'; @@ -222,9 +222,9 @@ export default function Sidebar(props: SidebarProps) { const mapper = new OrgCensusToCensusResultMapper(); const newCensusID = await mapper.startNewCensus(currentSite?.schemaName ?? '', currentPlot?.plotID ?? 0, highestPlotCensusNumber + 1); if (!newCensusID) throw new Error('census creation failure'); - await new Promise(resolve => setTimeout(resolve, 500)); // debounce + await new Promise(resolve => setTimeout(resolve, 300)); // debounce } else { - await new Promise(resolve => setTimeout(resolve, 500)); // debounce + await new Promise(resolve => setTimeout(resolve, 300)); // debounce // if (rolledOverQuadrats) { // passing census list loading trigger to stems rollover function: // setIsRolloverStemsModalOpen(true); // } else setCensusListLoaded(false); @@ -242,7 +242,7 @@ export default function Sidebar(props: SidebarProps) { // additional note: dialog handles actual rollover process. do not need to perform any API calls here. // --> stem rollover will not be triggered if quadrats are NOT rolled over setIsRolloverStemsModalOpen(false); - await new Promise(resolve => setTimeout(resolve, 500)); + await new Promise(resolve => setTimeout(resolve, 100)); setCensusListLoaded(false); }; @@ -786,7 +786,7 @@ export default function Sidebar(props: SidebarProps) { onMouseLeave={() => setHoveredIndex(null)} > {site !== undefined && plot !== undefined && census !== undefined ? ( - + setIsRolloverModalOpen(false)} onConfirm={handleConfirmRollover} /> setIsRolloverStemsModalOpen(false)} onConfirm={handleConfirmStemsRollover} /> - - {site && plot && census && } diff --git a/frontend/components/themeregistry/theme.ts b/frontend/components/themeregistry/theme.ts index 9608a8b3..5419b7db 100644 --- a/frontend/components/themeregistry/theme.ts +++ b/frontend/components/themeregistry/theme.ts @@ -40,6 +40,14 @@ const theme = extendTheme({ }) }) } + }, + JoyTooltip: { + defaultProps: { + // Automatically apply this prop globally + sx: { + pointerEvents: 'none' + } + } } } }); diff --git a/frontend/components/uploadsystemhelpers/displayparseddatagrid.tsx b/frontend/components/uploadsystemhelpers/displayparseddatagrid.tsx index 23586c81..5dbfe0c9 100644 --- a/frontend/components/uploadsystemhelpers/displayparseddatagrid.tsx +++ b/frontend/components/uploadsystemhelpers/displayparseddatagrid.tsx @@ -14,7 +14,7 @@ import { validateQuadratsRow, validateSubquadratsRow } from '@/config/sqlrdsdefi import { validateMeasurementsRow } from '@/config/sqlrdsdefinitions/views'; import { validatePersonnelRow } from '@/config/sqlrdsdefinitions/personnel'; -import { validateAttributesRow } from '@/config/sqlrdsdefinitions/core'; +import { AttributeStatusOptions, validateAttributesRow } from '@/config/sqlrdsdefinitions/core'; const validationFunctions: Record = { attributes: validateAttributesRow, @@ -95,7 +95,8 @@ export const DisplayParsedDataGridInline: React.FC = (pr (cellError.includes('were auto-filled based on table defaults') || cellError.includes('was auto-calculated based on dimension submission') || cellError === 'Genus was auto-filled based on species field.' || - cellError === 'Species field was split into genus and species.'); + cellError === 'Species field was split into genus and species.' || + cellError.includes('Attribute status must be one of the following:')); return ( = (pr )} ) : ( - + {displayValue !== undefined && displayValue !== null ? displayValue.toString() : ''} )} @@ -200,6 +206,13 @@ export const DisplayParsedDataGridInline: React.FC = (pr row['homunit'] = 'm'; rowErrors['homunit'] = 'HOM units were auto-filled based on table defaults.'; } + } else if (formType === 'attributes') { + rowErrors = rowErrors || {}; + const [status] = [row['status']]; + if (status && !AttributeStatusOptions.includes(status)) { + row['status'] = null; + rowErrors['status'] = 'Attribute status must be one of the following: ' + AttributeStatusOptions.join(', '); + } } if (rowErrors) { diff --git a/frontend/config/utils.ts b/frontend/config/utils.ts index b9c79de3..71d83faa 100644 --- a/frontend/config/utils.ts +++ b/frontend/config/utils.ts @@ -71,40 +71,56 @@ export type InitialValue = T extends string : undefined; export function createInitialObject(): { [K in keyof T]: InitialValue } { - return new Proxy( - {}, - { - get: (_target, prop) => { - if (typeof prop === 'string' && prop.toLowerCase().includes('id')) { - return 0; // Set the id field to 0 - } - const typeMap: { [key: string]: any } = { - string: '', - number: 0, - boolean: false, - object: null, - bigint: BigInt(0), - function: () => {}, - symbol: Symbol() - }; - const propType = typeof ({} as T)[prop as keyof T]; - return typeMap[propType as keyof typeof typeMap] ?? null; + const typeMap: { [key: string]: any } = { + string: '', + number: 0, + boolean: false, + object: null, + bigint: BigInt(0), + function: () => {}, + symbol: Symbol() + }; + + // Create an object where each property of T is initialized based on its type + const initializedObject = {} as { [K in keyof T]: InitialValue }; + + // Initialize all properties of T in the proxy + for (const prop in initializedObject) { + const propType = typeof initializedObject[prop as keyof T]; + initializedObject[prop as keyof T] = + prop.toLowerCase().includes('id') && true + ? 0 // If the property name includes 'id', set it to 0 + : (typeMap[propType as keyof typeof typeMap] ?? null); // Otherwise, assign default value based on type + } + + return new Proxy(initializedObject, { + get: (target, prop) => { + if (typeof prop === 'string' && prop.toLowerCase().includes('id')) { + return 0; // Set the id field to 0 } + const propType = typeof target[prop as keyof T]; + return typeMap[propType as keyof typeof typeMap] ?? null; } - ) as { [K in keyof T]: InitialValue }; + }) as { [K in keyof T]: InitialValue }; } export function createSelectQuery(schema: string, tableName: string, whereClause: Partial): string { - const whereConditions = Object.keys(whereClause) - .map(key => `${key} = ?`) + const whereKeys = Object.keys(whereClause); + + if (whereKeys.length === 0) { + throw new Error('No conditions provided for WHERE clause'); + } + + const whereConditions = whereKeys + .map(key => `\`${key}\` = ?`) // Escaping column names with backticks .join(' AND '); - return `SELECT * FROM ${schema}.${tableName} WHERE ${whereConditions}`; + return `SELECT * FROM \`${schema}\`.\`${tableName}\` WHERE ${whereConditions}`; } export function createInsertOrUpdateQuery(schema: string, tableName: string, data: Partial): string { const columns = Object.keys(data) - .map(key => key) + .map(key => `\`${key}\``) // Escaping column names with backticks .join(', '); const values = Object.keys(data) @@ -112,10 +128,10 @@ export function createInsertOrUpdateQuery(schema: string, tableName: str .join(', '); const updates = Object.keys(data) - .map(key => `${key} = VALUES(${key})`) + .map(key => `\`${key}\` = VALUES(\`${key}\`)`) .join(', '); - return `INSERT INTO ${schema}.${tableName} (${columns}) VALUES (${values}) ON DUPLICATE KEY UPDATE ${updates}`; + return `INSERT INTO \`${schema}\`.\`${tableName}\` (${columns}) VALUES (${values}) ON DUPLICATE KEY UPDATE ${updates}`; } export async function fetchPrimaryKey( diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 4f0ef1fc..1b41e82f 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -16113,8 +16113,7 @@ "url": "https://opencollective.com/unified" } }, - "node_modules/react-floater/@popperjs/core": { - }, + "node_modules/react-floater/@popperjs/core": {}, "node_modules/makeerror": { "version": "1.0.12", "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", @@ -17953,4 +17952,4 @@ "license": "MIT" } } -} \ No newline at end of file +} diff --git a/frontend/playwright.config.ts b/frontend/playwright.config.ts index e0b25be4..7206b7eb 100644 --- a/frontend/playwright.config.ts +++ b/frontend/playwright.config.ts @@ -4,8 +4,10 @@ import { defineConfig, devices } from '@playwright/test'; * Read environment variables from file. * https://github.com/motdotla/dotenv */ -// import dotenv from 'dotenv'; -// dotenv.config({ path: path.resolve(__dirname, '.env') }); +import dotenv from 'dotenv'; +import path from 'path'; + +dotenv.config({ path: path.resolve(__dirname, '.env') }); /** * See https://playwright.dev/docs/test-configuration. @@ -25,7 +27,7 @@ export default defineConfig({ /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ use: { /* Base URL to use in actions like `await page.goto('/')`. */ - // baseURL: 'http://127.0.0.1:3000', + baseURL: 'http://localhost:3000', /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */ trace: 'on-first-retry' @@ -67,12 +69,12 @@ export default defineConfig({ // name: 'Google Chrome', // use: { ...devices['Desktop Chrome'], channel: 'chrome' }, // }, - ] + ], /* Run your local dev server before starting the tests */ - // webServer: { - // command: 'npm run start', - // url: 'http://127.0.0.1:3000', - // reuseExistingServer: !process.env.CI, - // }, + webServer: { + command: 'npm run dev', + url: 'http://localhost:3000', + reuseExistingServer: !process.env.CI + } }); diff --git a/frontend/sqlscripting/resetautoincrements.sql b/frontend/sqlscripting/resetautoincrements.sql new file mode 100644 index 00000000..30bd5980 --- /dev/null +++ b/frontend/sqlscripting/resetautoincrements.sql @@ -0,0 +1,165 @@ +-- Reset auto_increment for plots +SET @max_value = (SELECT IFNULL(MAX(PlotID), 0) + FROM plots); +SET @query = CONCAT('ALTER TABLE plots AUTO_INCREMENT = ', @max_value + 1); +PREPARE stmt FROM @query; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + + +-- Reset auto_increment for census +SET @max_value = (SELECT IFNULL(MAX(CensusID), 0) + FROM census); +SET @query = CONCAT('ALTER TABLE census AUTO_INCREMENT = ', @max_value + 1); +PREPARE stmt FROM @query; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + + +-- Reset auto_increment for quadrats +SET @max_value = (SELECT IFNULL(MAX(QuadratID), 0) + FROM quadrats); +SET @query = CONCAT('ALTER TABLE quadrats AUTO_INCREMENT = ', @max_value + 1); +PREPARE stmt FROM @query; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + +-- Reset auto_increment for reference +SET @max_value = (SELECT IFNULL(MAX(ReferenceID), 0) + FROM reference); +SET @query = CONCAT('ALTER TABLE reference AUTO_INCREMENT = ', @max_value + 1); +PREPARE stmt FROM @query; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + +-- Reset auto_increment for family +SET @max_value = (SELECT IFNULL(MAX(FamilyID), 0) + FROM family); +SET @query = CONCAT('ALTER TABLE family AUTO_INCREMENT = ', @max_value + 1); +PREPARE stmt FROM @query; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + +-- Reset auto_increment for genus +SET @max_value = (SELECT IFNULL(MAX(GenusID), 0) + FROM genus); +SET @query = CONCAT('ALTER TABLE genus AUTO_INCREMENT = ', @max_value + 1); +PREPARE stmt FROM @query; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + +-- Reset auto_increment for roles +SET @max_value = (SELECT IFNULL(MAX(RoleID), 0) + FROM roles); +SET @query = CONCAT('ALTER TABLE roles AUTO_INCREMENT = ', @max_value + 1); +PREPARE stmt FROM @query; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + +-- Reset auto_increment for personnel +SET @max_value = (SELECT IFNULL(MAX(PersonnelID), 0) + FROM personnel); +SET @query = CONCAT('ALTER TABLE personnel AUTO_INCREMENT = ', @max_value + 1); +PREPARE stmt FROM @query; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + +-- Reset auto_increment for quadratpersonnel +SET @max_value = (SELECT IFNULL(MAX(QuadratPersonnelID), 0) + FROM quadratpersonnel); +SET @query = CONCAT('ALTER TABLE quadratpersonnel AUTO_INCREMENT = ', @max_value + 1); +PREPARE stmt FROM @query; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + +-- Reset auto_increment for species +SET @max_value = (SELECT IFNULL(MAX(SpeciesID), 0) + FROM species); +SET @query = CONCAT('ALTER TABLE species AUTO_INCREMENT = ', @max_value + 1); +PREPARE stmt FROM @query; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + +-- Reset auto_increment for specieslimits +SET @max_value = (SELECT IFNULL(MAX(SpeciesLimitID), 0) + FROM specieslimits); +SET @query = CONCAT('ALTER TABLE specieslimits AUTO_INCREMENT = ', @max_value + 1); +PREPARE stmt FROM @query; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + +-- Reset auto_increment for subquadrats +SET @max_value = (SELECT IFNULL(MAX(SubquadratID), 0) + FROM subquadrats); +SET @query = CONCAT('ALTER TABLE subquadrats AUTO_INCREMENT = ', @max_value + 1); +PREPARE stmt FROM @query; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + +-- Reset auto_increment for trees +SET @max_value = (SELECT IFNULL(MAX(TreeID), 0) + FROM trees); +SET @query = CONCAT('ALTER TABLE trees AUTO_INCREMENT = ', @max_value + 1); +PREPARE stmt FROM @query; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + +-- Reset auto_increment for stems +SET @max_value = (SELECT IFNULL(MAX(StemID), 0) + FROM stems); +SET @query = CONCAT('ALTER TABLE stems AUTO_INCREMENT = ', @max_value + 1); +PREPARE stmt FROM @query; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + +-- Reset auto_increment for coremeasurements +SET @max_value = (SELECT IFNULL(MAX(CoreMeasurementID), 0) + FROM coremeasurements); +SET @query = CONCAT('ALTER TABLE coremeasurements AUTO_INCREMENT = ', @max_value + 1); +PREPARE stmt FROM @query; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + +-- Reset auto_increment for cmattributes +SET @max_value = (SELECT IFNULL(MAX(CMAID), 0) + FROM cmattributes); +SET @query = CONCAT('ALTER TABLE cmattributes AUTO_INCREMENT = ', @max_value + 1); +PREPARE stmt FROM @query; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + +-- Reset auto_increment for cmverrors +SET @max_value = (SELECT IFNULL(MAX(CMVErrorID), 0) + FROM cmverrors); +SET @query = CONCAT('ALTER TABLE cmverrors AUTO_INCREMENT = ', @max_value + 1); +PREPARE stmt FROM @query; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + +-- Reset auto_increment for specimens +SET @max_value = (SELECT IFNULL(MAX(SpecimenID), 0) + FROM specimens); +SET @query = CONCAT('ALTER TABLE specimens AUTO_INCREMENT = ', @max_value + 1); +PREPARE stmt FROM @query; +EXECUTE stmt; +DEALLOCATE PREPARE stmt; + +-- Reset auto_increment for unifiedchangelog +# SET @max_value = (SELECT IFNULL(MAX(ChangeID), 0) FROM unifiedchangelog); +# SET @query = CONCAT('ALTER TABLE unifiedchangelog AUTO_INCREMENT = ', @max_value + 1); +# PREPARE stmt FROM @query; +# EXECUTE stmt; +# DEALLOCATE PREPARE stmt; + +truncate table unifiedchangelog; + +-- Reset auto_increment for validationchangelog +# SET @max_value = (SELECT IFNULL(MAX(ValidationRunID), 0) FROM validationchangelog); +# SET @new_auto_increment = @max_value + 1; +# ALTER TABLE validationchangelog AUTO_INCREMENT = @new_auto_increment; +truncate table validationchangelog; + +-- Reset auto_increment for viewfulltable +CALL RefreshViewFullTable(); + +CALL RefreshMeasurementsSummary(); \ No newline at end of file diff --git a/frontend/sqlscripting/updatedviews.sql b/frontend/sqlscripting/updatedviews.sql index cdf8579b..a06b9ec1 100644 --- a/frontend/sqlscripting/updatedviews.sql +++ b/frontend/sqlscripting/updatedviews.sql @@ -161,7 +161,7 @@ FROM coremeasurements cm LEFT JOIN species sp ON t.SpeciesID = sp.SpeciesID LEFT JOIN genus g ON sp.GenusID = g.GenusID LEFT JOIN family fam ON g.FamilyID = fam.FamilyID - LEFT JOIN specieslimits sl ON sp.SpeciesCode = sl.SpeciesCode + LEFT JOIN specieslimits sl ON sp.SpeciesID = sl.SpeciesID LEFT JOIN quadrats q ON s.QuadratID = q.QuadratID LEFT JOIN quadratpersonnel qp ON q.QuadratID = qp.QuadratID LEFT JOIN personnel per ON qp.PersonnelID = per.PersonnelID