Skip to content

Commit

Permalink
fixing issues with file upload and processing system. bugs were found…
Browse files Browse the repository at this point in the history
… causing load issues and SQL overload issues, but have been temporarily resolved. refit is needed to update how the file set is processed to ensure that system will not collapse under weight of file set.
  • Loading branch information
siddheshraze committed Oct 22, 2024
1 parent 69c8436 commit 0b38f66
Show file tree
Hide file tree
Showing 13 changed files with 207 additions and 95 deletions.
1 change: 0 additions & 1 deletion frontend/app/(hub)/measurementshub/validations/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,6 @@ export default function ValidationsPage() {
try {
const response = await fetch('/api/validations/crud', { method: 'GET' });
const data = await response.json();
console.log('data: ', data);
setGlobalValidations(data);
} catch (err) {
console.error('Error fetching validations:', err);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -365,7 +365,6 @@ export async function DELETE(request: NextRequest, { params }: { params: { dataT
let conn: PoolConnection | null = null;
const demappedGridID = gridID.charAt(0).toUpperCase() + gridID.substring(1);
const { newRow } = await request.json();
console.log('newrow: ', newRow);
try {
conn = await getConn();
await conn.beginTransaction();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import { HTTPResponses } from '@/config/macros';
export async function POST(request: NextRequest, { params }: { params: { validationProcedureName: string } }) {
try {
const { schema, validationProcedureID, cursorQuery, p_CensusID, p_PlotID, minDBH, maxDBH, minHOM, maxHOM } = await request.json();
console.log('data: ', schema, validationProcedureID, cursorQuery, p_CensusID, p_PlotID, minDBH, maxDBH, minHOM, maxHOM);

// Execute the validation procedure using the provided inputs
const validationResponse = await runValidation(validationProcedureID, params.validationProcedureName, schema, cursorQuery, {
Expand Down
80 changes: 76 additions & 4 deletions frontend/components/datagrids/isolateddatagridcommons.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,25 @@ export default function IsolatedDataGridCommons(props: Readonly<IsolatedDataGrid
aData.forEach((row: any) => {
const values = getTableHeaders(FormType.attributes)
.map(rowHeader => rowHeader.label)
.map(header => row[header]);
.map(header => row[header])
.map(value => {
if (value === undefined || value === null || value === '') {
return null;
}
if (typeof value === 'number') {
return value;
}
const parsedValue = parseFloat(value);
if (!isNaN(parsedValue)) {
return parsedValue;
}
if (typeof value === 'string') {
value = value.replace(/"/g, '""');
value = `"${value}"`;
}

return value;
});
aCSVRows += values.join(',') + '\n';
});
const aBlob = new Blob([aCSVRows], {
Expand All @@ -268,7 +286,25 @@ export default function IsolatedDataGridCommons(props: Readonly<IsolatedDataGrid
qData.forEach((row: any) => {
const values = getTableHeaders(FormType.quadrats)
.map(rowHeader => rowHeader.label)
.map(header => row[header]);
.map(header => row[header])
.map(value => {
if (value === undefined || value === null || value === '') {
return null;
}
if (typeof value === 'number') {
return value;
}
const parsedValue = parseFloat(value);
if (!isNaN(parsedValue)) {
return parsedValue;
}
if (typeof value === 'string') {
value = value.replace(/"/g, '""');
value = `"${value}"`;
}

return value;
});
qCSVRows += values.join(',') + '\n';
});
const qBlob = new Blob([qCSVRows], {
Expand All @@ -295,7 +331,25 @@ export default function IsolatedDataGridCommons(props: Readonly<IsolatedDataGrid
pData.forEach((row: any) => {
const values = getTableHeaders(FormType.personnel)
.map(rowHeader => rowHeader.label)
.map(header => row[header]);
.map(header => row[header])
.map(value => {
if (value === undefined || value === null || value === '') {
return null;
}
if (typeof value === 'number') {
return value;
}
const parsedValue = parseFloat(value);
if (!isNaN(parsedValue)) {
return parsedValue;
}
if (typeof value === 'string') {
value = value.replace(/"/g, '""');
value = `"${value}"`;
}

return value;
});
pCSVRows += values.join(',') + '\n';
});
const pBlob = new Blob([pCSVRows], {
Expand Down Expand Up @@ -323,7 +377,25 @@ export default function IsolatedDataGridCommons(props: Readonly<IsolatedDataGrid
sData.forEach((row: any) => {
const values = getTableHeaders(FormType.species)
.map(rowHeader => rowHeader.label)
.map(header => row[header]);
.map(header => row[header])
.map(value => {
if (value === undefined || value === null || value === '') {
return null;
}
if (typeof value === 'number') {
return value;
}
const parsedValue = parseFloat(value);
if (!isNaN(parsedValue)) {
return parsedValue;
}
if (typeof value === 'string') {
value = value.replace(/"/g, '""');
value = `"${value}"`;
}

return value;
});
sCSVRows += values.join(',') + '\n';
});
const sBlob = new Blob([sCSVRows], {
Expand Down
55 changes: 31 additions & 24 deletions frontend/components/datagrids/isolatedmultilinedatagridcommons.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ export default function IsolatedMultilineDataGridCommons(props: Readonly<Isolate
rowsBeforeChange: {}
});

const columns = useMemo<GridColDef[]>(
() => [
const columns = useMemo<GridColDef[]>(() => {
let baseColumns: GridColDef[] = [
{
field: 'actions',
headerName: 'Actions',
Expand Down Expand Up @@ -80,33 +80,40 @@ export default function IsolatedMultilineDataGridCommons(props: Readonly<Isolate
unsavedChangesRef.current.rowsBeforeChange[id] = row;
}
setHasUnsavedRows(true);
apiRef.current.updateRows([row]); // to trigger row render
apiRef.current.updateRows([row]);
}}
/>
];
}
},
...gridColumns,
{
field: 'date',
headerName: 'Date',
headerClassName: 'header',
flex: 1,
editable: true,
renderCell: renderDatePicker,
renderEditCell: renderEditDatePicker
},
{
field: 'codes',
headerName: 'Codes',
headerClassName: 'header',
flex: 1,
align: 'center',
editable: true
}
],
[gridColumns, unsavedChangesRef, apiRef, setRows]
);
...gridColumns
];

if (gridType === 'measurements') {
baseColumns = [
...baseColumns,
{
field: 'date',
headerName: 'Date',
headerClassName: 'header',
flex: 1,
editable: true,
renderCell: renderDatePicker,
renderEditCell: renderEditDatePicker
},
{
field: 'codes',
headerName: 'Codes',
headerClassName: 'header',
flex: 1,
align: 'center',
editable: true
}
];
}

return baseColumns;
}, [gridColumns, gridType, unsavedChangesRef, apiRef, setHasUnsavedRows]);

const processRowUpdate = useCallback<NonNullable<DataGridProps['processRowUpdate']>>((newRow, oldRow) => {
const rowId = newRow.id;
Expand Down
20 changes: 19 additions & 1 deletion frontend/components/datagrids/measurementscommons.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,25 @@ export default function MeasurementsCommons(props: Readonly<MeasurementsCommonsP
data.forEach((row: any) => {
const values = getTableHeaders(FormType.measurements)
.map(rowHeader => rowHeader.label)
.map(header => row[header]);
.map(header => row[header])
.map(value => {
if (value === undefined || value === null || value === '') {
return null;
}
if (typeof value === 'number') {
return value;
}
const parsedValue = parseFloat(value);
if (!isNaN(parsedValue)) {
return parsedValue;
}
if (typeof value === 'string') {
value = value.replace(/"/g, '""');
value = `"${value}"`;
}

return value;
});
csvRows += values.join(',') + '\n';
});
const blob = new Blob([csvRows], {
Expand Down
86 changes: 41 additions & 45 deletions frontend/components/processors/processcensus.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -11,24 +11,20 @@ export async function processCensus(props: Readonly<SpecialProcessingProps>): Pr
console.error('Missing required parameters: plotID or censusID');
throw new Error('Process Census: Missing plotID or censusID');
}

const { tag, stemtag, spcode, quadrat, lx, ly, coordinateunit, dbh, dbhunit, hom, homunit, date, codes } = rowData;

try {
await connection.beginTransaction();

// Fetch species
const speciesID = await fetchPrimaryKey<SpeciesResult>(schema, 'species', { SpeciesCode: spcode }, connection, 'SpeciesID');

// Fetch quadrat
const quadratID = await fetchPrimaryKey<QuadratResult>(schema, 'quadrats', { QuadratName: quadrat, PlotID: plotID }, connection, 'QuadratID');

if (tag) {
// Handle Tree Upsert
const treeID = await handleUpsert<TreeResult>(connection, schema, 'trees', { TreeTag: tag, SpeciesID: speciesID }, 'TreeID');

if (stemtag && lx && ly) {
console.log('Processing stem with StemTag:', stemtag);
if (stemtag || lx || ly) {
// Handle Stem Upsert
const stemID = await handleUpsert<StemResult>(
connection,
Expand All @@ -38,46 +34,47 @@ export async function processCensus(props: Readonly<SpecialProcessingProps>): Pr
'StemID'
);

if (dbh && hom && date) {
// Handle Core Measurement Upsert
const coreMeasurementID = await handleUpsert<CoreMeasurementsResult>(
connection,
schema,
'coremeasurements',
{
CensusID: censusID,
StemID: stemID,
IsValidated: null,
MeasurementDate: moment(date).format('YYYY-MM-DD'),
MeasuredDBH: dbh,
DBHUnit: dbhunit,
MeasuredHOM: hom,
HOMUnit: homunit
},
'CoreMeasurementID'
);
// Handle Core Measurement Upsert
const coreMeasurementID = await handleUpsert<CoreMeasurementsResult>(
connection,
schema,
'coremeasurements',
{
CensusID: censusID,
StemID: stemID,
IsValidated: null,
MeasurementDate: date && moment(date).isValid() ? moment.utc(date).format('YYYY-MM-DD') : null,
MeasuredDBH: dbh ? parseFloat(dbh) : null,
DBHUnit: dbhunit,
MeasuredHOM: hom ? parseFloat(hom) : null,
HOMUnit: homunit,
Description: null,
UserDefinedFields: null
},
'CoreMeasurementID'
);

// Handle CM Attributes Upsert
if (codes) {
const parsedCodes = codes
.split(';')
.map(code => code.trim())
.filter(Boolean);
if (parsedCodes.length === 0) {
console.error('No valid attribute codes found:', codes);
} else {
for (const code of parsedCodes) {
const attributeRows = await runQuery(connection, `SELECT COUNT(*) as count FROM ${schema}.attributes WHERE Code = ?`, [code]);
if (!attributeRows || attributeRows.length === 0 || !attributeRows[0].count) {
throw createError(`Attribute code ${code} not found or query failed.`, { code });
}
await handleUpsert<CMAttributesResult>(connection, schema, 'cmattributes', { CoreMeasurementID: coreMeasurementID, Code: code }, 'CMAID');
// Handle CM Attributes Upsert
if (codes) {
const parsedCodes = codes
.split(';')
.map(code => code.trim())
.filter(Boolean);
if (parsedCodes.length === 0) {
console.error('No valid attribute codes found:', codes);
} else {
for (const code of parsedCodes) {
const attributeRows = await runQuery(connection, `SELECT COUNT(*) as count FROM ${schema}.attributes WHERE Code = ?`, [code]);
if (!attributeRows || attributeRows.length === 0 || !attributeRows[0].count) {
throw createError(`Attribute code ${code} not found or query failed.`, { code });
}
await handleUpsert<CMAttributesResult>(connection, schema, 'cmattributes', { CoreMeasurementID: coreMeasurementID, Code: code }, 'CMAID');
}
}
}

// Update Census Start/End Dates
const combinedQuery = `
// Update Census Start/End Dates
const combinedQuery = `
UPDATE ${schema}.census c
JOIN (
SELECT CensusID, MIN(MeasurementDate) AS FirstMeasurementDate, MAX(MeasurementDate) AS LastMeasurementDate
Expand All @@ -88,11 +85,10 @@ export async function processCensus(props: Readonly<SpecialProcessingProps>): Pr
SET c.StartDate = m.FirstMeasurementDate, c.EndDate = m.LastMeasurementDate
WHERE c.CensusID = ${censusID};`;

await runQuery(connection, combinedQuery);
await connection.commit();
console.log('Upsert successful. CoreMeasurement ID generated:', coreMeasurementID);
return coreMeasurementID;
}
await runQuery(connection, combinedQuery);
await connection.commit();
console.log('Upsert successful. CoreMeasurement ID generated:', coreMeasurementID);
return coreMeasurementID;
}
}
} catch (error: any) {
Expand Down
6 changes: 5 additions & 1 deletion frontend/components/processors/processorhelperfunctions.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,11 @@ export async function insertOrUpdate(props: InsertUpdateProcessingProps): Promis
if (columns.includes('censusID')) rowData['censusID'] = subProps.censusID?.toString() ?? null;
const tableColumns = columns.map(fileColumn => mapping.columnMappings[fileColumn]).join(', ');
const placeholders = columns.map(() => '?').join(', '); // Use '?' for placeholders in MySQL
const values = columns.map(fileColumn => rowData[fileColumn]);
const values = columns.map(fileColumn => {
const value = rowData[fileColumn];
if (typeof value === 'string' && value === '') return null;
return value;
});
const query = `
INSERT INTO ${schema}.${mapping.tableName} (${tableColumns})
VALUES (${placeholders}) ON DUPLICATE KEY
Expand Down
4 changes: 2 additions & 2 deletions frontend/components/processors/processormacros.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -104,8 +104,8 @@ const sqlConfig: PoolOptions = {
port: parseInt(process.env.AZURE_SQL_PORT!),
database: process.env.AZURE_SQL_CATALOG_SCHEMA,
waitForConnections: true,
connectionLimit: 100, // increased from 10 to prevent bottlenecks
queueLimit: 0,
connectionLimit: 150, // increased from 10 to prevent bottlenecks
queueLimit: 20,
keepAliveInitialDelay: 10000, // 0 by default.
enableKeepAlive: true, // false by default.
connectTimeout: 20000 // 10 seconds by default.
Expand Down
Loading

0 comments on commit 0b38f66

Please sign in to comment.