More validation fixes, validate only cells that have changed instead of everything every time

This commit is contained in:
2025-09-06 15:33:48 -04:00
parent 5e2ee73e2d
commit 4935cfe3bb
2 changed files with 225 additions and 148 deletions

View File

@@ -1,7 +1,9 @@
import { useCallback } from 'react'; import { useCallback, useMemo } from 'react';
import { RowData } from './validationTypes'; import { RowData } from './validationTypes';
import type { Field, Fields } from '../../../types'; import type { Field, Fields } from '../../../types';
import { ErrorType, ValidationError } from '../../../types'; import { ErrorSources, ErrorType, ValidationError } from '../../../types';
import { useUniqueValidation } from './useUniqueValidation';
import { isEmpty } from './validationTypes';
export const useRowOperations = <T extends string>( export const useRowOperations = <T extends string>(
data: RowData<T>[], data: RowData<T>[],
@@ -10,6 +12,95 @@ export const useRowOperations = <T extends string>(
setValidationErrors: React.Dispatch<React.SetStateAction<Map<number, Record<string, ValidationError[]>>>>, setValidationErrors: React.Dispatch<React.SetStateAction<Map<number, Record<string, ValidationError[]>>>>,
validateFieldFromHook: (value: any, field: Field<T>) => ValidationError[] validateFieldFromHook: (value: any, field: Field<T>) => ValidationError[]
) => { ) => {
// Uniqueness validation utilities
const { validateUniqueField } = useUniqueValidation<T>(fields);
// Determine which field keys are considered uniqueness-constrained
const uniquenessFieldKeys = useMemo(() => {
const keys = new Set<string>([
'item_number',
'upc',
'barcode',
'supplier_no',
'notions_no',
'name'
]);
fields.forEach((f) => {
if (f.validations?.some((v) => v.rule === 'unique')) {
keys.add(String(f.key));
}
});
return keys;
}, [fields]);
// Merge per-field uniqueness errors into the validation error map
const mergeUniqueErrorsForFields = useCallback(
(
baseErrors: Map<number, Record<string, ValidationError[]>>,
dataForCalc: RowData<T>[],
fieldKeysToCheck: string[]
) => {
if (!fieldKeysToCheck.length) return baseErrors;
const newErrors = new Map(baseErrors);
// For each field, compute duplicates and merge
fieldKeysToCheck.forEach((fieldKey) => {
if (!uniquenessFieldKeys.has(fieldKey)) return;
// Compute unique errors for this single field
const uniqueMap = validateUniqueField(dataForCalc, fieldKey);
// Rows that currently have uniqueness errors for this field
const rowsWithUniqueErrors = new Set<number>();
uniqueMap.forEach((_, rowIdx) => rowsWithUniqueErrors.add(rowIdx));
// First, apply/overwrite unique errors for rows that have duplicates
uniqueMap.forEach((errorsForRow, rowIdx) => {
const existing = { ...(newErrors.get(rowIdx) || {}) };
// Convert InfoWithSource to ValidationError[] for this field
const info = errorsForRow[fieldKey];
// Only apply uniqueness error when the value is non-empty
const currentValue = (dataForCalc[rowIdx] as any)?.[fieldKey];
if (info && !isEmpty(currentValue)) {
existing[fieldKey] = [
{
message: info.message,
level: info.level,
source: info.source ?? ErrorSources.Table,
type: info.type ?? ErrorType.Unique
}
];
}
if (Object.keys(existing).length > 0) newErrors.set(rowIdx, existing);
else newErrors.delete(rowIdx);
});
// Then, remove any stale unique errors for this field where duplicates are resolved
newErrors.forEach((rowErrs, rowIdx) => {
// Skip rows that still have unique errors for this field
if (rowsWithUniqueErrors.has(rowIdx)) return;
if ((rowErrs as any)[fieldKey]) {
// Also clear uniqueness errors when the current value is empty
const currentValue = (dataForCalc[rowIdx] as any)?.[fieldKey];
const filtered = (rowErrs as any)[fieldKey].filter((e: ValidationError) => e.type !== ErrorType.Unique);
if (filtered.length > 0) (rowErrs as any)[fieldKey] = filtered;
else delete (rowErrs as any)[fieldKey];
if (Object.keys(rowErrs).length > 0) newErrors.set(rowIdx, rowErrs);
else newErrors.delete(rowIdx);
}
});
});
return newErrors;
},
[uniquenessFieldKeys, validateUniqueField]
);
// Helper function to validate a field value // Helper function to validate a field value
const fieldValidationHelper = useCallback( const fieldValidationHelper = useCallback(
(rowIndex: number, specificField?: string) => { (rowIndex: number, specificField?: string) => {
@@ -27,7 +118,7 @@ export const useRowOperations = <T extends string>(
// Use state setter instead of direct mutation // Use state setter instead of direct mutation
setValidationErrors((prev) => { setValidationErrors((prev) => {
const newErrors = new Map(prev); let newErrors = new Map(prev);
const existingErrors = { ...(newErrors.get(rowIndex) || {}) }; const existingErrors = { ...(newErrors.get(rowIndex) || {}) };
// Quick check for required fields - this prevents flashing errors // Quick check for required fields - this prevents flashing errors
@@ -73,6 +164,12 @@ export const useRowOperations = <T extends string>(
newErrors.delete(rowIndex); newErrors.delete(rowIndex);
} }
// If field is uniqueness-constrained, also re-validate uniqueness for the column
if (uniquenessFieldKeys.has(specificField)) {
const dataForCalc = data; // latest data
newErrors = mergeUniqueErrorsForFields(newErrors, dataForCalc, [specificField]);
}
return newErrors; return newErrors;
}); });
} }
@@ -103,7 +200,7 @@ export const useRowOperations = <T extends string>(
}); });
} }
}, },
[data, fields, validateFieldFromHook, setValidationErrors] [data, fields, validateFieldFromHook, setValidationErrors, mergeUniqueErrorsForFields, uniquenessFieldKeys]
); );
// Use validateRow as an alias for fieldValidationHelper for compatibility // Use validateRow as an alias for fieldValidationHelper for compatibility
@@ -155,7 +252,8 @@ export const useRowOperations = <T extends string>(
// CRITICAL FIX: Combine both validation operations into a single state update // CRITICAL FIX: Combine both validation operations into a single state update
// to prevent intermediate rendering that causes error icon flashing // to prevent intermediate rendering that causes error icon flashing
setValidationErrors((prev) => { setValidationErrors((prev) => {
const newMap = new Map(prev); // Start with previous errors
let newMap = new Map(prev);
const existingErrors = newMap.get(rowIndex) || {}; const existingErrors = newMap.get(rowIndex) || {};
const newRowErrors = { ...existingErrors }; const newRowErrors = { ...existingErrors };
@@ -215,6 +313,24 @@ export const useRowOperations = <T extends string>(
newMap.delete(rowIndex); newMap.delete(rowIndex);
} }
// If uniqueness applies, validate affected columns
const fieldsToCheck: string[] = [];
if (uniquenessFieldKeys.has(String(key))) fieldsToCheck.push(String(key));
if (key === ('upc' as T) || key === ('barcode' as T) || key === ('supplier' as T)) {
if (uniquenessFieldKeys.has('item_number')) fieldsToCheck.push('item_number');
}
if (fieldsToCheck.length > 0) {
const dataForCalc = (() => {
const copy = [...data];
if (rowIndex >= 0 && rowIndex < copy.length) {
copy[rowIndex] = { ...(copy[rowIndex] || {}), [key]: processedValue } as RowData<T>;
}
return copy;
})();
newMap = mergeUniqueErrorsForFields(newMap, dataForCalc, fieldsToCheck);
}
return newMap; return newMap;
}); });
@@ -257,7 +373,7 @@ export const useRowOperations = <T extends string>(
} }
}, 5); // Reduced delay for faster secondary effects }, 5); // Reduced delay for faster secondary effects
}, },
[data, fields, validateFieldFromHook, setData, setValidationErrors] [data, fields, validateFieldFromHook, setData, setValidationErrors, mergeUniqueErrorsForFields, uniquenessFieldKeys]
); );
// Improved revalidateRows function // Improved revalidateRows function
@@ -268,7 +384,10 @@ export const useRowOperations = <T extends string>(
) => { ) => {
// Process all specified rows using a single state update to avoid race conditions // Process all specified rows using a single state update to avoid race conditions
setValidationErrors((prev) => { setValidationErrors((prev) => {
const newErrors = new Map(prev); let newErrors = new Map(prev);
// Track which uniqueness fields need to be revalidated across the dataset
const uniqueFieldsToCheck = new Set<string>();
// Process each row // Process each row
for (const rowIndex of rowIndexes) { for (const rowIndex of rowIndexes) {
@@ -300,6 +419,11 @@ export const useRowOperations = <T extends string>(
} else { } else {
delete existingRowErrors[fieldKey]; delete existingRowErrors[fieldKey];
} }
// If field is uniqueness-constrained, mark for uniqueness pass
if (uniquenessFieldKeys.has(fieldKey)) {
uniqueFieldsToCheck.add(fieldKey);
}
} }
// Update the row's errors // Update the row's errors
@@ -324,6 +448,11 @@ export const useRowOperations = <T extends string>(
if (errors.length > 0) { if (errors.length > 0) {
rowErrors[fieldKey] = errors; rowErrors[fieldKey] = errors;
} }
// If field is uniqueness-constrained and we validated it, include for uniqueness pass
if (uniquenessFieldKeys.has(fieldKey)) {
uniqueFieldsToCheck.add(fieldKey);
}
} }
// Update the row's errors // Update the row's errors
@@ -335,10 +464,15 @@ export const useRowOperations = <T extends string>(
} }
} }
// Run per-field uniqueness checks and merge results
if (uniqueFieldsToCheck.size > 0) {
newErrors = mergeUniqueErrorsForFields(newErrors, data, Array.from(uniqueFieldsToCheck));
}
return newErrors; return newErrors;
}); });
}, },
[data, fields, validateFieldFromHook] [data, fields, validateFieldFromHook, mergeUniqueErrorsForFields, uniquenessFieldKeys]
); );
// Copy a cell value to all cells below it in the same column // Copy a cell value to all cells below it in the same column

View File

@@ -20,8 +20,8 @@ export const useValidationState = <T extends string>({
}: Props<T>) => { }: Props<T>) => {
const { fields, rowHook, tableHook } = useRsi<T>(); const { fields, rowHook, tableHook } = useRsi<T>();
// Import validateField from useValidation // Import validateField and validateUniqueField from useValidation
const { validateField: validateFieldFromHook } = useValidation<T>( const { validateField: validateFieldFromHook, validateUniqueField } = useValidation<T>(
fields, fields,
rowHook rowHook
); );
@@ -96,6 +96,8 @@ export const useValidationState = <T extends string>({
const initialValidationDoneRef = useRef(false); const initialValidationDoneRef = useRef(false);
const isValidatingRef = useRef(false); const isValidatingRef = useRef(false);
// Track last seen item_number signature to drive targeted uniqueness checks
const lastItemNumberSigRef = useRef<string | null>(null);
// Use row operations hook // Use row operations hook
const { validateRow, updateRow, revalidateRows, copyDown } = useRowOperations<T>( const { validateRow, updateRow, revalidateRows, copyDown } = useRowOperations<T>(
@@ -132,139 +134,13 @@ export const useValidationState = <T extends string>({
// Use filter management hook // Use filter management hook
const filterManagement = useFilterManagement<T>(data, fields, validationErrors); const filterManagement = useFilterManagement<T>(data, fields, validationErrors);
// Run validation when data changes - OPTIMIZED to prevent recursive validation // Disable global full-table revalidation on any data change.
// Field-level validation now runs inside updateRow/validateRow, and per-column
// uniqueness is handled surgically where needed.
// Intentionally left blank to avoid UI lock-ups on small edits.
useEffect(() => { useEffect(() => {
// Skip initial load - we have a separate initialization process return; // no-op
if (!initialValidationDoneRef.current) return; }, [data, fields, hasEditingCells]);
// Don't run validation during template application
if (isApplyingTemplateRef.current) return;
// CRITICAL FIX: Skip validation if we're already validating to prevent infinite loops
if (isValidatingRef.current) return;
// PERFORMANCE FIX: Skip validation while cells are being edited
if (hasEditingCells) return;
// Debounce validation to prevent excessive calls - reduced for better responsiveness
const timeoutId = setTimeout(() => {
if (isValidatingRef.current) return; // Double-check before proceeding
if (hasEditingCells) return; // Double-check editing state
// Validation running (removed console.log for performance)
isValidatingRef.current = true;
// ASYNC validation that clears old errors and adds new ones
const validateFields = async () => {
try {
// Create a complete fresh validation map
const allValidationErrors = new Map<number, Record<string, any[]>>();
// Get all field types that need validation
const requiredFields = fields.filter((field) =>
field.validations?.some((v) => v.rule === "required")
);
const regexFields = fields.filter((field) =>
field.validations?.some((v) => v.rule === "regex")
);
// ASYNC PROCESSING: Process rows in small batches to prevent UI blocking
const BATCH_SIZE = 10; // Small batch size for responsiveness
const totalRows = data.length;
for (let batchStart = 0; batchStart < totalRows; batchStart += BATCH_SIZE) {
const batchEnd = Math.min(batchStart + BATCH_SIZE, totalRows);
// Process this batch synchronously (fast)
for (let rowIndex = batchStart; rowIndex < batchEnd; rowIndex++) {
const row = data[rowIndex];
const rowErrors: Record<string, any[]> = {};
// Check required fields
requiredFields.forEach((field) => {
const key = String(field.key);
const value = row[key as keyof typeof row];
// Check if field is empty
if (value === undefined || value === null || value === "" ||
(Array.isArray(value) && value.length === 0)) {
const requiredValidation = field.validations?.find((v) => v.rule === "required");
rowErrors[key] = [
{
message: requiredValidation?.errorMessage || "This field is required",
level: requiredValidation?.level || "error",
source: "row",
type: "required",
},
];
}
});
// Check regex fields (only if they have values)
regexFields.forEach((field) => {
const key = String(field.key);
const value = row[key as keyof typeof row];
// Skip empty values for regex validation
if (value === undefined || value === null || value === "") {
return;
}
const regexValidation = field.validations?.find((v) => v.rule === "regex");
if (regexValidation) {
try {
const regex = new RegExp(regexValidation.value, regexValidation.flags);
if (!regex.test(String(value))) {
// Only add regex error if no required error exists
if (!rowErrors[key]) {
rowErrors[key] = [
{
message: regexValidation.errorMessage,
level: regexValidation.level || "error",
source: "row",
type: "regex",
},
];
}
}
} catch (error) {
console.error("Invalid regex in validation:", error);
}
}
});
// Only add to the map if there are actually errors
if (Object.keys(rowErrors).length > 0) {
allValidationErrors.set(rowIndex, rowErrors);
}
}
// CRITICAL: Yield control back to the UI thread after each batch
if (batchEnd < totalRows) {
await new Promise(resolve => setTimeout(resolve, 0));
}
}
// Replace validation errors completely (clears old ones)
setValidationErrors(allValidationErrors);
// Run uniqueness validations after basic validation (also async)
setTimeout(() => validateUniqueItemNumbers(), 0);
} finally {
// Always ensure the ref is reset, even if an error occurs - reduced delay
setTimeout(() => {
isValidatingRef.current = false;
}, 10);
}
};
// Run validation immediately (async)
validateFields();
}, 10); // Reduced debounce for better responsiveness
// Cleanup timeout on unmount or dependency change
return () => clearTimeout(timeoutId);
}, [data, fields, hasEditingCells]); // Added hasEditingCells to dependencies
// Add field options query // Add field options query
const { data: fieldOptionsData } = useQuery({ const { data: fieldOptionsData } = useQuery({
@@ -380,11 +256,12 @@ export const useValidationState = <T extends string>({
[data, onBack, onNext, validationErrors] [data, onBack, onNext, validationErrors]
); );
// Initialize validation on mount // Initialize validation once, after initial UPC-based item number generation completes
useEffect(() => { useEffect(() => {
if (initialValidationDoneRef.current) return; if (initialValidationDoneRef.current) return;
// Wait for initial UPC validation to finish to avoid double work and ensure
// Running initial validation (removed console.log for performance) // item_number values are in place before uniqueness checks
if (!upcValidation.initialValidationDone) return;
const runCompleteValidation = async () => { const runCompleteValidation = async () => {
if (!data || data.length === 0) return; if (!data || data.length === 0) return;
@@ -623,7 +500,73 @@ export const useValidationState = <T extends string>({
// Run the complete validation // Run the complete validation
runCompleteValidation(); runCompleteValidation();
}, [data, fields, setData, setValidationErrors, validateUniqueItemNumbers]); }, [data, fields, setData, setValidationErrors, validateUniqueItemNumbers, upcValidation.initialValidationDone]);
// Targeted uniqueness revalidation: run only when item_number values change
useEffect(() => {
if (!data || data.length === 0) return;
// Build a simple signature of the item_number column
const sig = data.map((r) => String((r as Record<string, any>).item_number ?? '')).join('|');
if (lastItemNumberSigRef.current === sig) return;
lastItemNumberSigRef.current = sig;
// Compute unique errors for item_number only and merge
const uniqueMap = validateUniqueField(data, 'item_number');
const rowsWithUnique = new Set<number>();
uniqueMap.forEach((_, idx) => rowsWithUnique.add(idx));
setValidationErrors((prev) => {
const newMap = new Map(prev);
// Apply unique errors
uniqueMap.forEach((errorsForRow, rowIdx) => {
const existing = { ...(newMap.get(rowIdx) || {}) } as Record<string, any[]>;
const info = (errorsForRow as any)['item_number'];
const currentValue = (data[rowIdx] as any)?.['item_number'];
// Only apply uniqueness error when the value is non-empty
if (info && currentValue !== undefined && currentValue !== null && String(currentValue) !== '') {
existing['item_number'] = [
{
message: info.message,
level: info.level,
source: info.source,
type: info.type,
},
];
}
// If value is now present, make sure to clear any lingering Required error
if (currentValue !== undefined && currentValue !== null && String(currentValue) !== '' && existing['item_number']) {
existing['item_number'] = (existing['item_number'] as any[]).filter((e) => e.type !== ErrorType.Required);
if ((existing['item_number'] as any[]).length === 0) delete existing['item_number'];
}
if (Object.keys(existing).length > 0) newMap.set(rowIdx, existing);
else newMap.delete(rowIdx);
});
// Remove stale unique errors for rows no longer duplicated
newMap.forEach((rowErrs, rowIdx) => {
const currentValue = (data[rowIdx] as any)?.['item_number'];
const shouldRemoveUnique = !rowsWithUnique.has(rowIdx) || currentValue === undefined || currentValue === null || String(currentValue) === '';
if (shouldRemoveUnique && (rowErrs as any)['item_number']) {
const filtered = (rowErrs as any)['item_number'].filter((e: any) => e.type !== ErrorType.Unique);
if (filtered.length > 0) (rowErrs as any)['item_number'] = filtered;
else delete (rowErrs as any)['item_number'];
}
// If value now present, also clear any lingering Required error for this field
if (currentValue !== undefined && currentValue !== null && String(currentValue) !== '' && (rowErrs as any)['item_number']) {
const nonRequired = (rowErrs as any)['item_number'].filter((e: any) => e.type !== ErrorType.Required);
if (nonRequired.length > 0) (rowErrs as any)['item_number'] = nonRequired;
else delete (rowErrs as any)['item_number'];
}
if (Object.keys(rowErrs).length > 0) newMap.set(rowIdx, rowErrs);
else newMap.delete(rowIdx);
});
return newMap;
});
}, [data, validateUniqueField, setValidationErrors]);
// Update fields with latest options // Update fields with latest options
const fieldsWithOptions = useMemo(() => { const fieldsWithOptions = useMemo(() => {