Optimize PredictionsChart with batch data loading
All checks were successful
Build Frontend / build (push) Successful in 1m5s
All checks were successful
Build Frontend / build (push) Successful in 1m5s
Refactors PredictionsChart to fetch all prediction data in a single batch for instant client-side switching between horizons and simulation factors. Updates state management and effects to utilize the pre-cached batch data, reducing API calls and improving responsiveness. Minor UI text update to remove TensorFlow.js mention.
This commit is contained in:
@@ -49,8 +49,6 @@ import type { DateRange as ProfitDateRange } from "@/lib/services/profit-analyti
|
|||||||
import { MotionWrapper } from "@/components/ui/motion-wrapper";
|
import { MotionWrapper } from "@/components/ui/motion-wrapper";
|
||||||
import { motion } from "framer-motion";
|
import { motion } from "framer-motion";
|
||||||
|
|
||||||
// Lazy load chart components - already handled individually below
|
|
||||||
|
|
||||||
const RevenueChart = dynamic(() => import("./RevenueChart"), {
|
const RevenueChart = dynamic(() => import("./RevenueChart"), {
|
||||||
loading: () => <ChartSkeleton />,
|
loading: () => <ChartSkeleton />,
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -82,6 +82,12 @@ export default function PredictionsChart({
|
|||||||
const [baselinePredictions, setBaselinePredictions] = useState<PredictionsOverview | null>(
|
const [baselinePredictions, setBaselinePredictions] = useState<PredictionsOverview | null>(
|
||||||
null,
|
null,
|
||||||
);
|
);
|
||||||
|
// Batch data holds all pre-cached predictions for instant switching
|
||||||
|
const [batchData, setBatchData] = useState<{
|
||||||
|
[horizon: string]: {
|
||||||
|
[simulationFactor: string]: PredictionsOverview;
|
||||||
|
};
|
||||||
|
} | null>(null);
|
||||||
const [stockPredictions, setStockPredictions] =
|
const [stockPredictions, setStockPredictions] =
|
||||||
useState<StockPredictionsResponse | null>(null);
|
useState<StockPredictionsResponse | null>(null);
|
||||||
const [loading, setLoading] = useState(true);
|
const [loading, setLoading] = useState(true);
|
||||||
@@ -92,16 +98,33 @@ export default function PredictionsChart({
|
|||||||
const [committedSimulationFactor, setCommittedSimulationFactor] = useState(0);
|
const [committedSimulationFactor, setCommittedSimulationFactor] = useState(0);
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
|
|
||||||
// Fetch baseline predictions (simulation factor = 0)
|
// Fetch all predictions in batch (for instant client-side switching)
|
||||||
const fetchBaseline = async () => {
|
const fetchBatchData = async () => {
|
||||||
try {
|
try {
|
||||||
setLoading(true);
|
setLoading(true);
|
||||||
const [overview, stock] = await Promise.all([
|
const { getBatchPredictionsWithStore } = await import("@/lib/services/analytics-service");
|
||||||
getPredictionsOverviewWithStore(daysAhead, timeRange, 0),
|
const [batchResponse, stock] = await Promise.all([
|
||||||
|
getBatchPredictionsWithStore(timeRange),
|
||||||
getStockPredictionsWithStore(timeRange),
|
getStockPredictionsWithStore(timeRange),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
if (batchResponse.success && batchResponse.predictions) {
|
||||||
|
setBatchData(batchResponse.predictions);
|
||||||
|
// Set initial predictions from batch
|
||||||
|
const horizonData = batchResponse.predictions[daysAhead.toString()];
|
||||||
|
if (horizonData) {
|
||||||
|
const baseline = horizonData["0"];
|
||||||
|
if (baseline) {
|
||||||
|
setBaselinePredictions(baseline);
|
||||||
|
setPredictions(baseline);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Fallback to single request if batch not available
|
||||||
|
const overview = await getPredictionsOverviewWithStore(daysAhead, timeRange, 0);
|
||||||
setBaselinePredictions(overview);
|
setBaselinePredictions(overview);
|
||||||
setPredictions(overview);
|
setPredictions(overview);
|
||||||
|
}
|
||||||
setStockPredictions(stock);
|
setStockPredictions(stock);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error fetching predictions:", error);
|
console.error("Error fetching predictions:", error);
|
||||||
@@ -115,43 +138,46 @@ export default function PredictionsChart({
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
// Fetch simulated predictions (without full reload)
|
// Switch predictions from batch data (no API call!)
|
||||||
const fetchSimulation = async (factor: number) => {
|
const switchPredictions = useCallback((horizon: number, simFactor: number) => {
|
||||||
if (factor === 0) {
|
if (!batchData) return;
|
||||||
// Use cached baseline
|
|
||||||
setPredictions(baselinePredictions);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
const horizonData = batchData[horizon.toString()];
|
||||||
setIsSimulating(true);
|
if (!horizonData) return;
|
||||||
const overview = await getPredictionsOverviewWithStore(daysAhead, timeRange, factor / 100);
|
|
||||||
setPredictions(overview);
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Error fetching simulation:", error);
|
|
||||||
toast({
|
|
||||||
title: "Error",
|
|
||||||
description: "Failed to load simulation",
|
|
||||||
variant: "destructive",
|
|
||||||
});
|
|
||||||
} finally {
|
|
||||||
setIsSimulating(false);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Fetch baseline on initial load or when daysAhead/timeRange changes
|
// Simulation factor is stored as decimal (e.g., 0.1 for 10%)
|
||||||
|
const simKey = (simFactor / 100).toString();
|
||||||
|
const newPrediction = horizonData[simKey];
|
||||||
|
|
||||||
|
if (newPrediction) {
|
||||||
|
setPredictions(newPrediction);
|
||||||
|
if (simFactor === 0) {
|
||||||
|
setBaselinePredictions(newPrediction);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}, [batchData]);
|
||||||
|
|
||||||
|
// Fetch batch data on initial load or when timeRange changes
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
fetchBaseline();
|
fetchBatchData();
|
||||||
setCommittedSimulationFactor(0);
|
setCommittedSimulationFactor(0);
|
||||||
setSimulationFactor(0);
|
setSimulationFactor(0);
|
||||||
}, [daysAhead, timeRange]);
|
}, [timeRange]);
|
||||||
|
|
||||||
// Fetch simulation when committed slider value changes
|
// Switch predictions when daysAhead changes (instant, from batch)
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (baselinePredictions) {
|
if (batchData) {
|
||||||
fetchSimulation(committedSimulationFactor);
|
switchPredictions(daysAhead, committedSimulationFactor);
|
||||||
}
|
}
|
||||||
}, [committedSimulationFactor]);
|
}, [daysAhead, batchData, switchPredictions]);
|
||||||
|
|
||||||
|
// Switch predictions when simulation factor changes (instant, from batch)
|
||||||
|
useEffect(() => {
|
||||||
|
if (batchData) {
|
||||||
|
switchPredictions(daysAhead, committedSimulationFactor);
|
||||||
|
}
|
||||||
|
}, [committedSimulationFactor, batchData, switchPredictions]);
|
||||||
|
|
||||||
|
|
||||||
const getConfidenceColor = (confidence: string) => {
|
const getConfidenceColor = (confidence: string) => {
|
||||||
switch (confidence) {
|
switch (confidence) {
|
||||||
@@ -305,7 +331,7 @@ export default function PredictionsChart({
|
|||||||
<Button
|
<Button
|
||||||
variant="outline"
|
variant="outline"
|
||||||
size="icon"
|
size="icon"
|
||||||
onClick={fetchBaseline}
|
onClick={fetchBatchData}
|
||||||
disabled={loading || isSimulating}
|
disabled={loading || isSimulating}
|
||||||
>
|
>
|
||||||
<RefreshCw
|
<RefreshCw
|
||||||
@@ -403,7 +429,7 @@ export default function PredictionsChart({
|
|||||||
</span>
|
</span>
|
||||||
</TooltipTrigger>
|
</TooltipTrigger>
|
||||||
<TooltipContent>
|
<TooltipContent>
|
||||||
<p>Predictions generated using a Deep Learning Ensemble Model (TensorFlow.js)</p>
|
<p>Predictions generated using a Deep Learning Ensemble Model</p>
|
||||||
</TooltipContent>
|
</TooltipContent>
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
)}
|
)}
|
||||||
|
|||||||
Reference in New Issue
Block a user