I have been fetching some study data from Indexed DB and for 10000+ records it is taking very long time to load (almost 40s). here dixie has already being used.
async getCdiscDataDistinctValues (realm: Realm, study: StudyIds, domain: Domain, property: string)
this._checkMainArgs (realm, study, domain, false, true);
checkReq (property, 'property', 'string');
const store = this._loaderCache.getStore(this._loaderCache.storeId(realm, domain));
const records await store.where('STUDYID').anyOf(asArray (study))..toArray();
console.log("Records", records);
console.log("Store", store);
return uniq(map(records, property));
export const fetchDistinctValuesFromRawData = createAsyncThunk<
FetchDistinctValuesFromRawDataPayload,
FetchDistinctValuesFromRawDataReturnType[]
>(
`${SLICE_PREFIX}/fetchDistinctValuesFromRawData`,
async ({ studyIds, realm, domains, columns }, { rejectWithValue, getState }) => {
try {
console.log("Redux fetch started", new Date().getTime());
const state = getState() as RootState;
const results: any[] = [];
// Iterate through each domain
await Promise.all(
asArray(domains).map(async (domain) => {
const dataItem = studyDataItemSelector(state, { domain });
const chunks = chunk(columns, 10); // Split columns into chunks of 10
for (const columnsChunk of chunks) {
const chunkResults = await Promise.all(
asArray(columnsChunk).map(async (column) => {
const startTime = performance.now();
// Fetch distinct values for the current domain and column
const values = await coreSdwDataApi.getCdiscDataDistinctValues(
realm,
studyIds,
domain,
column
);
const endTime = performance.now();
console.log(
`Fetched values for ${domain}-${column} in ${endTime - startTime}ms`
);
return {
values,
domain,
column,
domainLastSync: dataItem?.lastSync,
};
})
);
results.push(...chunkResults); // Add chunk results to overall results
}
})
);
console.log("Redux fetch results", new Date().getTime(), results);
return results;
} catch (error) {
console.error("Error fetching distinct values:", error);
return rejectWithValue(error);
}
}
);
I am expecting that 10000+ records should not take this long to get it loaded into the screen. I tried multiple ways to get it done , I tried increasing or decreasing the size of chunks nothing worked. Please help me with this issue.