Fetching all products metafields by namespace

Fetching all products metafields by namespace

1 0 2

I am trying to build a custom dynamic catalog filters system based on the metafields of products.

For now my store only contain about 25 products and very few metafields. In that fashion i'm trying to fetch all of these products and their metafields using the namespace 'filters' to handle the filters using Shopify's GraphQL Admin API.


However, testing the following query, it was clear that it was too expensive for the admin API.



query GetProductsMetafields {
  products(first: 25) {
    nodes {
      metafields(first: 25) {
        nodes {



so I did the next best thing, creating a bulk operation and downloading the results. It does work, however the process takes a long time (maybe 15mins ?) so I need to cache the results for about 48h to make sure the filters are displayed correctly *most of the time*. I understand that bulk operations are not supposed to be used for frontend handling but I kind of am stuck right now.


So I guess my real question being, how can I handle this more efficiently? Did I do an oversight somewhere?


This is how I handled the bulk operation queries:

Backend function handle data fetching:


import { initializeShopifyAdminClient } from "/src/lib/shopify/shopifyAdminGraphQLClient";
import {
    downloadBulkOperationResults, getBulkOperationDetails,
} from "/src/lib/shopify/bulk/bulkOperations";
import {cleanDuplicatesAndFilter, filterDuplicatesByKey} from "/src/utils/parser";

const ShopifyAdminClient = initializeShopifyAdminClient();

export async function getProductsMetafields() {
    try {
        // Check for an existing bulk operation or initiate a new one
        const bulkOperation = await initiateBulkOperation();

        if (bulkOperation) {
            let fullResults = null
            if(bulkOperation.status === "COMPLETED") {
                // using the bulk operation from cache provided in initiateBulkOperation()
                const cachedBulk = await getBulkOperationDetails(bulkOperation.id);
                console.log("bulk details", cachedBulk);
                if(cachedBulk) {
                    const dataUrl = cachedBulk.data?.node?.url;
                    fullResults = await downloadBulkOperationResults(dataUrl);
            }else if(bulkOperation.status !== "COMPLETED" && bulkOperation.status !== "CANCELLED") {
                // Wait for the bulk operation to complete using polling
                fullResults = await pollForBulkOperationCompletion(bulkOperation.id);

            // Process the results and clean up duplicates
            const mandatoryFields = ["id", "value"];
            const cleanedResults = cleanDuplicatesAndFilter(fullResults,mandatoryFields);

            // Return the full results
            return cleanedResults;
        } else {
            // Handle the case where there is no bulk operation to wait for
            console.error('No bulk operation to wait for.');
            return null;
    } catch (error) {
        console.error('Error:', error);
        return null;



Functions to handle the bulk operation itself (all of these are server-side):


export async function initiateBulkOperation() {
    try {
        // Check if there is an ongoing bulk operation
        const existingBulkOperation = await checkExistingBulkOperation();

        if (existingBulkOperation){

            if(existingBulkOperation.status === 'COMPLETED'){
                // cache the existing bulk operation if it exists and made in the last 48hours
                const createdAtTimestamp = new Date(existingBulkOperation.createdAt).getTime();
                const currentTimestamp = new Date().getTime();
                const fortyEightHoursInMilliseconds = (24 * 60 * 60 * 1000) * 2;

                const isWithinLast48Hours = currentTimestamp - createdAtTimestamp <= fortyEightHoursInMilliseconds;
                    console.log('A bulk query operation is completed and within cache time:', existingBulkOperation);
                    return existingBulkOperation;
            else if(existingBulkOperation.status !== 'CANCELLED') {
                // Return the existing bulk operation without initiating a new one
                console.log('A bulk query operation is already running:', existingBulkOperation);
                return existingBulkOperation;

        // Run the bulk operation with the getProductMetafieldsQuery
        const bulkOperationResult = await ShopifyAdminClient.mutate({
            mutation: gql`
                mutation {
                        query: """${getProductMetafieldsQuery}"""
                    ) {
                        bulkOperation {
                        userErrors {

        const { data, errors: bulkOperationErrors } = bulkOperationResult;

        // Handle errors from the bulk operation
        if (bulkOperationErrors && bulkOperationErrors.length > 0) {
            console.error('Bulk operation errors:', bulkOperationErrors);
            throw new Error('Bulk operation failed');

        const { bulkOperation, userErrors } = data.bulkOperationRunQuery;

        // Handle user errors
        if (userErrors && userErrors.length > 0) {
            console.error('User errors:', userErrors);
            throw new Error('User errors occurred during bulk operation');
        if (!bulkOperation) {
            console.error('Bulk operation undefined error');
            throw new Error('Bulk operation is undefined');

        return bulkOperation;
    } catch (error) {
        console.error('Error during bulk operation initiation:', error);
        throw new Error('Failed to initiate bulk operation');




export async function getBulkOperationDetails(bulkOperationId){
    const response = await ShopifyAdminClient.query({
        query: bulkOperation,
        variables: { id: bulkOperationId },
    return response;




export async function downloadBulkOperationResults(url) {
    try {
        const response = await fetch(url);

        if (!response.ok) {
            throw new Error('Failed to obtain the download URL');

        const fileContent = await response.text();
        const lines = fileContent.split('\n').filter(line => line.trim() !== '');

        // Now, lines is an array where each element is a string representing a JSON object
        const parsedResults = lines.map(line => JSON.parse(line));

        return parsedResults;
    } catch (error) {
        console.error('Error downloading bulk operation results:', error);
        throw new Error('Failed to download bulk operation results');




export async function pollForBulkOperationCompletion(bulkOperationId) {
    const maxPollAttempts = 15;
    let pollAttempt = 0;

    while (pollAttempt < maxPollAttempts) {
        try {
            const { data } = await getBulkOperationDetails(bulkOperationId);

            const currentStatus = data?.node?.status;

            if (!currentStatus) {
                console.error('Bulk operation status not available');
                throw new Error('Failed to get bulk operation status');

            if (currentStatus === "COMPLETED") {
                const dataUrl = data?.node?.url;

                // Download the full results
                const fullResults = await downloadBulkOperationResults(dataUrl);
                return fullResults;
            } else if (currentStatus === "FAILED" || currentStatus === "CANCELLED") {
                throw new Error(`Bulk operation failed or was cancelled. Status: ${currentStatus}`);

            // Calculate the wait time using exponential backoff
            const waitTime = Math.pow(2, pollAttempt) * 200; // 2^pollAttempt seconds

            // Wait for a bit before polling again
            await new Promise(resolve => setTimeout(resolve, waitTime));
        } catch (error) {
            console.error('Error during bulk operation polling:', error);
            throw new Error('Failed to poll for bulk operation completion');

    throw new Error('Exceeded maximum polling attempts. Bulk operation did not complete within the expected time.');



Replies 0 (0)