A space to discuss GraphQL queries, mutations, troubleshooting, throttling, and best practices.
@shopifydev I am wanting to load an entire rate card by weight and per country using GraphiQL and python.
I have a script that works for small loads, but it errors
Error: {'errors': 'query param length is too long'}
when trying to load the entire file (about 8000 lines).
This would be an ideal case for using
import requests
import json
import csv
# Replace these variables with your actual Shopify store information
SHOPIFY_STORE = 'xxxxxxxxx'
ACCESS_TOKEN = 'xxxxxxxxx'
CSV_FILE_PATH = 'uk.csv'
def fetch_locations():
query = """
{
locations(first: 10) {
edges {
node {
id
name
}
}
}
}
"""
headers = {
"Content-Type": "application/json",
"X-Shopify-Access-Token": ACCESS_TOKEN
}
response = requests.post(
f"https://{SHOPIFY_STORE}.myshopify.com/admin/api/2024-04/graphql.json",
headers=headers,
data=json.dumps({"query": query})
)
if response.status_code != 200:
print(f"Error fetching locations: {response.status_code}, {response.text}")
return []
response_json = response.json()
if 'data' not in response_json:
print(f"Error: 'data' not in response: {response_json}")
return []
data = response_json["data"]
locations = data["locations"]["edges"]
return locations
def fetch_delivery_profiles():
query = """
{
deliveryProfiles(first: 10) {
edges {
node {
id
name
}
}
}
}
"""
headers = {
"Content-Type": "application/json",
"X-Shopify-Access-Token": ACCESS_TOKEN
}
response = requests.post(
f"https://{SHOPIFY_STORE}.myshopify.com/admin/api/2024-07/graphql.json",
headers=headers,
data=json.dumps({"query": query})
)
if response.status_code != 200:
print(f"Error fetching delivery profiles: {response.status_code}, {response.text}")
return []
response_json = response.json()
if 'data' not in response_json:
print(f"Error: 'data' not in response: {response_json}")
return []
return response_json["data"]["deliveryProfiles"]["edges"]
def prompt_user_selection(options, option_type):
if not options:
print(f"No {option_type}s available.")
return None
print(f"Select a {option_type}:")
for idx, option in enumerate(options, start=1):
print(f"{idx}. {option['node']['name']}")
selection = int(input(f"Enter the number of the {option_type} you want to select: ")) - 1
return options[selection]["node"]["id"]
def read_rates_from_csv(file_path):
rates = []
try:
with open(file_path, mode='r') as file:
csv_reader = csv.DictReader(file)
for row in csv_reader:
provinces = row["provinces"].split(',') if row["provinces"] else []
rates.append({
"min_weight": float(row["min_weight"]),
"max_weight": float(row["max_weight"]),
"rate": float(row["rate"]),
"name": row["name"],
"description": row["description"],
"zone_name": row["zone_name"],
"country_code": row["country_code"],
"provinces": provinces
})
except Exception as e:
print(f"Error reading CSV file: {e}")
return rates
def group_rates_by_zone(rates):
zones = {}
for rate in rates:
zone_name = rate['zone_name']
country_code = rate['country_code']
provinces = tuple(rate['provinces']) # Use tuple for hashable key
if (zone_name, country_code, provinces) not in zones:
zones[(zone_name, country_code, provinces)] = []
zones[(zone_name, country_code, provinces)].append(rate)
return zones
def construct_graphql_mutation(zones, delivery_profile_id, location_id):
zones_to_create = []
for (zone_name, country_code, provinces), rates in zones.items():
method_definitions = []
for rate in rates:
method_definitions.append(f"""
{{
name: "{rate['name']}",
description: "{rate['description']}",
rateDefinition: {{
price: {{
amount: {rate['rate']},
currencyCode: USD
}}
}},
weightConditionsToCreate: [
{{
criteria: {{
unit: POUNDS,
value: {rate['min_weight']}
}},
operator: GREATER_THAN_OR_EQUAL_TO
}},
{{
criteria: {{
unit: POUNDS,
value: {rate['max_weight']}
}},
operator: LESS_THAN_OR_EQUAL_TO
}}
]
}}
""")
method_definitions_str = ", ".join(method_definitions)
if provinces:
provinces_str = ", ".join([f'{{ code: "{province.strip()}" }}' for province in provinces])
zone_countries = f"""
countries: [
{{
code: {country_code}
provinces: [{provinces_str}]
}}
]
"""
else:
zone_countries = f"""
countries: [
{{
code: {country_code}
}}
]
"""
zones_to_create.append(f"""
{{
name: "{zone_name}",
{zone_countries},
methodDefinitionsToCreate: [{method_definitions_str}]
}}
""")
zones_to_create_str = ", ".join(zones_to_create)
mutation = f"""
mutation updateshippingprofiles {{
deliveryProfileUpdate(
id: "{delivery_profile_id}"
profile: {{
locationGroupsToCreate: [
{{
locations: ["{location_id}"]
zonesToCreate: [{zones_to_create_str}]
}}
]
}}
) {{
profile {{
id
profileLocationGroups {{
locationGroupZones(first: 5) {{
edges {{
node {{
zone {{
id
name
countries {{
id
name
provinces {{
id
name
code
}}
}}
}}
}}
}}
}}
}}
}}
userErrors {{
field
message
}}
}}
}}
"""
return mutation
def send_zones(zones, delivery_profile_id, location_id):
mutation = construct_graphql_mutation(zones, delivery_profile_id, location_id)
headers = {
"Content-Type": "application/json",
"X-Shopify-Access-Token": ACCESS_TOKEN
}
response = requests.post(
f"https://{SHOPIFY_STORE}.myshopify.com/admin/api/2024-07/graphql.json",
headers=headers,
data=json.dumps({"query": mutation})
)
if response.status_code != 200 or 'errors' in response.json():
print(f"Error: {response.json()}")
else:
print(f"Zones processed successfully: {response.json()}")
if __name__ == "__main__":
locations = fetch_locations()
delivery_profiles = fetch_delivery_profiles()
if not locations or not delivery_profiles:
print("Could not fetch locations or delivery profiles. Exiting.")
else:
LOCATION_ID = prompt_user_selection(locations, "location")
DELIVERY_PROFILE_ID = prompt_user_selection(delivery_profiles, "delivery profile")
if LOCATION_ID and DELIVERY_PROFILE_ID:
shipping_rates = read_rates_from_csv(CSV_FILE_PATH)
zones = group_rates_by_zone(shipping_rates)
send_zones(zones, DELIVERY_PROFILE_ID, LOCATION_ID)
Given that this code works and successfully updates delivery profiles my next thought was to break the updates into batches, but when I do that each time a batch runs it seems only the most recent batch is left. Each time a batch is run it overwrites the update of the previous batch run.
Hope that makes sense. How can I make it so that it adds shipping profiles to the existing ones