Plough Through Large Datasets From a Database with Python's SQLAlchemy
# assume:
# more_results = True
# results_proxy is a predefined call to the database
# Start a while loop checking for more results
whilemore_results:# Fetch the first 50 results from the ResultProxy: partial_results
partial_results=results_proxy.fetchmany(50)# if empty list, set more_results to False
ifpartial_results==[]:more_results=False# Loop over the fetched records and increment the count for the state
forrowinpartial_results:ifrow.stateinstate_count:state_count[row.state]+=1else:state_count[row.state]=1# Close the ResultProxy, and thus the connection
results_proxy.close()# Print the count by state
print(state_count)