The decision of whether to cache is entirely up to you. Will stale data be a problem?
The simplest caching scheme I can think of is something like the following using Python's pickle module:
import MySQLdb
import csv
import pprint
import time
MAX_CACHE_AGE = 60*20 # 20 Minutes
CACHE_FILENAME = 'results.cache'
with open(CACHE_FILENAME, 'r') as cache:
cached = pickle.load(cache)
if(time.time() > cached['timestamp'] + MAX_CACHE_AGE):
# Cache too old, run query
db = MySQLdb.connect(host="localhost", # The Host
user="username", # username
passwd="password", # password
db="dbname") # name of the data base
cursor = db.cursor()
cursor.execute("SELECT name, id, city, storeid FROM Products;")
StudentsData = cursor.fetchall()
# Update cache file
data = {'results': StudentsData, 'timestamp':time.time()}
with open(CACHE_FILENAME, 'w') as cache:
pickle.dump(data, cache)
else:
# Cached data is fresh enough, use that
StudentsData = cached['results']
pprint.pprint(StudentsData)
You'll need to initialize the results.cache file once manually.
EDIT
The with syntax is a context manager and was introduced in Python 2.5.
with open(CACHE_FILENAME, 'r') as cache:
cached = pickle.load(cache)
Can be rewritten as
cached = open(CACHE_FILENAME, 'r')
cached = pickle.load(cache)
cached.close()
EDIT2
After a long discussion in chat, the following works:
import MySQLdb
import csv
import pprint
import time
import pickle
MAX_CACHE_AGE = 60*20 # 20 Minutes
CACHE_FILENAME = 'results.cache'
regen = False
try:
with open(CACHE_FILENAME, 'r') as cache:
cached = pickle.load(cache)
if(time.time() > cached['timestamp'] + MAX_CACHE_AGE):
print("Cache too old: regenerating cache")
regen = True
else:
print("Cached data is fresh enough: loading results from cache")
except IOError:
print("Error opening %s: regenerating cache" % CACHE_FILENAME)
regen = True
if(regen):
# Cache too old, run query
db = MySQLdb.connect(host="localhost", # The Host
user="username", # username
passwd="password", # password
db="dbname") # name of the data base
cursor = db.cursor()
cursor.execute("SELECT name, id, city, storeid FROM Products;")
StudentsData = cursor.fetchall()
cursor.close()
# Update cache file
data = {'results': StudentsData, 'timestamp':time.time()}
with open(CACHE_FILENAME, 'w') as cache:
pickle.dump(data, cache)
else:
# Cached data is fresh enough, use that
StudentsData = cached['results']
print StudentsData