I have some python code which runs a loop across different "databases" and stores each database separately. How do I save the content from he ten (10) loops into one daataframe that I can save / export or push to Google Sheets?
Here's a preview of my code below:
USERNAME = 'email'
PASSWORD = 'password'
DATABASES = [ 'DB1', 'DB2', 'DB3', 'DB4', 'DB5', 'DB6', 'DB7', 'DB8', 'DB9', 'DB10' ]
def get_dataframe(db):
client = my.API(username=USERNAME, password=PASSWORD, database=db)
client.authenticate()
device_status = client.get("DeviceStatusInfo")
device_serial = client.get("Device")
df = pandas.DataFrame(device_serial)
ids = list(df['id'])
sn = list(df['serialNumber'])
nm = list(df['name'])
cm = list(df['comment'])
dt = list(df['deviceType'])
di = pandas.DataFrame(device_status)
device_ids = di['device']
dev_id = []
for device_id in device_ids:
dev_id.append(device_id['id'])
di['id'] = pandas.Series(dev_id)
d = {
'id':ids,
'serialNumber':sn,
'name':nm,
'comment':cm,
'deviceType':dt
}
df2 = pandas.DataFrame(d)
dfo = pandas.merge(di, df2, on='id')
print(dfo)
return dfo
In addition, here's the loop below:
def main():
for db in DATABASES:
df = get_dataframe(db)
gc = gspread.service_account(filename='creds.json')
sh = gc.open_by_key('1vToay2IueDcV0gzu6MCehcMUU6Frea1fa3lqjpoKdSs')
worksheet = sh.get_worksheet(0)
dfo = pandas.DataFrame(df)
set_with_dataframe(worksheet, dfo)
if __name__ == '__main__':
main()
Any help from anyone would be greatly appreciated! Thanks in advance!