I need to fetch data from DB and write it to a corresponding column in CSV file. The following code do it very slow(iteratevly, one by one)
async def fetch_and_write():
conn = await asyncpg.connect('...')
with open('/Users/mac/Desktop/input.csv','r') as csvinput:
with open('/Users/mac/Desktop/output.csv', 'w') as csvoutput:
reader = csv.reader(csvinput)
writer = csv.writer(csvoutput, lineterminator='\n')
all = []
row = next(reader)
row.append('new_column_name')
all.append(row)
for row in reader:
query = "SELECT .. FROM .. WHERE id = '%s';"
query = query % row[14]
try:
result = await conn.fetch(query)
except BaseException:
print("Oops!That was no valid number.")
continue
row.append(result[0][0])
all.append(row)
writer.writerows(all)
How can I read id from CSV in chunks and use "in" clause to improve performance?