I had to recently export lots of tables from SQL Server to csv, and some columns contain binary data, I came up with a python script with dependency no pyodbc that does the job. After that I was able to import those tables to a postgres database.
Posting this because presented answers doesn't handle binary data.
Export script:
import os
import pyodbc
import argparse
def parse_args():
parser = argparse.ArgumentParser(description="bulk table export")
parser.add_argument("--server", "-s", metavar="server", required=True, type=str, help="sql server host")
parser.add_argument("--db", "-d", metavar="db", required=True, type=str, help="db name")
parser.add_argument("--output", "-o", metavar="csv", required=True, type=str, help="where to export")
return parser.parse_args()
def connect(server, db):
return pyodbc.connect(f"Driver={{SQL Server}};Server={server};Database={db};Trusted_Connection=yes;")
if __name__ == '__main__':
args = parse_args()
with connect(args.server, args.db) as conn:
with conn.cursor() as cursor:
# get all tables for export
query = """
SELECT s.name as schemaName, t.name as tableName
FROM sys.tables t
INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
"""
tables = [(s, t) for (s, t) in cursor.execute(query) if s == "dbo"]
export_folder = os.path.join(args.output, args.db);
if not os.path.exists(export_folder):
os.makedirs(export_folder)
# start exporting
for (schema, table) in tables:
path = os.path.join(export_folder, f"{table.lower()}.csv")
with open(path, "w", encoding="utf-8") as f:
print(f"writing table '{table}'")
# write header
rows = cursor.execute(f"SELECT * FROM [{schema}].[{table}]")
f.write(",".join([column[0] for column in cursor.description]) + "\n")
# fill all rows
for row in rows:
r = []
for cell in row:
if type(cell) == bytes:
# convert bytes to oct number
# handling here binary data
# this one is handled specifically for postgres csv import (oct numbers)
# you can handle it here as you wish
byte_string = []
for b in cell:
b_str = oct(b).replace("0o", "")
byte_string.append("\\" + f"{b_str:0>3}")
byte_str = "".join(byte_string)
r.append(f"${byte_str}$")
elif cell is None:
r.append("")
else:
r.append(f"${cell}$")
f.write(",".join(r) + "\n")
Example usage:
python csv_export.py ^
--output "C:\amazing\path\csv" ^
--server "AMAZING_SERVER_NAME" ^
--db "MY_VERY_IMPORTANT_DB"