How to run bulk query in python by using chunks
import time
import concurrent.futures
from itertools import islice
qry="""insert into demo (id,score,marks)
values(?,?,?);"""
def time_took_decorator(func):
def wrapper(*args,**kwargs):
start=time.time()
func(*args,**kwargs)
end=time.time()
seconds=end-start
time_taken=time.strftime('%H:%M:%S',time.gmtime(seconds))
print('{} time taken'.format(func.__name__)+time_taken)
return wrapper
@time_took_decorator
def sql_con(x):
print('sql connection here')
print(x)
print('sql con end')
return 'success'
def bulk_query_lrngth_checker(bulk_query):
try:
lst=bulk_query.strip().split(';')
iterator=iter(lst)
while chunk :=list(islice(iterator,500)):
temp_str=''
temp_str +=';'.join(chunk)
if temp_str.endswith(';'):
temp_str
else:
temp_str+=';'
sql_con(temp_str)
except Exception as e:
print('something went wrong',e)
def new_execute_many(info,qry):
bulk_qry=''
qry=qry.replace('?','{}')
for i in info:
tuple_data=i
bulk_qry+=''.join(qry.format(*tuple_data))
if len(bulk_qry)>5000:
bulk_query_lrngth_checker(bulk_qry)
else:
sql_con(bulk_qry)
def new_chunker(tuple_data,chunk_size=100):
iterator=iter(tuple_data)
while chunk:=list(islice(iterator,chunk_size)):
new_execute_many(chunk,qry)
user_marks=[('a',12,1.1),('b',456,1.167),('c',1290,6.90),('d',666,1.1),('e',912,13.1),('f',178,89.1),('g',12,-1.1),('h',15,-891.1),('i',12,1.1),('j',1222,01.1)]*1000
new_chunker(user_marks,10)
output:
sql connection here
insert into demo (id,score,marks)
values(a,12,1.1);insert into demo (id,score,marks)
values(b,456,1.167);insert into demo (id,score,marks)
values(c,1290,6.9);insert into demo (id,score,marks)
values(d,666,1.1);insert into demo (id,score,marks)
values(e,912,13.1);insert into demo (id,score,marks)
values(f,178,89.1);insert into demo (id,score,marks)
values(g,12,-1.1);insert into demo (id,score,marks)
values(h,15,-891.1);insert into demo (id,score,marks)
values(i,12,1.1);insert into demo (id,score,marks)
values(j,1222,1.1);
sql con end
sql_con time taken00:00:00