# TODO: formulate this as a SQLAlchemy expression (is there an exec() element?)
outer_sql = r'''
EXEC [sys].[sp_executesql] @statement = :sql,
@params = N'@bind_value_blob_o_json varchar(max)',
@bind_value_blob_o_json = :json_blob
'''
# we prepare the *outer* statement that has a single command in it
statement = sql.text(outer_sql)
# now execute that passing in unicode value of the inner SQL.
logging.basicConfig(format='%(asctime)s %(message)s')
logging.getLogger().setLevel(logging.DEBUG)
nrows = [1000,10000,100000, 200000, 500000]
for n in nrows:
# we cons up a list of dicts to represent a single 'inline' table with lots of rows
logging.debug("passing %d items" % (n))
blob_o_json =json.dumps([dict(rn=x,foo=x*2,bar=x-1,fruit='banana', flavor='yummy') for x in range(0,n)])
# Note: we named arguments in this next line correspond to the :sql and :json_blob placeholders in outer_sql
result = engine.execute(statement,sql=six.text_type(inner_sql), json_blob=blob_o_json)
rs = result.fetchall()
#logging.debug((len(rs), rs[]))
# drop down to DBAPI to process each result-set. The application should know how many result-sets will be present
# I think it is likely that the vast majority of queries will just have a single result-set
# I did not know how to get to anything other than the first result-set ... I thought there
# was probably some pyodbc magic to make it work with SQLAlchemy and -- sure enough! -- there
#cursor = result.cursor
#rs1 = cursor.fetchall()
#cursor.nextset()
#rs2 = cursor.fetchall()
#logging.debug((len(rs1), rs1[-4:], rs2[-4:]))
logging.debug("finished %d" % (n))
logging.debug("done")