@Bartosz Wachocki ,
Use timeout, retry interval ,recursion and exception handling
pseudo code below
timeout = 300
def exec_query(query,timeout):
try:
df = spark.createDataFrame(sf.bulk.MyTable.query(query))
except:
if timeout > 0 :
sleep(60)
exec_query(query)
timeout = timeout - 60
else:
print("Timeout")
break;