Closed
Description
I have a very large table, with more than 100 thousand rows and when I try to download it using this method, it gives an error.
I replace the table name because it doesn't matter.
apparently the code is not treated to receive such a large size.
however when I use select * from table LIMIT 5000 it works
AttributeError Traceback (most recent call last)
<ipython-input-39-f3c65c38f1a7> in <module>
9 print(datetime.datetime.now())
10 with connection.cursor() as cursor:
---> 11 cursor.execute('SELECT * FROM table')
12 result = cursor.fetchall()
13 lista = []
c:\users\\appdata\local\programs\python\python37\lib\site-packages\databricks\sql\client.py in execute(self, operation, parameters)
508 self.thrift_backend,
509 self.buffer_size_bytes,
--> 510 self.arraysize,
511 )
512
c:\users\\appdata\local\programs\python\python37\lib\site-packages\databricks\sql\client.py in __init__(self, connection, execute_response, thrift_backend, result_buffer_size_bytes, arraysize)
816 else:
817 # In this case, there are results waiting on the server so we fetch now for simplicity
--> 818 self._fill_results_buffer()
819
820 def __iter__(self):
c:\users\\appdata\local\programs\python\python37\lib\site-packages\databricks\sql\client.py in _fill_results_buffer(self)
835 lz4_compressed=self.lz4_compressed,
836 arrow_schema_bytes=self._arrow_schema_bytes,
--> 837 description=self.description,
838 )
839 self.results = results
c:\users\\appdata\local\programs\python\python37\lib\site-packages\databricks\sql\thrift_backend.py in fetch_results(self, op_handle, max_rows, max_bytes, expected_row_start_offset, lz4_compressed, arrow_schema_bytes, description)
932
933 queue = ResultSetQueueFactory.build_queue(
--> 934 row_set_type=resp.resultSetMetadata.resultFormat,
935 t_row_set=resp.results,
936 arrow_schema_bytes=arrow_schema_bytes,