diff --git a/poetry.lock b/poetry.lock index 21c9c4d68..bc861926f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -371,14 +371,8 @@ python-versions = "^3.7.1" content-hash = "46fe2288362fc103abfdcd49c9dce356736b9ea6758d57b5d7fed173e2b9ceb5" [metadata.files] -atomicwrites = [ - {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, - {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, -] -attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, -] +atomicwrites = [] +attrs = [] black = [ {file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"}, {file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"}, @@ -404,79 +398,17 @@ black = [ {file = "black-22.3.0-py3-none-any.whl", hash = "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"}, {file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"}, ] -click = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, -] +click = [] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] -greenlet = [ - {file = "greenlet-1.1.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:58df5c2a0e293bf665a51f8a100d3e9956febfbf1d9aaf8c0677cf70218910c6"}, - {file = "greenlet-1.1.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:aec52725173bd3a7b56fe91bc56eccb26fbdff1386ef123abb63c84c5b43b63a"}, - {file = "greenlet-1.1.2-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:833e1551925ed51e6b44c800e71e77dacd7e49181fdc9ac9a0bf3714d515785d"}, - {file = "greenlet-1.1.2-cp27-cp27m-win32.whl", hash = "sha256:aa5b467f15e78b82257319aebc78dd2915e4c1436c3c0d1ad6f53e47ba6e2713"}, - {file = "greenlet-1.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:40b951f601af999a8bf2ce8c71e8aaa4e8c6f78ff8afae7b808aae2dc50d4c40"}, - {file = "greenlet-1.1.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:95e69877983ea39b7303570fa6760f81a3eec23d0e3ab2021b7144b94d06202d"}, - {file = "greenlet-1.1.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:356b3576ad078c89a6107caa9c50cc14e98e3a6c4874a37c3e0273e4baf33de8"}, - {file = "greenlet-1.1.2-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8639cadfda96737427330a094476d4c7a56ac03de7265622fcf4cfe57c8ae18d"}, - {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e5306482182170ade15c4b0d8386ded995a07d7cc2ca8f27958d34d6736497"}, - {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6a36bb9474218c7a5b27ae476035497a6990e21d04c279884eb10d9b290f1b1"}, - {file = "greenlet-1.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abb7a75ed8b968f3061327c433a0fbd17b729947b400747c334a9c29a9af6c58"}, - {file = "greenlet-1.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b336501a05e13b616ef81ce329c0e09ac5ed8c732d9ba7e3e983fcc1a9e86965"}, - {file = "greenlet-1.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:14d4f3cd4e8b524ae9b8aa567858beed70c392fdec26dbdb0a8a418392e71708"}, - {file = "greenlet-1.1.2-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:17ff94e7a83aa8671a25bf5b59326ec26da379ace2ebc4411d690d80a7fbcf23"}, - {file = "greenlet-1.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9f3cba480d3deb69f6ee2c1825060177a22c7826431458c697df88e6aeb3caee"}, - {file = "greenlet-1.1.2-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:fa877ca7f6b48054f847b61d6fa7bed5cebb663ebc55e018fda12db09dcc664c"}, - {file = "greenlet-1.1.2-cp35-cp35m-win32.whl", hash = "sha256:7cbd7574ce8e138bda9df4efc6bf2ab8572c9aff640d8ecfece1b006b68da963"}, - {file = "greenlet-1.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:903bbd302a2378f984aef528f76d4c9b1748f318fe1294961c072bdc7f2ffa3e"}, - {file = "greenlet-1.1.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:049fe7579230e44daef03a259faa24511d10ebfa44f69411d99e6a184fe68073"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:dd0b1e9e891f69e7675ba5c92e28b90eaa045f6ab134ffe70b52e948aa175b3c"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7418b6bfc7fe3331541b84bb2141c9baf1ec7132a7ecd9f375912eca810e714e"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9d29ca8a77117315101425ec7ec2a47a22ccf59f5593378fc4077ac5b754fce"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21915eb821a6b3d9d8eefdaf57d6c345b970ad722f856cd71739493ce003ad08"}, - {file = "greenlet-1.1.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eff9d20417ff9dcb0d25e2defc2574d10b491bf2e693b4e491914738b7908168"}, - {file = "greenlet-1.1.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:b8c008de9d0daba7b6666aa5bbfdc23dcd78cafc33997c9b7741ff6353bafb7f"}, - {file = "greenlet-1.1.2-cp36-cp36m-win32.whl", hash = "sha256:32ca72bbc673adbcfecb935bb3fb1b74e663d10a4b241aaa2f5a75fe1d1f90aa"}, - {file = "greenlet-1.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f0214eb2a23b85528310dad848ad2ac58e735612929c8072f6093f3585fd342d"}, - {file = "greenlet-1.1.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:b92e29e58bef6d9cfd340c72b04d74c4b4e9f70c9fa7c78b674d1fec18896dc4"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:fdcec0b8399108577ec290f55551d926d9a1fa6cad45882093a7a07ac5ec147b"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:93f81b134a165cc17123626ab8da2e30c0455441d4ab5576eed73a64c025b25c"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e12bdc622676ce47ae9abbf455c189e442afdde8818d9da983085df6312e7a1"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c790abda465726cfb8bb08bd4ca9a5d0a7bd77c7ac1ca1b839ad823b948ea28"}, - {file = "greenlet-1.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f276df9830dba7a333544bd41070e8175762a7ac20350786b322b714b0e654f5"}, - {file = "greenlet-1.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c5d5b35f789a030ebb95bff352f1d27a93d81069f2adb3182d99882e095cefe"}, - {file = "greenlet-1.1.2-cp37-cp37m-win32.whl", hash = "sha256:64e6175c2e53195278d7388c454e0b30997573f3f4bd63697f88d855f7a6a1fc"}, - {file = "greenlet-1.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b11548073a2213d950c3f671aa88e6f83cda6e2fb97a8b6317b1b5b33d850e06"}, - {file = "greenlet-1.1.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:9633b3034d3d901f0a46b7939f8c4d64427dfba6bbc5a36b1a67364cf148a1b0"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:eb6ea6da4c787111adf40f697b4e58732ee0942b5d3bd8f435277643329ba627"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f3acda1924472472ddd60c29e5b9db0cec629fbe3c5c5accb74d6d6d14773478"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e859fcb4cbe93504ea18008d1df98dee4f7766db66c435e4882ab35cf70cac43"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00e44c8afdbe5467e4f7b5851be223be68adb4272f44696ee71fe46b7036a711"}, - {file = "greenlet-1.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec8c433b3ab0419100bd45b47c9c8551248a5aee30ca5e9d399a0b57ac04651b"}, - {file = "greenlet-1.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2bde6792f313f4e918caabc46532aa64aa27a0db05d75b20edfc5c6f46479de2"}, - {file = "greenlet-1.1.2-cp38-cp38-win32.whl", hash = "sha256:288c6a76705dc54fba69fbcb59904ae4ad768b4c768839b8ca5fdadec6dd8cfd"}, - {file = "greenlet-1.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:8d2f1fb53a421b410751887eb4ff21386d119ef9cde3797bf5e7ed49fb51a3b3"}, - {file = "greenlet-1.1.2-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:166eac03e48784a6a6e0e5f041cfebb1ab400b394db188c48b3a84737f505b67"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:572e1787d1460da79590bf44304abbc0a2da944ea64ec549188fa84d89bba7ab"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:be5f425ff1f5f4b3c1e33ad64ab994eed12fc284a6ea71c5243fd564502ecbe5"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1692f7d6bc45e3200844be0dba153612103db241691088626a33ff1f24a0d88"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7227b47e73dedaa513cdebb98469705ef0d66eb5a1250144468e9c3097d6b59b"}, - {file = "greenlet-1.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ff61ff178250f9bb3cd89752df0f1dd0e27316a8bd1465351652b1b4a4cdfd3"}, - {file = "greenlet-1.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0051c6f1f27cb756ffc0ffbac7d2cd48cb0362ac1736871399a739b2885134d3"}, - {file = "greenlet-1.1.2-cp39-cp39-win32.whl", hash = "sha256:f70a9e237bb792c7cc7e44c531fd48f5897961701cdaa06cf22fc14965c496cf"}, - {file = "greenlet-1.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:013d61294b6cd8fe3242932c1c5e36e5d1db2c8afb58606c5a67efce62c1f5fd"}, - {file = "greenlet-1.1.2.tar.gz", hash = "sha256:e30f5ea4ae2346e62cedde8794a56858a67b878dd79f7df76a0767e356b1744a"}, -] +greenlet = [] importlib-metadata = [ {file = "importlib_metadata-4.11.3-py3-none-any.whl", hash = "sha256:1208431ca90a8cca1a6b8af391bb53c1a2db74e5d1cef6ddced95d4b2062edc6"}, {file = "importlib_metadata-4.11.3.tar.gz", hash = "sha256:ea4c597ebf37142f827b8f39299579e31685c31d3a438b59f469406afd0f2539"}, ] -iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, -] +iniconfig = [] mypy = [ {file = "mypy-0.950-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cf9c261958a769a3bd38c3e133801ebcd284ffb734ea12d01457cb09eacf7d7b"}, {file = "mypy-0.950-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5b5bd0ffb11b4aba2bb6d31b8643902c48f990cc92fda4e21afac658044f0c0"}, @@ -502,87 +434,14 @@ mypy = [ {file = "mypy-0.950-py3-none-any.whl", hash = "sha256:a4d9898f46446bfb6405383b57b96737dcfd0a7f25b748e78ef3e8c576bba3cb"}, {file = "mypy-0.950.tar.gz", hash = "sha256:1b333cfbca1762ff15808a0ef4f71b5d3eed8528b23ea1c3fb50543c867d68de"}, ] -mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -numpy = [ - {file = "numpy-1.21.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38e8648f9449a549a7dfe8d8755a5979b45b3538520d1e735637ef28e8c2dc50"}, - {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fd7d7409fa643a91d0a05c7554dd68aa9c9bb16e186f6ccfe40d6e003156e33a"}, - {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a75b4498b1e93d8b700282dc8e655b8bd559c0904b3910b144646dbbbc03e062"}, - {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1412aa0aec3e00bc23fbb8664d76552b4efde98fb71f60737c83efbac24112f1"}, - {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e46ceaff65609b5399163de5893d8f2a82d3c77d5e56d976c8b5fb01faa6b671"}, - {file = "numpy-1.21.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:c6a2324085dd52f96498419ba95b5777e40b6bcbc20088fddb9e8cbb58885e8e"}, - {file = "numpy-1.21.1-cp37-cp37m-win32.whl", hash = "sha256:73101b2a1fef16602696d133db402a7e7586654682244344b8329cdcbbb82172"}, - {file = "numpy-1.21.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7a708a79c9a9d26904d1cca8d383bf869edf6f8e7650d85dbc77b041e8c5a0f8"}, - {file = "numpy-1.21.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95b995d0c413f5d0428b3f880e8fe1660ff9396dcd1f9eedbc311f37b5652e16"}, - {file = "numpy-1.21.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:635e6bd31c9fb3d475c8f44a089569070d10a9ef18ed13738b03049280281267"}, - {file = "numpy-1.21.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a3d5fb89bfe21be2ef47c0614b9c9c707b7362386c9a3ff1feae63e0267ccb6"}, - {file = "numpy-1.21.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8a326af80e86d0e9ce92bcc1e65c8ff88297de4fa14ee936cb2293d414c9ec63"}, - {file = "numpy-1.21.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:791492091744b0fe390a6ce85cc1bf5149968ac7d5f0477288f78c89b385d9af"}, - {file = "numpy-1.21.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0318c465786c1f63ac05d7c4dbcecd4d2d7e13f0959b01b534ea1e92202235c5"}, - {file = "numpy-1.21.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a513bd9c1551894ee3d31369f9b07460ef223694098cf27d399513415855b68"}, - {file = "numpy-1.21.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:91c6f5fc58df1e0a3cc0c3a717bb3308ff850abdaa6d2d802573ee2b11f674a8"}, - {file = "numpy-1.21.1-cp38-cp38-win32.whl", hash = "sha256:978010b68e17150db8765355d1ccdd450f9fc916824e8c4e35ee620590e234cd"}, - {file = "numpy-1.21.1-cp38-cp38-win_amd64.whl", hash = "sha256:9749a40a5b22333467f02fe11edc98f022133ee1bfa8ab99bda5e5437b831214"}, - {file = "numpy-1.21.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d7a4aeac3b94af92a9373d6e77b37691b86411f9745190d2c351f410ab3a791f"}, - {file = "numpy-1.21.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9e7912a56108aba9b31df688a4c4f5cb0d9d3787386b87d504762b6754fbb1b"}, - {file = "numpy-1.21.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:25b40b98ebdd272bc3020935427a4530b7d60dfbe1ab9381a39147834e985eac"}, - {file = "numpy-1.21.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8a92c5aea763d14ba9d6475803fc7904bda7decc2a0a68153f587ad82941fec1"}, - {file = "numpy-1.21.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05a0f648eb28bae4bcb204e6fd14603de2908de982e761a2fc78efe0f19e96e1"}, - {file = "numpy-1.21.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f01f28075a92eede918b965e86e8f0ba7b7797a95aa8d35e1cc8821f5fc3ad6a"}, - {file = "numpy-1.21.1-cp39-cp39-win32.whl", hash = "sha256:88c0b89ad1cc24a5efbb99ff9ab5db0f9a86e9cc50240177a571fbe9c2860ac2"}, - {file = "numpy-1.21.1-cp39-cp39-win_amd64.whl", hash = "sha256:01721eefe70544d548425a07c80be8377096a54118070b8a62476866d5208e33"}, - {file = "numpy-1.21.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d4d1de6e6fb3d28781c73fbde702ac97f03d79e4ffd6598b880b2d95d62ead4"}, - {file = "numpy-1.21.1.zip", hash = "sha256:dff4af63638afcc57a3dfb9e4b26d434a7a602d225b42d746ea7fe2edf1342fd"}, -] -packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, -] -pandas = [ - {file = "pandas-1.3.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:62d5b5ce965bae78f12c1c0df0d387899dd4211ec0bdc52822373f13a3a022b9"}, - {file = "pandas-1.3.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:adfeb11be2d54f275142c8ba9bf67acee771b7186a5745249c7d5a06c670136b"}, - {file = "pandas-1.3.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:60a8c055d58873ad81cae290d974d13dd479b82cbb975c3e1fa2cf1920715296"}, - {file = "pandas-1.3.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd541ab09e1f80a2a1760032d665f6e032d8e44055d602d65eeea6e6e85498cb"}, - {file = "pandas-1.3.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2651d75b9a167cc8cc572cf787ab512d16e316ae00ba81874b560586fa1325e0"}, - {file = "pandas-1.3.5-cp310-cp310-win_amd64.whl", hash = "sha256:aaf183a615ad790801fa3cf2fa450e5b6d23a54684fe386f7e3208f8b9bfbef6"}, - {file = "pandas-1.3.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:344295811e67f8200de2390093aeb3c8309f5648951b684d8db7eee7d1c81fb7"}, - {file = "pandas-1.3.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:552020bf83b7f9033b57cbae65589c01e7ef1544416122da0c79140c93288f56"}, - {file = "pandas-1.3.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cce0c6bbeb266b0e39e35176ee615ce3585233092f685b6a82362523e59e5b4"}, - {file = "pandas-1.3.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d28a3c65463fd0d0ba8bbb7696b23073efee0510783340a44b08f5e96ffce0c"}, - {file = "pandas-1.3.5-cp37-cp37m-win32.whl", hash = "sha256:a62949c626dd0ef7de11de34b44c6475db76995c2064e2d99c6498c3dba7fe58"}, - {file = "pandas-1.3.5-cp37-cp37m-win_amd64.whl", hash = "sha256:8025750767e138320b15ca16d70d5cdc1886e8f9cc56652d89735c016cd8aea6"}, - {file = "pandas-1.3.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fe95bae4e2d579812865db2212bb733144e34d0c6785c0685329e5b60fcb85dd"}, - {file = "pandas-1.3.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f261553a1e9c65b7a310302b9dbac31cf0049a51695c14ebe04e4bfd4a96f02"}, - {file = "pandas-1.3.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b6dbec5f3e6d5dc80dcfee250e0a2a652b3f28663492f7dab9a24416a48ac39"}, - {file = "pandas-1.3.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3bc49af96cd6285030a64779de5b3688633a07eb75c124b0747134a63f4c05f"}, - {file = "pandas-1.3.5-cp38-cp38-win32.whl", hash = "sha256:b6b87b2fb39e6383ca28e2829cddef1d9fc9e27e55ad91ca9c435572cdba51bf"}, - {file = "pandas-1.3.5-cp38-cp38-win_amd64.whl", hash = "sha256:a395692046fd8ce1edb4c6295c35184ae0c2bbe787ecbe384251da609e27edcb"}, - {file = "pandas-1.3.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bd971a3f08b745a75a86c00b97f3007c2ea175951286cdda6abe543e687e5f2f"}, - {file = "pandas-1.3.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37f06b59e5bc05711a518aa10beaec10942188dccb48918bb5ae602ccbc9f1a0"}, - {file = "pandas-1.3.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c21778a688d3712d35710501f8001cdbf96eb70a7c587a3d5613573299fdca6"}, - {file = "pandas-1.3.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3345343206546545bc26a05b4602b6a24385b5ec7c75cb6059599e3d56831da2"}, - {file = "pandas-1.3.5-cp39-cp39-win32.whl", hash = "sha256:c69406a2808ba6cf580c2255bcf260b3f214d2664a3a4197d0e640f573b46fd3"}, - {file = "pandas-1.3.5-cp39-cp39-win_amd64.whl", hash = "sha256:32e1a26d5ade11b547721a72f9bfc4bd113396947606e00d5b4a5b79b3dcb006"}, - {file = "pandas-1.3.5.tar.gz", hash = "sha256:1e4285f5de1012de20ca46b188ccf33521bff61ba5c5ebd78b4fb28e5416a9f1"}, -] -pathspec = [ - {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, - {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, -] -platformdirs = [ - {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, - {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, -] -pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, -] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] +mypy-extensions = [] +numpy = [] +packaging = [] +pandas = [] +pathspec = [] +platformdirs = [] +pluggy = [] +py = [] pyarrow = [ {file = "pyarrow-5.0.0-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:e9ec80f4a77057498cf4c5965389e42e7f6a618b6859e6dd615e57505c9167a6"}, {file = "pyarrow-5.0.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b1453c2411b5062ba6bf6832dbc4df211ad625f678c623a2ee177aee158f199b"}, @@ -613,71 +472,17 @@ pyarrow = [ {file = "pyarrow-5.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:4d8adda1892ef4553c4804af7f67cce484f4d6371564e2d8374b8e2bc85293e2"}, {file = "pyarrow-5.0.0.tar.gz", hash = "sha256:24e64ea33eed07441cc0e80c949e3a1b48211a1add8953268391d250f4d39922"}, ] -pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] -pytest = [ - {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, - {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, -] +pyparsing = [] +pytest = [] python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] -pytz = [ - {file = "pytz-2022.1-py2.py3-none-any.whl", hash = "sha256:e68985985296d9a66a881eb3193b0906246245294a881e7c8afe623866ac6a5c"}, - {file = "pytz-2022.1.tar.gz", hash = "sha256:1e760e2fe6a8163bc0b3d9a19c4f84342afa0a2affebfaa84b01b978a02ecaa7"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -sqlalchemy = [ - {file = "SQLAlchemy-1.4.39-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:4770eb3ba69ec5fa41c681a75e53e0e342ac24c1f9220d883458b5596888e43a"}, - {file = "SQLAlchemy-1.4.39-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:752ef2e8dbaa3c5d419f322e3632f00ba6b1c3230f65bc97c2ff5c5c6c08f441"}, - {file = "SQLAlchemy-1.4.39-cp27-cp27m-win32.whl", hash = "sha256:b30e70f1594ee3c8902978fd71900d7312453922827c4ce0012fa6a8278d6df4"}, - {file = "SQLAlchemy-1.4.39-cp27-cp27m-win_amd64.whl", hash = "sha256:864d4f89f054819cb95e93100b7d251e4d114d1c60bc7576db07b046432af280"}, - {file = "SQLAlchemy-1.4.39-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8f901be74f00a13bf375241a778455ee864c2c21c79154aad196b7a994e1144f"}, - {file = "SQLAlchemy-1.4.39-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:1745987ada1890b0e7978abdb22c133eca2e89ab98dc17939042240063e1ef21"}, - {file = "SQLAlchemy-1.4.39-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ede13a472caa85a13abe5095e71676af985d7690eaa8461aeac5c74f6600b6c0"}, - {file = "SQLAlchemy-1.4.39-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7f13644b15665f7322f9e0635129e0ef2098409484df67fcd225d954c5861559"}, - {file = "SQLAlchemy-1.4.39-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26146c59576dfe9c546c9f45397a7c7c4a90c25679492ff610a7500afc7d03a6"}, - {file = "SQLAlchemy-1.4.39-cp310-cp310-win32.whl", hash = "sha256:91d2b89bb0c302f89e753bea008936acfa4e18c156fb264fe41eb6bbb2bbcdeb"}, - {file = "SQLAlchemy-1.4.39-cp310-cp310-win_amd64.whl", hash = "sha256:50e7569637e2e02253295527ff34666706dbb2bc5f6c61a5a7f44b9610c9bb09"}, - {file = "SQLAlchemy-1.4.39-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:107df519eb33d7f8e0d0d052128af2f25066c1a0f6b648fd1a9612ab66800b86"}, - {file = "SQLAlchemy-1.4.39-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f24d4d6ec301688c59b0c4bb1c1c94c5d0bff4ecad33bb8f5d9efdfb8d8bc925"}, - {file = "SQLAlchemy-1.4.39-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7b2785dd2a0c044a36836857ac27310dc7a99166253551ee8f5408930958cc60"}, - {file = "SQLAlchemy-1.4.39-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6e2c8581c6620136b9530137954a8376efffd57fe19802182c7561b0ab48b48"}, - {file = "SQLAlchemy-1.4.39-cp36-cp36m-win32.whl", hash = "sha256:fbc076f79d830ae4c9d49926180a1140b49fa675d0f0d555b44c9a15b29f4c80"}, - {file = "SQLAlchemy-1.4.39-cp36-cp36m-win_amd64.whl", hash = "sha256:0ec54460475f0c42512895c99c63d90dd2d9cbd0c13491a184182e85074b04c5"}, - {file = "SQLAlchemy-1.4.39-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:6f95706da857e6e79b54c33c1214f5467aab10600aa508ddd1239d5df271986e"}, - {file = "SQLAlchemy-1.4.39-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:621f050e72cc7dfd9ad4594ff0abeaad954d6e4a2891545e8f1a53dcdfbef445"}, - {file = "SQLAlchemy-1.4.39-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05a05771617bfa723ba4cef58d5b25ac028b0d68f28f403edebed5b8243b3a87"}, - {file = "SQLAlchemy-1.4.39-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20bf65bcce65c538e68d5df27402b39341fabeecf01de7e0e72b9d9836c13c52"}, - {file = "SQLAlchemy-1.4.39-cp37-cp37m-win32.whl", hash = "sha256:f2a42acc01568b9701665e85562bbff78ec3e21981c7d51d56717c22e5d3d58b"}, - {file = "SQLAlchemy-1.4.39-cp37-cp37m-win_amd64.whl", hash = "sha256:6d81de54e45f1d756785405c9d06cd17918c2eecc2d4262dc2d276ca612c2f61"}, - {file = "SQLAlchemy-1.4.39-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:5c2d19bfb33262bf987ef0062345efd0f54c4189c2d95159c72995457bf4a359"}, - {file = "SQLAlchemy-1.4.39-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14ea8ff2d33c48f8e6c3c472111d893b9e356284d1482102da9678195e5a8eac"}, - {file = "SQLAlchemy-1.4.39-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec3985c883d6d217cf2013028afc6e3c82b8907192ba6195d6e49885bfc4b19d"}, - {file = "SQLAlchemy-1.4.39-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1962dfee37b7fb17d3d4889bf84c4ea08b1c36707194c578f61e6e06d12ab90f"}, - {file = "SQLAlchemy-1.4.39-cp38-cp38-win32.whl", hash = "sha256:047ef5ccd8860f6147b8ac6c45a4bc573d4e030267b45d9a1c47b55962ff0e6f"}, - {file = "SQLAlchemy-1.4.39-cp38-cp38-win_amd64.whl", hash = "sha256:b71be98ef6e180217d1797185c75507060a57ab9cd835653e0112db16a710f0d"}, - {file = "SQLAlchemy-1.4.39-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:365b75938049ae31cf2176efd3d598213ddb9eb883fbc82086efa019a5f649df"}, - {file = "SQLAlchemy-1.4.39-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7a7667d928ba6ee361a3176e1bef6847c1062b37726b33505cc84136f657e0d"}, - {file = "SQLAlchemy-1.4.39-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c6d00cb9da8d0cbfaba18cad046e94b06de6d4d0ffd9d4095a3ad1838af22528"}, - {file = "SQLAlchemy-1.4.39-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0538b66f959771c56ff996d828081908a6a52a47c5548faed4a3d0a027a5368"}, - {file = "SQLAlchemy-1.4.39-cp39-cp39-win32.whl", hash = "sha256:d1f665e50592caf4cad3caed3ed86f93227bffe0680218ccbb293bd5a6734ca8"}, - {file = "SQLAlchemy-1.4.39-cp39-cp39-win_amd64.whl", hash = "sha256:8b773c9974c272aae0fa7e95b576d98d17ee65f69d8644f9b6ffc90ee96b4d19"}, - {file = "SQLAlchemy-1.4.39.tar.gz", hash = "sha256:8194896038753b46b08a0b0ae89a5d80c897fb601dd51e243ed5720f1f155d27"}, -] -thrift = [ - {file = "thrift-0.13.0.tar.gz", hash = "sha256:9af1c86bf73433afc6010ed376a6c6aca2b54099cc0d61895f640870a9ae7d89"}, -] -tomli = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] +pytz = [] +six = [] +sqlalchemy = [] +thrift = [] +tomli = [] typed-ast = [ {file = "typed_ast-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ad3b48cf2b487be140072fb86feff36801487d4abb7382bb1929aaac80638ea"}, {file = "typed_ast-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:542cd732351ba8235f20faa0fc7398946fe1a57f2cdb289e5497e1e7f48cfedb"}, @@ -708,7 +513,4 @@ typing-extensions = [ {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"}, {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"}, ] -zipp = [ - {file = "zipp-3.8.0-py3-none-any.whl", hash = "sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"}, - {file = "zipp-3.8.0.tar.gz", hash = "sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad"}, -] +zipp = [] diff --git a/src/databricks/sqlalchemy/.gitignore b/src/databricks/sqlalchemy/.gitignore new file mode 100644 index 000000000..6c5416e66 --- /dev/null +++ b/src/databricks/sqlalchemy/.gitignore @@ -0,0 +1,2 @@ +*env + diff --git a/src/databricks/sqlalchemy/README.md b/src/databricks/sqlalchemy/README.md new file mode 100644 index 000000000..9bf377afb --- /dev/null +++ b/src/databricks/sqlalchemy/README.md @@ -0,0 +1,130 @@ +# Introduction + +This is work-in-progress of a SQLAlchemy dialect for Databricks. + +The dialect is embedded within the Databricks SQL Connector. + +## Connection String + +Using the dialect requires the following: + +1. SQL Warehouse hostname +2. Endpoint +3. Access token + +The schema `default` is used unless an alternate is specified via _Default-schema_. + +The connection string is constructed as follows: + +`databricks+thrift://token:`_Access-token_`@`_SQL-warehouse-hostname_`/`_Default-schema_`?http_path=`_Endpoint_ + + +## Data Types + +|Databricks type| SQLAlchemy type | Extra| +|:-|:-|:-| + `smallint` | `integer` | + `int` | `integer` | + `bigint` | `integer` | + `float` | `float` | + `decimal` | `float` | + `boolean` | `boolean` | + `string` | WIP | + `date` | WIP | + `timestamp` | WIP | + + + +## Sample Code + +The focus of this dialect is enabling SQLAlchemy Core (as opposed to SQLAchemy ORM). + + + +### The Simplest Program + +A program (see [`sample-app-select.py`](https://github.com/overcoil/fork-databricks-sql-python/blob/sqlalchemy-dev/src/databricks/sqlalchemy/sample-app-select.py)) to read from a Databricks table looks roughly as follows: + +```Python +import os + +from sqlalchemy import create_engine +from sqlalchemy import MetaData +from sqlalchemy import Table, Column, Integer, BigInteger, Float, Boolean +from sqlalchemy import select + +# pickup settings from the env +server_hostname = os.getenv("DATABRICKS_SERVER_HOSTNAME") +http_path = os.getenv("DATABRICKS_HTTP_PATH") +access_token = os.getenv("DATABRICKS_TOKEN") +default_schema = os.getenv("DATABRICKS_SCHEMA") + +# use echo=True for verbose log +engine = create_engine(f"databricks+thrift://token:{access_token}@{server_hostname}/{default_schema}?http_path={http_path}", echo=False, future=True) + +metadata_obj = MetaData() + +# NB: sample_numtypes is a pre-created/populated table +tableName = "sample_numtypes" + +# declare the schema we're expecting +numtypes = Table( + tableName, + metadata_obj, + Column('f_byte', Integer), + Column('f_short', Integer), + Column('f_int', Integer), + Column('f_long', BigInteger), + Column('f_float', Float), + Column('f_decimal', Float), + Column('f_boolean', Boolean) +) + +# SELECT * FROM t WHERE f_byte = -125 +stmt = select(numtypes).where(numtypes.c.f_byte == -125) +print(f"Attempting to execute: {stmt}\n") + +print(f"Rows from table {tableName}") + +with engine.connect() as conn: + for row in conn.execute(stmt): + print(row) +``` + + +### Table definition via reflection +Reflection may be used to recover the schema of a table dynamically via [the `Table` constructor's `autoload_with` parameter](https://docs.sqlalchemy.org/en/14/core/reflection.html). + +```Python +some_table = Table("some_table", metadata_obj, autoload_with=engine) +stmt = select(some_table).where(some_table.c.f_byte == -125) +... +``` + +### INSERT statement +```Python + +``` + +### Unmanaged table creation +```Python +# TODO +metadata_obj = MetaData() +user_table = Table( + "user_account", + metadata_obj, + Column('id', Integer, primary_key=True), + Column('name', String(30)), + Column('fullname', String) +) +metadata_obj.create_all(engine) +``` + +### Direct access to Spark SQL +```Python +# TODO: does this work? +with engine.connect() as conn: + result = conn.execute(text("VACCUM tablename")) + print(result.all()) +``` + diff --git a/src/databricks/sqlalchemy/dialect.py b/src/databricks/sqlalchemy/dialect.py index 397f39c51..07871d51d 100644 --- a/src/databricks/sqlalchemy/dialect.py +++ b/src/databricks/sqlalchemy/dialect.py @@ -1,52 +1,301 @@ +import os + from databricks import sql +from databricks import sql as dbsql + +import re + +from sqlalchemy import types + +# we leverage MySQL's implementation of TINYINT and DOUBLE +from sqlalchemy.types import Integer, BigInteger, SmallInteger, Float, DECIMAL, Boolean +from sqlalchemy.types import String, DATE, TIMESTAMP + +from sqlalchemy import util +from sqlalchemy import exc + +from sqlalchemy.engine import default, interfaces +from sqlalchemy.sql import compiler + from typing import AnyStr -from sqlalchemy import util, exc, types -from sqlalchemy.engine import default +# provide a way to debug +debugbreakpoint = os.getenv("DATABRICKS_DIALECT_DEBUG") or False -class DatabricksDialect(default.DefaultDialect): +class DatabricksIdentifierPreparer(compiler.IdentifierPreparer): + # SparkSQL identifier specification: + # ref: https://spark.apache.org/docs/latest/sql-ref-identifier.html + + legal_characters = re.compile(r"^[A-Z0-9_]+$", re.I) + + def __init__(self, dialect): + super(DatabricksIdentifierPreparer, self).__init__( + dialect, + initial_quote="`", + ) + + +# this class provides visitors that emit the dialect-specific keywords for SQLAlchemy's SQL expression parse tree +class DatabricksTypeCompiler(compiler.GenericTypeCompiler): + # ref: https://spark.apache.org/docs/latest/sql-ref-datatypes.html + + def visit_TINYINT(self, type_): + return "TINYINT" + + def visit_SMALLINT(self, type_, **kw): + return "SMALLINT" + + def visit_INTEGER(self, type_, **kw): + return "INT" + + def visit_BIGINT(self, type_, **kw): + return "BIGINT" + + def visit_FLOAT(self, type_, **kw): + return "FLOAT" + + def visit_DOUBLE(self, type_, **kw): + return "DOUBLE" + + def visit_DECIMAL(self, type_, **kw): + if type_.precision is None: + return "DECIMAL" + elif type_.scale is None: + return "DECIMAL(%(precision)s)" % {"precision": type_.precision} + else: + return "DECIMAL(%(precision)s, %(scale)s)" % { + "precision": type_.precision, + "scale": type_.scale, + } + + def visit_NUMERIC(self, type_, **kw): + if type_.precision is None: + return "DECIMAL" + elif type_.scale is None: + return "DECIMAL(%(precision)s)" % {"precision": type_.precision} + else: + return "DECIMAL(%(precision)s, %(scale)s)" % { + "precision": type_.precision, + "scale": type_.scale, + } + + def visit_DATE(self, type_, **kw): + return "DATE" + + def visit_DATETIME(self, type_, **kw): + return "TIMESTAMP" + + def visit_TIMESTAMP(self, type_, **kw): + return "TIMESTAMP" + + def visit_BOOLEAN(self, type_, **kw): + return "BOOLEAN" + + def visit_STRING(self, type_, **kw): + return "STRING" + + # TODO: why is this needed even though there's no occurence of VARCHAR? + def visit_VARCHAR(self, type_, **kw): + return "STRING" + + +class DatabricksDDLCompiler(compiler.DDLCompiler): + + # Spark doesn't support any table constraint at present so ignore any and all declared constraints + # Once information constraint is complete, this will need to be implemented. + # This is needed for Connection.create_all() + def create_table_constraints( + self, table, _include_foreign_key_constraints=None, **kw + ): + return [] + + +# I started with the following lookup table (indexed by DATA_TYPE) and it is rather nice since Decimal can be detected directly. +# However, as DATA_TYPE values are rather obtuse, I switched to use COLUMN_TYPE_NAME instead (the table below) +# _type_map = { +# -6: types.Integer, # tiny_int +# 5: types.Integer, # small_int +# 4: types.Integer, # int +# -5: types.BigInteger, # big_int +# 6: types.Float, +# 3: types.DECIMAL, +# 16: types.Boolean, +# 12: types.String, +# 91: DatabricksDate, # date +# 93: DatabricksTimestamp, # timestamp +# 1111: interval +# } + + +# This lookup is by TYPE_NAME which is easier to maintain and likely safer in the long term. +# NB: Decimal is explicitly excluded here as each occurence's TYPE_NAME includes the occurence's precision and scale +# See/refer to COLUMN_TYPE_DECIMAL below. + +# this map SQL types onto Python representation; note the deliberate omission of Decimal! +_type_map = { + "TINYINT": types.Integer, # tiny_int + "SMALLINT": types.Integer, # small_int + "INT": types.Integer, # int + "BIGINT": types.BigInteger, # big_int + "FLOAT": types.Float, + "DOUBLE": types.Float, # double fits into a Python float + "BOOLEAN": types.Boolean, + "STRING": types.String, + "DATE": types.DATE, # date + "TIMESTAMP": types.TIMESTAMP, # timestamp +} +# this is used to match a column's DATA_TYPE for Decimal; it will map to types.DECIMAL +COLUMN_TYPE_DECIMAL = 3 +# COLUMN_TYPE_INTERVAL=1111 + + +class DatabricksDialect(default.DefaultDialect): # Possible attributes are defined here: https://docs.sqlalchemy.org/en/14/core/internals.html#sqlalchemy.engine.Dialect name: str = "databricks" driver: str = "thrift" default_schema_name: str = "default" + preparer = DatabricksIdentifierPreparer + + # TODO: revisit server-side cursors + # ref: https://docs.databricks.com/dev-tools/python-sql-connector.html#manage-cursors-and-connections + execution_ctx_cls = default.DefaultExecutionContext + + statement_compiler = compiler.SQLCompiler + ddl_compiler = DatabricksDDLCompiler + type_compiler = DatabricksTypeCompiler + + # the following attributes are cribbed from HiveDialect: + supports_views = False + supports_alter = True + supports_pk_autoincrement = False + supports_default_values = False + supports_empty_insert = False + supports_native_decimal = True + supports_native_boolean = True + supports_unicode_statements = True + supports_unicode_binds = True + returns_unicode_strings = True + description_encoding = None + supports_multivalues_insert = True + supports_sane_rowcount = False + + # added based on comments here: https://docs.sqlalchemy.org/en/14/errors.html#error-cprf + supports_statement_cache = False + @classmethod def dbapi(cls): return sql - def create_connect_args(self, url): - # Expected URI format is: databricks+thrift://token:dapi***@***.cloud.databricks.com?http_path=/sql/*** - + def create_connect_args(self, url: "URL"): + # URI format is: databricks+thrift://token:{access_token}@{server_hostname}/{schema}?http_path={http_path} kwargs = { "server_hostname": url.host, "access_token": url.password, "http_path": url.query.get("http_path"), + "schema": url.database or "default", } return [], kwargs - def get_table_names(self, *args, **kwargs): + def get_schema_names(self, connection, **kwargs): + # conn = dbsql.connect( + # server_hostname=kwargs['server_hostname'], + # http_path=kwargs['http_path'], + # access_token=kwargs['access_token'], + # schema=kwargs['schema'] + # ) + # TODO: look up correct index for TABLE_SCHEM - # TODO: Implement with native driver `.tables()` call - return super().get_table_names(*args, **kwargs) + breakpoint() + TABLE_SCHEM = 2 + with self.get_driver_connection( + connection + )._dbapi_connection.dbapi_connection.cursor() as cur: + data = cur.schemas(catalog_name="%").fetchall() + _schemas = [i[TABLE_SCHEM] for i in data] - def get_columns(self, *args, **kwargs): + return _schemas - # TODO: Implement with native driver `.columns()` call + def get_table_names(self, connection, schema=None, **kwargs): + breakpoint() + TABLE_NAME = 2 + with self.get_driver_connection( + connection + )._dbapi_connection.dbapi_connection.cursor() as cur: + data = cur.tables(schema_name=schema).fetchall() + _tables = [i[TABLE_NAME] for i in data] - return super().get_columns(*args, **kwargs) + return _tables - def do_rollback(self, dbapi_connection): - # Databricks SQL Does not support transaction - pass + # This is needed for SQLAlchemy reflection + def get_columns(self, connection, table_name, schema=None, **kwargs): + # Example row + # Row(TABLE_CAT='hive_metastore', TABLE_SCHEM='george_chow_dbtest', TABLE_NAME='all_types', COLUMN_NAME='f_byte', DATA_TYPE=4, + # TYPE_NAME='INT', COLUMN_SIZE=4, BUFFER_LENGTH=None, DECIMAL_DIGITS=0, NUM_PREC_RADIX=10, + # NULLABLE=1, REMARKS='', COLUMN_DEF=None, SQL_DATA_TYPE=None, SQL_DATETIME_SUB=None, + # CHAR_OCTET_LENGTH=None, ORDINAL_POSITION=0, IS_NULLABLE='YES', SCOPE_CATALOG=None, SCOPE_SCHEMA=None, + # SCOPE_TABLE=None, SOURCE_DATA_TYPE=None, IS_AUTO_INCREMENT='NO') + COLUMN_NAME = 3 + COLUMN_TYPE = 4 + COLUMN_TYPE_NAME = 5 + COLUMN_NULLABLE = 17 + COLUMN_COMMENT = 11 + COLUMN_AUTOINCREMENT = 22 + + result = [] + with self.get_driver_connection( + connection + )._dbapi_connection.dbapi_connection.cursor() as cur: + data = cur.columns(schema_name=schema, table_name=table_name).fetchall() + for i in data: + try: + if i[COLUMN_TYPE] != COLUMN_TYPE_DECIMAL: + coltype = _type_map[i[COLUMN_TYPE_NAME]] + else: + coltype = types.DECIMAL + except KeyError: + util.warn( + f"Did not recognize type '{i[COLUMN_TYPE_NAME]}'({i[COLUMN_TYPE]}) of column '{i[COLUMN_NAME]}'" + ) + coltype = types.NullType + + try: + nullable = i[COLUMN_NULLABLE] == "YES" + except KeyError: + nullable = True + + try: + autoincrement = i[COLUMN_AUTOINCREMENT] == "YES" + except KeyError: + autoincrement = False + + # filled-in according to interfaces.py's class ReflectedColumn(TypedDict): + result.append( + { + "name": i[COLUMN_NAME], + "type": coltype, + "nullable": nullable, + "comment": i[COLUMN_COMMENT], + "autoincrement": autoincrement, + } + ) - def has_table(self, connection, table_name, schema=None, **kwargs) -> bool: - """Required for `tests.sqlalchemy.integration.test_create_table` to pass. - """ + return result + + # This is needed to support Connection.create_all() + def has_table( + self, + connection, + table_name, + schema=None, + **kwargs, + ) -> bool: try: COLUMN_NAME = 3 + # TODO: this following expression is circuitous! with self.get_driver_connection( connection )._dbapi_connection.dbapi_connection.cursor() as cur: @@ -57,3 +306,25 @@ def has_table(self, connection, table_name, schema=None, **kwargs) -> bool: return len(data) > 0 except exc.NoSuchTableError: return False + + # This is needed for SQLAlchemy reflection + def get_foreign_keys(self, connection, table_name, schema=None, **kwargs): + # Spark has no foreign keys + return [] + + # This is needed for SQLAlchemy reflection + def get_pk_constraint(self, connection, table_name, schema=None, **kwargs): + # Spark has no primary keys + return [] + + # This is needed for SQLAlchemy reflection + def get_indexes(self, connection, table_name, schema=None, **kwargs): + # TODO: expose partition columns as indices? + return [] + + # DefaultDialect's default impl delegates to the (PySQL) dbapi_connection which currently raises a NotSupportedError. + # Using a pass here is the laziest implementation (which while semantically wrong) provides barebone dialect utility. + # TODO: I suspect this is the cause for the failure to drop tables... SA is likely relying on rollback to undo the CREATE tables + def do_rollback(self, dbapi_connection) -> None: + # Spark/Delta transaction only support single-table updates... to simplify things, just skip this for now. + pass diff --git a/src/databricks/sqlalchemy/requirements.py b/src/databricks/sqlalchemy/requirements.py deleted file mode 100644 index 6ce986887..000000000 --- a/src/databricks/sqlalchemy/requirements.py +++ /dev/null @@ -1,18 +0,0 @@ -# Following official SQLAlchemy guide: -# -# https://github.com/sqlalchemy/sqlalchemy/blob/main/README.dialects.rst#dialect-layout -# -# The full group of requirements is available here: -# -# https://github.com/sqlalchemy/sqlalchemy/blob/a453256afc334acabee25ec275de555ef7287144/test/requirements.py - - -from sqlalchemy.testing.requirements import SuiteRequirements -from sqlalchemy.testing import exclusions - -class Requirements(SuiteRequirements): - - @property - def two_phase_transactions(self): - # Databricks SQL doesn't support transactions - return exclusions.closed() diff --git a/tests/sqlalchemy/integration.py b/tests/sqlalchemy/integration.py index 9becc8d6e..ee8bf6c47 100644 --- a/tests/sqlalchemy/integration.py +++ b/tests/sqlalchemy/integration.py @@ -7,40 +7,85 @@ def db_engine(): host = os.getenv("host") http_path = os.getenv("http_path") - api_token = os.getenv("api_token") - engine = sqlalchemy.create_engine(f"databricks+thrift://token:{api_token}@{host}?http_path={http_path}") + access_token = os.getenv("access_token") + schema = os.getenv("schema") or "default" + engine = sqlalchemy.create_engine( + f"databricks+thrift://token:{access_token}@{host}/{schema}?http_path={http_path}" + ) return engine -def test_basic_connection(db_engine): - """Make sure we can connect and run basic query + +def test_constraints(db_engine): + """Make sure we can handle any constraints that's passed in during table declaration. + In the immediate term, the dialect silently ignores them. But when information constraint is + complete, constraints need to be handled. """ + mdo = sqlalchemy.MetaData() + this_moment = datetime.datetime.utcnow().strftime("%s") + + tname = f"integration_test_table_{this_moment}" + tname2 = f"integration_test_table_{this_moment}_items" + + t1 = sqlalchemy.Table( + tname, + mdo, + sqlalchemy.Column("f_primary", sqlalchemy.types.Integer, primary_key=True), + sqlalchemy.Column("f_nullable", sqlalchemy.types.Integer, nullable=False), + sqlalchemy.Column("f_unique", sqlalchemy.types.Integer, unique=True), + ) + + t2 = sqlalchemy.Table( + tname2, + mdo, + sqlalchemy.Column( + "f_foreign", + sqlalchemy.types.Integer, + sqlalchemy.ForeignKey(f"{tname}.f_primary"), + nullable=False, + ), + ) + + mdo.create_all(bind=db_engine, checkfirst=True) + + check_it_exists = db_engine.execute(f"DESCRIBE TABLE EXTENDED {tname}") + + mdo.drop_all(db_engine, checkfirst=True) + + +def test_basic_connection(db_engine): + """Make sure we can connect and run basic query""" + curs = db_engine.execute("SELECT id FROM RANGE(100)") result = curs.fetchall() assert len(result) == 100 + def test_create_and_drop_table(db_engine): """Make sure we can automatically create and drop a table defined with SQLAlchemy's MetaData object + while exercising all supported types. """ - + mdo = sqlalchemy.MetaData() this_moment = datetime.datetime.utcnow().strftime("%s") - + tname = f"integration_test_table_{this_moment}" t1 = sqlalchemy.Table( tname, mdo, - sqlalchemy.Column('f_short', sqlalchemy.types.SMALLINT), - sqlalchemy.Column('f_int', sqlalchemy.types.Integer), - sqlalchemy.Column('f_long', sqlalchemy.types.BigInteger), - sqlalchemy.Column('f_float', sqlalchemy.types.Float), - sqlalchemy.Column('f_decimal', sqlalchemy.types.DECIMAL), - sqlalchemy.Column('f_boolean', sqlalchemy.types.BOOLEAN) + sqlalchemy.Column("f_short", sqlalchemy.types.SMALLINT), + sqlalchemy.Column("f_int", sqlalchemy.types.Integer), + sqlalchemy.Column("f_long", sqlalchemy.types.BigInteger), + sqlalchemy.Column("f_float", sqlalchemy.types.Float), + sqlalchemy.Column("f_decimal_def", sqlalchemy.types.DECIMAL), + sqlalchemy.Column("f_decimal_spec", sqlalchemy.types.DECIMAL(precision=10, scale=2)), + sqlalchemy.Column("f_boolean", sqlalchemy.types.BOOLEAN), + sqlalchemy.Column("f_str", sqlalchemy.types.String), ) - mdo.create_all(bind=db_engine,checkfirst=True) + mdo.create_all(bind=db_engine, checkfirst=False) check_it_exists = db_engine.execute(f"DESCRIBE TABLE EXTENDED {tname}") - + mdo.drop_all(db_engine, checkfirst=True) diff --git a/tests/test_dialect.py b/tests/test_dialect.py new file mode 100644 index 000000000..a4308d57a --- /dev/null +++ b/tests/test_dialect.py @@ -0,0 +1,210 @@ +import unittest +from unittest.mock import Mock + +import pyarrow as pa + +import databricks.sql.client as client +from databricks.sql.utils import ExecuteResponse, ArrowQueue + + +class FetchTests(unittest.TestCase): + """ + Unit tests for checking the fetch logic. + """ + + @staticmethod + def make_arrow_table(batch): + n_cols = len(batch[0]) if batch else 0 + schema = pa.schema({"col%s" % i: pa.uint32() for i in range(n_cols)}) + cols = [[batch[row][col] for row in range(len(batch))] for col in range(n_cols)] + return schema, pa.Table.from_pydict(dict(zip(schema.names, cols)), schema=schema) + + @staticmethod + def make_arrow_queue(batch): + _, table = FetchTests.make_arrow_table(batch) + queue = ArrowQueue(table, len(batch)) + return queue + + @staticmethod + def make_dummy_result_set_from_initial_results(initial_results): + # If the initial results have been set, then we should never try and fetch more + schema, arrow_table = FetchTests.make_arrow_table(initial_results) + arrow_queue = ArrowQueue(arrow_table, len(initial_results), 0) + rs = client.ResultSet( + connection=Mock(), + thrift_backend=None, + execute_response=ExecuteResponse( + status=None, + has_been_closed_server_side=True, + has_more_rows=False, + description=Mock(), + command_handle=None, + arrow_queue=arrow_queue, + arrow_schema_bytes=schema.serialize().to_pybytes())) + num_cols = len(initial_results[0]) if initial_results else 0 + rs.description = [(f'col{col_id}', 'integer', None, None, None, None, None) + for col_id in range(num_cols)] + return rs + + @staticmethod + def make_dummy_result_set_from_batch_list(batch_list): + batch_index = 0 + + def fetch_results(op_handle, max_rows, max_bytes, expected_row_start_offset, + arrow_schema_bytes, description): + nonlocal batch_index + results = FetchTests.make_arrow_queue(batch_list[batch_index]) + batch_index += 1 + + return results, batch_index < len(batch_list) + + mock_thrift_backend = Mock() + mock_thrift_backend.fetch_results = fetch_results + num_cols = len(batch_list[0][0]) if batch_list and batch_list[0] else 0 + + rs = client.ResultSet( + connection=Mock(), + thrift_backend=mock_thrift_backend, + execute_response=ExecuteResponse( + status=None, + has_been_closed_server_side=False, + has_more_rows=True, + description=[(f'col{col_id}', 'integer', None, None, None, None, None) + for col_id in range(num_cols)], + command_handle=None, + arrow_queue=None, + arrow_schema_bytes=None)) + return rs + + def assertEqualRowValues(self, actual, expected): + self.assertEqual(len(actual) if actual else 0, len(expected) if expected else 0) + for act, exp in zip(actual, expected): + self.assertSequenceEqual(act, exp) + + def test_fetchmany_with_initial_results(self): + # Fetch all in one go + initial_results_1 = [[1], [2], [3]] # This is a list of rows, each row with 1 col + dummy_result_set = self.make_dummy_result_set_from_initial_results(initial_results_1) + self.assertEqualRowValues(dummy_result_set.fetchmany(3), [[1], [2], [3]]) + + # Fetch in small amounts + initial_results_2 = [[1], [2], [3], [4]] + dummy_result_set = self.make_dummy_result_set_from_initial_results(initial_results_2) + self.assertEqualRowValues(dummy_result_set.fetchmany(1), [[1]]) + self.assertEqualRowValues(dummy_result_set.fetchmany(2), [[2], [3]]) + self.assertEqualRowValues(dummy_result_set.fetchmany(1), [[4]]) + + # Fetch too many + initial_results_3 = [[2], [3]] + dummy_result_set = self.make_dummy_result_set_from_initial_results(initial_results_3) + self.assertEqualRowValues(dummy_result_set.fetchmany(5), [[2], [3]]) + + # Empty results + initial_results_4 = [[]] + dummy_result_set = self.make_dummy_result_set_from_initial_results(initial_results_4) + self.assertEqualRowValues(dummy_result_set.fetchmany(0), []) + + def test_fetch_many_without_initial_results(self): + # Fetch all in one go; single batch + batch_list_1 = [[[1], [2], [3]]] # This is a list of one batch of rows, each row with 1 col + dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_1) + self.assertEqualRowValues(dummy_result_set.fetchmany(3), [[1], [2], [3]]) + + # Fetch all in one go; multiple batches + batch_list_2 = [[[1], [2]], [[3]]] # This is a list of two batches of rows + dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_2) + self.assertEqualRowValues(dummy_result_set.fetchmany(3), [[1], [2], [3]]) + + # Fetch in small amounts; single batch + batch_list_3 = [[[1], [2], [3]]] + dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_3) + self.assertEqualRowValues(dummy_result_set.fetchmany(1), [[1]]) + self.assertEqualRowValues(dummy_result_set.fetchmany(2), [[2], [3]]) + + # Fetch in small amounts; multiple batches + batch_list_4 = [[[1], [2]], [[3], [4], [5]]] + dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_4) + self.assertEqualRowValues(dummy_result_set.fetchmany(1), [[1]]) + self.assertEqualRowValues(dummy_result_set.fetchmany(3), [[2], [3], [4]]) + self.assertEqualRowValues(dummy_result_set.fetchmany(2), [[5]]) + + # Fetch too many; single batch + batch_list_5 = [[[1], [2], [3], [4]]] + dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_5) + self.assertEqualRowValues(dummy_result_set.fetchmany(6), [[1], [2], [3], [4]]) + + # Fetch too many; multiple batches + batch_list_6 = [[[1]], [[2], [3], [4]], [[5], [6]]] + dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_6) + self.assertEqualRowValues(dummy_result_set.fetchmany(100), [[1], [2], [3], [4], [5], [6]]) + + # Fetch 0; 1 empty batch + batch_list_7 = [[]] + dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_7) + self.assertEqualRowValues(dummy_result_set.fetchmany(0), []) + + # Fetch 0; lots of batches + batch_list_8 = [[[1], [2]], [[3]]] + dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_8) + self.assertEqualRowValues(dummy_result_set.fetchmany(0), []) + + def test_fetchall_with_initial_results(self): + initial_results_1 = [[1], [2], [3]] + dummy_result_set = self.make_dummy_result_set_from_initial_results(initial_results_1) + self.assertEqualRowValues(dummy_result_set.fetchall(), [[1], [2], [3]]) + + def test_fetchall_without_initial_results(self): + # Fetch all, single batch + batch_list_1 = [[[1], [2], [3]]] # This is a list of one batch of rows, each row with 1 col + dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_1) + self.assertEqualRowValues(dummy_result_set.fetchall(), [[1], [2], [3]]) + + # Fetch all, multiple batches + batch_list_2 = [[[1], [2]], [[3]], [[4], [5], [6]]] + dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_2) + self.assertEqualRowValues(dummy_result_set.fetchall(), [[1], [2], [3], [4], [5], [6]]) + + batch_list_3 = [[]] + dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_3) + self.assertEqualRowValues(dummy_result_set.fetchall(), []) + + def test_fetchmany_fetchall_with_initial_results(self): + initial_results_1 = [[1], [2], [3]] + dummy_result_set = self.make_dummy_result_set_from_initial_results(initial_results_1) + self.assertEqualRowValues(dummy_result_set.fetchmany(2), [[1], [2]]) + self.assertEqualRowValues(dummy_result_set.fetchall(), [[3]]) + + def test_fetchmany_fetchall_without_initial_results(self): + batch_list_1 = [[[1], [2], [3]]] # This is a list of one batch of rows, each row with 1 col + dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_1) + self.assertEqualRowValues(dummy_result_set.fetchmany(2), [[1], [2]]) + self.assertEqualRowValues(dummy_result_set.fetchall(), [[3]]) + + batch_list_2 = [[[1], [2]], [[3], [4]], [[5], [6], [7]]] + dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_2) + self.assertEqualRowValues(dummy_result_set.fetchmany(3), [[1], [2], [3]]) + self.assertEqualRowValues(dummy_result_set.fetchall(), [[4], [5], [6], [7]]) + + def test_fetchone_with_initial_results(self): + initial_results_1 = [[1], [2], [3]] + dummy_result_set = self.make_dummy_result_set_from_initial_results(initial_results_1) + self.assertSequenceEqual(dummy_result_set.fetchone(), [1]) + self.assertSequenceEqual(dummy_result_set.fetchone(), [2]) + self.assertSequenceEqual(dummy_result_set.fetchone(), [3]) + self.assertEqual(dummy_result_set.fetchone(), None) + + def test_fetchone_without_initial_results(self): + batch_list_1 = [[[1], [2]], [[3]]] + dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_1) + self.assertSequenceEqual(dummy_result_set.fetchone(), [1]) + self.assertSequenceEqual(dummy_result_set.fetchone(), [2]) + self.assertSequenceEqual(dummy_result_set.fetchone(), [3]) + self.assertEqual(dummy_result_set.fetchone(), None) + + batch_list_2 = [[]] + dummy_result_set = self.make_dummy_result_set_from_batch_list(batch_list_2) + self.assertEqual(dummy_result_set.fetchone(), None) + + +if __name__ == '__main__': + unittest.main()