diff --git a/Pipfile b/Pipfile index 0303ed7..66a00bf 100644 --- a/Pipfile +++ b/Pipfile @@ -15,6 +15,7 @@ requests = "*" [packages] requests = "*" +logzero = "*" [requires] python_version = "3.6" diff --git a/Pipfile.lock b/Pipfile.lock index 1a38e3f..b588d82 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "e5b5420547d5a045f2f16859d9a090a6424137b22ccb2e86ae1d98e432bfb86f" + "sha256": "28373346849e32da829fcb191e44e05235e29d3a45c2482b25e22955196801e7" }, "pipfile-spec": 6, "requires": { @@ -15,94 +15,472 @@ } ] }, - "default": {}, + "default": { + "certifi": { + "hashes": [ + "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", + "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830" + ], + "version": "==2020.12.5" + }, + "chardet": { + "hashes": [ + "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", + "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==4.0.0" + }, + "idna": { + "hashes": [ + "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", + "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.10" + }, + "logzero": { + "hashes": [ + "sha256:1435284574e409b8ec8b680f276bca04cab41f93d6eff4dc8348b7630cddf560", + "sha256:1b84ee4c8fdabf7023877ff17cb456d82564097704eb6c4ee37952bd8ce0800f" + ], + "index": "pypi", + "version": "==1.6.3" + }, + "requests": { + "hashes": [ + "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", + "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" + ], + "index": "pypi", + "version": "==2.25.1" + }, + "urllib3": { + "hashes": [ + "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80", + "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", + "version": "==1.26.3" + } + }, "develop": { - "atomicwrites": { + "appdirs": { "hashes": [ - "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4", - "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6" + "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", + "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128" ], - "version": "==1.3.0" + "version": "==1.4.4" }, "attrs": { "hashes": [ - "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", - "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399" + "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", + "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" ], - "version": "==19.1.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==20.3.0" + }, + "bandit": { + "hashes": [ + "sha256:216be4d044209fa06cf2a3e51b319769a51be8318140659719aa7a115c35ed07", + "sha256:8a4c7415254d75df8ff3c3b15cfe9042ecee628a1e40b44c15a98890fbfc2608" + ], + "index": "pypi", + "version": "==1.7.0" + }, + "better-exceptions": { + "hashes": [ + "sha256:9c70b1c61d5a179b84cd2c9d62c3324b667d74286207343645ed4306fdaad976", + "sha256:bf111d0c9994ac1123f29c24907362bed2320a86809c85f0d858396000667ce2", + "sha256:e4e6bc18444d5f04e6e894b10381e5e921d3d544240418162c7db57e9eb3453b" + ], + "index": "pypi", + "version": "==0.3.3" + }, + "black": { + "hashes": [ + "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea" + ], + "index": "pypi", + "version": "==20.8b1" }, "certifi": { "hashes": [ - "sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5", - "sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae" + "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c", + "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830" ], - "version": "==2019.3.9" + "version": "==2020.12.5" + }, + "cfgv": { + "hashes": [ + "sha256:32e43d604bbe7896fe7c248a9c2276447dbef840feb28fe20494f62af110211d", + "sha256:cf22deb93d4bcf92f345a5c3cd39d3d41d6340adc60c78bbbd6588c384fda6a1" + ], + "markers": "python_full_version >= '3.6.1'", + "version": "==3.2.0" }, "chardet": { "hashes": [ - "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", - "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" + "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa", + "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==4.0.0" + }, + "click": { + "hashes": [ + "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", + "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==7.1.2" + }, + "distlib": { + "hashes": [ + "sha256:8c09de2c67b3e7deef7184574fc060ab8a793e7adbb183d942c389c8b13c52fb", + "sha256:edf6116872c863e1aa9d5bb7cb5e05a022c519a4594dc703843343a9ddd9bff1" + ], + "version": "==0.3.1" + }, + "filelock": { + "hashes": [ + "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59", + "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836" + ], + "version": "==3.0.12" + }, + "flake8": { + "hashes": [ + "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839", + "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b" + ], + "index": "pypi", + "version": "==3.8.4" + }, + "gitdb": { + "hashes": [ + "sha256:91f36bfb1ab7949b3b40e23736db18231bf7593edada2ba5c3a174a7b23657ac", + "sha256:c9e1f2d0db7ddb9a704c2a0217be31214e91a4fe1dea1efad19ae42ba0c285c9" + ], + "markers": "python_version >= '3.4'", + "version": "==4.0.5" + }, + "gitpython": { + "hashes": [ + "sha256:42dbefd8d9e2576c496ed0059f3103dcef7125b9ce16f9d5f9c834aed44a1dac", + "sha256:867ec3dfb126aac0f8296b19fb63b8c4a399f32b4b6fafe84c4b10af5fa9f7b5" + ], + "markers": "python_version >= '3.4'", + "version": "==3.1.12" + }, + "identify": { + "hashes": [ + "sha256:70b638cf4743f33042bebb3b51e25261a0a10e80f978739f17e7fd4837664a66", + "sha256:9dfb63a2e871b807e3ba62f029813552a24b5289504f5b071dea9b041aee9fe4" ], - "version": "==3.0.4" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.5.13" }, "idna": { "hashes": [ - "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", - "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" + "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6", + "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.10" + }, + "iniconfig": { + "hashes": [ + "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3", + "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32" + ], + "version": "==1.1.1" + }, + "mccabe": { + "hashes": [ + "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", + "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" ], - "version": "==2.8" + "version": "==0.6.1" }, - "more-itertools": { + "mypy-extensions": { "hashes": [ - "sha256:2112d2ca570bb7c3e53ea1a35cd5df42bb0fd10c45f0fb97178679c3c03d64c7", - "sha256:c3e4748ba1aad8dba30a4886b0b1a2004f9a863837b8654e7059eebf727afa5a" + "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d", + "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8" ], - "markers": "python_version > '2.7'", - "version": "==7.0.0" + "version": "==0.4.3" + }, + "nodeenv": { + "hashes": [ + "sha256:5304d424c529c997bc888453aeaa6362d242b6b4631e90f3d4bf1b290f1c84a9", + "sha256:ab45090ae383b716c4ef89e690c41ff8c2b257b85b309f01f3654df3d084bd7c" + ], + "version": "==1.5.0" + }, + "packaging": { + "hashes": [ + "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5", + "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==20.9" + }, + "pathspec": { + "hashes": [ + "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd", + "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d" + ], + "version": "==0.8.1" + }, + "pbr": { + "hashes": [ + "sha256:5fad80b613c402d5b7df7bd84812548b2a61e9977387a80a5fc5c396492b13c9", + "sha256:b236cde0ac9a6aedd5e3c34517b423cd4fd97ef723849da6b0d2231142d89c00" + ], + "markers": "python_version >= '2.6'", + "version": "==5.5.1" + }, + "pipenv": { + "editable": true, + "extras": [ + "test" + ], + "path": "." }, "pluggy": { "hashes": [ - "sha256:25a1bc1d148c9a640211872b4ff859878d422bccb59c9965e04eed468a0aa180", - "sha256:964cedd2b27c492fbf0b7f58b3284a09cf7f99b0f715941fb24a439b3af1bd1a" + "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", + "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" ], - "version": "==0.11.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.13.1" + }, + "pre-commit": { + "hashes": [ + "sha256:16212d1fde2bed88159287da88ff03796863854b04dc9f838a55979325a3d20e", + "sha256:399baf78f13f4de82a29b649afd74bef2c4e28eb4f021661fc7f29246e8c7a3a" + ], + "index": "pypi", + "version": "==2.10.1" }, "py": { "hashes": [ - "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", - "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" + "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3", + "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a" ], - "version": "==1.8.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.10.0" + }, + "pycodestyle": { + "hashes": [ + "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367", + "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.6.0" + }, + "pyflakes": { + "hashes": [ + "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92", + "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.2.0" + }, + "pyparsing": { + "hashes": [ + "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", + "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" + ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.4.7" }, "pytest": { "hashes": [ - "sha256:136632a40451162cdfc18fe4d7ecc5d169b558a3d4bbb1603d4005308a42fd03", - "sha256:62b129bf8368554ca7a942cbdb57ea26aafef46cc65bc317cdac3967e54483a3" + "sha256:9d1edf9e7d0b84d72ea3dbcdfd22b35fb543a5e8f2a60092dd578936bf63d7f9", + "sha256:b574b57423e818210672e07ca1fa90aaf194a4f63f3ab909a2c67ebb22913839" ], "index": "pypi", - "version": "==4.4.2" + "version": "==6.2.2" + }, + "pyyaml": { + "hashes": [ + "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", + "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", + "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", + "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", + "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", + "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", + "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", + "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", + "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", + "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", + "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", + "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", + "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", + "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", + "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", + "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", + "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", + "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", + "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", + "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", + "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", + "version": "==5.4.1" + }, + "regex": { + "hashes": [ + "sha256:02951b7dacb123d8ea6da44fe45ddd084aa6777d4b2454fa0da61d569c6fa538", + "sha256:0d08e71e70c0237883d0bef12cad5145b84c3705e9c6a588b2a9c7080e5af2a4", + "sha256:1862a9d9194fae76a7aaf0150d5f2a8ec1da89e8b55890b1786b8f88a0f619dc", + "sha256:1ab79fcb02b930de09c76d024d279686ec5d532eb814fd0ed1e0051eb8bd2daa", + "sha256:1fa7ee9c2a0e30405e21031d07d7ba8617bc590d391adfc2b7f1e8b99f46f444", + "sha256:262c6825b309e6485ec2493ffc7e62a13cf13fb2a8b6d212f72bd53ad34118f1", + "sha256:2a11a3e90bd9901d70a5b31d7dd85114755a581a5da3fc996abfefa48aee78af", + "sha256:2c99e97d388cd0a8d30f7c514d67887d8021541b875baf09791a3baad48bb4f8", + "sha256:3128e30d83f2e70b0bed9b2a34e92707d0877e460b402faca908c6667092ada9", + "sha256:38c8fd190db64f513fe4e1baa59fed086ae71fa45083b6936b52d34df8f86a88", + "sha256:3bddc701bdd1efa0d5264d2649588cbfda549b2899dc8d50417e47a82e1387ba", + "sha256:4902e6aa086cbb224241adbc2f06235927d5cdacffb2425c73e6570e8d862364", + "sha256:49cae022fa13f09be91b2c880e58e14b6da5d10639ed45ca69b85faf039f7a4e", + "sha256:56e01daca75eae420bce184edd8bb341c8eebb19dd3bce7266332258f9fb9dd7", + "sha256:5862975b45d451b6db51c2e654990c1820523a5b07100fc6903e9c86575202a0", + "sha256:6a8ce43923c518c24a2579fda49f093f1397dad5d18346211e46f134fc624e31", + "sha256:6c54ce4b5d61a7129bad5c5dc279e222afd00e721bf92f9ef09e4fae28755683", + "sha256:6e4b08c6f8daca7d8f07c8d24e4331ae7953333dbd09c648ed6ebd24db5a10ee", + "sha256:717881211f46de3ab130b58ec0908267961fadc06e44f974466d1887f865bd5b", + "sha256:749078d1eb89484db5f34b4012092ad14b327944ee7f1c4f74d6279a6e4d1884", + "sha256:7913bd25f4ab274ba37bc97ad0e21c31004224ccb02765ad984eef43e04acc6c", + "sha256:7a25fcbeae08f96a754b45bdc050e1fb94b95cab046bf56b016c25e9ab127b3e", + "sha256:83d6b356e116ca119db8e7c6fc2983289d87b27b3fac238cfe5dca529d884562", + "sha256:8b882a78c320478b12ff024e81dc7d43c1462aa4a3341c754ee65d857a521f85", + "sha256:8f6a2229e8ad946e36815f2a03386bb8353d4bde368fdf8ca5f0cb97264d3b5c", + "sha256:9801c4c1d9ae6a70aeb2128e5b4b68c45d4f0af0d1535500884d644fa9b768c6", + "sha256:a15f64ae3a027b64496a71ab1f722355e570c3fac5ba2801cafce846bf5af01d", + "sha256:a3d748383762e56337c39ab35c6ed4deb88df5326f97a38946ddd19028ecce6b", + "sha256:a63f1a07932c9686d2d416fb295ec2c01ab246e89b4d58e5fa468089cab44b70", + "sha256:b2b1a5ddae3677d89b686e5c625fc5547c6e492bd755b520de5332773a8af06b", + "sha256:b2f4007bff007c96a173e24dcda236e5e83bde4358a557f9ccf5e014439eae4b", + "sha256:baf378ba6151f6e272824b86a774326f692bc2ef4cc5ce8d5bc76e38c813a55f", + "sha256:bafb01b4688833e099d79e7efd23f99172f501a15c44f21ea2118681473fdba0", + "sha256:bba349276b126947b014e50ab3316c027cac1495992f10e5682dc677b3dfa0c5", + "sha256:c084582d4215593f2f1d28b65d2a2f3aceff8342aa85afd7be23a9cad74a0de5", + "sha256:d1ebb090a426db66dd80df8ca85adc4abfcbad8a7c2e9a5ec7513ede522e0a8f", + "sha256:d2d8ce12b7c12c87e41123997ebaf1a5767a5be3ec545f64675388970f415e2e", + "sha256:e32f5f3d1b1c663af7f9c4c1e72e6ffe9a78c03a31e149259f531e0fed826512", + "sha256:e3faaf10a0d1e8e23a9b51d1900b72e1635c2d5b0e1bea1c18022486a8e2e52d", + "sha256:f7d29a6fc4760300f86ae329e3b6ca28ea9c20823df123a2ea8693e967b29917", + "sha256:f8f295db00ef5f8bae530fc39af0b40486ca6068733fb860b42115052206466f" + ], + "version": "==2020.11.13" }, "requests": { "hashes": [ - "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e", - "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b" + "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804", + "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e" ], "index": "pypi", - "version": "==2.21.0" + "version": "==2.25.1" }, "six": { "hashes": [ - "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", - "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" + "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", + "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.15.0" + }, + "smmap": { + "hashes": [ + "sha256:7bfcf367828031dc893530a29cb35eb8c8f2d7c8f2d0989354d75d24c8573714", + "sha256:84c2751ef3072d4f6b2785ec7ee40244c6f45eb934d9e543e2c51f1bd3d54c50" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==3.0.5" + }, + "stevedore": { + "hashes": [ + "sha256:3a5bbd0652bf552748871eaa73a4a8dc2899786bc497a2aa1fcb4dcdb0debeee", + "sha256:50d7b78fbaf0d04cd62411188fa7eedcb03eb7f4c4b37005615ceebe582aa82a" + ], + "markers": "python_version >= '3.6'", + "version": "==3.3.0" + }, + "sumologic-sdk": { + "editable": true, + "extras": [ + "test" + ], + "path": "." + }, + "toml": { + "hashes": [ + "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", + "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" ], - "version": "==1.12.0" + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.10.2" + }, + "typed-ast": { + "hashes": [ + "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1", + "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d", + "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6", + "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd", + "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37", + "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151", + "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07", + "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440", + "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70", + "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496", + "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea", + "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400", + "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc", + "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606", + "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc", + "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581", + "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412", + "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a", + "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2", + "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787", + "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f", + "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937", + "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64", + "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487", + "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b", + "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41", + "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a", + "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3", + "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166", + "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10" + ], + "version": "==1.4.2" + }, + "typing-extensions": { + "hashes": [ + "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918", + "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c", + "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f" + ], + "version": "==3.7.4.3" }, "urllib3": { "hashes": [ - "sha256:2393a695cd12afedd0dcb26fe5d50d0cf248e5a66f75dbd89a3d4eb333a61af4", - "sha256:a637e5fae88995b256e3409dc4d52c2e2e0ba32c42a6365fee8bbd2238de3cfb" + "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80", + "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'", + "version": "==1.26.3" + }, + "virtualenv": { + "hashes": [ + "sha256:147b43894e51dd6bba882cf9c282447f780e2251cd35172403745fc381a0a80d", + "sha256:2be72df684b74df0ea47679a7df93fd0e04e72520022c57b479d8f881485dbe3" ], - "version": "==1.24.3" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==20.4.2" } } } diff --git a/requirements.txt b/requirements.txt index b7773ae..01f0e4e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,2 @@ requests>=2.2.1 +logzero>=1.0 diff --git a/sumologic/sumologic.py b/sumologic/sumologic.py index 1a0ff60..9a79c38 100644 --- a/sumologic/sumologic.py +++ b/sumologic/sumologic.py @@ -1,17 +1,57 @@ import json import requests +import urllib +import time import os import sys +import warnings +from logzero import logger +import logzero try: import cookielib except ImportError: import http.cookiejar as cookielib + +# API RATE Limit constants +MAX_TRIES = 10 +NUMBER_OF_CALLS = 4 +# per +PERIOD = 1 # in seconds + + +def backoff(func): + def limited(*args, **kwargs): + delay = PERIOD / NUMBER_OF_CALLS + tries = 0 + lastException = None + while tries < MAX_TRIES: + tries += 1 + try: + return func(*args, **kwargs) + except requests.exceptions.HTTPError as e: + if e.response.status_code == 429: # rate limited + logger.debug("Rate limited, sleeping for {0}s".format(delay)) + time.sleep(delay) + delay = delay * 2 + lastException = e + continue + else: + raise + logger.debug("Rate limited function still failed after {0} retries.".format(MAX_TRIES)) + raise lastException + + return limited + + class SumoLogic(object): - def __init__(self, accessId, accessKey, endpoint=None, caBundle=None, cookieFile='cookies.txt'): + def __init__(self, accessId, accessKey, endpoint=None, log_level='info', log_file=None, caBundle=None, cookieFile='cookies.txt'): self.session = requests.Session() + self.log_level = log_level + self.set_log_level(self.log_level) + if log_file: + logzero.logfile(str(log_file)) self.session.auth = (accessId, accessKey) - self.DEFAULT_VERSION = 'v1' self.session.headers = {'content-type': 'application/json', 'accept': 'application/json'} if caBundle is not None: self.session.verify = caBundle @@ -21,8 +61,32 @@ def __init__(self, accessId, accessKey, endpoint=None, caBundle=None, cookieFile self.endpoint = self._get_endpoint() else: self.endpoint = endpoint - if self.endpoint[-1:] == "/": - raise Exception("Endpoint should not end with a slash character") + if self.endpoint[-4:] == "/v1": + self.endpoint = self.endpoint[:-4] + warnings.warn('Endpoint should no longer end in "/v1/", it has been removed from your endpoint string.', + DeprecationWarning) + if endpoint[-1:] == "/": + self.endpoint = self.endpoint[:-1] + warnings.warn( + "Endpoint should not end with a slash character, it has been removed from your endpoint string.") + + def set_log_level(self, log_level): + if log_level == 'info': + self.log_level = log_level + logzero.loglevel(level=20) + return True + elif log_level == 'debug': + self.log_level = log_level + logzero.loglevel(level=10) + logger.debug("[Sumologic SDK] Setting logging level to 'debug'") + return True + else: + raise Exception("Bad Logging Level") + logger.info("[Sumologic SDK] Attempt to set undefined logging level.") + return False + + def get_log_level(self): + return self.log_level def _get_endpoint(self): """ @@ -41,40 +105,87 @@ def _get_endpoint(self): self.endpoint = 'https://api.sumologic.com/api' self.response = self.session.get('https://api.sumologic.com/api/v1/collectors') # Dummy call to get endpoint endpoint = self.response.url.replace('/v1/collectors', '') # dirty hack to sanitise URI and retain domain - print("SDK Endpoint", endpoint, file=sys.stderr) + logger.info("SDK Endpoint {}".format(str(endpoint))) return endpoint def get_versioned_endpoint(self, version): return self.endpoint+'/%s' % version - def delete(self, method, params=None, version=None): - version = version or self.DEFAULT_VERSION - endpoint = self.get_versioned_endpoint(version) - r = self.session.delete(endpoint + method, params=params) - if 400 <= r.status_code < 600: + @backoff + def delete(self, method, params=None, headers=None, data=None): + logger.debug("DELETE: " + self.endpoint + method) + logger.debug("Headers:") + logger.debug(headers) + logger.debug("Params:") + logger.debug(params) + logger.debug("Body:") + logger.debug(data) + r = self.session.delete(self.endpoint + method, params=params, headers=headers, data=data) + logger.debug("Response:") + logger.debug(r) + logger.debug("Response Body:") + logger.debug(r.text) + if r.status_code != 200: r.reason = r.text r.raise_for_status() return r - def get(self, method, params=None, version=None): - version = version or self.DEFAULT_VERSION - endpoint = self.get_versioned_endpoint(version) - r = self.session.get(endpoint + method, params=params) - if 400 <= r.status_code < 600: + @backoff + def get(self, method, params=None, headers=None): + logger.debug("GET: " + self.endpoint + method) + logger.debug("Headers:") + logger.debug(headers) + logger.debug("Params:") + logger.debug(params) + r = self.session.get(self.endpoint + method, params=params, headers=headers) + logger.debug("Response:") + logger.debug(r) + logger.debug("Response Body:") + logger.debug(r.text) + if r.status_code != 200: r.reason = r.text r.raise_for_status() return r - def post(self, method, params, headers=None, version=None): - version = version or self.DEFAULT_VERSION - endpoint = self.get_versioned_endpoint(version) - r = self.session.post(endpoint + method, data=json.dumps(params), headers=headers) - if 400 <= r.status_code < 600: + @backoff + def post(self, method, data, headers=None, params=None): + logger.debug("POST: " + self.endpoint + method) + logger.debug("Headers:") + logger.debug(headers) + logger.debug("Params:") + logger.debug(params) + logger.debug("Body:") + logger.debug(data) + r = self.session.post(self.endpoint + method, data=json.dumps(data), headers=headers, params=params) + logger.debug("Response:") + logger.debug(r) + logger.debug("Response Body:") + logger.debug(r.text) + if r.status_code != 200: r.reason = r.text r.raise_for_status() return r - def post_file(self, method, params, headers=None, version=None): + @backoff + def put(self, method, data, headers=None, params=None): + logger.debug("PUT: " + self.endpoint + method) + logger.debug("Headers:") + logger.debug(headers) + logger.debug("Params:") + logger.debug(params) + logger.debug("Body:") + logger.debug(data) + r = self.session.put(self.endpoint + method, data=json.dumps(data), headers=headers, params=params) + logger.debug("Response:") + logger.debug(r) + logger.debug("Response Body:") + logger.debug(r.text) + if r.status_code != 200: + r.reason = r.text + r.raise_for_status() + return r + + def post_file(self, method, params, headers=None): """ Handle file uploads via a separate post request to avoid having to clear the content-type header in the session. @@ -87,161 +198,337 @@ def post_file(self, method, params, headers=None, version=None): try to clear the content-type from the session. Thus we don't re-use the session for the upload, rather we create a new one off session. """ - version = version or self.DEFAULT_VERSION - endpoint = self.get_versioned_endpoint(version) + post_params = {'merge': params['merge']} file_data = open(params['full_file_path'], 'rb').read() files = {'file': (params['file_name'], file_data)} - r = requests.post(endpoint + method, files=files, params=post_params, + r = requests.post(self.endpoint + method, files=files, params=post_params, auth=(self.session.auth[0], self.session.auth[1]), headers=headers) if 400 <= r.status_code < 600: r.reason = r.text r.raise_for_status() return r - def put(self, method, params, headers=None, version=None): - version = version or self.DEFAULT_VERSION - endpoint = self.get_versioned_endpoint(version) - r = self.session.put(endpoint + method, data=json.dumps(params), headers=headers) - if 400 <= r.status_code < 600: - r.reason = r.text - r.raise_for_status() - return r + # Search API - def search(self, query, fromTime=None, toTime=None, timeZone='UTC'): - params = {'q': query, 'from': fromTime, 'to': toTime, 'tz': timeZone} - r = self.get('/logs/search', params) - return json.loads(r.text) - - def search_job(self, query, fromTime=None, toTime=None, timeZone='UTC', byReceiptTime=None): - params = {'query': query, 'from': fromTime, 'to': toTime, 'timeZone': timeZone, 'byReceiptTime': byReceiptTime} - r = self.post('/search/jobs', params) - return json.loads(r.text) + def search_job(self, query, fromTime=None, toTime=None, timeZone='UTC', byReceiptTime=False): + data = {'query': str(query), 'from': str(fromTime), 'to': str(toTime), 'timeZone': str(timeZone), 'byReceiptTime': str(byReceiptTime)} + r = self.post('/v1/search/jobs', data) + return r.json() def search_job_status(self, search_job): - r = self.get('/search/jobs/' + str(search_job['id'])) - return json.loads(r.text) + r = self.get('/v1/search/jobs/' + str(search_job['id'])) + return r.json() + + def search_job_records_sync(self, query, fromTime=None, toTime=None, timeZone=None, byReceiptTime=None): + searchjob = self.search_job(query, fromTime=fromTime, toTime=toTime, timeZone=timeZone, byReceiptTime=byReceiptTime) + status = self.search_job_status(searchjob) + numrecords = status['recordCount'] + while status['state'] != 'DONE GATHERING RESULTS': + if status['state'] == 'CANCELLED': + break + status = self.search_job_status(searchjob) + numrecords = status['recordCount'] + if status['state'] == 'DONE GATHERING RESULTS': + jobrecords=[] + iterations = numrecords // 10000 + 1 + + for iteration in range(1, iterations + 1): + records = self.search_job_records(searchjob, limit=10000, + offset=((iteration - 1) * 10000)) + for record in records['records']: + jobrecords.append(record) + return jobrecords #returns a list + else: + return status + + def search_job_messages_sync(self, query, fromTime=None, toTime=None, timeZone=None, byReceiptTime=None): + searchjob = self.search_job(query, fromTime=fromTime, toTime=toTime, timeZone=timeZone, byReceiptTime=byReceiptTime) + status = self.search_job_status(searchjob) + nummessages = status['messageCount'] + while status['state'] != 'DONE GATHERING RESULTS': + if status['state'] == 'CANCELLED': + break + status = self.search_job_status(searchjob) + nummessages = status['messageCount'] + if status['state'] == 'DONE GATHERING RESULTS': + jobmessages=[] + iterations = nummessages // 10000 + 1 + + for iteration in range(1, iterations + 1): + messages = self.search_job_messages(searchjob, limit=10000, + offset=((iteration - 1) * 10000)) + for message in messages['messages']: + jobmessages.append(message) + return jobmessages #returns a list + else: + return status def search_job_messages(self, search_job, limit=None, offset=0): params = {'limit': limit, 'offset': offset} - r = self.get('/search/jobs/' + str(search_job['id']) + '/messages', params) - return json.loads(r.text) + r = self.get('/v1/search/jobs/' + str(search_job['id']) + '/messages', params) + return r.json() def search_job_records(self, search_job, limit=None, offset=0): params = {'limit': limit, 'offset': offset} - r = self.get('/search/jobs/' + str(search_job['id']) + '/records', params) - return json.loads(r.text) + r = self.get('/v1/search/jobs/' + str(search_job['id']) + '/records', params) + return r.json() def delete_search_job(self, search_job): - return self.delete('/search/jobs/' + str(search_job['id'])) + r = self.delete('/v1/search/jobs/' + str(search_job['id'])) + return r.json() + + # Collectors API + # included for backwards compatibility with older community SDK def collectors(self, limit=None, offset=None, filter_type=None): + return self.get_collectors(limit=limit, offset=offset) + + def get_collectors(self, limit=1000, offset=None, filter_type=None): params = {'limit': limit, 'offset': offset} if filter_type: params['filter'] = filter_type - r = self.get('/collectors', params) - return json.loads(r.text)['collectors'] + r = self.get('/v1/collectors', params) + return r.json()['collectors'] + + def get_collectors_sync(self, limit=1000, filter_type=None): + offset = 0 + results = [] + r = self.get_collectors(limit=limit, offset=offset, filter_type=filter_type) + offset = offset + limit + results = results + r + while not (len(r) < limit): + r = self.get_collectors(limit=limit, offset=offset, filter_type=filter_type) + offset = offset + limit + results = results + r + return results + + def get_collector_by_id(self, collector_id): + r = self.get('/v1/collectors/' + str(collector_id)) + return r.json()['collector'], r.headers['etag'] + + # The following calls the Sumo "get collector by name" method which does not support special characters like ; / % \ + def get_collector_by_name(self, name): + encoded_name = urllib.parse.quote(str(name)) + r = self.get('/v1/collectors/name/' + encoded_name) + return r.json()['collector'], r.headers['etag'] + + # this version makes multiple calls but should work with special characters in the collector name + def get_collector_by_name_alternate(self, name): + sumocollectors = self.get_collectors_sync() + for sumocollector in sumocollectors: + if sumocollector['name'] == str(name): + collector, _ = self.get_collector_by_id(sumocollector['id']) + return collector + # for backward compatibility with old community API def collector(self, collector_id): r = self.get('/collectors/' + str(collector_id)) - return json.loads(r.text), r.headers['etag'] + return r.json(), r.headers['etag'] def create_collector(self, collector, headers=None): - return self.post('/collectors', collector, headers) + r = self.post('/v1/collectors', collector, headers) + return r.json() def update_collector(self, collector, etag): headers = {'If-Match': etag} - return self.put('/collectors/' + str(collector['collector']['id']), collector, headers) + r = self.put('/v1/collectors/' + str(collector['collector']['id']), collector, headers) + return r.json() - def delete_collector(self, collector): - return self.delete('/collectors/' + str(collector['collector']['id'])) + def delete_collector(self, collector_id): + r = self.delete('/v1/collectors/' + str(collector_id)) + return r.json() - def sources(self, collector_id, limit=None, offset=None): + def get_sources(self, collector_id, limit=None, offset=None): params = {'limit': limit, 'offset': offset} - r = self.get('/collectors/' + str(collector_id) + '/sources', params) + r = self.get('/v1/collectors/' + str(collector_id) + '/sources', params) return json.loads(r.text)['sources'] + def get_sources_sync(self, collector_id, limit=1000): + offset = 0 + results = [] + r = self.get_sources(collector_id, limit=limit, offset=offset) + offset = offset + limit + results = results + r + while not (len(r) < limit): + r = self.get_sources(collector_id, limit=limit, offset=offset) + offset = offset + limit + results = results + r + return results + + # for backward compatibility with old community API + def sources(self, collector_id, limit=None, offset=None): + return self.get_sources(collector_id, limit=limit, offset=offset) + + def get_source(self, collector_id, source_id): + r = self.get('/v1/collectors/' + str(collector_id) + '/sources/' + str(source_id)) + return r.json() + + def get_source_with_etag(self, collector_id, source_id): + r = self.get('/v1/collectors/' + str(collector_id) + '/sources/' + str(source_id)) + return r.headers.get('etag'), r.json() + + # for backward compatibility with old community API def source(self, collector_id, source_id): - r = self.get('/collectors/' + str(collector_id) + '/sources/' + str(source_id)) - return json.loads(r.text), r.headers['etag'] + return self.get_source(collector_id, source_id) def create_source(self, collector_id, source): - return self.post('/collectors/' + str(collector_id) + '/sources', source) + r = self.post('/v1/collectors/' + str(collector_id) + '/sources', source) + return r.json() def update_source(self, collector_id, source, etag): headers = {'If-Match': etag} - return self.put('/collectors/' + str(collector_id) + '/sources/' + str(source['source']['id']), source, headers) - - def delete_source(self, collector_id, source): - return self.delete('/collectors/' + str(collector_id) + '/sources/' + str(source['source']['id'])) - - def create_content(self, path, data): - r = self.post('/content/' + path, data) - return r.text - - def dashboards(self, monitors=False): - params = {'monitors': monitors} - r = self.get('/dashboards', params) - return json.loads(r.text)['dashboards'] - - def dashboard(self, dashboard_id): - r = self.get('/dashboards/' + str(dashboard_id)) - return json.loads(r.text)['dashboard'] - - def dashboard_data(self, dashboard_id): - r = self.get('/dashboards/' + str(dashboard_id) + '/data') - return json.loads(r.text)['dashboardMonitorDatas'] - - def search_metrics(self, query, fromTime=None, toTime=None, requestedDataPoints=600, maxDataPoints=800): - '''Perform a single Sumo metrics query''' - def millisectimestamp(ts): - '''Convert UNIX timestamp to milliseconds''' - if ts > 10**12: - ts = ts/(10**(len(str(ts))-13)) - else: - ts = ts*10**(12-len(str(ts))) - return int(ts) - - params = {'query': [{"query": query, "rowId": "A"}], - 'startTime': millisectimestamp(fromTime), - 'endTime': millisectimestamp(toTime), - 'requestedDataPoints': requestedDataPoints, - 'maxDataPoints': maxDataPoints} - r = self.post('/metrics/results', params) - return json.loads(r.text) + r = self.put('/v1/collectors/' + str(collector_id) + '/sources/' + str(source['source']['id']), source, headers) + return r.json() + + def delete_source(self, collector_id, source_id): + r = self.delete('/v1/collectors/' + str(collector_id) + '/sources/' + str(source_id)) + return r + + ############################################ + + ############################################### + + + # Unverified API calls. These are disabled as they are not documented by Sumo Logic or have been replaced + # def dashboards(self, monitors=False): + # params = {'monitors': monitors} + # r = self.get('/v1/dashboards', params) + # return json.loads(r.text)['dashboards'] + # + # def dashboard(self, dashboard_id): + # r = self.get('/v1/dashboards/' + str(dashboard_id)) + # return json.loads(r.text)['dashboard'] + # + # def dashboard_data(self, dashboard_id): + # r = self.get('/v1/dashboards/' + str(dashboard_id) + '/data') + # return json.loads(r.text)['dashboardMonitorDatas'] + + # def search_metrics(self, query, fromTime=None, toTime=None, requestedDataPoints=600, maxDataPoints=800): + # '''Perform a single Sumo metrics query''' + # def millisectimestamp(ts): + # '''Convert UNIX timestamp to milliseconds''' + # if ts > 10**12: + # ts = ts/(10**(len(str(ts))-13)) + # else: + # ts = ts*10**(12-len(str(ts))) + # return int(ts) + # + # params = {'query': [{"query": query, "rowId": "A"}], + # 'startTime': millisectimestamp(fromTime), + # 'endTime': millisectimestamp(toTime), + # 'requestedDataPoints': requestedDataPoints, + # 'maxDataPoints': maxDataPoints} + # r = self.post('/v1/metrics/results', params) + # return r.json() + + # def create_content(self, path, data): + # r = self.post('/content/' + path, data) + # return r.text def get_available_builds(self): - r = self.get('/collectors/upgrades/targets') - return json.loads(r.text)['targets'] + r = self.get('/v1/collectors/upgrades/targets') + return r.json()['targets'] + + # def sync_folder(self, folder_id, content): + # return self.post('/content/folders/%s/synchronize' % folder_id, params=content, version='v2') + # + # def check_sync_folder(self, folder_id, job_id): + # return self.get('/content/folders/%s/synchronize/%s/status' % (folder_id, job_id), version='v2') + + # Permissions API + + def get_permissions(self, id, explicit_only=False, adminmode=False): + headers = {'isAdminMode': str(adminmode).lower()} + params = {'explicitOnly': bool(explicit_only)} + r = self.get('/v2/content/' + str(id) + '/permissions', headers=headers, params=params) + return r.json() - def sync_folder(self, folder_id, content): - return self.post('/content/folders/%s/synchronize' % folder_id, params=content, version='v2') + def add_permissions(self, id, body, adminmode=False): + headers = {'isAdminMode': str(adminmode).lower()} + r = self.put('/v2/content/' + str(id) + '/permissions/add', body, headers=headers) + return r.json() - def check_sync_folder(self, folder_id, job_id): - return self.get('/content/folders/%s/synchronize/%s/status' % (folder_id, job_id), version='v2') + def remove_permissions(self, id, body, adminmode=False): + headers = {'isAdminMode': str(adminmode).lower()} + r = self.put('/v2/content/' + str(id) + '/permissions/remove', body, headers=headers) + return r.json() - def delete_folder(self, folder_id): - return self.delete('/content/%s/delete' % folder_id, version='v2') + # Folder API - def create_folder(self, name, description, parent_folder_id): - content = { - "name": name, - "description": description, - "parentId": parent_folder_id - } - return self.post('/content/folders', params=content, version='v2') + def create_folder(self, folder_name, parent_id, adminmode=False): + headers = {'isAdminMode': str(adminmode).lower()} + data = {'name': str(folder_name), 'parentId': str(parent_id)} + r = self.post('/v2/content/folders', data, headers=headers) + return r.json() + + def get_folder(self, folder_id, adminmode=False): + headers = {'isAdminMode': str(adminmode).lower()} + r = self.get('/v2/content/folders/' + str(folder_id), headers=headers) + return r.json() + + def update_folder(self, id, name, description='', adminmode=False): + headers = {'isAdminMode': str(adminmode).lower()} + data = {'name': str(name), 'description': str(description)} + r = self.put('/v2/content/folders/' + str(id), data, headers=headers) + return r.json() def get_personal_folder(self): - return self.get('/content/folders/personal', version='v2') + r = self.get('/v2/content/folders/personal') + return r.json() + + def get_global_folder_job_status(self, job_id): + r = self.get('/v2/content/folders/global/' + str(job_id) + '/status') + return r.json() + + def get_global_folder(self, adminmode=False): + headers = {'isAdminMode': str(adminmode).lower()} + r = self.get('/v2/content/folders/global', headers=headers) + return r.json() + + def get_global_folder_job_result(self, job_id): + r = self.get('/v2/content/folders/global/' + str(job_id) + '/result') + return r.json() - def import_content(self, folder_id, content, is_overwrite="false"): - return self.post('/content/folders/%s/import?overwrite=%s' % (folder_id, is_overwrite), params=content, version='v2') + def get_global_folder_sync(self, adminmode=False): + r = self.get_global_folder(adminmode=adminmode) + job_id = str(r['id']) + status = self.get_global_folder_job_status(job_id) + while status['status'] == 'InProgress': + status = self.get_global_folder_job_status(job_id) + if status['status'] == 'Success': + r = self.get_global_folder_job_result(job_id) + return r + else: + return status + + def get_admin_folder_job_status(self, job_id): + + r = self.get('/v2/content/folders/adminRecommended/' + str(job_id) + '/status') + return r.json() - def check_import_status(self, folder_id, job_id): - return self.get('/content/folders/%s/import/%s/status' % (folder_id, job_id), version='v2') + def get_admin_folder(self, adminmode=False): + headers = {'isAdminMode': str(adminmode).lower()} + r = self.get('/v2/content/folders/adminRecommended', headers=headers) + return r.json() + + def get_admin_folder_job_result(self, job_id): + r = self.get('/v2/content/folders/adminRecommended/' + str(job_id) + '/result') + return r.json() + + def get_admin_folder_sync(self, adminmode=False): + r = self.get_admin_folder(adminmode=adminmode) + job_id = str(r['id']) + status = self.get_admin_folder_job_status(job_id) + while status['status'] == 'InProgress': + status = self.get_admin_folder_job_status(job_id) + if status['status'] == 'Success': + r = self.get_admin_folder_job_result(job_id) + return r + else: + return status - def get_folder(self, folder_id): - return self.get('/content/folders/%s' % folder_id, version='v2') + # Application API def install_app(self, app_id, content): return self.post('/apps/%s/install' % (app_id), params=content) @@ -249,57 +536,575 @@ def install_app(self, app_id, content): def check_app_install_status(self, job_id): return self.get('/apps/install/%s/status' % job_id) - def export_content(self, content_id): - return self.post('/content/%s/export' % content_id, params="", version='v2') + # Content API - def check_export_status(self, content_id, job_id): - return self.get('/content/%s/export/%s/status' % (content_id, job_id), version='v2') + # for backward compatibility with old community API - def get_export_content_result(self, content_id, job_id): - return self.get('/content/%s/export/%s/result' % (content_id, job_id), version='v2') + def get_content(self, path): + return self.get_content_by_path(path) - def delete_content(self, content_id): - return self.delete('/content/%s/delete' % content_id, version='v2') + def get_content_by_path(self, item_path): + # item_path should start with /Library and use the user's email address if referencing a user home dir + # firstname + :space: + lastname will not work here, even though that's how it's displayed in the UI + # YES: "/Library/Users/user@demo.com/someItemOrFolder" could be a valid path + # NO: "/Library/Users/Demo User/someItemOrFolder" is not a valid path because user first/last names are not + # unique identifiers + params = {'path': str(item_path)} + r = self.get('/v2/content/path', params=params) + return r.json() - def check_delete_status(self, content_id, job_id): - return self.get('/content/%s/delete/%s/status' % (content_id, job_id), version='v2') + def get_item_path(self, item_id): + r = self.get('/v2/content/' + str(item_id) + '/path') + return r.json() - def get_content(self, path): - return self.get('/content/%s' % path, version='v2') + def delete_content_job(self, item_id, adminmode=False): + headers = {'isAdminMode': str(adminmode).lower()} + r = self.delete('/v2/content/' + str(item_id) + '/delete', headers=headers) + return r.json() - def copy_content(self, content_id, destination_folder): - return self.post('/content/%s/copy?destinationFolder=%s' % (content_id, destination_folder), params=None, version='v2') + # for backward compatibility with old community API + + def check_delete_status(self, item_id, job_id, adminmode=False): + return self.get_delete_content_job_status(item_id, job_id, adminmode=adminmode) + + def get_delete_content_job_status(self, item_id, job_id, adminmode=False): + headers = {'isAdminMode': str(adminmode).lower()} + r = self.get('/v2/content/' + str(item_id) + '/delete/' + str(job_id) + '/status', headers=headers) + return r.json() + + def delete_content_job_sync(self, item_id, adminmode=False): + r = self.delete_content_job(str(item_id), adminmode=adminmode) + job_id = str(r['id']) + status = self.get_delete_content_job_status(str(item_id), str(job_id), adminmode=adminmode) + while status['status'] == 'InProgress': + status = self.get_delete_content_job_status(str(item_id), str(job_id), adminmode=adminmode) + return status + + # for backward compatibility with old community API + + def export_content(self, item_id, adminmode=False): + return self.export_content_job(item_id, adminmode=adminmode) + + def export_content_job(self, item_id, adminmode=False): + headers = {'isAdminMode': str(adminmode).lower()} + data = {} + r = self.post('/v2/content/' + str(item_id) + '/export', data, headers=headers) + return r.json() + + # for backward compatibility with old community API + + def check_export_status(self, item_id, job_id, adminmode=False): + return self.get_export_content_job_status(item_id, job_id, adminmode=adminmode) + + def get_export_content_job_status(self, item_id, job_id, adminmode=False): + headers = {'isAdminMode': str(adminmode).lower()} + r = self.get('/v2/content/' + str(item_id) + '/export/' + str(job_id) + '/status', headers=headers) + return r.json() + + # for backward compatibility with old community API + + def get_export_content_result(self, item_id, job_id, adminmode=False): + return self.get_export_content_job_result(item_id, job_id, adminmode=adminmode) + + def get_export_content_job_result(self, item_id, job_id, adminmode=False): + headers = {'isAdminMode': str(adminmode).lower()} + r = self.get('/v2/content/' + str(item_id) + '/export/' + str(job_id) + '/result', headers=headers) + return r.json() + + def export_content_job_sync(self, item_id, adminmode=False): + r = self.export_content_job(str(item_id), adminmode=adminmode) + job_id = str(r['id']) + status = self.get_export_content_job_status(item_id, job_id, adminmode=adminmode) + while status['status'] == 'InProgress': + status = self.get_export_content_job_status(item_id, job_id, adminmode=adminmode) + if status['status'] == 'Success': + r = self.get_export_content_job_result(item_id, job_id, adminmode=adminmode) + return r + else: + return status + + def import_content_job(self, folder_id, content, adminmode=False, overwrite=False): + headers = {'isAdminMode': str(adminmode).lower()} + params = {'overwrite': str(overwrite).lower()} + r = self.post('/v2/content/folders/' + str(folder_id) + '/import', content, headers=headers, params=params) + return r.json() + + def get_import_content_job_status(self, folder_id, job_id, adminmode=False): + headers = {'isAdminMode': str(adminmode).lower()} + r = self.get('/v2/content/folders/' + str(folder_id) + '/import/' + str(job_id) + '/status', headers=headers) + return r.json() + + def import_content_job_sync(self, folder_id, content, adminmode=False, overwrite=False): + r = self.import_content_job(str(folder_id), content, adminmode=adminmode, overwrite=overwrite) + job_id = str(r['id']) + status = self.get_import_content_job_status(str(folder_id), str(job_id), adminmode=adminmode) + while status['status'] == 'InProgress': + time.sleep(1) + status = self.get_import_content_job_status(str(folder_id), str(job_id), adminmode=adminmode) + return status + + # Role API + + def get_roles(self, limit=1000, token='', sort_by='name', name=''): + if name != '': + params = {'limit': int(limit), 'token': str(token), 'sortBy': str(sort_by), 'name': str(name)} + else: + params = {'limit': int(limit), 'token': str(token), 'sortBy': str(sort_by)} + r = self.get('/v1/roles', params=params) + return r.json() + + def get_roles_sync(self, limit=1000, sort_by='name', name=''): + token = '' + results = [] + while True: + r = self.get_roles(limit=limit, token=token, sort_by=sort_by, name=name) + token = r['next'] + results = results + r['data'] + if token is None: + break + return results + + def create_role(self, body): + r = self.post('/v1/roles', body) + return r.json() + + def get_role(self, id): + r = self.get('/v1/roles/' + str(id)) + return r.json() + + def update_role(self, id, body): + r = self.put('/v1/roles/' + str(id), body) + return r.json() + + def delete_role(self, id): + r = self.delete('/v1/roles/' + str(id)) + return r + + def assign_role_to_user(self, role_id, user_id): + r = self.put('/v1/roles/' + str(role_id) + '/users/' + str(user_id)) + return r.json() + + def remove_role_from_user(self, role_id, user_id): + r = self.delete('/v1/roles/' + str(role_id) + '/users/' + str(user_id)) + return r.json() + + # User API + + def get_users(self, limit=1000, token=None, sort_by='lastName', email=''): + if email != '': + params = {'limit': int(limit), 'token': str(token), 'sortBy': str(sort_by), 'email': str(email)} + else: + params = {'limit': int(limit), 'token': str(token), 'sortBy': str(sort_by)} + r = self.get('/v1/users', params=params) + return r.json() + + def get_users_sync(self, limit=1000, sort_by='lastName', email=''): + token = '' + results = [] + while True: + r = self.get_users(limit=limit, token=token, sort_by=sort_by, email=email) + token = r['next'] + results = results + r['data'] + if token is None: + break + return results + + def get_user(self, user_id): + r = self.get('/v1/users/' + str(user_id)) + return r.json() # ['data'] + + # This call gets the user and then all roles the user belongs to. This is useful for exporting or copying a user + # to a new org. + def get_user_and_roles(self, user_id): + user = self.get_user(str(user_id)) + user['roles'] = [] + for role_id in user['roleIds']: + role = self.get_role(str(role_id)) + user['roles'].append(role) + return user + + def create_user(self, first_name, last_name, email, roleIDs): + data = {'firstName': str(first_name), 'lastName': str(last_name), 'email': str(email), 'roleIds': roleIDs} + r = self.post('/v1/users', data) + return r.json() + + def update_user(self, id, first_name, last_name, email, roleIDs): + data = {'firstName': str(first_name), 'lastName': str(last_name), 'email': str(email), 'roleIds': roleIDs} + r = self.put('/v1/users' + str(id), data) + return r.json() + + def delete_user(self, id, transferTo=None): + if transferTo: + params = {'transferTo': str(transferTo)} + else: + params = None + r = self.delete('/v1/users/' + str(id), params=params) + return r + + def change_user_email(self, id, email): + data = {'email': str(email)} + r = self.post('/v1/users' + str(id) + '/email/requestChange', data) + return r.json() + + def reset_user_password(self, id): + r = self.post('/v1/users' + str(id) + '/password/reset') + return r.json() + + def unlock_user(self, id): + r = self.post('/v1/users' + str(id) + '/unlock') + return r.json() + + def disable_user_MFA(self, id, email, password): + data = {'email': str(email), 'password': str(password)} + r = self.put('/v1/users/' + str(id) + 'mfa/disable', data) + return r.json() + + # Connections API + + def get_connections(self, limit=1000, token=''): + params = {'limit': limit, 'token': token} + r = self.get('/v1/connections', params=params) + return r.json() + + def get_connections_sync(self, limit=1000): + token = None + results = [] + while True: + r = self.get_connections(limit=limit, token=token) + token = r['next'] + results = results + r['data'] + if token is None: + break + return results + + def create_connection(self, connection): + r = self.post('/v1/connections', connection) + return r.json() + + def test_connection(self, connection): + r = self.post('/v1/connections/test', connection) + return r.json() + + def get_connection(self, item_id, type): + params = {'type': str(type)} + r = self.get('/v1/connections/' + str(item_id), params=params) + return r.json() + + def update_connection(self, item_id, connection): + r = self.put('/v1/connections/' + str(item_id), connection) + return r.json() + + def delete_connection(self, item_id, item_type): + params = {'type': str(item_type)} + r = self.delete('/v1/connections/' + str(item_id), params=params) + return r + + # Field Extraction Rules API + + def get_fers(self, limit=1000, token=''): + params = {'limit': limit, 'token': token} + r = self.get('/v1/extractionRules', params=params) + return r.json() + + def get_fers_sync(self, limit=1000): + token = None + results = [] + while True: + r = self.get_fers(limit=limit, token=token) + token = r['next'] + results = results + r['data'] + if token is None: + break + return results + + def create_fer(self, name, scope, parse_expression, enabled=False): + data = {'name': name, 'scope': scope, 'parseExpression': parse_expression, 'enabled': str(enabled).lower()} + r = self.post('/v1/extractionRules', data) + return r.json() + + def get_fer(self, item_id): + r = self.get('/v1/extractionRules/' + str(item_id)) + return r.json() + + def update_fer(self, item_id, name, scope, parse_expression, enabled=False): + data = {'name': name, 'scope': scope, 'parseExpression': parse_expression, 'enabled': str(enabled).lower()} + r = self.put('/v1/extractionRules/' + str(item_id), data) + return r.json() + + def delete_fer(self, item_id): + r = self.delete('/v1/extractionRules/' + str(item_id)) + return r + + + # Scheduled View API + + def get_scheduled_views(self, limit=1000, token=''): + params = {'limit': limit, 'token': token} + r = self.get('/v1/scheduledViews', params=params) + return r.json() + + def get_scheduled_views_sync(self, limit=1000): + token = None + results = [] + while True: + r = self.get_scheduled_views(limit=limit, token=token) + token = r['next'] + results = results + r['data'] + if token is None: + break + return results + + #start time must be in RFC3339 format + # https://tools.ietf.org/html/rfc3339 + # https://medium.com/easyread/understanding-about-rfc-3339-for-datetime-formatting-in-software-engineering-940aa5d5f68a + def create_scheduled_view(self, index_name, query, start_time, retention_period=-1, data_forwarding_id=None): + data = {'indexName': str(index_name), 'query': str(query), 'startTime': str(start_time), 'retentionPeriod': int(retention_period), "dataForwardingId": str(data_forwarding_id) } + r = self.post('/v1/scheduledViews', data) + return r.json() + + def get_scheduled_view(self, item_id): + r = self.get('/v1/scheduledViews/' + str(item_id)) + return r.json() + + def update_scheduled_view(self, item_id, data_forwarding_id=None, retention_period=-1, reduce_retention_period_immediately=False): + data = {'retentionPeriod': retention_period, "dataForwardingId": data_forwarding_id, "reduceRetentionPeriodImmediately" : str(reduce_retention_period_immediately).lower()} + r = self.put('/v1/scheduledViews/' + str(item_id),data) + return r.json() + + def disable_scheduled_view(self, item_id): + r = self.delete('/v1/scheduledViews/' + str(item_id) + '/disable') + return r + + + # Partitions API + + def get_partitions(self, limit=1000, token=''): + params = {'limit': limit, 'token': token} + r = self.get('/v1/partitions', params=params) + return r.json() + + def get_partitions_sync(self, limit=1000): + token = None + results = [] + while True: + r = self.get_partitions(limit=limit, token=token) + token = r['next'] + results = results + r['data'] + if token is None: + break + return results + + def create_partition(self, name, routing_expression, analytics_tier="enhanced", retention_period=-1, data_forwarding_id=None, is_compliant=False): + data = {'name': str(name), + 'routingExpression': str(routing_expression), + 'analyticsTier': str(analytics_tier), + 'retentionPeriod': int(retention_period), + 'dataForwardingId': str(data_forwarding_id), + 'isCompliant': str(is_compliant).lower()} + + r = self.post('/v1/partitions', data) + return r.json() + + def get_partition(self, item_id): + r = self.get('/v1/partitions/' + str(item_id)) + return r.json() + + def update_partition(self, item_id, data_forwarding_id=None, retention_period=-1, reduce_retention_period_immediately=False, is_compliant=False): + data = {'retentionPeriod': retention_period, + "dataForwardingId": data_forwarding_id, + "reduceRetentionPeriodImmediately" : str(reduce_retention_period_immediately).lower(), + "isCompliant": str(is_compliant).lower()} + r = self.put('/v1/partitions/' + str(item_id),data) + return r.json() + + def decommission_partition(self, item_id): + data ={} + r = self.post('/v1/partitions/' + str(item_id) + '/decommission', data) + return r + + # Monitors API + + def get_usage_info(self): + r = self.get('/v1/monitors/usageInfo') + return r.json() + + def bulk_get_monitors(self, item_ids): + item_ids_string = '' + for item_id in item_ids: + item_ids_string = item_ids_string + str(item_id) + ',' + item_ids_string = item_ids_string[:-1] + params = {'ids': item_ids_string} + r = self.get('/v1/monitors', params=params) + return r.json() + + def create_monitor(self, parent_id, monitor): + params = { 'parentId': str(parent_id)} + r = self.post('/v1/monitors', monitor, params=params) + return r.json() + + def create_monitor_folder(self, parent_id, name, description=''): + data = {'name': str(name), + 'description': str(description), + 'type': 'MonitorsLibraryFolder'} + r = self.create_monitor(parent_id, data) + return r + + def bulk_delete_monitors(self, item_ids): + item_ids_string = '' + for item_id in item_ids: + item_ids_string = item_ids_string + str(item_id) + ',' + item_ids_string = item_ids_string[:-1] + params = {'ids': item_ids_string} + r = self.delete('/v1/monitors', params=params) + return r + + def get_monitor_folder_root(self): + r = self.get('/v1/monitors/root') + return r.json() + + def get_monitor_by_path(self, path): + params = {'path': str(path)} + r = self.get('/v1/monitors/path', params=params) + return r.json() + + def search_monitors(self, query, limit=100, offset=0): + params = {'query': str(query), + 'limit': int(limit), + 'offset': int(offset)} + r = self.get('/v1/monitors/search', params=params) + return r.json() + + def search_monitors_sync(self, query, limit=100): + offset = 0 + results = [] + r = self.search_monitors(query, limit=limit, offset=offset) + offset = offset + limit + results = results + r + while not (len(r) < limit): + r = self.search_monitors(query, limit=limit, offset=offset) + offset = offset + limit + results = results + r + return results + + def get_monitor(self, item_id): + r = self.get('/v1/monitors/' + str(item_id)) + return r.json() + + def update_monitor(self, item_id, name, version, type, description=''): + data = {'name': str(name), + 'description': str(description), + 'version': int(version), + 'type': str(type)} + r = self.put('/v1/monitors/' + str(item_id), data) + return r.json() + + def delete_monitor(self, item_id): + r = self.delete('/v1/monitors/' + str(item_id)) + return r + + def get_monitor_path(self, item_id): + r = self.get('/v1/monitors/' + str(item_id) + '/path') + return r.json() + + def move_monitor(self, item_id, parent_id): + params = { 'parentId': str(parent_id)} + r = self.post('/v1/monitors/' + str(item_id) + '/move', params=params) + return r.json() + + def copy_monitor(self, item_id, parent_id, name=None, description=''): + data = {'parentId': str(parent_id), + 'description': str(description)} + if name: + data['name'] = str(name) + r = self.post('/v1/monitors/' + str(item_id) + '/copy') + return r.json() + + def export_monitor(self, item_id): + r = self.get('/v1/monitors/' + str(item_id) + '/export') + return r.json() + + def import_monitor(self, parent_id, monitor): + r = self.post('/v1/monitors/' + str(parent_id) + '/import', monitor) + return r.json() + + # SAML Config API + + def get_saml_configs(self): + r = self.get('/v1/saml/identityProviders') + return r.json() + + def get_saml_config_by_name(self, name): + configs = self.get_saml_configs() + for config in configs: + if config['name'] == str(name): + return config + return False + + def get_saml_config_by_id(self, item_id): + configs = self.get_saml_configs() + for config in configs: + if config['id'] == str(item_id): + return config + return False + + def create_saml_config(self, saml_config): + r = self.post('/v1/saml/identityProviders', saml_config) + return r.json() + + def update_saml_config(self, item_id, saml_config): + r = self.put('/v1/saml/identityProviders/' + str(item_id), saml_config) + return r.json() + + def delete_saml_config(self, item_id): + r = self.delete('/v1/saml/identityProviders/' + str(item_id)) + return r + + def get_whitelisted_users(self): + r = self.get('/v1/saml/whitelistedUsers') + return r.json() + + def set_whitelisted_user(self, user_id): + r = self.post('/v1/saml/whitelistedUsers' + str(user_id)) + return r.json() + + def remove_whitelisted_user(self, user_id): + r = self.delete('/v1/saml/whitelistedUsers/' + str(user_id)) + return r.json() + + def enable_saml_lockdown(self): + r = self.post('/v1/saml/lockdown/enable') + return r.json() + + def disable_saml_lockdown(self): + r = self.post('/v1/saml/lockdown/disable') + return r.json() - def check_copy_status(self, content_id, job_id): - return self.get('/content/%s/copy/%s/status' % (content_id, job_id), version='v2') - - def move_content(self, content_id, destination_folder): - return self.post('/content/%s/move?destinationFolderId=%s' % (content_id, destination_folder), params=None, version='v2') + # Lookup table API def create_lookup_table(self, content): - return self.post('/lookupTables', params=content, version='v1') + return self.post('/v1/lookupTables', params=content) def get_lookup_table(self, id): - return self.get('/lookupTables/%s' % id, version='v1') + return self.get('/v1/lookupTables/%s' % id) def edit_lookup_table(self, id, content): - return self.put('/lookupTables/%s' % id, params=content, version='v1') + return self.put('/v1/lookupTables/%s' % id, params=content) def delete_lookup_table(self, id): - return self.delete('/lookupTables/%s' % id, version='v1') + return self.delete('/v1/lookupTables/%s' % id) def upload_csv_lookup_table(self, id, file_path, file_name, merge='false'): params={'file_name': file_name, 'full_file_path': os.path.join(file_path, file_name), 'merge': merge } - return self.post_file('/lookupTables/%s/upload' % id, params, version='v1') + return self.post_file('/v1/lookupTables/%s/upload' % id, params) def check_lookup_status(self, id): - return self.get('/lookupTables/jobs/%s/status' % id, version='v1') + return self.get('/v1/lookupTables/jobs/%s/status' % id) def empty_lookup_table(self, id): - return self.post('/lookupTables/%s/truncate'% id, params=None, version='v1') + return self.post('/v1/lookupTables/%s/truncate'% id, params=None) def update_lookup_table(self, id, content): - return self.put('/lookupTables/%s/row' % id, params=content, version='v1') + return self.put('/v1/lookupTables/%s/row' % id, params=content)