From 93b41de6b4b0ba57eccc4024842b54ba7827374e Mon Sep 17 00:00:00 2001 From: Cody Wyatt Neiman Date: Mon, 24 Oct 2022 14:20:20 -0400 Subject: [PATCH 01/13] Add S3 SSE-C support https://docs.aws.amazon.com/AmazonS3/latest/userguide/ServerSideEncryptionCustomerKeys.html --- README.md | 3 +++ s3_storage_provider.py | 35 ++++++++++++++++++++++++++++------- scripts/s3_media_upload | 39 ++++++++++++++++++++++++++++++++++----- 3 files changed, 65 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index fc6444f..23146a2 100644 --- a/README.md +++ b/README.md @@ -27,6 +27,9 @@ media_storage_providers: endpoint_url: access_key_id: secret_access_key: + sse_customer_key: + # Your SSE-C algorithm is very likely AES256 + sse_customer_algo: # The object storage class used when uploading files to the bucket. # Default is STANDARD. diff --git a/s3_storage_provider.py b/s3_storage_provider.py index 68137ac..c74fa5c 100644 --- a/s3_storage_provider.py +++ b/s3_storage_provider.py @@ -62,7 +62,7 @@ class S3StorageProviderBackend(StorageProvider): def __init__(self, hs, config): self.cache_directory = hs.config.media.media_store_path self.bucket = config["bucket"] - self.storage_class = config["storage_class"] + self.eargs = config["eargs"] self.api_kwargs = {} if "region_name" in config: @@ -118,11 +118,12 @@ class S3StorageProviderBackend(StorageProvider): def _store_file(): with LoggingContext(parent_context=parent_logcontext): + self._get_s3_client().upload_file( Filename=os.path.join(self.cache_directory, path), Bucket=self.bucket, Key=path, - ExtraArgs={"StorageClass": self.storage_class}, + ExtraArgs=self.eargs, ) return make_deferred_yieldable( @@ -136,7 +137,9 @@ class S3StorageProviderBackend(StorageProvider): d = defer.Deferred() def _get_file(): - s3_download_task(self._get_s3_client(), self.bucket, path, d, logcontext) + s3_download_task( + self._get_s3_client(), self.bucket, self.eargs, path, d, logcontext + ) self._s3_pool.callInThread(_get_file) return make_deferred_yieldable(d) @@ -158,7 +161,7 @@ class S3StorageProviderBackend(StorageProvider): result = { "bucket": bucket, - "storage_class": storage_class, + "eargs": {"StorageClass": storage_class}, } if "region_name" in config: @@ -173,10 +176,16 @@ class S3StorageProviderBackend(StorageProvider): if "secret_access_key" in config: result["secret_access_key"] = config["secret_access_key"] + if "sse_customer_key" in config: + result["eargs"]["SSECustomerKey"] = config["sse_customer_key"] + result["eargs"]["SSECustomerAlgorithm"] = config.get( + "sse_customer_algo", "AES256" + ) + return result -def s3_download_task(s3_client, bucket, key, deferred, parent_logcontext): +def s3_download_task(s3_client, bucket, key, eargs, deferred, parent_logcontext): """Attempts to download a file from S3. Args: @@ -193,9 +202,21 @@ def s3_download_task(s3_client, bucket, key, deferred, parent_logcontext): logger.info("Fetching %s from S3", key) try: - resp = s3_client.get_object(Bucket=bucket, Key=key) + if eargs["SSECustomerKey"] and eargs["SSECustomerAlgorithm"]: + resp = s3_client.get_object( + Bucket=bucket, + Key=key, + SSECustomerKey=eargs["SSECustomerKey"], + SSECustomerAlgorithm=eargs["SSECustomerAlgorithm"], + ) + else: + resp = s3_client.get_object(Bucket=bucket, Key=key) + except botocore.exceptions.ClientError as e: - if e.response["Error"]["Code"] in ("404", "NoSuchKey",): + if e.response["Error"]["Code"] in ( + "404", + "NoSuchKey", + ): logger.info("Media %s not found in S3", key) reactor.callFromThread(deferred.callback, None) return diff --git a/scripts/s3_media_upload b/scripts/s3_media_upload index 6be77e7..108d39e 100755 --- a/scripts/s3_media_upload +++ b/scripts/s3_media_upload @@ -167,11 +167,19 @@ def get_local_files(base_path, origin, filesystem_id, m_type): return local_files -def check_file_in_s3(s3, bucket, key): +def check_file_in_s3(s3, bucket, key, eargs): """Check the file exists in S3 (though it could be different) """ try: - s3.head_object(Bucket=bucket, Key=key) + if eargs["SSECustomerKey"] and eargs["SSECustomerAlgorithm"]: + s3.head_object( + Bucket=bucket, + Key=key, + SSECustomerKey=eargs["SSECustomerKey"], + SSECustomerAlgorithm=eargs["SSECustomerAlgorithm"], + ) + else: + s3.head_object(Bucket=bucket, Key=key) except botocore.exceptions.ClientError as e: if int(e.response["Error"]["Code"]) == 404: return False @@ -327,13 +335,13 @@ def run_upload(s3, bucket, sqlite_conn, base_path, should_delete, storage_class) for rel_file_path in local_files: local_path = os.path.join(base_path, rel_file_path) - if not check_file_in_s3(s3, bucket, rel_file_path): + if not check_file_in_s3(s3, bucket, rel_file_path, eargs): try: s3.upload_file( local_path, bucket, rel_file_path, - ExtraArgs={"StorageClass": storage_class}, + ExtraArgs=self.eargs, ) except Exception as e: print("Failed to upload file %s: %s", local_path, e) @@ -481,6 +489,7 @@ def main(): "base_path", help="Base path of the media store directory" ) upload_parser.add_argument("bucket", help="S3 bucket to upload to") + upload_parser.add_argument( "--storage-class", help="S3 storage class to use", @@ -495,6 +504,17 @@ def main(): default="STANDARD", ) + upload_parser.add_argument( + "--sse-customer-key", help="SSE-C key to use", + ) + + upload_parser.add_argument( + "--sse-customer-algo", + help="Algorithm for SSE-C, only used if sse-customer-key is also specified", + nargs="?", + default="AES256", + ) + upload_parser.add_argument( "--delete", action="store_const", @@ -537,13 +557,22 @@ def main(): if args.cmd == "upload": sqlite_conn = get_sqlite_conn(parser) s3 = boto3.client("s3", endpoint_url=args.endpoint_url) + + eargs = {"StorageClass": args.storage_class} + if args.sse_customer_key: + eargs["SSECustomerKey"] = args.sse_customer_key + if args.sse_customer_algo: + eargs["SSECustomerAlgorithm"] = args.sse_customer_algo + else: + eargs["SSECustomerAlgorithm"] = "AES256" + run_upload( s3, args.bucket, sqlite_conn, args.base_path, should_delete=args.delete, - storage_class=args.storage_class, + eargs=eargs ) return From 3e3f059f6c0bb4e953ad71110e58504f7d09e8c5 Mon Sep 17 00:00:00 2001 From: Cody Wyatt Neiman Date: Mon, 24 Oct 2022 14:58:26 -0400 Subject: [PATCH 02/13] Match SSE-C docs closer to other optional configs --- README.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 23146a2..f639ad0 100644 --- a/README.md +++ b/README.md @@ -27,9 +27,11 @@ media_storage_providers: endpoint_url: access_key_id: secret_access_key: - sse_customer_key: + + # Server Side Encryption for Customer-provided keys + #sse_customer_key: # Your SSE-C algorithm is very likely AES256 - sse_customer_algo: + #sse_customer_algo: # The object storage class used when uploading files to the bucket. # Default is STANDARD. From 4ab0624023b94156769e01239ee5069f07757177 Mon Sep 17 00:00:00 2001 From: Cody Wyatt Neiman Date: Mon, 24 Oct 2022 20:00:23 -0400 Subject: [PATCH 03/13] Fix run_upload eargs --- scripts/s3_media_upload | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/s3_media_upload b/scripts/s3_media_upload index 108d39e..e3bfb18 100755 --- a/scripts/s3_media_upload +++ b/scripts/s3_media_upload @@ -302,7 +302,7 @@ def run_check_delete(sqlite_conn, base_path): print("Updated", len(deleted), "as deleted") -def run_upload(s3, bucket, sqlite_conn, base_path, should_delete, storage_class): +def run_upload(s3, bucket, sqlite_conn, base_path, eargs, should_delete): """Entry point for upload command """ total = get_not_deleted_count(sqlite_conn) @@ -571,8 +571,8 @@ def main(): args.bucket, sqlite_conn, args.base_path, - should_delete=args.delete, - eargs=eargs + eargs, + should_delete=args.delete ) return From c118826d14a04a2b8a6a14fd621c9a5c3843f531 Mon Sep 17 00:00:00 2001 From: Cody Wyatt Neiman Date: Mon, 24 Oct 2022 20:28:11 -0400 Subject: [PATCH 04/13] Fix remaining eargs run_upload reference --- scripts/s3_media_upload | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/s3_media_upload b/scripts/s3_media_upload index e3bfb18..2042e32 100755 --- a/scripts/s3_media_upload +++ b/scripts/s3_media_upload @@ -341,7 +341,7 @@ def run_upload(s3, bucket, sqlite_conn, base_path, eargs, should_delete): local_path, bucket, rel_file_path, - ExtraArgs=self.eargs, + ExtraArgs=eargs, ) except Exception as e: print("Failed to upload file %s: %s", local_path, e) From 1e2cec3e5656c35f473b92d2e0c1474714ee52d1 Mon Sep 17 00:00:00 2001 From: Cody Wyatt Neiman Date: Mon, 24 Oct 2022 21:53:28 -0400 Subject: [PATCH 05/13] Fix s3_download_task args order --- s3_storage_provider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/s3_storage_provider.py b/s3_storage_provider.py index c74fa5c..d5030e4 100644 --- a/s3_storage_provider.py +++ b/s3_storage_provider.py @@ -138,7 +138,7 @@ class S3StorageProviderBackend(StorageProvider): def _get_file(): s3_download_task( - self._get_s3_client(), self.bucket, self.eargs, path, d, logcontext + self._get_s3_client(), self.bucket, path, self.eargs, d, logcontext ) self._s3_pool.callInThread(_get_file) From d08c2dcf224f0077ab1d34112dff2437bc3b0fcd Mon Sep 17 00:00:00 2001 From: Cody Wyatt Neiman Date: Sun, 30 Oct 2022 16:52:01 -0400 Subject: [PATCH 06/13] Resolve linting errors --- s3_storage_provider.py | 5 +---- scripts/s3_media_upload | 7 ++----- 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/s3_storage_provider.py b/s3_storage_provider.py index d5030e4..8b5a880 100644 --- a/s3_storage_provider.py +++ b/s3_storage_provider.py @@ -213,10 +213,7 @@ def s3_download_task(s3_client, bucket, key, eargs, deferred, parent_logcontext) resp = s3_client.get_object(Bucket=bucket, Key=key) except botocore.exceptions.ClientError as e: - if e.response["Error"]["Code"] in ( - "404", - "NoSuchKey", - ): + if e.response["Error"]["Code"] in ("404","NoSuchKey",): logger.info("Media %s not found in S3", key) reactor.callFromThread(deferred.callback, None) return diff --git a/scripts/s3_media_upload b/scripts/s3_media_upload index 2042e32..2cacbc7 100755 --- a/scripts/s3_media_upload +++ b/scripts/s3_media_upload @@ -338,10 +338,7 @@ def run_upload(s3, bucket, sqlite_conn, base_path, eargs, should_delete): if not check_file_in_s3(s3, bucket, rel_file_path, eargs): try: s3.upload_file( - local_path, - bucket, - rel_file_path, - ExtraArgs=eargs, + local_path, bucket, rel_file_path, ExtraArgs=eargs, ) except Exception as e: print("Failed to upload file %s: %s", local_path, e) @@ -572,7 +569,7 @@ def main(): sqlite_conn, args.base_path, eargs, - should_delete=args.delete + should_delete=args.delete, ) return From f39ce16a97f1092c389f1f20ea9eef4d9e2c2b67 Mon Sep 17 00:00:00 2001 From: Cody Wyatt Neiman Date: Sun, 30 Oct 2022 16:58:08 -0400 Subject: [PATCH 07/13] Fix set whitespace --- s3_storage_provider.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/s3_storage_provider.py b/s3_storage_provider.py index 8b5a880..e5272b8 100644 --- a/s3_storage_provider.py +++ b/s3_storage_provider.py @@ -213,7 +213,7 @@ def s3_download_task(s3_client, bucket, key, eargs, deferred, parent_logcontext) resp = s3_client.get_object(Bucket=bucket, Key=key) except botocore.exceptions.ClientError as e: - if e.response["Error"]["Code"] in ("404","NoSuchKey",): + if e.response["Error"]["Code"] in ("404", "NoSuchKey",): logger.info("Media %s not found in S3", key) reactor.callFromThread(deferred.callback, None) return From 095f8ebe86bae569b55f503cff0a55b84798cb56 Mon Sep 17 00:00:00 2001 From: Cody Wyatt Neiman Date: Fri, 4 Nov 2022 13:09:01 -0400 Subject: [PATCH 08/13] Add sse_customer_algo default note --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index f639ad0..50a833a 100644 --- a/README.md +++ b/README.md @@ -31,6 +31,7 @@ media_storage_providers: # Server Side Encryption for Customer-provided keys #sse_customer_key: # Your SSE-C algorithm is very likely AES256 + # Default is AES256. #sse_customer_algo: # The object storage class used when uploading files to the bucket. From ff28d0a02f7c3c666cc61e3be3736f829b1705be Mon Sep 17 00:00:00 2001 From: Cody Wyatt Neiman Date: Fri, 4 Nov 2022 13:10:40 -0400 Subject: [PATCH 09/13] =?UTF-8?q?Remove=20nargs=3D=3F=20from=20--sse-custo?= =?UTF-8?q?mer-algo=20arg?= --- scripts/s3_media_upload | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/s3_media_upload b/scripts/s3_media_upload index 2cacbc7..02155af 100755 --- a/scripts/s3_media_upload +++ b/scripts/s3_media_upload @@ -508,7 +508,6 @@ def main(): upload_parser.add_argument( "--sse-customer-algo", help="Algorithm for SSE-C, only used if sse-customer-key is also specified", - nargs="?", default="AES256", ) From 5957773cf7494d6054cf3b7b8f62e371a9e2c11b Mon Sep 17 00:00:00 2001 From: Cody Wyatt Neiman Date: Fri, 4 Nov 2022 13:17:00 -0400 Subject: [PATCH 10/13] Refactor eargs to extra_args --- s3_storage_provider.py | 20 ++++++++++---------- scripts/s3_media_upload | 24 ++++++++++++------------ 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/s3_storage_provider.py b/s3_storage_provider.py index e5272b8..f4d700a 100644 --- a/s3_storage_provider.py +++ b/s3_storage_provider.py @@ -62,7 +62,7 @@ class S3StorageProviderBackend(StorageProvider): def __init__(self, hs, config): self.cache_directory = hs.config.media.media_store_path self.bucket = config["bucket"] - self.eargs = config["eargs"] + self.extra_args = config["extra_args"] self.api_kwargs = {} if "region_name" in config: @@ -123,7 +123,7 @@ class S3StorageProviderBackend(StorageProvider): Filename=os.path.join(self.cache_directory, path), Bucket=self.bucket, Key=path, - ExtraArgs=self.eargs, + ExtraArgs=self.extra_args, ) return make_deferred_yieldable( @@ -138,7 +138,7 @@ class S3StorageProviderBackend(StorageProvider): def _get_file(): s3_download_task( - self._get_s3_client(), self.bucket, path, self.eargs, d, logcontext + self._get_s3_client(), self.bucket, path, self.extra_args, d, logcontext ) self._s3_pool.callInThread(_get_file) @@ -161,7 +161,7 @@ class S3StorageProviderBackend(StorageProvider): result = { "bucket": bucket, - "eargs": {"StorageClass": storage_class}, + "extra_args": {"StorageClass": storage_class}, } if "region_name" in config: @@ -177,15 +177,15 @@ class S3StorageProviderBackend(StorageProvider): result["secret_access_key"] = config["secret_access_key"] if "sse_customer_key" in config: - result["eargs"]["SSECustomerKey"] = config["sse_customer_key"] - result["eargs"]["SSECustomerAlgorithm"] = config.get( + result["extra_args"]["SSECustomerKey"] = config["sse_customer_key"] + result["extra_args"]["SSECustomerAlgorithm"] = config.get( "sse_customer_algo", "AES256" ) return result -def s3_download_task(s3_client, bucket, key, eargs, deferred, parent_logcontext): +def s3_download_task(s3_client, bucket, key, extra_args, deferred, parent_logcontext): """Attempts to download a file from S3. Args: @@ -202,12 +202,12 @@ def s3_download_task(s3_client, bucket, key, eargs, deferred, parent_logcontext) logger.info("Fetching %s from S3", key) try: - if eargs["SSECustomerKey"] and eargs["SSECustomerAlgorithm"]: + if extra_args["SSECustomerKey"] and extra_args["SSECustomerAlgorithm"]: resp = s3_client.get_object( Bucket=bucket, Key=key, - SSECustomerKey=eargs["SSECustomerKey"], - SSECustomerAlgorithm=eargs["SSECustomerAlgorithm"], + SSECustomerKey=extra_args["SSECustomerKey"], + SSECustomerAlgorithm=extra_args["SSECustomerAlgorithm"], ) else: resp = s3_client.get_object(Bucket=bucket, Key=key) diff --git a/scripts/s3_media_upload b/scripts/s3_media_upload index 02155af..07e83be 100755 --- a/scripts/s3_media_upload +++ b/scripts/s3_media_upload @@ -167,16 +167,16 @@ def get_local_files(base_path, origin, filesystem_id, m_type): return local_files -def check_file_in_s3(s3, bucket, key, eargs): +def check_file_in_s3(s3, bucket, key, extra_args): """Check the file exists in S3 (though it could be different) """ try: - if eargs["SSECustomerKey"] and eargs["SSECustomerAlgorithm"]: + if extra_args["SSECustomerKey"] and extra_args["SSECustomerAlgorithm"]: s3.head_object( Bucket=bucket, Key=key, - SSECustomerKey=eargs["SSECustomerKey"], - SSECustomerAlgorithm=eargs["SSECustomerAlgorithm"], + SSECustomerKey=extra_args["SSECustomerKey"], + SSECustomerAlgorithm=extra_args["SSECustomerAlgorithm"], ) else: s3.head_object(Bucket=bucket, Key=key) @@ -302,7 +302,7 @@ def run_check_delete(sqlite_conn, base_path): print("Updated", len(deleted), "as deleted") -def run_upload(s3, bucket, sqlite_conn, base_path, eargs, should_delete): +def run_upload(s3, bucket, sqlite_conn, base_path, extra_args, should_delete): """Entry point for upload command """ total = get_not_deleted_count(sqlite_conn) @@ -335,10 +335,10 @@ def run_upload(s3, bucket, sqlite_conn, base_path, eargs, should_delete): for rel_file_path in local_files: local_path = os.path.join(base_path, rel_file_path) - if not check_file_in_s3(s3, bucket, rel_file_path, eargs): + if not check_file_in_s3(s3, bucket, rel_file_path, extra_args): try: s3.upload_file( - local_path, bucket, rel_file_path, ExtraArgs=eargs, + local_path, bucket, rel_file_path, ExtraArgs=extra_args, ) except Exception as e: print("Failed to upload file %s: %s", local_path, e) @@ -554,20 +554,20 @@ def main(): sqlite_conn = get_sqlite_conn(parser) s3 = boto3.client("s3", endpoint_url=args.endpoint_url) - eargs = {"StorageClass": args.storage_class} + extra_args = {"StorageClass": args.storage_class} if args.sse_customer_key: - eargs["SSECustomerKey"] = args.sse_customer_key + extra_args["SSECustomerKey"] = args.sse_customer_key if args.sse_customer_algo: - eargs["SSECustomerAlgorithm"] = args.sse_customer_algo + extra_args["SSECustomerAlgorithm"] = args.sse_customer_algo else: - eargs["SSECustomerAlgorithm"] = "AES256" + extra_args["SSECustomerAlgorithm"] = "AES256" run_upload( s3, args.bucket, sqlite_conn, args.base_path, - eargs, + extra_args, should_delete=args.delete, ) return From 139e44673cd336301f0764934bca67fec92108fb Mon Sep 17 00:00:00 2001 From: Cody Wyatt Neiman Date: Fri, 4 Nov 2022 13:18:53 -0400 Subject: [PATCH 11/13] Add extra_args comment and link to docs --- s3_storage_provider.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/s3_storage_provider.py b/s3_storage_provider.py index f4d700a..547f29b 100644 --- a/s3_storage_provider.py +++ b/s3_storage_provider.py @@ -62,6 +62,9 @@ class S3StorageProviderBackend(StorageProvider): def __init__(self, hs, config): self.cache_directory = hs.config.media.media_store_path self.bucket = config["bucket"] + # A dictionary of extra arguments for uploading files. + # See https://boto3.amazonaws.com/v1/documentation/api/latest/reference/customizations/s3.html#boto3.s3.transfer.S3Transfer.ALLOWED_UPLOAD_ARGS + # for a list of possible keys. self.extra_args = config["extra_args"] self.api_kwargs = {} From 966a0cf28af4b017a5259a47c7951722d4825207 Mon Sep 17 00:00:00 2001 From: "Olivier Wilkinson (reivilibre)" Date: Tue, 8 Nov 2022 12:44:50 +0000 Subject: [PATCH 12/13] (Copy flake8 config from Synapse to dismiss spurious error) --- .flake8 | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 .flake8 diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..b266d8a --- /dev/null +++ b/.flake8 @@ -0,0 +1,19 @@ +# TODO: incorporate this into pyproject.toml if flake8 supports it in the future. +# See https://github.com/PyCQA/flake8/issues/234 +[flake8] +# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes +# for error codes. The ones we ignore are: +# W503: line break before binary operator +# W504: line break after binary operator +# E203: whitespace before ':' (which is contrary to pep8?) +# E731: do not assign a lambda expression, use a def +# E501: Line too long (black enforces this for us) +# +# flake8-bugbear runs extra checks. Its error codes are described at +# https://github.com/PyCQA/flake8-bugbear#list-of-warnings +# B019: Use of functools.lru_cache or functools.cache on methods can lead to memory leaks +# B023: Functions defined inside a loop must not use variables redefined in the loop +# B024: Abstract base class with no abstract method. + +ignore=W503,W504,E203,E731,E501,B019,B023,B024 + From 0f162da9ed71d07bb79947cf306c5564e8e1b460 Mon Sep 17 00:00:00 2001 From: "Olivier Wilkinson (reivilibre)" Date: Tue, 8 Nov 2022 12:52:40 +0000 Subject: [PATCH 13/13] fixup! (Copy flake8 config from Synapse to dismiss spurious error) --- .flake8 | 19 ------------------- setup.cfg | 3 ++- 2 files changed, 2 insertions(+), 20 deletions(-) delete mode 100644 .flake8 diff --git a/.flake8 b/.flake8 deleted file mode 100644 index b266d8a..0000000 --- a/.flake8 +++ /dev/null @@ -1,19 +0,0 @@ -# TODO: incorporate this into pyproject.toml if flake8 supports it in the future. -# See https://github.com/PyCQA/flake8/issues/234 -[flake8] -# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes -# for error codes. The ones we ignore are: -# W503: line break before binary operator -# W504: line break after binary operator -# E203: whitespace before ':' (which is contrary to pep8?) -# E731: do not assign a lambda expression, use a def -# E501: Line too long (black enforces this for us) -# -# flake8-bugbear runs extra checks. Its error codes are described at -# https://github.com/PyCQA/flake8-bugbear#list-of-warnings -# B019: Use of functools.lru_cache or functools.cache on methods can lead to memory leaks -# B023: Functions defined inside a loop must not use variables redefined in the loop -# B024: Abstract base class with no abstract method. - -ignore=W503,W504,E203,E731,E501,B019,B023,B024 - diff --git a/setup.cfg b/setup.cfg index e590400..fa32a06 100644 --- a/setup.cfg +++ b/setup.cfg @@ -13,7 +13,8 @@ ignore = W503,E203,E731 # note that flake8 inherits the "ignore" settings from "pep8" (because it uses # pep8 to do those checks), but not the "max-line-length" setting max-line-length = 90 -ignore=W503,E203,E731 +# E501: line length is enforced by black; no need to duplicate in flake8 +ignore=W503,E203,E731,E501 [isort] line_length = 89