From eba7b8f1a3a8ff715986511e211ccca2f58936aa Mon Sep 17 00:00:00 2001 From: Peter Badida Date: Fri, 28 Jun 2019 00:14:02 +0200 Subject: [PATCH] [FIX] Fix bad values and ensure stopping the multiprocessing. --- bear/__main__.py | 2 +- bear/hashing.py | 1 - bear/output.py | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/bear/__main__.py b/bear/__main__.py index 7a10dc5..462fcc0 100644 --- a/bear/__main__.py +++ b/bear/__main__.py @@ -140,7 +140,7 @@ def run(): help='set how many processes will be spawn for hashing, 0=max' ) parser.add_argument( - '-o', '--output', action='store', type=str, + '-o', '--output', action='store', type=str, default='', help='output file for the list of duplicates' ) parser.add_argument( diff --git a/bear/hashing.py b/bear/hashing.py index 82c9747..569e3ab 100644 --- a/bear/hashing.py +++ b/bear/hashing.py @@ -44,7 +44,6 @@ def hash_file(path: str) -> str: return result -@ensure_annotations def hash_files(files: list) -> dict: """ Hash each of the file in the list. diff --git a/bear/output.py b/bear/output.py index 595b37e..6530354 100644 --- a/bear/output.py +++ b/bear/output.py @@ -82,7 +82,7 @@ def find_duplicates(folders: list, processes: int = 1) -> dict: @ensure_annotations -def output_duplicates(hashes: dict, out: str = None): +def output_duplicates(hashes: dict, out: str = ''): """ Output a simple structure for the duplicates: