Skip to content

Commit

Permalink
[FIX] Fix bad values and ensure stopping the multiprocessing.
Browse files Browse the repository at this point in the history
  • Loading branch information
KeyWeeUsr committed Jun 27, 2019
1 parent 31caad1 commit eba7b8f
Show file tree
Hide file tree
Showing 3 changed files with 2 additions and 3 deletions.
2 changes: 1 addition & 1 deletion bear/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ def run():
help='set how many processes will be spawn for hashing, 0=max'
)
parser.add_argument(
'-o', '--output', action='store', type=str,
'-o', '--output', action='store', type=str, default='',
help='output file for the list of duplicates'
)
parser.add_argument(
Expand Down
1 change: 0 additions & 1 deletion bear/hashing.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ def hash_file(path: str) -> str:
return result


@ensure_annotations
def hash_files(files: list) -> dict:
"""
Hash each of the file in the list.
Expand Down
2 changes: 1 addition & 1 deletion bear/output.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def find_duplicates(folders: list, processes: int = 1) -> dict:


@ensure_annotations
def output_duplicates(hashes: dict, out: str = None):
def output_duplicates(hashes: dict, out: str = ''):
"""
Output a simple structure for the duplicates:
Expand Down

0 comments on commit eba7b8f

Please sign in to comment.