diff --git a/CHANGELOG.md b/CHANGELOG.md index a19331d..5cc984f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,8 +6,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [Unreleased] -### Added -- Upcoming changes... +### Fixed +- Fixed terminal cursor disappearing after aborting scan with Ctrl+C ## [1.40.1] - 2025-10-29 ### Changed diff --git a/src/scanoss/filecount.py b/src/scanoss/filecount.py index a2f43b1..87b8df7 100644 --- a/src/scanoss/filecount.py +++ b/src/scanoss/filecount.py @@ -26,6 +26,7 @@ import os import pathlib import sys +from contextlib import nullcontext from progress.spinner import Spinner @@ -105,48 +106,46 @@ def count_files(self, scan_dir: str) -> bool: """ success = True if not scan_dir: - raise Exception(f'ERROR: Please specify a folder to scan') + raise Exception('ERROR: Please specify a folder to scan') if not os.path.exists(scan_dir) or not os.path.isdir(scan_dir): raise Exception(f'ERROR: Specified folder does not exist or is not a folder: {scan_dir}') self.print_msg(f'Searching {scan_dir} for files to count...') - spinner = None - if not self.quiet and self.isatty: - spinner = Spinner('Searching ') - file_types = {} - file_count = 0 - file_size = 0 - for root, dirs, files in os.walk(scan_dir): - self.print_trace(f'U Root: {root}, Dirs: {dirs}, Files {files}') - dirs[:] = self.__filter_dirs(dirs) # Strip out unwanted directories - filtered_files = self.__filter_files(files) # Strip out unwanted files - self.print_trace(f'F Root: {root}, Dirs: {dirs}, Files {filtered_files}') - for file in filtered_files: # Cycle through each filtered file - path = os.path.join(root, file) - f_size = 0 - try: - f_size = os.stat(path).st_size - except Exception as e: - self.print_trace(f'Ignoring missing symlink file: {file} ({e})') # broken symlink - if f_size > 0: # Ignore broken links and empty files - file_count = file_count + 1 - file_size = file_size + f_size - f_suffix = pathlib.Path(file).suffix - if not f_suffix or f_suffix == '': - f_suffix = 'no_suffix' - self.print_trace(f'Counting {path} ({f_suffix} - {f_size})..') - fc = file_types.get(f_suffix) - if not fc: - fc = [1, f_size] - else: - fc[0] = fc[0] + 1 - fc[1] = fc[1] + f_size - file_types[f_suffix] = fc - if spinner: - spinner.next() - # End for loop - if spinner: - spinner.finish() + spinner_ctx = Spinner('Searching ') if (not self.quiet and self.isatty) else nullcontext() + + with spinner_ctx as spinner: + file_types = {} + file_count = 0 + file_size = 0 + for root, dirs, files in os.walk(scan_dir): + self.print_trace(f'U Root: {root}, Dirs: {dirs}, Files {files}') + dirs[:] = self.__filter_dirs(dirs) # Strip out unwanted directories + filtered_files = self.__filter_files(files) # Strip out unwanted files + self.print_trace(f'F Root: {root}, Dirs: {dirs}, Files {filtered_files}') + for file in filtered_files: # Cycle through each filtered file + path = os.path.join(root, file) + f_size = 0 + try: + f_size = os.stat(path).st_size + except Exception as e: + self.print_trace(f'Ignoring missing symlink file: {file} ({e})') # broken symlink + if f_size > 0: # Ignore broken links and empty files + file_count = file_count + 1 + file_size = file_size + f_size + f_suffix = pathlib.Path(file).suffix + if not f_suffix or f_suffix == '': + f_suffix = 'no_suffix' + self.print_trace(f'Counting {path} ({f_suffix} - {f_size})..') + fc = file_types.get(f_suffix) + if not fc: + fc = [1, f_size] + else: + fc[0] = fc[0] + 1 + fc[1] = fc[1] + f_size + file_types[f_suffix] = fc + if spinner: + spinner.next() + # End for loop self.print_stderr(f'Found {file_count:,.0f} files with a total size of {file_size / (1 << 20):,.2f} MB.') if file_types: csv_dict = [] diff --git a/src/scanoss/scanner.py b/src/scanoss/scanner.py index 63803e5..6e5d147 100644 --- a/src/scanoss/scanner.py +++ b/src/scanoss/scanner.py @@ -26,6 +26,7 @@ import json import os import sys +from contextlib import nullcontext from pathlib import Path from typing import Any, Dict, List, Optional @@ -363,62 +364,60 @@ def scan_folder(self, scan_dir: str) -> bool: # noqa: PLR0912, PLR0915 operation_type='scanning', ) self.print_msg(f'Searching {scan_dir} for files to fingerprint...') - spinner = None - if not self.quiet and self.isatty: - spinner = Spinner('Fingerprinting ') - save_wfps_for_print = not self.no_wfp_file or not self.threaded_scan - wfp_list = [] - scan_block = '' - scan_size = 0 - queue_size = 0 - file_count = 0 # count all files fingerprinted - wfp_file_count = 0 # count number of files in each queue post - scan_started = False - - to_scan_files = file_filters.get_filtered_files_from_folder(scan_dir) - for to_scan_file in to_scan_files: - if self.threaded_scan and self.threaded_scan.stop_scanning(): - self.print_stderr('Warning: Aborting fingerprinting as the scanning service is not available.') - break - self.print_debug(f'Fingerprinting {to_scan_file}...') - if spinner: - spinner.next() - abs_path = Path(scan_dir, to_scan_file).resolve() - wfp = self.winnowing.wfp_for_file(str(abs_path), to_scan_file) - if wfp is None or wfp == '': - self.print_debug(f'No WFP returned for {to_scan_file}. Skipping.') - continue - if save_wfps_for_print: - wfp_list.append(wfp) - file_count += 1 - if self.threaded_scan: - wfp_size = len(wfp.encode('utf-8')) - # If the WFP is bigger than the max post size and we already have something stored in the scan block, - # add it to the queue - if scan_block != '' and (wfp_size + scan_size) >= self.max_post_size: - self.threaded_scan.queue_add(scan_block) - queue_size += 1 - scan_block = '' - wfp_file_count = 0 - scan_block += wfp - scan_size = len(scan_block.encode('utf-8')) - wfp_file_count += 1 - # If the scan request block (group of WFPs) or larger than the POST size or we have reached the file limit, add it to the queue # noqa: E501 - if wfp_file_count > self.post_file_count or scan_size >= self.max_post_size: - self.threaded_scan.queue_add(scan_block) - queue_size += 1 - scan_block = '' - wfp_file_count = 0 - if not scan_started and queue_size > self.nb_threads: # Start scanning if we have something to do - scan_started = True - if not self.threaded_scan.run(wait=False): - self.print_stderr('Warning: Some errors encounted while scanning. Results might be incomplete.') - success = False - # End for loop - if self.threaded_scan and scan_block != '': - self.threaded_scan.queue_add(scan_block) # Make sure all files have been submitted - if spinner: - spinner.finish() + spinner_ctx = Spinner('Fingerprinting ') if (not self.quiet and self.isatty) else nullcontext() + + with spinner_ctx as spinner: + save_wfps_for_print = not self.no_wfp_file or not self.threaded_scan + wfp_list = [] + scan_block = '' + scan_size = 0 + queue_size = 0 + file_count = 0 # count all files fingerprinted + wfp_file_count = 0 # count number of files in each queue post + scan_started = False + + to_scan_files = file_filters.get_filtered_files_from_folder(scan_dir) + for to_scan_file in to_scan_files: + if self.threaded_scan and self.threaded_scan.stop_scanning(): + self.print_stderr('Warning: Aborting fingerprinting as the scanning service is not available.') + break + self.print_debug(f'Fingerprinting {to_scan_file}...') + if spinner: + spinner.next() + abs_path = Path(scan_dir, to_scan_file).resolve() + wfp = self.winnowing.wfp_for_file(str(abs_path), to_scan_file) + if wfp is None or wfp == '': + self.print_debug(f'No WFP returned for {to_scan_file}. Skipping.') + continue + if save_wfps_for_print: + wfp_list.append(wfp) + file_count += 1 + if self.threaded_scan: + wfp_size = len(wfp.encode('utf-8')) + # If the WFP is bigger than the max post size and we already have something stored in the scan block, + # add it to the queue + if scan_block != '' and (wfp_size + scan_size) >= self.max_post_size: + self.threaded_scan.queue_add(scan_block) + queue_size += 1 + scan_block = '' + wfp_file_count = 0 + scan_block += wfp + scan_size = len(scan_block.encode('utf-8')) + wfp_file_count += 1 + # If the scan request block (group of WFPs) or larger than the POST size or we have reached the file limit, add it to the queue # noqa: E501 + if wfp_file_count > self.post_file_count or scan_size >= self.max_post_size: + self.threaded_scan.queue_add(scan_block) + queue_size += 1 + scan_block = '' + wfp_file_count = 0 + if not scan_started and queue_size > self.nb_threads: # Start scanning if we have something to do + scan_started = True + if not self.threaded_scan.run(wait=False): + self.print_stderr('Warning: Some errors encounted while scanning. Results might be incomplete.') + success = False + # End for loop + if self.threaded_scan and scan_block != '': + self.threaded_scan.queue_add(scan_block) # Make sure all files have been submitted if file_count > 0: if save_wfps_for_print: # Write a WFP file if no threading is requested @@ -631,63 +630,61 @@ def scan_files(self, files: []) -> bool: # noqa: PLR0912, PLR0915 skip_extensions=self.skip_extensions, operation_type='scanning', ) - spinner = None - if not self.quiet and self.isatty: - spinner = Spinner('Fingerprinting ') - save_wfps_for_print = not self.no_wfp_file or not self.threaded_scan - wfp_list = [] - scan_block = '' - scan_size = 0 - queue_size = 0 - file_count = 0 # count all files fingerprinted - wfp_file_count = 0 # count number of files in each queue post - scan_started = False - - to_scan_files = file_filters.get_filtered_files_from_files(files) - for file in to_scan_files: - if self.threaded_scan and self.threaded_scan.stop_scanning(): - self.print_stderr('Warning: Aborting fingerprinting as the scanning service is not available.') - break - self.print_debug(f'Fingerprinting {file}...') - if spinner: - spinner.next() - wfp = self.winnowing.wfp_for_file(file, file) - if wfp is None or wfp == '': - self.print_debug(f'No WFP returned for {file}. Skipping.') - continue - if save_wfps_for_print: - wfp_list.append(wfp) - file_count += 1 - if self.threaded_scan: - wfp_size = len(wfp.encode('utf-8')) - # If the WFP is bigger than the max post size and we already have something stored in the scan block, add it to the queue # noqa: E501 - if scan_block != '' and (wfp_size + scan_size) >= self.max_post_size: - self.threaded_scan.queue_add(scan_block) - queue_size += 1 - scan_block = '' - wfp_file_count = 0 - scan_block += wfp - scan_size = len(scan_block.encode('utf-8')) - wfp_file_count += 1 - # If the scan request block (group of WFPs) or larger than the POST size or we have reached the file limit, add it to the queue # noqa: E501 - if wfp_file_count > self.post_file_count or scan_size >= self.max_post_size: - self.threaded_scan.queue_add(scan_block) - queue_size += 1 - scan_block = '' - wfp_file_count = 0 - if not scan_started and queue_size > self.nb_threads: # Start scanning if we have something to do - scan_started = True - if not self.threaded_scan.run(wait=False): - self.print_stderr( - 'Warning: Some errors encounted while scanning. Results might be incomplete.' - ) - success = False + spinner_ctx = Spinner('Fingerprinting ') if (not self.quiet and self.isatty) else nullcontext() + + with spinner_ctx as spinner: + save_wfps_for_print = not self.no_wfp_file or not self.threaded_scan + wfp_list = [] + scan_block = '' + scan_size = 0 + queue_size = 0 + file_count = 0 # count all files fingerprinted + wfp_file_count = 0 # count number of files in each queue post + scan_started = False + + to_scan_files = file_filters.get_filtered_files_from_files(files) + for file in to_scan_files: + if self.threaded_scan and self.threaded_scan.stop_scanning(): + self.print_stderr('Warning: Aborting fingerprinting as the scanning service is not available.') + break + self.print_debug(f'Fingerprinting {file}...') + if spinner: + spinner.next() + wfp = self.winnowing.wfp_for_file(file, file) + if wfp is None or wfp == '': + self.print_debug(f'No WFP returned for {file}. Skipping.') + continue + if save_wfps_for_print: + wfp_list.append(wfp) + file_count += 1 + if self.threaded_scan: + wfp_size = len(wfp.encode('utf-8')) + # If the WFP is bigger than the max post size and we already have something stored in the scan block, add it to the queue # noqa: E501 + if scan_block != '' and (wfp_size + scan_size) >= self.max_post_size: + self.threaded_scan.queue_add(scan_block) + queue_size += 1 + scan_block = '' + wfp_file_count = 0 + scan_block += wfp + scan_size = len(scan_block.encode('utf-8')) + wfp_file_count += 1 + # If the scan request block (group of WFPs) or larger than the POST size or we have reached the file limit, add it to the queue # noqa: E501 + if wfp_file_count > self.post_file_count or scan_size >= self.max_post_size: + self.threaded_scan.queue_add(scan_block) + queue_size += 1 + scan_block = '' + wfp_file_count = 0 + if not scan_started and queue_size > self.nb_threads: # Start scanning if we have something to do + scan_started = True + if not self.threaded_scan.run(wait=False): + self.print_stderr( + 'Warning: Some errors encounted while scanning. Results might be incomplete.' + ) + success = False - # End for loop - if self.threaded_scan and scan_block != '': - self.threaded_scan.queue_add(scan_block) # Make sure all files have been submitted - if spinner: - spinner.finish() + # End for loop + if self.threaded_scan and scan_block != '': + self.threaded_scan.queue_add(scan_block) # Make sure all files have been submitted if file_count > 0: if save_wfps_for_print: # Write a WFP file if no threading is requested @@ -778,73 +775,72 @@ def scan_wfp_file(self, file: str = None) -> bool: # noqa: PLR0912, PLR0915 self.print_debug(f'Found {file_count} files to process.') raw_output = '{\n' file_print = '' - bar = None - if not self.quiet and self.isatty: - bar = Bar('Scanning', max=file_count) - bar.next(0) - with open(wfp_file) as f: - for line in f: - if line.startswith(WFP_FILE_START): - if file_print: - wfp += file_print # Store the WFP for the current file - cur_size = len(wfp.encode('utf-8')) - file_print = line # Start storing the next file - cur_files += 1 - batch_files += 1 - else: - file_print += line # Store the rest of the WFP for this file - l_size = cur_size + len(file_print.encode('utf-8')) - # Hit the max post size, so sending the current batch and continue processing - if l_size >= self.max_post_size and wfp: - self.print_debug( - f'Sending {batch_files} ({cur_files}) of' - f' {file_count} ({len(wfp.encode("utf-8"))} bytes) files to the ScanOSS API.' - ) - if self.debug and cur_size > self.max_post_size: - Scanner.print_stderr(f'Warning: Post size {cur_size} greater than limit {self.max_post_size}') - scan_resp = self.scanoss_api.scan(wfp, max_component['name']) # Scan current WFP and store - if bar: - bar.next(batch_files) - if scan_resp is not None: - for key, value in scan_resp.items(): - raw_output += ' "%s":%s,' % (key, json.dumps(value, indent=2)) - for v in value: - if hasattr(v, 'get'): - if v.get('id') != 'none': - vcv = '%s:%s:%s' % (v.get('vendor'), v.get('component'), v.get('version')) - components[vcv] = components[vcv] + 1 if vcv in components else 1 - if max_component['hits'] < components[vcv]: - max_component['name'] = v.get('component') - max_component['hits'] = components[vcv] - else: - Scanner.print_stderr(f'Warning: Unknown value: {v}') - else: - success = False - batch_files = 0 - wfp = '' - if file_print: - wfp += file_print # Store the WFP for the current file - if wfp: - self.print_debug( - f'Sending {batch_files} ({cur_files}) of' - f' {file_count} ({len(wfp.encode("utf-8"))} bytes) files to the ScanOSS API.' - ) - scan_resp = self.scanoss_api.scan(wfp, max_component['name']) # Scan current WFP and store + bar_ctx = Bar('Scanning', max=file_count) if (not self.quiet and self.isatty) else nullcontext() + + with bar_ctx as bar: if bar: - bar.next(batch_files) - first = True - if scan_resp is not None: - for key, value in scan_resp.items(): - if first: - raw_output += ' "%s":%s' % (key, json.dumps(value, indent=2)) - first = False + bar.next(0) + with open(wfp_file) as f: + for line in f: + if line.startswith(WFP_FILE_START): + if file_print: + wfp += file_print # Store the WFP for the current file + cur_size = len(wfp.encode('utf-8')) + file_print = line # Start storing the next file + cur_files += 1 + batch_files += 1 else: - raw_output += ',\n "%s":%s' % (key, json.dumps(value, indent=2)) - else: - success = False + file_print += line # Store the rest of the WFP for this file + l_size = cur_size + len(file_print.encode('utf-8')) + # Hit the max post size, so sending the current batch and continue processing + if l_size >= self.max_post_size and wfp: + self.print_debug( + f'Sending {batch_files} ({cur_files}) of' + f' {file_count} ({len(wfp.encode("utf-8"))} bytes) files to the ScanOSS API.' + ) + if self.debug and cur_size > self.max_post_size: + Scanner.print_stderr(f'Warning: Post size {cur_size} greater than limit {self.max_post_size}') + scan_resp = self.scanoss_api.scan(wfp, max_component['name']) # Scan current WFP and store + if bar: + bar.next(batch_files) + if scan_resp is not None: + for key, value in scan_resp.items(): + raw_output += ' "%s":%s,' % (key, json.dumps(value, indent=2)) + for v in value: + if hasattr(v, 'get'): + if v.get('id') != 'none': + vcv = '%s:%s:%s' % (v.get('vendor'), v.get('component'), v.get('version')) + components[vcv] = components[vcv] + 1 if vcv in components else 1 + if max_component['hits'] < components[vcv]: + max_component['name'] = v.get('component') + max_component['hits'] = components[vcv] + else: + Scanner.print_stderr(f'Warning: Unknown value: {v}') + else: + success = False + batch_files = 0 + wfp = '' + if file_print: + wfp += file_print # Store the WFP for the current file + if wfp: + self.print_debug( + f'Sending {batch_files} ({cur_files}) of' + f' {file_count} ({len(wfp.encode("utf-8"))} bytes) files to the ScanOSS API.' + ) + scan_resp = self.scanoss_api.scan(wfp, max_component['name']) # Scan current WFP and store + if bar: + bar.next(batch_files) + first = True + if scan_resp is not None: + for key, value in scan_resp.items(): + if first: + raw_output += ' "%s":%s' % (key, json.dumps(value, indent=2)) + first = False + else: + raw_output += ',\n "%s":%s' % (key, json.dumps(value, indent=2)) + else: + success = False raw_output += '\n}' - if bar: - bar.finish() if self.output_format == 'plain': self.__log_result(raw_output) elif self.output_format == 'cyclonedx': @@ -1052,19 +1048,16 @@ def wfp_folder(self, scan_dir: str, wfp_file: str = None): ) wfps = '' self.print_msg(f'Searching {scan_dir} for files to fingerprint...') - spinner = None - if not self.quiet and self.isatty: - spinner = Spinner('Fingerprinting ') - - to_fingerprint_files = file_filters.get_filtered_files_from_folder(scan_dir) - for file in to_fingerprint_files: - if spinner: - spinner.next() - abs_path = Path(scan_dir, file).resolve() - self.print_debug(f'Fingerprinting {file}...') - wfps += self.winnowing.wfp_for_file(str(abs_path), file) - if spinner: - spinner.finish() + spinner_ctx = Spinner('Fingerprinting ') if (not self.quiet and self.isatty) else nullcontext() + + with spinner_ctx as spinner: + to_fingerprint_files = file_filters.get_filtered_files_from_folder(scan_dir) + for file in to_fingerprint_files: + if spinner: + spinner.next() + abs_path = Path(scan_dir, file).resolve() + self.print_debug(f'Fingerprinting {file}...') + wfps += self.winnowing.wfp_for_file(str(abs_path), file) if wfps: if wfp_file: self.print_stderr(f'Writing fingerprints to {wfp_file}') diff --git a/src/scanoss/scanners/folder_hasher.py b/src/scanoss/scanners/folder_hasher.py index eb4bd72..549a7c1 100644 --- a/src/scanoss/scanners/folder_hasher.py +++ b/src/scanoss/scanners/folder_hasher.py @@ -157,38 +157,38 @@ def _build_root_node( # Sort the files by name to ensure the hash is the same for the same folder filtered_files.sort() - bar = Bar('Hashing files...', max=len(filtered_files)) - full_file_path = '' - for file_path in filtered_files: - try: - file_path_obj = Path(file_path) if isinstance(file_path, str) else file_path - full_file_path = file_path_obj if file_path_obj.is_absolute() else root / file_path_obj + bar_ctx = Bar('Hashing files...', max=len(filtered_files)) - self.base.print_debug(f'\nHashing file {str(full_file_path)}') + with bar_ctx as bar: + full_file_path = '' + for file_path in filtered_files: + try: + file_path_obj = Path(file_path) if isinstance(file_path, str) else file_path + full_file_path = file_path_obj if file_path_obj.is_absolute() else root / file_path_obj - file_bytes = full_file_path.read_bytes() - key = CRC64.get_hash_buff(file_bytes) - key_str = ''.join(f'{b:02x}' for b in key) - rel_path = str(full_file_path.relative_to(root)) + self.base.print_debug(f'\nHashing file {str(full_file_path)}') - file_item = DirectoryFile(rel_path, key, key_str) + file_bytes = full_file_path.read_bytes() + key = CRC64.get_hash_buff(file_bytes) + key_str = ''.join(f'{b:02x}' for b in key) + rel_path = str(full_file_path.relative_to(root)) - current_node = root_node - for part in Path(rel_path).parent.parts: - child_path = str(Path(current_node.path) / part) - if child_path not in current_node.children: - current_node.children[child_path] = DirectoryNode(child_path) - current_node = current_node.children[child_path] - current_node.files.append(file_item) + file_item = DirectoryFile(rel_path, key, key_str) - root_node.files.append(file_item) + current_node = root_node + for part in Path(rel_path).parent.parts: + child_path = str(Path(current_node.path) / part) + if child_path not in current_node.children: + current_node.children[child_path] = DirectoryNode(child_path) + current_node = current_node.children[child_path] + current_node.files.append(file_item) - except Exception as e: - self.base.print_debug(f'Skipping file {full_file_path}: {str(e)}') + root_node.files.append(file_item) - bar.next() + except Exception as e: + self.base.print_debug(f'Skipping file {full_file_path}: {str(e)}') - bar.finish() + bar.next() return root_node def _hash_calc_from_node(self, node: DirectoryNode, current_depth: int = 1) -> dict: diff --git a/src/scanoss/scanners/scanner_hfh.py b/src/scanoss/scanners/scanner_hfh.py index 7ac6463..8d4a284 100644 --- a/src/scanoss/scanners/scanner_hfh.py +++ b/src/scanoss/scanners/scanner_hfh.py @@ -110,6 +110,19 @@ def __init__( # noqa: PLR0913 self.min_accepted_score = min_accepted_score self.use_grpc = use_grpc + def _execute_grpc_scan(self, hfh_request: Dict) -> None: + """ + Execute folder hash scan. + + Args: + hfh_request: Request dictionary for the gRPC call + """ + try: + self.scan_results = self.client.folder_hash_scan(hfh_request, self.use_grpc) + except Exception as e: + self.base.print_stderr(f'Error during folder hash scan: {e}') + self.scan_results = None + def scan(self) -> Optional[Dict]: """ Scan the provided directory using the folder hashing algorithm. @@ -124,25 +137,17 @@ def scan(self) -> Optional[Dict]: 'min_accepted_score': self.min_accepted_score, } - spinner = Spinner('Scanning folder...') - stop_spinner = False + spinner_ctx = Spinner('Scanning folder...') + + with spinner_ctx as spinner: + grpc_thread = threading.Thread(target=self._execute_grpc_scan, args=(hfh_request,)) + grpc_thread.start() - def spin(): - while not stop_spinner: + while grpc_thread.is_alive(): spinner.next() time.sleep(0.1) - spinner_thread = threading.Thread(target=spin) - spinner_thread.start() - - try: - response = self.client.folder_hash_scan(hfh_request, self.use_grpc) - if response: - self.scan_results = response - finally: - stop_spinner = True - spinner_thread.join() - spinner.finish() + grpc_thread.join() return self.scan_results diff --git a/src/scanoss/threadedscanning.py b/src/scanoss/threadedscanning.py index e9784e3..d0a5cad 100644 --- a/src/scanoss/threadedscanning.py +++ b/src/scanoss/threadedscanning.py @@ -22,6 +22,7 @@ THE SOFTWARE. """ +import atexit import os import queue import sys @@ -77,6 +78,8 @@ def __init__( if nb_threads > MAX_ALLOWED_THREADS: self.print_msg(f'Warning: Requested threads too large: {nb_threads}. Reducing to {MAX_ALLOWED_THREADS}') self.nb_threads = MAX_ALLOWED_THREADS + # Register cleanup to ensure progress bar is finished on exit + atexit.register(self.complete_bar) @staticmethod def __count_files_in_wfp(wfp: str): @@ -101,6 +104,13 @@ def complete_bar(self): if self.bar: self.bar.finish() + def __del__(self): + """Ensure progress bar is cleaned up when object is destroyed""" + try: + self.complete_bar() + except Exception: + pass # Ignore errors during cleanup + def set_bar(self, bar: Bar) -> None: """ Set the Progress Bar to display progress while scanning