import os import time import glob import typing as T from multiprocessing.pool import ThreadPool def search_file(file_location: str, search_string: str) -> bool: with open(file_location, "r", encoding="utf8") as file: return search_string in file.read() def search_files_concurrently(file_locations: T.List[str], search_string: str) -> None: with ThreadPool() as pool: results = pool.starmap(search_file, ((file_location, search_string) for file_location in file_locations)) for result, file_name in zip(results, file_locations): if result: print(f"Found string in file: `{file_name}`") def search_files_sequentially(file_locations: T.List[str], search_string: str) -> None: for file_name in file_locations: result = search_file(file_name, search_string) if result: print(f"Found word in file: `{file_name}`") if __name__ == "__main__": file_locations = list( glob.glob(f"{os.path.abspath(os.getcwd())}/*.py")) search_string = input("what word are you trying to find?: ") start_time = time.perf_counter() search_files_concurrently(file_locations, search_string) process_time = time.perf_counter() - start_time print(f"PROCESS TIME: {process_time}")
vagrant@vagrant:~/dev/rust/parallel/python$ python3 find_files_sequential.py
what word are you trying to find?: queue
Found word in file: `/home/vagrant/dev/rust/parallel/python/thread_pool.py`
Found word in file: `/home/vagrant/dev/rust/parallel/python/pipeline.py`
Found word in file: `/home/vagrant/dev/rust/parallel/python/message_queue.py`
PROCESS TIME: 0.004676494048908353
vagrant@vagrant:~/dev/rust/parallel/python$ python3 find_files_concurrent.py
what word are you trying to find?: queue
Found string in file: `/home/vagrant/dev/rust/parallel/python/thread_pool.py`
Found string in file: `/home/vagrant/dev/rust/parallel/python/pipeline.py`
Found string in file: `/home/vagrant/dev/rust/parallel/python/message_queue.py`
PROCESS TIME: 0.011579621117562056
速度的にはスレッドの方が遅いような気がするが、どうなんだろうか…