diff --git a/concurrency-overview/README.md b/concurrency-overview/README.md deleted file mode 100644 index 3e3864aa13..0000000000 --- a/concurrency-overview/README.md +++ /dev/null @@ -1,9 +0,0 @@ -# Speed Up Your Python Program With Concurrency: Code Examples - -Corresponding code to the Real Python tutorial, "[Speed up your Python Program with Concurrency](https://realpython.com/python-concurrency/)." - -To run the code here, use: - - pip install -r requirements.txt - -This will ensure you have the required packages. diff --git a/concurrency-overview/cpu_mp.py b/concurrency-overview/cpu_mp.py deleted file mode 100755 index fedc305cdf..0000000000 --- a/concurrency-overview/cpu_mp.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python3 -import multiprocessing -import time - - -def cpu_bound(number): - return sum(i * i for i in range(number)) - - -def find_sums(numbers): - with multiprocessing.Pool() as pool: - pool.map(cpu_bound, numbers) - - -if __name__ == "__main__": - numbers = [5_000_000 + x for x in range(20)] - - start_time = time.time() - find_sums(numbers) - duration = time.time() - start_time - print(f"Duration {duration} seconds") diff --git a/concurrency-overview/cpu_non_concurrent.py b/concurrency-overview/cpu_non_concurrent.py deleted file mode 100755 index ec98f080bd..0000000000 --- a/concurrency-overview/cpu_non_concurrent.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python3 -import time - - -def cpu_bound(number): - return sum(i * i for i in range(number)) - - -def find_sums(numbers): - for number in numbers: - cpu_bound(number) - - -if __name__ == "__main__": - numbers = [5_000_000 + x for x in range(20)] - - start_time = time.time() - find_sums(numbers) - duration = time.time() - start_time - print(f"Duration {duration} seconds") diff --git a/concurrency-overview/cpu_threading.py b/concurrency-overview/cpu_threading.py deleted file mode 100755 index c15ef2006c..0000000000 --- a/concurrency-overview/cpu_threading.py +++ /dev/null @@ -1,21 +0,0 @@ -#!/usr/bin/env python3 -import concurrent.futures -import time - - -def cpu_bound(number): - return sum(i * i for i in range(number)) - - -def find_sums(numbers): - with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: - executor.map(cpu_bound, numbers) - - -if __name__ == "__main__": - numbers = [5_000_000 + x for x in range(20)] - - start_time = time.time() - find_sums(numbers) - duration = time.time() - start_time - print(f"Duration {duration} seconds") diff --git a/concurrency-overview/io_mp.py b/concurrency-overview/io_mp.py deleted file mode 100755 index 174d99fceb..0000000000 --- a/concurrency-overview/io_mp.py +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env python3 -import multiprocessing -import time - -import requests - -session = None - - -def set_global_session(): - global session - if not session: - session = requests.Session() - - -def download_site(url): - with session.get(url) as response: - name = multiprocessing.current_process().name - print(f"{name}:Read {len(response.content)} from {url}") - - -def download_all_sites(sites): - with multiprocessing.Pool(initializer=set_global_session) as pool: - pool.map(download_site, sites) - - -if __name__ == "__main__": - sites = [ - "https://www.jython.org", - "http://olympus.realpython.org/dice", - ] * 80 - start_time = time.time() - download_all_sites(sites) - duration = time.time() - start_time - print(f"Downloaded {len(sites)} in {duration} seconds") diff --git a/concurrency-overview/race_condition.py b/concurrency-overview/race_condition.py deleted file mode 100755 index 7f75c9faf8..0000000000 --- a/concurrency-overview/race_condition.py +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env python3 -import concurrent.futures - -counter = 0 - - -def increment_counter(fake_value): - global counter - for _ in range(100): - counter += 1 - - -if __name__ == "__main__": - fake_data = [x for x in range(5000)] - counter = 0 - with concurrent.futures.ThreadPoolExecutor(max_workers=5000) as executor: - executor.map(increment_counter, fake_data) diff --git a/concurrency-overview/requirements.txt b/concurrency-overview/requirements.txt deleted file mode 100644 index 245c03137f..0000000000 --- a/concurrency-overview/requirements.txt +++ /dev/null @@ -1,35 +0,0 @@ -aiohttp==3.4.4 -asks==2.0.0 -astroid==2.0.4 -async-generator==1.10 -async-timeout==3.0.1 -atomicwrites==1.2.0 -attrs==18.1.0 -certifi==2018.8.13 -chardet==3.0.4 -contextvars==2.3 -h11==0.8.1 -idna==2.7 -immutables==0.6 -isort==4.3.4 -lazy-object-proxy==1.3.1 -mccabe==0.6.1 -more-itertools==4.3.0 -multidict==4.4.2 -multio==0.2.3 -outcome==0.1.0 -pathlib2==2.3.2 -pluggy==0.7.1 -py==1.6.0 -pycodestyle==2.3.1 -pytest==3.7.3 -requests==2.19.1 -six==1.11.0 -sniffio==1.0.0 -sortedcontainers==2.0.4 -tqdm==4.25.0 -trio==0.6.0 -typed-ast==1.1.0 -urllib3==1.23 -wrapt==1.10.11 -yarl==1.2.6 diff --git a/python-concurrency/README.md b/python-concurrency/README.md new file mode 100644 index 0000000000..79b90e8f13 --- /dev/null +++ b/python-concurrency/README.md @@ -0,0 +1,13 @@ +# Speed Up Your Python Program With Concurrency + +This folder contains the sample code for the [Speed Up Your Python Program With Concurrency](https://realpython.com/python-concurrency/) tutorial. + +To use this code, first create and activate a [virtual environment](https://realpython.com/python-virtual-environments-a-primer/). Then, install the required libraries into it: + +```sh +$ python -m venv venv/ +$ source venv/bin/activate +(venv) $ python -m pip install -r requirements.txt +``` + +This will ensure you have the required Python packages. diff --git a/python-concurrency/cpu_asyncio.py b/python-concurrency/cpu_asyncio.py new file mode 100644 index 0000000000..34efe1f72a --- /dev/null +++ b/python-concurrency/cpu_asyncio.py @@ -0,0 +1,18 @@ +import asyncio +import time + + +async def main(): + start_time = time.perf_counter() + tasks = [fib(35) for _ in range(20)] + await asyncio.gather(*tasks, return_exceptions=True) + duration = time.perf_counter() - start_time + print(f"Computed in {duration} seconds") + + +async def fib(n): + return n if n < 2 else await fib(n - 2) + await fib(n - 1) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python-concurrency/cpu_non_concurrent.py b/python-concurrency/cpu_non_concurrent.py new file mode 100644 index 0000000000..9fd492f399 --- /dev/null +++ b/python-concurrency/cpu_non_concurrent.py @@ -0,0 +1,17 @@ +import time + + +def main(): + start_time = time.perf_counter() + for _ in range(20): + fib(35) + duration = time.perf_counter() - start_time + print(f"Computed in {duration} seconds") + + +def fib(n): + return n if n < 2 else fib(n - 2) + fib(n - 1) + + +if __name__ == "__main__": + main() diff --git a/python-concurrency/cpu_processes.py b/python-concurrency/cpu_processes.py new file mode 100644 index 0000000000..e87c26c241 --- /dev/null +++ b/python-concurrency/cpu_processes.py @@ -0,0 +1,18 @@ +import concurrent.futures +import time + + +def main(): + start_time = time.perf_counter() + with concurrent.futures.ProcessPoolExecutor() as executor: + executor.map(fib, [35] * 20) + duration = time.perf_counter() - start_time + print(f"Computed in {duration} seconds") + + +def fib(n): + return n if n < 2 else fib(n - 2) + fib(n - 1) + + +if __name__ == "__main__": + main() diff --git a/python-concurrency/cpu_threads.py b/python-concurrency/cpu_threads.py new file mode 100644 index 0000000000..8b46852f35 --- /dev/null +++ b/python-concurrency/cpu_threads.py @@ -0,0 +1,18 @@ +import concurrent.futures +import time + + +def main(): + start_time = time.perf_counter() + with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: + executor.map(fib, [35] * 20) + duration = time.perf_counter() - start_time + print(f"Computed in {duration} seconds") + + +def fib(n): + return n if n < 2 else fib(n - 2) + fib(n - 1) + + +if __name__ == "__main__": + main() diff --git a/concurrency-overview/io_asyncio.py b/python-concurrency/io_asyncio.py old mode 100755 new mode 100644 similarity index 50% rename from concurrency-overview/io_asyncio.py rename to python-concurrency/io_asyncio.py index 7a062c4977..c022668e67 --- a/concurrency-overview/io_asyncio.py +++ b/python-concurrency/io_asyncio.py @@ -1,30 +1,30 @@ -#!/usr/bin/env python3 import asyncio import time import aiohttp -async def download_site(session, url): - async with session.get(url) as response: - print("Read {0} from {1}".format(response.content_length, url)) +async def main(): + sites = [ + "https://www.jython.org", + "http://olympus.realpython.org/dice", + ] * 80 + start_time = time.perf_counter() + await download_all_sites(sites) + duration = time.perf_counter() - start_time + print(f"Downloaded {len(sites)} sites in {duration} seconds") async def download_all_sites(sites): async with aiohttp.ClientSession() as session: - tasks = [] - for url in sites: - task = asyncio.ensure_future(download_site(session, url)) - tasks.append(task) + tasks = [download_site(url, session) for url in sites] await asyncio.gather(*tasks, return_exceptions=True) +async def download_site(url, session): + async with session.get(url) as response: + print(f"Read {len(await response.read())} bytes from {url}") + + if __name__ == "__main__": - sites = [ - "https://www.jython.org", - "http://olympus.realpython.org/dice", - ] * 80 - start_time = time.time() - asyncio.get_event_loop().run_until_complete(download_all_sites(sites)) - duration = time.time() - start_time - print(f"Downloaded {len(sites)} sites in {duration} seconds") + asyncio.run(main()) diff --git a/concurrency-overview/io_non_concurrent.py b/python-concurrency/io_non_concurrent.py old mode 100755 new mode 100644 similarity index 62% rename from concurrency-overview/io_non_concurrent.py rename to python-concurrency/io_non_concurrent.py index 2fef578b0f..b48f9d799e --- a/concurrency-overview/io_non_concurrent.py +++ b/python-concurrency/io_non_concurrent.py @@ -1,12 +1,17 @@ -#!/usr/bin/env python3 import time import requests -def download_site(url, session): - with session.get(url) as response: - print(f"Read {len(response.content)} from {url}") +def main(): + sites = [ + "https://www.jython.org", + "http://olympus.realpython.org/dice", + ] * 80 + start_time = time.perf_counter() + download_all_sites(sites) + duration = time.perf_counter() - start_time + print(f"Downloaded {len(sites)} sites in {duration} seconds") def download_all_sites(sites): @@ -15,12 +20,10 @@ def download_all_sites(sites): download_site(url, session) +def download_site(url, session): + with session.get(url) as response: + print(f"Read {len(response.content)} bytes from {url}") + + if __name__ == "__main__": - sites = [ - "https://www.jython.org", - "http://olympus.realpython.org/dice", - ] * 80 - start_time = time.time() - download_all_sites(sites) - duration = time.time() - start_time - print(f"Downloaded {len(sites)} in {duration} seconds") + main() diff --git a/python-concurrency/io_processes.py b/python-concurrency/io_processes.py new file mode 100644 index 0000000000..21baa4c55a --- /dev/null +++ b/python-concurrency/io_processes.py @@ -0,0 +1,40 @@ +import atexit +import multiprocessing +import time +from concurrent.futures import ProcessPoolExecutor + +import requests + +session: requests.Session + + +def main(): + sites = [ + "https://www.jython.org", + "http://olympus.realpython.org/dice", + ] * 80 + start_time = time.perf_counter() + download_all_sites(sites) + duration = time.perf_counter() - start_time + print(f"Downloaded {len(sites)} sites in {duration} seconds") + + +def download_all_sites(sites): + with ProcessPoolExecutor(initializer=init_process) as executor: + executor.map(download_site, sites) + + +def download_site(url): + with session.get(url) as response: + name = multiprocessing.current_process().name + print(f"{name}:Read {len(response.content)} bytes from {url}") + + +def init_process(): + global session + session = requests.Session() + atexit.register(session.close) + + +if __name__ == "__main__": + main() diff --git a/concurrency-overview/io_threading.py b/python-concurrency/io_threads.py old mode 100755 new mode 100644 similarity index 66% rename from concurrency-overview/io_threading.py rename to python-concurrency/io_threads.py index 0b7458ed90..663d975ccc --- a/concurrency-overview/io_threading.py +++ b/python-concurrency/io_threads.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python3 import concurrent.futures import threading import time @@ -8,29 +7,33 @@ thread_local = threading.local() -def get_session(): - if not hasattr(thread_local, "session"): - thread_local.session = requests.Session() - return thread_local.session +def main(): + sites = [ + "https://www.jython.org", + "http://olympus.realpython.org/dice", + ] * 80 + start_time = time.perf_counter() + download_all_sites(sites) + duration = time.perf_counter() - start_time + print(f"Downloaded {len(sites)} sites in {duration} seconds") + + +def download_all_sites(sites): + with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: + executor.map(download_site, sites) def download_site(url): - session = get_session() + session = get_session_for_thread() with session.get(url) as response: - print(f"Read {len(response.content)} from {url}") + print(f"Read {len(response.content)} bytes from {url}") -def download_all_sites(sites): - with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: - executor.map(download_site, sites) +def get_session_for_thread(): + if not hasattr(thread_local, "session"): + thread_local.session = requests.Session() + return thread_local.session if __name__ == "__main__": - sites = [ - "https://www.jython.org", - "http://olympus.realpython.org/dice", - ] * 80 - start_time = time.time() - download_all_sites(sites) - duration = time.time() - start_time - print(f"Downloaded {len(sites)} in {duration} seconds") + main() diff --git a/python-concurrency/requirements.txt b/python-concurrency/requirements.txt new file mode 100644 index 0000000000..3cb9747ebf --- /dev/null +++ b/python-concurrency/requirements.txt @@ -0,0 +1,13 @@ +aiohappyeyeballs==2.4.3 +aiohttp==3.10.10 +aiosignal==1.3.1 +attrs==24.2.0 +certifi==2024.8.30 +charset-normalizer==3.4.0 +frozenlist==1.5.0 +idna==3.10 +multidict==6.1.0 +propcache==0.2.0 +requests==2.32.3 +urllib3==2.2.3 +yarl==1.17.1