add some definitions to readme

This commit is contained in:
bt3gl 2022-03-28 14:13:34 +04:00
parent 09ea5f12cf
commit 0a0d2707b2
12 changed files with 0 additions and 0 deletions

View file

@ -0,0 +1,35 @@
## Concurrency and Parallelism in Python
### Threading
* Threading is a feature usually provided by the operating system.
* Threads are lighter than processes, and share the same memory space.
* With threading, concurrency is achieved using multiple threads, but due to the GIL only one thread can be running at a time.
* If your code is IO-heavy (like HTTP requests), then multithreading will still probably speed up your code.
#### Multi-processing
* In multiprocessing, the original process is forked process into multiple child processes bypassing the GIL.
* Each child process will have a copy of the entire program's memory.
* If your code is performing a CPU bound task, such as decompressing gzip files, using the threading module will result in a slower execution time. For CPU bound tasks and truly parallel execution, use the multiprocessing module.
* Higher memory overhead than threading.
### RQ: queueing jobs
* [RQ](https://python-rq.org/) is aimple but powerful library.
* You first enqueue a function and its arguments using the library. This pickles the function call representation, which is then appended to a Redis list.
### Celery: queueing jobs
* Celery is one of the most popular background job managers in the Python world.
* Compatible with several message brokers like RabbitMQ or Redis and can act as both producer and consumer.
* Asynchronous task queue/job queue based on distributed message passing. It is focused on real-time operations but supports scheduling as well.
### concurrent.futures
* Using a concurrent.futures.ThreadPoolExecutor makes the Python threading example code almost identical to the multiprocessing module.

View file

@ -0,0 +1,14 @@
#!/usr/bin/env python3
import asyncio
async def delayed_hello():
print('Hello ')
await asyncio.sleep(1)
print('World!')
loop = asyncio.get_event_loop()
loop.run_until_complete(delayed_hello())
loop.close()

View file

@ -0,0 +1,19 @@
#!/usr/bin/env python3
from time import sleep
from concurrent.futures import ThreadPoolExecutor
def return_after_5_secs(message):
sleep(5)
return message
pool = ThreadPoolExecutor(3)
future = pool.submit(return_after_5_secs, ('Future message'))
print(future.done())
sleep(5)
print(future.done())
print(future.result())

View file

@ -0,0 +1,39 @@
#!/usr/bin/env python3
import time
import sys
import multiprocessing
def daemon():
p = multiprocessing.current_process()
print('Starting: {}, {}'.format(p.name, p.pid))
sys.stdout.flush()
time.sleep(1)
print('Exiting : {}, {}'.format(p.name, p.pid))
sys.stdout.flush()
def non_daemon():
p = multiprocessing.current_process()
print('Starting: {}, {}'.format(p.name, p.pid))
sys.stdout.flush()
print('Exiting : {}, {}'.format(p.name, p.pid))
sys.stdout.flush()
if __name__ == '__main__':
d = multiprocessing.Process(name='daemon', target=daemon)
d.daemon = True
n = multiprocessing.Process(name='non-daemon', target=non_daemon)
n.daemon = False
d.start()
time.sleep(1)
n.start()

View file

@ -0,0 +1,12 @@
#!/usr/bin/env python3
import threading
l = threading.Lock()
print("Before first lock acquire.")
l.acquire()
print("Before second lock acquire.")
l.acquire()
print("Lock was acquired twice")

View file

@ -0,0 +1,16 @@
#!/usr/bin/env python3
import sys
import logging
import multiprocessing
def worker():
print('Doing some work...')
sys.stdout.flush()
multiprocessing.log_to_stderr(logging.DEBUG)
p = multiprocessing.Process(target=worker)
p.start()
p.join()

View file

@ -0,0 +1,14 @@
import time
import random
import multiprocessing
def worker(n):
sleep = random.randrange(1, 10)
time.sleep(sleep)
print("Worker {}: sleeping for {} seconds.".format(n, sleep))
for i in range(5):
p = multiprocessing.Process(target=worker, args=(i,))
p.start()

View file

@ -0,0 +1,11 @@
#!/usr/bin/env python3
from multiprocessing import Pool
def f(x):
return x*x
p = Pool(5)
print(p.map(f, [1, 2, 3]))

View file

@ -0,0 +1,27 @@
#!/usr/bin/env python3
import threading
x = 0
COUNT = 10000000
def foo():
global x
for i in range(COUNT):
x += 1
def bar():
global x
for i in range(COUNT):
x -= 1
t1 = threading.Thread(target=foo)
t2 = threading.Thread(target=bar)
t1.start()
t2.start()
t1.join()
t2.join()
print(x)

View file

@ -0,0 +1,16 @@
#!/usr/bin/env python3
import time
import random
import threading
def worker(n):
sleep = random.randrange(1, 10)
time.sleep(sleep)
print("Worker {} from {}: sleeping for {} seconds.".format(n, threading.get_ident(), sleep))
for i in range(5):
t = threading.Thread(target=worker, args=(i,))
t.start()

View file

@ -0,0 +1,18 @@
#!/usr/bin/env python3
from time import sleep
from concurrent.futures import ThreadPoolExecutor
def return_after_5_secs(message):
sleep(5)
return message
pool = ThreadPoolExecutor(3)
future = pool.submit(return_after_5_secs, ("hello"))
print(future.done())
sleep(5)
print(future.done())
print(future.result())

View file

@ -0,0 +1,19 @@
from queue import Queue
from threading import Thread
NUM_WORKERS = 4
task_queue = Queue()
def worker():
while True:
address = task_queue.get()
run_function(address)
task_queue.task_done()
threads = [Thread(target=worker) for _ in range(NUM_WORKERS)]
[task_queue.put(item) for item in threads]
[thread.start() for thread in threads]
task_queue.join()