add concurrence examples

This commit is contained in:
Mia von Steinkirch 2020-03-21 14:24:55 -07:00
parent 614452d462
commit 516c922e02
15 changed files with 64 additions and 248 deletions

View file

@ -1,3 +1,3 @@
### Concurrence in Python
* This directory contains the examples for my Medium article: [Python + Concurrence: Here is What You Need to Know](https://medium.com/python-for-the-utopian/python-concurrence-here-is-what-you-need-to-know-c771d86eda95).
Examples for my Medium article: [Python + Concurrence: A Mnemonic Guide🚦](https://medium.com/python-for-the-utopian/python-concurrence-a-mnemonic-guide-7304867cbfb7).

View file

@ -2,14 +2,13 @@
import asyncio
async def delayed_hello():
print("Hello ")
print('Hello ')
await asyncio.sleep(1)
print("World!")
print('World!')
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(delayed_hello())
loop.close()
loop = asyncio.get_event_loop()
loop.run_until_complete(delayed_hello())
loop.close()

View file

@ -1,25 +0,0 @@
#!/usr/bin/env python3
import random
import logging
import concurrent.futures
WORKER_COUNT = 10
JOB_COUNT = 10
class Job:
def __init__(self, number):
self.number = number
def process_job(job):
# Wait between 0 and 0.01 seconds.
time.sleep(random.random()/100.0)
logging.info("Job number {:d}".format(job.number))
def main():
with concurrent.futures.ThreadPoolExecutor(
max_workers=WORKER_COUNT) as executor:
futures = [executor.submit(process_job, Job(i))
for i in range(JOB_COUNT)]
for future in concurrent.futures.as_completed(futures):
pass

View file

@ -0,0 +1,19 @@
#!/usr/bin/env python3
from time import sleep
from concurrent.futures import ThreadPoolExecutor
def return_after_5_secs(message):
sleep(5)
return message
pool = ThreadPoolExecutor(3)
future = pool.submit(return_after_5_secs, ('Future message'))
print(future.done())
sleep(5)
print(future.done())
print(future.result())

View file

@ -1,51 +0,0 @@
#!/usr/bin/env python3
import os
import time
import threading
import multiprocessing
NUM_WORKERS = 4
def run_sleep():
print("PID: %s, Process Name: %s, Thread Name: %s" % (
os.getpid(),
multiprocessing.current_process().name,
threading.current_thread().name)
)
time.sleep(1)
# Run tasks serially
start_time = time.time()
for _ in range(NUM_WORKERS):
run_sleep()
end_time = time.time()
print("Serial time=", end_time - start_time)
# Run tasks using threads
start_time = time.time()
threads = [threading.Thread(target=run_sleep) for _ in range(NUM_WORKERS)]
[thread.start() for thread in threads]
[thread.join() for thread in threads]
end_time = time.time()
print("Threads time=", end_time - start_time)
# Run tasks using processes
start_time = time.time()
processes = [multiprocessing.Process(target=run_sleep()) for _ in range(NUM_WORKERS)]
[process.start() for process in processes]
[process.join() for process in processes]
end_time = time.time()
print("Parallel time=", end_time - start_time)

View file

@ -1,49 +0,0 @@
#!/usr/bin/env python3
import os
import time
import threading
import multiprocessing
NUM_WORKERS = 4
def run_numbers():
print("PID: %s, Process Name: %s, Thread Name: %s" % (
os.getpid(),
multiprocessing.current_process().name,
threading.current_thread().name)
)
x = 0
while x < 10000000:
x += 1
start_time = time.time()
for _ in range(NUM_WORKERS):
run_numbers()
end_time = time.time()
print("Serial time=", end_time - start_time)
start_time = time.time()
threads = [threading.Thread(target=run_numbers) for _ in range(NUM_WORKERS)]
[thread.start() for thread in threads]
[thread.join() for thread in threads]
end_time = time.time()
print("Threads time=", end_time - start_time)
start_time = time.time()
processes = [multiprocessing.Process(target=run_numbers) for _ in range(NUM_WORKERS)]
[process.start() for process in processes]
[process.join() for process in processes]
end_time = time.time()
print("Parallel time=", end_time - start_time)

View file

@ -7,10 +7,10 @@ import multiprocessing
def daemon():
p = multiprocessing.current_process()
print('Starting: {}, {}'.format(p.name, p.pid))
sys.stdout.flush()
time.sleep(2)
time.sleep(1)
print('Exiting : {}, {}'.format(p.name, p.pid))
sys.stdout.flush()
@ -18,8 +18,8 @@ def daemon():
def non_daemon():
p = multiprocessing.current_process()
print('Starting: {}, {}'.format(p.name, p.pid))
sys.stdout.flush()
print('Exiting : {}, {}'.format(p.name, p.pid))

View file

@ -0,0 +1,12 @@
#!/usr/bin/env python3
import threading
l = threading.Lock()
print("Before first lock acquire.")
l.acquire()
print("Before second lock acquire.")
l.acquire()
print("Lock was acquired twice")

View file

@ -1,24 +0,0 @@
#!/usr/bin/env python3
import time
from gevent.pool import Pool
from gevent import monkey
# Note that you can spawn many workers with gevent since the cost of creating and switching is very low
NUM_WORKERS = 4
# Monkey-Patch socket module for HTTP requests
monkey.patch_socket()
start_time = time.time()
pool = Pool(NUM_WORKERS)
for address in WEBSITE_LIST:
pool.spawn(check_website, address)
# Wait for stuff to finish
pool.join()
end_time = time.time()
print("Time for GreenSquirrel: %ssecs" % (end_time - start_time))

View file

@ -1,15 +1,16 @@
#!/usr/bin/env python3
import multiprocessing
import logging
import sys
import logging
import multiprocessing
def worker():
print 'Doing some work'
print('Doing some work...')
sys.stdout.flush()
if __name__ == '__main__':
multiprocessing.log_to_stderr(logging.DEBUG)
p = multiprocessing.Process(target=worker)
p.start()
p.join()
multiprocessing.log_to_stderr(logging.DEBUG)
p = multiprocessing.Process(target=worker)
p.start()
p.join()

View file

@ -1,5 +1,3 @@
#!/usr/bin/env python3
import time
import random
import multiprocessing
@ -12,5 +10,5 @@ def worker(n):
for i in range(5):
t = multiprocessing.Process(target=worker, args=(i,))
t.start()
p = multiprocessing.Process(target=worker, args=(i,))
p.start()

View file

@ -2,9 +2,10 @@
from multiprocessing import Pool
def f(x):
return x*x
if __name__ == '__main__':
p = Pool(5)
print(p.map(f, [1, 2, 3]))
p = Pool(5)
print(p.map(f, [1, 2, 3]))

View file

@ -1,26 +0,0 @@
#!/usr/bin/env python3
import threading
counter = 0
threads = []
lock = threading.Lock()
def count_with_lock():
global counter
for _ in range(100):
with lock:
counter += 1
for _ in range(100):
thread = threading.Thread(target=count_with_lock)
thread.start()
threads.append(thread)
print(counter)

View file

@ -1,36 +1,19 @@
#!/usr/bin/env python3
import time
from queue import Queue
from threading import Thread
NUM_WORKERS = 4
task_queue = Queue()
def worker():
# Constantly check the queue for addresses
while True:
address = task_queue.get()
run_function(address)
# Mark the processed task as done
task_queue.task_done()
start_time = time.time()
# Create the worker threads
threads = [Thread(target=worker) for _ in range(NUM_WORKERS)]
# Add the websites to the task queue
[task_queue.put(item) for item in SOME_LIST]
# Start all the workers
[task_queue.put(item) for item in threads]
[thread.start() for thread in threads]
# Wait for all the tasks in the queue to be processed
task_queue.join()
end_time = time.time()
print('Time: {} secs'.format(end_time - start_time))
task_queue.join()

View file

@ -1,22 +0,0 @@
#!/usr/bin/env python3
import time
import threading
counter = 0
threads = []
def count():
global counter
for _ in range(100):
counter += 1
for _ in range(100):
thread = threading.Thread(target=count)
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
print(f"Count: {counter}")