mirror of
https://github.com/autistic-symposium/web3-starter-py.git
synced 2025-05-17 22:20:22 -04:00
add concurrence examples
This commit is contained in:
parent
614452d462
commit
516c922e02
15 changed files with 64 additions and 248 deletions
|
@ -1,3 +1,3 @@
|
||||||
### Concurrence in Python
|
### Concurrence in Python
|
||||||
|
|
||||||
* This directory contains the examples for my Medium article: [Python + Concurrence: Here is What You Need to Know](https://medium.com/python-for-the-utopian/python-concurrence-here-is-what-you-need-to-know-c771d86eda95).
|
Examples for my Medium article: [Python + Concurrence: A Mnemonic Guide🚦](https://medium.com/python-for-the-utopian/python-concurrence-a-mnemonic-guide-7304867cbfb7).
|
||||||
|
|
|
@ -2,14 +2,13 @@
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
|
|
||||||
async def delayed_hello():
|
async def delayed_hello():
|
||||||
print("Hello ")
|
print('Hello ')
|
||||||
await asyncio.sleep(1)
|
await asyncio.sleep(1)
|
||||||
print("World!")
|
print('World!')
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
loop = asyncio.get_event_loop()
|
||||||
|
loop.run_until_complete(delayed_hello())
|
||||||
loop = asyncio.get_event_loop()
|
loop.close()
|
||||||
loop.run_until_complete(delayed_hello())
|
|
||||||
loop.close()
|
|
|
@ -1,25 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import random
|
|
||||||
import logging
|
|
||||||
import concurrent.futures
|
|
||||||
|
|
||||||
WORKER_COUNT = 10
|
|
||||||
JOB_COUNT = 10
|
|
||||||
|
|
||||||
class Job:
|
|
||||||
def __init__(self, number):
|
|
||||||
self.number = number
|
|
||||||
|
|
||||||
def process_job(job):
|
|
||||||
# Wait between 0 and 0.01 seconds.
|
|
||||||
time.sleep(random.random()/100.0)
|
|
||||||
logging.info("Job number {:d}".format(job.number))
|
|
||||||
|
|
||||||
def main():
|
|
||||||
with concurrent.futures.ThreadPoolExecutor(
|
|
||||||
max_workers=WORKER_COUNT) as executor:
|
|
||||||
futures = [executor.submit(process_job, Job(i))
|
|
||||||
for i in range(JOB_COUNT)]
|
|
||||||
for future in concurrent.futures.as_completed(futures):
|
|
||||||
pass
|
|
19
Concurrence_examples/concurrent_future_example.py
Normal file
19
Concurrence_examples/concurrent_future_example.py
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
from time import sleep
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
|
||||||
|
|
||||||
|
def return_after_5_secs(message):
|
||||||
|
sleep(5)
|
||||||
|
return message
|
||||||
|
|
||||||
|
|
||||||
|
pool = ThreadPoolExecutor(3)
|
||||||
|
future = pool.submit(return_after_5_secs, ('Future message'))
|
||||||
|
|
||||||
|
print(future.done())
|
||||||
|
|
||||||
|
sleep(5)
|
||||||
|
print(future.done())
|
||||||
|
print(future.result())
|
|
@ -1,51 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
import threading
|
|
||||||
import multiprocessing
|
|
||||||
|
|
||||||
NUM_WORKERS = 4
|
|
||||||
|
|
||||||
def run_sleep():
|
|
||||||
print("PID: %s, Process Name: %s, Thread Name: %s" % (
|
|
||||||
os.getpid(),
|
|
||||||
multiprocessing.current_process().name,
|
|
||||||
threading.current_thread().name)
|
|
||||||
)
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
|
|
||||||
# Run tasks serially
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
for _ in range(NUM_WORKERS):
|
|
||||||
run_sleep()
|
|
||||||
|
|
||||||
end_time = time.time()
|
|
||||||
|
|
||||||
print("Serial time=", end_time - start_time)
|
|
||||||
|
|
||||||
|
|
||||||
# Run tasks using threads
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
threads = [threading.Thread(target=run_sleep) for _ in range(NUM_WORKERS)]
|
|
||||||
[thread.start() for thread in threads]
|
|
||||||
[thread.join() for thread in threads]
|
|
||||||
|
|
||||||
end_time = time.time()
|
|
||||||
|
|
||||||
print("Threads time=", end_time - start_time)
|
|
||||||
|
|
||||||
|
|
||||||
# Run tasks using processes
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
processes = [multiprocessing.Process(target=run_sleep()) for _ in range(NUM_WORKERS)]
|
|
||||||
[process.start() for process in processes]
|
|
||||||
[process.join() for process in processes]
|
|
||||||
|
|
||||||
end_time = time.time()
|
|
||||||
|
|
||||||
print("Parallel time=", end_time - start_time)
|
|
|
@ -1,49 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
import threading
|
|
||||||
import multiprocessing
|
|
||||||
|
|
||||||
NUM_WORKERS = 4
|
|
||||||
|
|
||||||
def run_numbers():
|
|
||||||
print("PID: %s, Process Name: %s, Thread Name: %s" % (
|
|
||||||
os.getpid(),
|
|
||||||
multiprocessing.current_process().name,
|
|
||||||
threading.current_thread().name)
|
|
||||||
)
|
|
||||||
x = 0
|
|
||||||
while x < 10000000:
|
|
||||||
x += 1
|
|
||||||
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
for _ in range(NUM_WORKERS):
|
|
||||||
run_numbers()
|
|
||||||
|
|
||||||
end_time = time.time()
|
|
||||||
|
|
||||||
print("Serial time=", end_time - start_time)
|
|
||||||
|
|
||||||
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
threads = [threading.Thread(target=run_numbers) for _ in range(NUM_WORKERS)]
|
|
||||||
[thread.start() for thread in threads]
|
|
||||||
[thread.join() for thread in threads]
|
|
||||||
|
|
||||||
end_time = time.time()
|
|
||||||
|
|
||||||
print("Threads time=", end_time - start_time)
|
|
||||||
|
|
||||||
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
processes = [multiprocessing.Process(target=run_numbers) for _ in range(NUM_WORKERS)]
|
|
||||||
[process.start() for process in processes]
|
|
||||||
[process.join() for process in processes]
|
|
||||||
end_time = time.time()
|
|
||||||
|
|
||||||
|
|
||||||
print("Parallel time=", end_time - start_time)
|
|
|
@ -7,10 +7,10 @@ import multiprocessing
|
||||||
|
|
||||||
def daemon():
|
def daemon():
|
||||||
p = multiprocessing.current_process()
|
p = multiprocessing.current_process()
|
||||||
|
|
||||||
print('Starting: {}, {}'.format(p.name, p.pid))
|
print('Starting: {}, {}'.format(p.name, p.pid))
|
||||||
|
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
time.sleep(2)
|
time.sleep(1)
|
||||||
print('Exiting : {}, {}'.format(p.name, p.pid))
|
print('Exiting : {}, {}'.format(p.name, p.pid))
|
||||||
|
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
@ -18,8 +18,8 @@ def daemon():
|
||||||
|
|
||||||
def non_daemon():
|
def non_daemon():
|
||||||
p = multiprocessing.current_process()
|
p = multiprocessing.current_process()
|
||||||
|
|
||||||
print('Starting: {}, {}'.format(p.name, p.pid))
|
print('Starting: {}, {}'.format(p.name, p.pid))
|
||||||
|
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
print('Exiting : {}, {}'.format(p.name, p.pid))
|
print('Exiting : {}, {}'.format(p.name, p.pid))
|
||||||
|
|
||||||
|
|
12
Concurrence_examples/deadlock_example.py
Normal file
12
Concurrence_examples/deadlock_example.py
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import threading
|
||||||
|
|
||||||
|
l = threading.Lock()
|
||||||
|
print("Before first lock acquire.")
|
||||||
|
|
||||||
|
l.acquire()
|
||||||
|
print("Before second lock acquire.")
|
||||||
|
|
||||||
|
l.acquire()
|
||||||
|
print("Lock was acquired twice")
|
|
@ -1,24 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import time
|
|
||||||
from gevent.pool import Pool
|
|
||||||
from gevent import monkey
|
|
||||||
|
|
||||||
# Note that you can spawn many workers with gevent since the cost of creating and switching is very low
|
|
||||||
NUM_WORKERS = 4
|
|
||||||
|
|
||||||
# Monkey-Patch socket module for HTTP requests
|
|
||||||
monkey.patch_socket()
|
|
||||||
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
pool = Pool(NUM_WORKERS)
|
|
||||||
for address in WEBSITE_LIST:
|
|
||||||
pool.spawn(check_website, address)
|
|
||||||
|
|
||||||
# Wait for stuff to finish
|
|
||||||
pool.join()
|
|
||||||
|
|
||||||
end_time = time.time()
|
|
||||||
|
|
||||||
print("Time for GreenSquirrel: %ssecs" % (end_time - start_time))
|
|
|
@ -1,15 +1,16 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import multiprocessing
|
|
||||||
import logging
|
|
||||||
import sys
|
import sys
|
||||||
|
import logging
|
||||||
|
import multiprocessing
|
||||||
|
|
||||||
|
|
||||||
def worker():
|
def worker():
|
||||||
print 'Doing some work'
|
print('Doing some work...')
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
multiprocessing.log_to_stderr(logging.DEBUG)
|
multiprocessing.log_to_stderr(logging.DEBUG)
|
||||||
p = multiprocessing.Process(target=worker)
|
p = multiprocessing.Process(target=worker)
|
||||||
p.start()
|
p.start()
|
||||||
p.join()
|
p.join()
|
|
@ -1,5 +1,3 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import time
|
import time
|
||||||
import random
|
import random
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
|
@ -12,5 +10,5 @@ def worker(n):
|
||||||
|
|
||||||
|
|
||||||
for i in range(5):
|
for i in range(5):
|
||||||
t = multiprocessing.Process(target=worker, args=(i,))
|
p = multiprocessing.Process(target=worker, args=(i,))
|
||||||
t.start()
|
p.start()
|
||||||
|
|
|
@ -2,9 +2,10 @@
|
||||||
|
|
||||||
from multiprocessing import Pool
|
from multiprocessing import Pool
|
||||||
|
|
||||||
|
|
||||||
def f(x):
|
def f(x):
|
||||||
return x*x
|
return x*x
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
p = Pool(5)
|
p = Pool(5)
|
||||||
print(p.map(f, [1, 2, 3]))
|
print(p.map(f, [1, 2, 3]))
|
|
@ -1,26 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import threading
|
|
||||||
|
|
||||||
counter = 0
|
|
||||||
threads = []
|
|
||||||
|
|
||||||
lock = threading.Lock()
|
|
||||||
|
|
||||||
|
|
||||||
def count_with_lock():
|
|
||||||
|
|
||||||
global counter
|
|
||||||
|
|
||||||
for _ in range(100):
|
|
||||||
with lock:
|
|
||||||
counter += 1
|
|
||||||
|
|
||||||
|
|
||||||
for _ in range(100):
|
|
||||||
thread = threading.Thread(target=count_with_lock)
|
|
||||||
thread.start()
|
|
||||||
threads.append(thread)
|
|
||||||
|
|
||||||
|
|
||||||
print(counter)
|
|
|
@ -1,36 +1,19 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import time
|
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
|
|
||||||
|
|
||||||
NUM_WORKERS = 4
|
NUM_WORKERS = 4
|
||||||
task_queue = Queue()
|
task_queue = Queue()
|
||||||
|
|
||||||
|
|
||||||
def worker():
|
def worker():
|
||||||
# Constantly check the queue for addresses
|
|
||||||
while True:
|
while True:
|
||||||
address = task_queue.get()
|
address = task_queue.get()
|
||||||
run_function(address)
|
run_function(address)
|
||||||
|
|
||||||
# Mark the processed task as done
|
|
||||||
task_queue.task_done()
|
task_queue.task_done()
|
||||||
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
# Create the worker threads
|
|
||||||
threads = [Thread(target=worker) for _ in range(NUM_WORKERS)]
|
threads = [Thread(target=worker) for _ in range(NUM_WORKERS)]
|
||||||
|
[task_queue.put(item) for item in threads]
|
||||||
# Add the websites to the task queue
|
|
||||||
[task_queue.put(item) for item in SOME_LIST]
|
|
||||||
|
|
||||||
# Start all the workers
|
|
||||||
[thread.start() for thread in threads]
|
[thread.start() for thread in threads]
|
||||||
|
task_queue.join()
|
||||||
# Wait for all the tasks in the queue to be processed
|
|
||||||
task_queue.join()
|
|
||||||
|
|
||||||
|
|
||||||
end_time = time.time()
|
|
||||||
|
|
||||||
print('Time: {} secs'.format(end_time - start_time))
|
|
|
@ -1,22 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import time
|
|
||||||
import threading
|
|
||||||
|
|
||||||
counter = 0
|
|
||||||
threads = []
|
|
||||||
|
|
||||||
def count():
|
|
||||||
global counter
|
|
||||||
for _ in range(100):
|
|
||||||
counter += 1
|
|
||||||
|
|
||||||
for _ in range(100):
|
|
||||||
thread = threading.Thread(target=count)
|
|
||||||
thread.start()
|
|
||||||
threads.append(thread)
|
|
||||||
|
|
||||||
for thread in threads:
|
|
||||||
thread.join()
|
|
||||||
|
|
||||||
print(f"Count: {counter}")
|
|
Loading…
Add table
Add a link
Reference in a new issue