Merge pull request #7 from bt3gl/add_more_concurrence

Add more concurrence
This commit is contained in:
Mia von Steinkirch 2020-03-21 15:40:36 -07:00 committed by GitHub
commit 85a3f66662
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
18 changed files with 104 additions and 250 deletions

37
.github/workflows/pythonapp.yml vendored Normal file
View file

@ -0,0 +1,37 @@
# This workflow will install Python dependencies, run tests and lint with a single version of Python
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
name: Python application
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.8
uses: actions/setup-python@v1
with:
python-version: 3.8
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: Lint with flake8
run: |
pip install flake8
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Test with pytest
run: |
pip install pytest
pytest

View file

@ -1,3 +1,3 @@
### Concurrence in Python
Please check out this article for the examples in this directory: [Python + Concurrence: Here is What You Need to Know](https://medium.com/python-for-the-utopian/python-concurrence-here-is-what-you-need-to-know-c771d86eda95).
Examples for my Medium article: [Python + Concurrence: A Mnemonic Guide🚦](https://medium.com/python-for-the-utopian/python-concurrence-a-mnemonic-guide-7304867cbfb7).

View file

@ -2,14 +2,13 @@
import asyncio
async def delayed_hello():
print("Hello ")
print('Hello ')
await asyncio.sleep(1)
print("World!")
print('World!')
if __name__ == "__main__":
loop = asyncio.get_event_loop()
loop.run_until_complete(delayed_hello())
loop.close()
loop = asyncio.get_event_loop()
loop.run_until_complete(delayed_hello())
loop.close()

View file

@ -1,25 +0,0 @@
#!/usr/bin/env python3
import random
import logging
import concurrent.futures
WORKER_COUNT = 10
JOB_COUNT = 10
class Job:
def __init__(self, number):
self.number = number
def process_job(job):
# Wait between 0 and 0.01 seconds.
time.sleep(random.random()/100.0)
logging.info("Job number {:d}".format(job.number))
def main():
with concurrent.futures.ThreadPoolExecutor(
max_workers=WORKER_COUNT) as executor:
futures = [executor.submit(process_job, Job(i))
for i in range(JOB_COUNT)]
for future in concurrent.futures.as_completed(futures):
pass

View file

@ -0,0 +1,19 @@
#!/usr/bin/env python3
from time import sleep
from concurrent.futures import ThreadPoolExecutor
def return_after_5_secs(message):
sleep(5)
return message
pool = ThreadPoolExecutor(3)
future = pool.submit(return_after_5_secs, ('Future message'))
print(future.done())
sleep(5)
print(future.done())
print(future.result())

View file

@ -1,51 +0,0 @@
#!/usr/bin/env python3
import os
import time
import threading
import multiprocessing
NUM_WORKERS = 4
def run_sleep():
print("PID: %s, Process Name: %s, Thread Name: %s" % (
os.getpid(),
multiprocessing.current_process().name,
threading.current_thread().name)
)
time.sleep(1)
# Run tasks serially
start_time = time.time()
for _ in range(NUM_WORKERS):
run_sleep()
end_time = time.time()
print("Serial time=", end_time - start_time)
# Run tasks using threads
start_time = time.time()
threads = [threading.Thread(target=run_sleep) for _ in range(NUM_WORKERS)]
[thread.start() for thread in threads]
[thread.join() for thread in threads]
end_time = time.time()
print("Threads time=", end_time - start_time)
# Run tasks using processes
start_time = time.time()
processes = [multiprocessing.Process(target=run_sleep()) for _ in range(NUM_WORKERS)]
[process.start() for process in processes]
[process.join() for process in processes]
end_time = time.time()
print("Parallel time=", end_time - start_time)

View file

@ -1,49 +0,0 @@
#!/usr/bin/env python3
import os
import time
import threading
import multiprocessing
NUM_WORKERS = 4
def run_numbers():
print("PID: %s, Process Name: %s, Thread Name: %s" % (
os.getpid(),
multiprocessing.current_process().name,
threading.current_thread().name)
)
x = 0
while x < 10000000:
x += 1
start_time = time.time()
for _ in range(NUM_WORKERS):
run_numbers()
end_time = time.time()
print("Serial time=", end_time - start_time)
start_time = time.time()
threads = [threading.Thread(target=run_numbers) for _ in range(NUM_WORKERS)]
[thread.start() for thread in threads]
[thread.join() for thread in threads]
end_time = time.time()
print("Threads time=", end_time - start_time)
start_time = time.time()
processes = [multiprocessing.Process(target=run_numbers) for _ in range(NUM_WORKERS)]
[process.start() for process in processes]
[process.join() for process in processes]
end_time = time.time()
print("Parallel time=", end_time - start_time)

View file

@ -7,10 +7,10 @@ import multiprocessing
def daemon():
p = multiprocessing.current_process()
print('Starting: {}, {}'.format(p.name, p.pid))
sys.stdout.flush()
time.sleep(2)
time.sleep(1)
print('Exiting : {}, {}'.format(p.name, p.pid))
sys.stdout.flush()
@ -18,8 +18,8 @@ def daemon():
def non_daemon():
p = multiprocessing.current_process()
print('Starting: {}, {}'.format(p.name, p.pid))
sys.stdout.flush()
print('Exiting : {}, {}'.format(p.name, p.pid))

View file

@ -0,0 +1,12 @@
#!/usr/bin/env python3
import threading
l = threading.Lock()
print("Before first lock acquire.")
l.acquire()
print("Before second lock acquire.")
l.acquire()
print("Lock was acquired twice")

View file

@ -1,24 +0,0 @@
#!/usr/bin/env python3
import time
from gevent.pool import Pool
from gevent import monkey
# Note that you can spawn many workers with gevent since the cost of creating and switching is very low
NUM_WORKERS = 4
# Monkey-Patch socket module for HTTP requests
monkey.patch_socket()
start_time = time.time()
pool = Pool(NUM_WORKERS)
for address in WEBSITE_LIST:
pool.spawn(check_website, address)
# Wait for stuff to finish
pool.join()
end_time = time.time()
print("Time for GreenSquirrel: %ssecs" % (end_time - start_time))

View file

@ -1,15 +1,16 @@
#!/usr/bin/env python3
import multiprocessing
import logging
import sys
import logging
import multiprocessing
def worker():
print 'Doing some work'
print('Doing some work...')
sys.stdout.flush()
if __name__ == '__main__':
multiprocessing.log_to_stderr(logging.DEBUG)
p = multiprocessing.Process(target=worker)
p.start()
p.join()
multiprocessing.log_to_stderr(logging.DEBUG)
p = multiprocessing.Process(target=worker)
p.start()
p.join()

View file

@ -1,5 +1,3 @@
#!/usr/bin/env python3
import time
import random
import multiprocessing
@ -12,5 +10,5 @@ def worker(n):
for i in range(5):
t = multiprocessing.Process(target=worker, args=(i,))
t.start()
p = multiprocessing.Process(target=worker, args=(i,))
p.start()

View file

@ -2,9 +2,10 @@
from multiprocessing import Pool
def f(x):
return x*x
if __name__ == '__main__':
p = Pool(5)
print(p.map(f, [1, 2, 3]))
p = Pool(5)
print(p.map(f, [1, 2, 3]))

View file

@ -1,26 +0,0 @@
#!/usr/bin/env python3
import threading
counter = 0
threads = []
lock = threading.Lock()
def count_with_lock():
global counter
for _ in range(100):
with lock:
counter += 1
for _ in range(100):
thread = threading.Thread(target=count_with_lock)
thread.start()
threads.append(thread)
print(counter)

View file

@ -1,36 +1,19 @@
#!/usr/bin/env python3
import time
from queue import Queue
from threading import Thread
NUM_WORKERS = 4
task_queue = Queue()
def worker():
# Constantly check the queue for addresses
while True:
address = task_queue.get()
run_function(address)
# Mark the processed task as done
task_queue.task_done()
start_time = time.time()
# Create the worker threads
threads = [Thread(target=worker) for _ in range(NUM_WORKERS)]
# Add the websites to the task queue
[task_queue.put(item) for item in SOME_LIST]
# Start all the workers
[task_queue.put(item) for item in threads]
[thread.start() for thread in threads]
# Wait for all the tasks in the queue to be processed
task_queue.join()
end_time = time.time()
print('Time: {} secs'.format(end_time - start_time))
task_queue.join()

View file

@ -1,22 +0,0 @@
#!/usr/bin/env python3
import time
import threading
counter = 0
threads = []
def count():
global counter
for _ in range(100):
counter += 1
for _ in range(100):
thread = threading.Thread(target=count)
thread.start()
threads.append(thread)
for thread in threads:
thread.join()
print(f"Count: {counter}")

View file

@ -14,4 +14,5 @@
---
<a href="https://www.buymeacoffee.com/miavonpizza" target="_blank"><img src="https://cdn.buymeacoffee.com/buttons/arial-pink.png" alt="Buy Me A Coffee" style="height: 51px !important;width: 217px !important;" ></a>

View file

@ -1,6 +1,6 @@
# Medium Examples
# Security Examples
This directory holds any code and snippet that I have published in Medium:
This directory contains the source code published in my Medium articles:
* [Learn Networking with Pythons Socket and Threading Module 🚀](https://medium.com/python-for-the-utopian/learning-networking-with-pythons-socket-and-threading-module-30dc77e1fc59).