Kalil de Lima
kalil@rootstrap.com
github.com/kaozdl
(55 + 5) * 30 = 1800/60 = 30 Minutes per game
24 * 30 = 720/60 = 12 Hours total
120 * 30 = 3600/60 = 1 Hours total
24 * 5 = 120 For Judith per table
import asyncio
aysnc def play_game(move_list=None):
if move_list:
move_list = judiths_move()
else
move_list.append(
judiths_move(move_list[-1])
)
await oponents_move(move_list[-1])
if move_list[-1] == 'CM':
return move_list
async def play_exibition():
games = []
await asyncio.gather(
[
games.append(play_game())
for game in range(25)
]
)
return games
if __name__ == '__main__':
asyncio.run(play_exibition())Very Lightweight
Predictable Behavior
Scheduled inside the interpreter
Good error handling
Async code should run inside asyncio
No external communication
Explicit event loop handling
GIL Locked
- Can run inside sync code
- Do not require new syntax
- Scheduled by the OS
- Require tools like mutexes
- All threads depend on the main program
- All threads Share the same memory space.
import threading- Can run inside sync code
- Do not require new syntax
- Scheduled by the OS
- Pretty much separate programs
- Do not share the same memory space
- Communicates to other threads via Kernel
import multiprocessing#Routines from websocket handlers in Django Channels
async def connect(self):
self.user = self.scope['user']
if '_wrapped' in self.user.__dict__:
self.user = self.user._wrapped
self.chat = Chat.objects.get(
pk=self.scope['url_route']['kwargs']['chat_id']
)
self.room_group_name = f'chat_{self.chat.id}'
# Join room group
await self.channel_layer.group_add(
self.room_group_name,
self.channel_name
)
await self.accept()#Parallelize multiple web request to scrap info from websites
def spark_threads(function, iteration, initial_page, last_page):
number_of_threads, start_page_thread_group, finish_page_thread_group =\
calculate_thread_pages(iteration, initial_page, last_page)
threads = []
try:
for k in range(1, number_of_threads + 1):
page_start = (k - 1) * PAGES_PER_THREAD + start_page_thread_group
page_finish = min(page_start + PAGES_PER_THREAD - 1, finish_page_thread_group)
function.keywords['page'] = page_start
function.keywords['last_page'] = page_finish
threads.append(spark_thread(function))
random_sleep()
except BaseException:
print("Error: unable to start thread")
return threads
def spark_thread(function, *args):
new_thread = threading.Thread(target=function, args=args)
new_thread.start()
return new_thread
https://docs.python.org/3/library/asyncio.html
https://docs.python.org/3/library/threading.html
https://docs.python.org/3/library/multiprocessing.html