第一部分:多进程

案例1:使用多进程进行计算密集型任务

import multiprocessing

def square(n):
    return n*n

if __name__ == '__main__':
    numbers = [1, 2, 3, 4, 5]
    pool = multiprocessing.Pool()

    results = pool.map(square, numbers)
    pool.close()
    pool.join()

    print(results)

案例2:使用多进程进行IO密集型任务

import multiprocessing
import requests

def download(url):
    response = requests.get(url)
    content = response.content
    with open('file_' + url.split('/')[-1], 'wb') as file:
        file.write(content)

if __name__ == '__main__':
    urls = ['http://example.com', 'http://example.org', 'http://example.net']
    pool = multiprocessing.Pool()

    pool.map(download, urls)
    pool.close()
    pool.join()

案例3:多进程实现并行任务

import multiprocessing
import time

def task(name):
    print(f'Starting task {name}')
    time.sleep(2)
    print(f'Finished task {name}')

if __name__ == '__main__':
    processes = []

    for i in range(1, 6):
        p = multiprocessing.Process(target=task, args=(f'Task {i}',))
        p.start()
        processes.append(p)

    for p in processes:
        p.join()

    print('All tasks completed.')

第二部分:多线程

案例1:使用多线程进行并发请求

import threading
import requests

def fetch(url):
    response = requests.get(url)
    content = response.content
    print(f'Response from {url}: {content}')

if __name__ == '__main__':
    urls = ['http://example.com', 'http://example.org', 'http://example.net']
    threads = []

    for url in urls:
        t = threading.Thread(target=fetch, args=(url,))
        threads.append(t)
        t.start()

    for t in threads:
        t.join()

案例2:多线程实现资源共享

import threading

count = 0
lock = threading.Lock()

def increment():
    global count
    with lock:
        count += 1
        print(f'Count: {count}')

if __name__ == '__main__':
    threads = []

    for i in range(10):
        t = threading.Thread(target=increment)
        t.start()
        threads.append(t)

    for t in threads:
        t.join()

    print('Final count:', count)

案例3:多线程队列示例

import threading
import queue

def producer(q, name):
    for i in range(5):
        message = f'Message {i} from {name}'
        q.put(message)
        print(f'Produced: {message}')

def consumer(q, name):
    while not q.empty():
        message = q.get()
        print(f'Consumed by {name}: {message}')

if __name__ == '__main__':
    q = queue.Queue()

    p1 = threading.Thread(target=producer, args=(q, 'Producer 1'))
    p2 = threading.Thread(target=producer, args=(q, 'Producer 2'))
    c1 = threading.Thread(target=consumer, args=(q, 'Consumer 1'))
    c2 = threading.Thread(target=consumer, args=(q, 'Consumer 2'))

    p1.start()
    p2.start()
    c1.start()
    c2.start()

    p1.join()
    p2.join()
    c1.join()
    c2.join()

    print('All messages consumed.')
09-01 02:51