官方文档:https://docs.python.org/2/library/multiprocessing.html#module-multiprocessing

1。 同步类型,如锁,条件和队列官方案例:

#
# A test file for the `multiprocessing` package
#
# Copyright (c) 2006-2008, R Oudkerk
# All rights reserved.
# import time, sys, random
from Queue import Empty import multiprocessing # may get overwritten #### TEST_VALUE def value_func(running, mutex):
random.seed()
time.sleep(random.random()*4) mutex.acquire()
print '\n\t\t\t' + str(multiprocessing.current_process()) + ' has finished'
running.value -= 1
mutex.release() def test_value():
TASKS = 10
running = multiprocessing.Value('i', TASKS)
mutex = multiprocessing.Lock() for i in range(TASKS):
p = multiprocessing.Process(target=value_func, args=(running, mutex))
p.start() while running.value > 0:
time.sleep(0.08)
mutex.acquire()
print running.value,
sys.stdout.flush()
mutex.release() print
print 'No more running processes' #### TEST_QUEUE def queue_func(queue):
for i in range(30):
time.sleep(0.5 * random.random())
queue.put(i*i)
queue.put('STOP') def test_queue():
q = multiprocessing.Queue() p = multiprocessing.Process(target=queue_func, args=(q,))
p.start() o = None
while o != 'STOP':
try:
o = q.get(timeout=0.3)
print o,
sys.stdout.flush()
except Empty:
print 'TIMEOUT' print #### TEST_CONDITION def condition_func(cond):
cond.acquire()
print '\t' + str(cond)
time.sleep(2)
print '\tchild is notifying'
print '\t' + str(cond)
cond.notify()
cond.release() def test_condition():
cond = multiprocessing.Condition() p = multiprocessing.Process(target=condition_func, args=(cond,))
print cond cond.acquire()
print cond
cond.acquire()
print cond p.start() print 'main is waiting'
cond.wait()
print 'main has woken up' print cond
cond.release()
print cond
cond.release() p.join()
print cond #### TEST_SEMAPHORE def semaphore_func(sema, mutex, running):
sema.acquire() mutex.acquire()
running.value += 1
print running.value, 'tasks are running'
mutex.release() random.seed()
time.sleep(random.random()*2) mutex.acquire()
running.value -= 1
print '%s has finished' % multiprocessing.current_process()
mutex.release() sema.release() def test_semaphore():
sema = multiprocessing.Semaphore(3)
mutex = multiprocessing.RLock()
running = multiprocessing.Value('i', 0) processes = [
multiprocessing.Process(target=semaphore_func,
args=(sema, mutex, running))
for i in range(10)
] for p in processes:
p.start() for p in processes:
p.join() #### TEST_JOIN_TIMEOUT def join_timeout_func():
print '\tchild sleeping'
time.sleep(5.5)
print '\n\tchild terminating' def test_join_timeout():
p = multiprocessing.Process(target=join_timeout_func)
p.start() print 'waiting for process to finish' while 1:
p.join(timeout=1)
if not p.is_alive():
break
print '.',
sys.stdout.flush() #### TEST_EVENT def event_func(event):
print '\t%r is waiting' % multiprocessing.current_process()
event.wait()
print '\t%r has woken up' % multiprocessing.current_process() def test_event():
event = multiprocessing.Event() processes = [multiprocessing.Process(target=event_func, args=(event,))
for i in range(5)] for p in processes:
p.start() print 'main is sleeping'
time.sleep(2) print 'main is setting event'
event.set() for p in processes:
p.join() #### TEST_SHAREDVALUES def sharedvalues_func(values, arrays, shared_values, shared_arrays):
for i in range(len(values)):
v = values[i][1]
sv = shared_values[i].value
assert v == sv for i in range(len(values)):
a = arrays[i][1]
sa = list(shared_arrays[i][:])
assert a == sa print 'Tests passed' def test_sharedvalues():
values = [
('i', 10),
('h', -2),
('d', 1.25)
]
arrays = [
('i', range(100)),
('d', [0.25 * i for i in range(100)]),
('H', range(1000))
] shared_values = [multiprocessing.Value(id, v) for id, v in values]
shared_arrays = [multiprocessing.Array(id, a) for id, a in arrays] p = multiprocessing.Process(
target=sharedvalues_func,
args=(values, arrays, shared_values, shared_arrays)
)
p.start()
p.join() assert p.exitcode == 0 #### def test(namespace=multiprocessing):
global multiprocessing multiprocessing = namespace for func in [ test_value, test_queue, test_condition,
test_semaphore, test_join_timeout, test_event,
test_sharedvalues ]: print '\n\t######## %s\n' % func.__name__
func() ignore = multiprocessing.active_children() # cleanup any old processes
if hasattr(multiprocessing, '_debug_info'):
info = multiprocessing._debug_info()
if info:
print info
raise ValueError('there should be no positive refcounts left') if __name__ == '__main__':
multiprocessing.freeze_support() assert len(sys.argv) in (1, 2) if len(sys.argv) == 1 or sys.argv[1] == 'processes':
print ' Using processes '.center(79, '-')
namespace = multiprocessing
elif sys.argv[1] == 'manager':
print ' Using processes and a manager '.center(79, '-')
namespace = multiprocessing.Manager()
namespace.Process = multiprocessing.Process
namespace.current_process = multiprocessing.current_process
namespace.active_children = multiprocessing.active_children
elif sys.argv[1] == 'threads':
print ' Using threads '.center(79, '-')
import multiprocessing.dummy as namespace
else:
print 'Usage:\n\t%s [processes | manager | threads]' % sys.argv[0]
raise SystemExit(2) test(namespace)

 下面是一个示例,显示了如何使用队列将任务提供给一组工作进程并收集结果:

# Simple example which uses a pool of workers to carry out some tasks.
#
# Notice that the results will probably not come out of the output
# queue in the same in the same order as the corresponding tasks were
# put on the input queue. If it is important to get the results back
# in the original order then consider using `Pool.map()` or
# `Pool.imap()` (which will save on the amount of code needed anyway).
#
# Copyright (c) 2006-2008, R Oudkerk
# All rights reserved.
# import time
import random from multiprocessing import Process, Queue, current_process, freeze_support #
# Function run by worker processes
# def worker(input, output):
for func, args in iter(input.get, 'STOP'):
result = calculate(func, args)
output.put(result) #
# Function used to calculate result
# def calculate(func, args):
result = func(*args)
return '%s says that %s%s = %s' % \
(current_process().name, func.__name__, args, result) #
# Functions referenced by tasks
# def mul(a, b):
time.sleep(0.5*random.random())
return a * b def plus(a, b):
time.sleep(0.5*random.random())
return a + b #
#
# def test():
NUMBER_OF_PROCESSES = 4
TASKS1 = [(mul, (i, 7)) for i in range(20)]
TASKS2 = [(plus, (i, 8)) for i in range(10)] # Create queues
task_queue = Queue()
done_queue = Queue() # Submit tasks
for task in TASKS1:
task_queue.put(task) # Start worker processes
for i in range(NUMBER_OF_PROCESSES):
Process(target=worker, args=(task_queue, done_queue)).start() # Get and print results
print 'Unordered results:'
for i in range(len(TASKS1)):
print '\t', done_queue.get() # Add more tasks using `put()`
for task in TASKS2:
task_queue.put(task) # Get and print some more results
for i in range(len(TASKS2)):
print '\t', done_queue.get() # Tell child processes to stop
for i in range(NUMBER_OF_PROCESSES):
task_queue.put('STOP') if __name__ == '__main__':
freeze_support()
test()

  

mutiprocessing 同步类型,如锁,条件和队列官方案例:的更多相关文章

  1. C++11 多线程同步 互斥锁 条件变量

    在多线程程序中,线程同步(多个线程访问一个资源保证顺序)是一个非常重要的问题,Linux下常见的线程同步的方法有下面几种: 互斥锁 条件变量 信号量 这篇博客只介绍互斥量和条件变量的使用. 互斥锁和条 ...

  2. [转]Posix-- 互斥锁 条件变量 信号量

    这是一个关于Posix线程编程的专栏.作者在阐明概念的基础上,将向您详细讲述Posix线程库API.本文是第三篇将向您讲述线程同步. 互斥锁 尽管在Posix Thread中同样可以使用IPC的信号量 ...

  3. Linux内核同步:自旋锁

    linux内核--自旋锁的理解 自旋锁:如果内核配置为SMP系统,自旋锁就按SMP系统上的要求来实现真正的自旋等待,但是对于UP系统,自旋锁仅做抢占和中断操作,没有实现真正的“自旋”.如果配置了CON ...

  4. jvm高级特性(6)(线程的种类,调度,状态,安全程度,实现安全的方法,同步种类,锁优化,锁种类)

    JVM高级特性与实践(十三):线程实现 与 Java线程调度 JVM高级特性与实践(十四):线程安全 与 锁优化 一. 线程的实现 线程其实是比进程更轻量级的调度执行单位. 线程的引入,可以把一个检查 ...

  5. Java多线程的同步方式和锁机制

    Object.wait(miliSec)/notify()/notifyAll() 线程调用wait()之后可以由notify()唤醒,如果指定了miliSec的话也可超时后自动唤醒.wait方法的调 ...

  6. JAVA之旅(十三)——线程的安全性,synchronized关键字,多线程同步代码块,同步函数,同步函数的锁是this

    JAVA之旅(十三)--线程的安全性,synchronized关键字,多线程同步代码块,同步函数,同步函数的锁是this 我们继续上个篇幅接着讲线程的知识点 一.线程的安全性 当我们开启四个窗口(线程 ...

  7. 线程同步 - POSIX互斥锁

    线程同步 - POSIX互斥锁 概括 本文讲解POSIX中互斥量的基本用法,从而能达到简单的线程同步.互斥量是一种特殊的变量,它有两种状态:锁定以及解锁.如果互斥量是锁定的,就有一个特定的线程持有或者 ...

  8. 4、网络并发编程--僵尸进程、孤儿进程、守护进程、互斥锁、消息队列、IPC机制、生产者消费者模型、线程理论与实操

    昨日内容回顾 操作系统发展史 1.穿孔卡片 CPU利用率极低 2.联机批处理系统 CPU效率有所提升 3.脱机批处理系统 CPU效率极大提升(现代计算机雏形) 多道技术(单核CPU) 串行:多个任务依 ...

  9. JAVA之旅(十四)——静态同步函数的锁是class对象,多线程的单例设计模式,死锁,线程中的通讯以及通讯所带来的安全隐患,等待唤醒机制

    JAVA之旅(十四)--静态同步函数的锁是class对象,多线程的单例设计模式,死锁,线程中的通讯以及通讯所带来的安全隐患,等待唤醒机制 JAVA之旅,一路有你,加油! 一.静态同步函数的锁是clas ...

随机推荐

  1. PP: Meta-learning framework with applications to zero-shot time-series forecasting

    From: Yoshua Bengio Problem: time series forecasting. Supplementary knowledge: 1. what is meta-learn ...

  2. php函数的巧妙应用

    直接切入正题: 1.extract();函数从数组中把变量导入到当前的符号表中 对于数组中的每个元素,键名用于变量名,键值用于变量值. 第二个参数 type 用于指定当某个变量已经存在,而数组中又有同 ...

  3. POJ 1099 Square Ice 连蒙带猜+根据样例找规律

    目录 题面 思路 思路 AC代码 题面 Square Ice Time Limit: 1000MS   Memory Limit: 10000K Total Submissions: 4526   A ...

  4. jQuery---创建和添加节点

    创建和添加节点 //创建jq对象 var $li = $('<a href="http://web.itcast.cn" target="_blank"& ...

  5. Python 高维数组“稀疏矩阵”scipy sparse学习笔记

    scipy 里面的sparse函数进行的矩阵存储 可以节省内存 主要是scipy包里面的 sparse 这里目前只用到两个 稀疏矩阵的读取 sparse.load() 转稀疏矩阵为普通矩阵 spars ...

  6. 洛谷P1219 八皇后 我。。。。。。

    代码1    (学弟版) #include<bits/stdc++.h>using namespace std;int l[15];bool s[15];                  ...

  7. nginx中部署前端,后端打成jar包运行

    项目是前后端分离:前端用vue开发,后端用的是springboot开发 会产生跨域问题,故在前端里用了代理 1.本前端项目是用vue开发: 1.1打包:终端 vscode快捷键:crtl+~  然后n ...

  8. Eclipse的Errors in required projec(s)问题

    在Eclipse中运行代码时出现Errors exist in required project(s)弹窗提示,但是当前类并无错误,点击Proceed当前类仍然可以运行 错误展示: Errors ex ...

  9. 重载(Overload)和重写(Override)的区别是什么?

    首先java程序的运行分为编译和运行两部分. 所以重载和重写在这一点就有很明显的区别,因为重写方法的方法名和参数个数类型都一样,所以在java虚拟机的编译阶段是识别不出重写的方法的不同,在运行期间才可 ...

  10. AntDesign(React)学习-1 创建环境

    目录: AntDesign(React)学习-15 组件定义.connect.interface AntDesign(React)学习-14 使用UMI提供的antd模板 AntDesign(Reac ...