celery -A proj report
的输出。master
分支存在这个问题。1 .deploy_tomcat2.py
`从 .AnsibleApi 导入 CallApi
def django_process(jira_num):
服务器 = '10.10.51.30'
名称 = 'abc'
端口 = 11011
代码 = 'efs'
jdk = '1.12.13'
jvm = 'xxx'
if str.isdigit(jira_num):
# import pdb
# pdb.set_trace()
call = CallApi(server,name,port,code,jdk,jvm)
return call.run_task()
`
导入日志
从 .Logger 导入记录器
从 django.conf 导入设置
从集合导入namedtuple
从 ansible.parsing.dataloader 导入 DataLoader
从 ansible.vars 导入 VariableManager
从 ansible.inventory 导入库存
从 ansible.playbook.play 导入播放
从 ansible.executor.task_queue_manager 导入 TaskQueueManager
从 ansible.plugins.callback 导入 CallbackBase
Log = Logger('/tmp/auto_deploy_tomcat.log',logging.INFO)
类 ResultCallback(CallbackBase):
def __init__(self, args, * kwargs):
super(ResultCallback,self).__init__( args, * kwargs)
self.host_ok = {}
self.host_unreachable = {}
self.host_failed = {}
def v2_runner_on_unreachable(self, result):
self.host_unreachable[result._host.get_name()] = result
def v2_runner_on_ok(self, result, *args, **kwargs):
self.host_ok[result._host.get_name()] = result
def v2_runner_on_failed(self, result, *args, **kwargs):
self.host_failed[result._host.get_name()] = result
类 CallApi(对象):
用户 = settings.SSH_USER
ssh_private_key_file = settings.SSH_PRIVATE_KEY_FILE
结果回调 = ResultCallback()
Options = namedtuple('选项',
['connection', 'module_path', 'private_key_file', 'forks', 'become', 'become_method',
'become_user', '检查'])
def __init__(self,ip,name,port,code,jdk,jvm):
self.ip = ip
self.name = name
self.port = port
self.code = code
self.jdk = jdk
self.jvm = jvm
self.results_callback = ResultCallback()
self.results_raw = {}
def _gen_user_task(self):
tasks = []
deploy_script = 'autodeploy/abc.sh'
dst_script = '/tmp/abc.sh'
cargs = dict(src=deploy_script, dest=dst_script, owner=self.user, group=self.user, mode='0755')
args = "%s %s %d %s %s '%s'" % (dst_script, self.name, self.port, self.code, self.jdk, self.jvm)
tasks.append(dict(action=dict(module='copy', args=cargs),register='shell_out'))
tasks.append(dict(action=dict(module='debug', args=dict(msg='{{shell_out}}'))))
# tasks.append(dict(action=dict(module='command', args=args)))
# tasks.append(dict(action=dict(module='command', args=args), register='result'))
# tasks.append(dict(action=dict(module='debug', args=dict(msg='{{result.stdout}}'))))
self.tasks = tasks
def _set_option(self):
self._gen_user_task()
self.variable_manager = VariableManager()
self.loader = DataLoader()
self.options = self.Options(connection='smart', module_path=None, private_key_file=self.ssh_private_key_file, forks=None,
become=True, become_method='sudo', become_user='root', check=False)
self.inventory = Inventory(loader=self.loader, variable_manager=self.variable_manager, host_list=[self.ip])
self.variable_manager.set_inventory(self.inventory)
play_source = dict(
name = "auto deploy tomcat",
hosts = self.ip,
remote_user = self.user,
gather_facts='no',
tasks = self.tasks
)
self.play = Play().load(play_source, variable_manager=self.variable_manager, loader=self.loader)
def run_task(self):
self.results_raw = {'success':{}, 'failed':{}, 'unreachable':{}}
tqm = None
from celery.contrib import rdb
rdb.set_trace()
self._set_option()
try:
tqm = TaskQueueManager(
inventory=self.inventory,
variable_manager=self.variable_manager,
loader=self.loader,
options=self.options,
passwords=None,
stdout_callback=self.results_callback,
)
result = tqm.run(self.play)
finally:
if tqm is not None:
tqm.cleanup()
for host, result in self.results_callback.host_ok.items():
self.results_raw['success'][host] = result._result
for host, result in self.results_callback.host_failed.items():
self.results_raw['failed'][host] = result._result
for host, result in self.results_callback.host_unreachable.items():
self.results_raw['unreachable'][host]= result._result
Log.info("result is :%s" % self.results_raw)
return self.results_raw
3.tasks.py
from __future__ import absolute_import, unicode_literals
从芹菜进口shared_task
从 .deploy_tomcat2 导入 django_process
@shared_task
定义部署(数量):
#return 'hello world {0}'.format(num)
#rdb.set_trace()
return django_process(num)`
在 Django 控制台 :python manage.py shell 中,我可以使用 deploy('123') ,代码调用 ansibleApi 将 abc.sh 成功复制到 dst 服务器。
但是当我使用 'deploy.apply_async(args=['1334'], queue='queue.ops.deploy', routing_key='ops.deploy')' 时我没有工作
找了几天,还是不行。
AnsibleApi 返回空字典 :{'success': {}, 'failed': {}, 'unreachable': {}}
有两种方法可以解决这个问题,禁用断言:
1. celery 开始的地方设置 export PYTHONOPTIMIZE=1 或者用这个参数启动 celery -O OPTIMIZATION
2.disable python packet multiprocessing process.py line 102:
assert not _current_process._config.get('daemon'), \
'daemonic processes are not allowed to have children'
有两种方法可以解决这个问题,禁用断言:
1. celery 开始的地方设置 export PYTHONOPTIMIZE=1 或者用这个参数启动 celery -O OPTIMIZATION
2.disable python packet multiprocessing process.py line 102:
assert not _current_process._config.get('daemon'), \'守护进程不允许有子进程'
@Xuexiang825抱歉,它不起作用,我使用 Python 3.6.3 和 ansible 2.4.2.0,celery 4.1.0。 我的剧本执行器显示消息但什么也不做。此外,你能告诉我一些关于参数 -O OPTIMIZATION 的信息吗?
@Xuexiang825 我刚刚看了你在这篇文章中的解答,我发现你也是中国人,那我就用中文了,我用了export 变量好使了! 但是-O 参数好像并没有效果,celery worker -A celery_worker.celery --loglevel=info -O OPTIMIZATION 这是我的启动命令。 感觉没什么不对的地方,对吧
最有用的评论
有两种方法可以解决这个问题,禁用断言:
1. celery 开始的地方设置 export PYTHONOPTIMIZE=1 或者用这个参数启动 celery -O OPTIMIZATION
2.disable python packet multiprocessing process.py line 102:
assert not _current_process._config.get('daemon'), \ 'daemonic processes are not allowed to have children'