## page was renamed from Celery = Celery = Celery is an asynchronous task queue/job queue based on distributed message passing. http://docs.celeryproject.org/en/latest/getting-started/first-steps-with-celery.html == Slackware 14 installation == * easy_install celery * pip install celery Check installation: {{{#!highlight bash python Python 2.7.3 (default, Jul 3 2012, 19:58:39) [GCC 4.7.1] on linux2 Type "help", "copyright", "credits" or "license" for more information. >>> import celery >>> celery.__version__ '3.1.7' >>> }}} == Test app with Redis == Run redis: {{{#!highlight bash $redis-server & $redis-cli redis 127.0.0.1:6379> quit }}} App: {{{#!highlight python # celeryTest.py from celery import Celery app = Celery('tasks', broker='redis://localhost') app.conf.update(CELERY_RESULT_BACKEND="redis://") @app.task def add(x, y): return x + y }}} {{{#!highlight bash $celery -A celeryTest worker --loglevel=info }}} Run task {{{#!highlight bash $ python Python 2.7.3 (default, Jul 3 2012, 19:58:39) [GCC 4.7.1] on linux2 Type "help", "copyright", "credits" or "license" for more information. >>> from celeryTest import add >>> xx=add.delay(3,3) >>> xx.ready() True >>> print(xx.result) 6 }}} == Docker compose example == * docker-compose stop * docker-compose build * docker-compose up === app.py === {{{#!highlight python import time import redis from flask import Flask,jsonify from celery import Celery celery = Celery() celery.config_from_object('celeryconfig') """ docker-compose build docker-compose run docker exec -it celerytest_web_1 sh docker-compose buid docker-compose restart docker-compose stop docker system prune -a docker-compose up docker exec -it celerytest_web_1 sh pip freeze """ app = Flask(__name__) cache = redis.Redis(host='redis', port=6379) def get_hit_count(): retries = 5 while True: try: return cache.incr('hits') except redis.exceptions.ConnectionError as exc: if retries == 0: raise exc retries -= 1 time.sleep(0.5) @app.route('/') def hello(): count = get_hit_count() return 'Hello World!?= I have been seen {} times.\n'.format(count) @app.route('/add//') def add(op1=None,op2=None): """ http://localhost:5000/add/2/8 """ result=(celery.send_task('celery_worker.add', (op1,op2))) return jsonify( add_res=result.get()) @app.route('/mul//') def mul(op1=None,op2=None): """ http://localhost:5000/mul/2/8 """ result=(celery.send_task('celery_worker.mul', (op1,op2))) return jsonify( add_res=result.get()) }}} === Dockerfile-celery === {{{ FROM python:3.7-alpine WORKDIR /code RUN apk add --no-cache gcc musl-dev linux-headers COPY requirements.txt requirements.txt RUN pip install -r requirements.txt EXPOSE 5000 COPY . . CMD ["celery","-A","celery_worker","worker","--loglevel=info"] }}} === Dockerfile === {{{ FROM python:3.7-alpine WORKDIR /code ENV FLASK_APP=app.py ENV FLASK_RUN_HOST=0.0.0.0 RUN apk add --no-cache gcc musl-dev linux-headers COPY requirements.txt requirements.txt RUN pip install -r requirements.txt EXPOSE 5000 COPY . . CMD ["flask", "run"] }}} === docker-compose.yml === {{{#!highlight yaml version: "3.3" services: web: build: . ports: - "5000:5000" volumes: - .:/code environment: FLASK_ENV: development redis: image: "redis:alpine" celery: build: context: . dockerfile: Dockerfile-celery }}} === celery_client.py === {{{#!highlight python from celery import Celery def callback(taskid): print("callback taskid %s"%(taskid)) print("callback ready %d task id %s"%(result.get() , result.task_id )) celery = Celery() celery.config_from_object('celeryconfig') result=(celery.send_task('celery_worker.add', (2,2))) result.on_ready.then(callback) print(result.get()) }}} === celery_worker.py === {{{#!highlight python from celery import Celery from celery import shared_task app = Celery('tasks', broker='redis://redis') app.conf.update(CELERY_RESULT_BACKEND="redis://redis") @app.task def add(x, y): return x + y @shared_task def mul(x,y): return x*y }}} === requirements.txt === {{{ flask redis celery }}} === celeryconfig.py === {{{#!highlight python broker_url = 'redis://redis' result_backend = 'redis://redis' task_serializer = 'json' result_serializer = 'json' accept_content = ['json'] timezone = 'Europe/Lisbon' enable_utc = True }}}