如何使用dockercompose运行PythonDjango和Celery?

2024-09-30 06:12:48 发布

您现在位置:Python中文网/ 问答频道 /正文

我有一个使用Django和Celery的Python应用程序,我试图使用dockerdocker compose运行,因为我还使用RedisDynamodb

问题是:

我不能同时执行WSGICelery服务,因为只有第一条指令可以正常工作。。在

version: '3.3'

services:
  redis:
    image: redis:3.2-alpine
    volumes:
      - redis_data:/data
    ports:
      - "6379:6379"
  dynamodb:
    image: dwmkerr/dynamodb
    ports:
      - "3000:8000"
    volumes:
      - dynamodb_data:/data
  jobs:
    build:
      context: nubo-async-cfe-seces
      dockerfile: Dockerfile
    environment:
      - REDIS_HOST=redisrvi
      - PYTHONUNBUFFERED=0
      - CC_DYNAMODB_NAMESPACE=None
      - CC_DYNAMODB_ACCESS_KEY_ID=anything
      - CC_DYNAMODB_SECRET_ACCESS_KEY=anything
      - CC_DYNAMODB_HOST=dynamodb
      - CC_DYNAMODB_PORT=8000
      - CC_DYNAMODB_IS_SECURE=False

    command: >
      bash -c "celery worker -A tasks.async_service -Q dynamo-queue -E --loglevel=ERROR &&
               uwsgi --socket 0.0.0.0:8080 --protocol=http --wsgi-file nubo_async/wsgi.py"
    depends_on:
      - redis
      - dynamodb
    volumes:
      - .:/jobs
    ports:
      - "9090:8080"
volumes:
  redis_data:
  dynamodb_data:

有人有同样的问题吗?在


Tags: dockerimageredishostdataasyncservicejobs
2条回答

这是由@Satevg建议的docker组合,通过单独的容器运行Django和Celery应用程序。工作正常!在

version: '3.3'

services:
  redis:
    image: redis:3.2-alpine
    volumes:
      - redis_data:/data
    ports:
      - "6379:6379"
  dynamodb:
    image: dwmkerr/dynamodb
    ports:
      - "3000:8000"
    volumes:
      - dynamodb_data:/data
  jobs:
    build:
      context: nubo-async-cfe-services
      dockerfile: Dockerfile
    environment:
      - REDIS_HOST=redis
      - PYTHONUNBUFFERED=0
      - CC_DYNAMODB_NAMESPACE=None
      - CC_DYNAMODB_ACCESS_KEY_ID=anything
      - CC_DYNAMODB_SECRET_ACCESS_KEY=anything
      - CC_DYNAMODB_HOST=dynamodb
      - CC_DYNAMODB_PORT=8000
      - CC_DYNAMODB_IS_SECURE=False    
    command: bash -c "uwsgi  socket 0.0.0.0:8080  protocol=http  wsgi-file nubo_async/wsgi.py"
    depends_on:
      - redis
      - dynamodb
    volumes:
      - .:/jobs
    ports:
      - "9090:8080"
  celery:
    build:
      context: nubo-async-cfe-services
      dockerfile: Dockerfile
    environment:
    - REDIS_HOST=redis
    - PYTHONUNBUFFERED=0
    - CC_DYNAMODB_NAMESPACE=None
    - CC_DYNAMODB_ACCESS_KEY_ID=anything
    - CC_DYNAMODB_SECRET_ACCESS_KEY=anything
    - CC_DYNAMODB_HOST=dynamodb
    - CC_DYNAMODB_PORT=8000
    - CC_DYNAMODB_IS_SECURE=False
    command: celery worker -A tasks.async_service -Q dynamo-queue -E  loglevel=ERROR
    depends_on:
    - redis
    - dynamodb
    volumes:
    - .:/jobs
volumes:
  redis_data:
  dynamodb_data:

您可以参考Saleor项目的docker-compose。我建议让celery运行它的守护进程,只依赖redis作为代理。请参阅docker-compose.yml文件的配置:

services:
  web:
    build:
      context: .
      dockerfile: ./Dockerfile
      args:
        STATIC_URL: '/static/'
    restart: unless-stopped
    networks:
      - saleor-backend-tier
    env_file: common.env
    depends_on:
      - db
      - redis

    celery:
        build:
          context: .
          dockerfile: ./Dockerfile
          args:
            STATIC_URL: '/static/'
        command: celery -A saleor worker  app=saleor.celeryconf:app  loglevel=info
        restart: unless-stopped
        networks:
          - saleor-backend-tier
        env_file: common.env
        depends_on:
          - redis

另请参见从两个服务到redis的连接是由environtment vatables分别设置的,如common.env文件所示:

^{pr2}$

相关问题 更多 >

    热门问题