From 297d70b009627b76d9dd49b6963ef2a4fb1648bc Mon Sep 17 00:00:00 2001 From: Wu Clan Date: Sat, 21 Oct 2023 03:25:54 +0800 Subject: [PATCH 01/17] Replace APScheduler to Celery task --- backend/app/.env.example | 11 ++-- backend/app/api/v1/mixed/config.py | 45 ---------------- backend/app/api/v1/mixed/tests.py | 36 ++----------- backend/app/api/v1/task.py | 54 ++++++------------- backend/app/common/task.py | 60 --------------------- backend/app/core/celery.py | 20 +++++++ backend/app/core/conf.py | 24 ++++----- backend/app/core/registrar.py | 5 -- backend/app/schemas/task.py | 17 ------ backend/app/services/task_service.py | 78 ---------------------------- backend/app/tasks.py | 10 ++++ requirements.txt | 3 +- 12 files changed, 70 insertions(+), 293 deletions(-) delete mode 100644 backend/app/common/task.py create mode 100644 backend/app/core/celery.py delete mode 100644 backend/app/schemas/task.py delete mode 100644 backend/app/services/task_service.py create mode 100644 backend/app/tasks.py diff --git a/backend/app/.env.example b/backend/app/.env.example index 93bfd659..d3cc10dd 100644 --- a/backend/app/.env.example +++ b/backend/app/.env.example @@ -10,11 +10,12 @@ REDIS_HOST='127.0.0.1' REDIS_PORT=6379 REDIS_PASSWORD='' REDIS_DATABASE=0 -# APScheduler -APS_REDIS_HOST='127.0.0.1' -APS_REDIS_PORT=6379 -APS_REDIS_PASSWORD='' -APS_REDIS_DATABASE=1 +# Celery +CELERY_REDIS_HOST='127.0.0.1' +CELERY_REDIS_PORT=6379 +CELERY_REDIS_PASSWORD='' +CELERY_REDIS_DATABASE_BROKER=1 +CELERY_REDIS_DATABASE_BACKEND=2 # Token TOKEN_SECRET_KEY='1VkVF75nsNABBjK_7-qz7GtzNy3AMvktc9TCPwKczCk' # Opera Log diff --git a/backend/app/api/v1/mixed/config.py b/backend/app/api/v1/mixed/config.py index 4b9c6f2c..bdc7dec1 100644 --- a/backend/app/api/v1/mixed/config.py +++ b/backend/app/api/v1/mixed/config.py @@ -5,54 +5,10 @@ from backend.app.common.rbac import DependsRBAC from backend.app.common.response.response_schema import response_base -from backend.app.core.conf import settings router = APIRouter() -@router.get('/configs', summary='获取系统配置', dependencies=[DependsRBAC]) -async def get_sys_config(): - return await response_base.success( - data={ - 'title': settings.TITLE, - 'version': settings.VERSION, - 'description': settings.DESCRIPTION, - 'docs_url': settings.DOCS_URL, - 'redocs_url': settings.REDOCS_URL, - 'openapi_url': settings.OPENAPI_URL, - 'environment': settings.ENVIRONMENT, - 'static_files': settings.STATIC_FILES, - 'uvicorn_host': settings.UVICORN_HOST, - 'uvicorn_port': settings.UVICORN_PORT, - 'uvicorn_reload': settings.UVICORN_RELOAD, - 'db_host': settings.DB_HOST, - 'db_port': settings.DB_PORT, - 'db_user': settings.DB_USER, - 'db_database': settings.DB_DATABASE, - 'db_charset': settings.DB_CHARSET, - 'redis_host': settings.REDIS_HOST, - 'redis_port': settings.REDIS_PORT, - 'redis_database': settings.REDIS_DATABASE, - 'redis_timeout': settings.REDIS_TIMEOUT, - 'aps_redis_host': settings.APS_REDIS_HOST, - 'aps_redis_port': settings.APS_REDIS_PORT, - 'aps_redis_database': settings.APS_REDIS_DATABASE, - 'aps_redis_timeout': settings.APS_REDIS_TIMEOUT, - 'aps_coalesce': settings.APS_COALESCE, - 'aps_max_instances': settings.APS_MAX_INSTANCES, - 'aps_misfire_grace_time': settings.APS_MISFIRE_GRACE_TIME, - 'token_algorithm': settings.TOKEN_ALGORITHM, - 'token_expire_seconds': settings.TOKEN_EXPIRE_SECONDS, - 'token_swagger_url': settings.TOKEN_URL_SWAGGER, - 'access_log_filename': settings.LOG_STDOUT_FILENAME, - 'error_log_filename': settings.LOG_STDERR_FILENAME, - 'middleware_cors': settings.MIDDLEWARE_CORS, - 'middleware_gzip': settings.MIDDLEWARE_GZIP, - 'middleware_access': settings.MIDDLEWARE_ACCESS, - } - ) - - @router.get('/routers', summary='获取所有路由', dependencies=[DependsRBAC]) async def get_all_route(request: Request): data = [] @@ -64,7 +20,6 @@ async def get_all_route(request: Request): 'name': route.name, 'summary': route.summary, 'methods': route.methods, - 'dependencies': route.dependencies, } ) return await response_base.success(data={'route_list': data}) diff --git a/backend/app/api/v1/mixed/tests.py b/backend/app/api/v1/mixed/tests.py index a228e1a4..bef46ce7 100644 --- a/backend/app/api/v1/mixed/tests.py +++ b/backend/app/api/v1/mixed/tests.py @@ -1,43 +1,17 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -import datetime from fastapi import APIRouter, File, UploadFile, Form -from backend.app.common.response.response_schema import response_base -from backend.app.common.task import scheduler +from backend.app.core.celery import celery_app router = APIRouter(prefix='/tests') -def task_demo(): - print('普通任务') - - -async def task_demo_async(): - print('异步任务') - - -@router.post('/sync', summary='测试添加同步任务') -async def task_demo_add(): - scheduler.add_job( - task_demo, 'interval', seconds=1, id='task_demo', replace_existing=True, start_date=datetime.datetime.now() - ) - - return await response_base.success() - - -@router.post('/async', summary='测试添加异步任务') -async def task_demo_add_async(): - scheduler.add_job( - task_demo_async, - 'interval', - seconds=1, - id='task_demo_async', - replace_existing=True, - start_date=datetime.datetime.now(), - ) - return await response_base.success() +@router.post('/task/async', summary='测试异步任务') +async def task_demo_async_send(): + celery_app.send_task('task_demo_async') + return {'msg': 'Success'} @router.post('/files', summary='测试文件上传') diff --git a/backend/app/api/v1/task.py b/backend/app/api/v1/task.py index 1a78c52d..e7b4565b 100644 --- a/backend/app/api/v1/task.py +++ b/backend/app/api/v1/task.py @@ -1,46 +1,24 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -from fastapi import APIRouter +from celery.exceptions import BackendGetMetaError +from celery.result import AsyncResult +from fastapi import APIRouter, Path -from backend.app.common.rbac import DependsRBAC -from backend.app.common.jwt import DependsJwtAuth +from backend.app.common.exception.errors import NotFoundError +from backend.app.common.response.response_code import CustomResponseCode from backend.app.common.response.response_schema import response_base -from backend.app.services.task_service import TaskService router = APIRouter() -@router.get('', summary='获取任务列表', dependencies=[DependsJwtAuth]) -async def get_all_tasks(): - tasks_list = await TaskService.get_task_list() - return await response_base.success(data=tasks_list) - - -@router.get('/{pk}', summary='获取任务详情', dependencies=[DependsJwtAuth]) -async def get_task(pk: str): - task = await TaskService.get_task(pk=pk) - return await response_base.success(data=task) - - -@router.post('/{pk}/run', summary='执行任务', dependencies=[DependsRBAC]) -async def run_task(pk: str): - task = await TaskService().run(pk=pk) - return await response_base.success(data=task) - - -@router.post('/{pk}/pause', summary='暂停任务', dependencies=[DependsRBAC]) -async def pause_task(pk: str): - task = await TaskService().pause(pk=pk) - return await response_base.success(data=task) - - -@router.post('/{pk}/resume', summary='恢复任务', dependencies=[DependsRBAC]) -async def resume_task(pk: str): - task = await TaskService().resume(pk=pk) - return await response_base.success(data=task) - - -@router.post('/{pk}/stop', summary='删除任务', dependencies=[DependsRBAC]) -async def delete_task(pk: str): - task = await TaskService().delete(pk=pk) - return await response_base.success(data=task) +@router.get('/{pk}', summary='获取任务结果') +async def get_task_result(pk: str = Path(description='任务ID')): + try: + task = AsyncResult(pk) + except BackendGetMetaError: + raise NotFoundError(msg='任务不存在') + else: + status = task.status + if status == 'FAILURE': + return await response_base.fail(res=CustomResponseCode.HTTP_204, data=task.result) + return await response_base.success(data=task.result) diff --git a/backend/app/common/task.py b/backend/app/common/task.py deleted file mode 100644 index 4efa7fb7..00000000 --- a/backend/app/common/task.py +++ /dev/null @@ -1,60 +0,0 @@ -# !/usr/bin/env python3 -# -*- coding: utf-8 -*- -from apscheduler.executors.asyncio import AsyncIOExecutor -from apscheduler.jobstores.redis import RedisJobStore -from apscheduler.schedulers.asyncio import AsyncIOScheduler - -from backend.app.common.log import log -from backend.app.core.conf import settings - - -def _scheduler_conf() -> dict: - """ - task conf - - :return: - """ - redis_conf = { - 'host': settings.APS_REDIS_HOST, - 'port': settings.APS_REDIS_PORT, - 'password': settings.APS_REDIS_PASSWORD, - 'db': settings.APS_REDIS_DATABASE, - 'socket_timeout': settings.APS_REDIS_TIMEOUT, - } - - end_conf = { - # 配置存储器 - 'jobstores': {'default': RedisJobStore(**redis_conf)}, - # 配置执行器 - 'executors': { - 'default': AsyncIOExecutor(), - }, - # 创建task时的默认参数 - 'job_defaults': { - 'coalesce': settings.APS_COALESCE, - 'max_instances': settings.APS_MAX_INSTANCES, - 'misfire_grace_time': settings.APS_MISFIRE_GRACE_TIME, - }, - # 时区 - 'timezone': settings.DATETIME_TIMEZONE, - } - - return end_conf - - -class Scheduler(AsyncIOScheduler): - def start(self, paused: bool = False): - try: - super().start(paused) - except Exception as e: - log.error(f'❌ 任务 scheduler 启动失败: {e}') - - def shutdown(self, wait: bool = True): - try: - super().shutdown(wait) - except Exception as e: - log.error(f'❌ 任务 scheduler 关闭失败: {e}') - - -# 调度器 -scheduler = Scheduler(**_scheduler_conf()) diff --git a/backend/app/core/celery.py b/backend/app/core/celery.py new file mode 100644 index 00000000..b3dedc84 --- /dev/null +++ b/backend/app/core/celery.py @@ -0,0 +1,20 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +from celery import Celery + +from backend.app.core.conf import settings + +celery_app = Celery('celery_app') +celery_app.conf.broker_url = f'redis://:{settings.CELERY_REDIS_PASSWORD}@{settings.CELERY_REDIS_HOST}:{settings.CELERY_REDIS_PORT}/{settings.CELERY_REDIS_DATABASE_BROKER}' +celery_app.conf.result_backend = f'redis://:{settings.CELERY_REDIS_PASSWORD}@{settings.CELERY_REDIS_HOST}:{settings.CELERY_REDIS_PORT}/{settings.CELERY_REDIS_DATABASE_BACKEND}' +celery_app.conf.result_backend_transport_options = { + 'global_keyprefix': settings.CELERY_REDIS_BACKEND_PREFIX, + 'retry_policy': { + 'timeout': settings.CELERY_REDIS_BACKEND_TIMEOUT + }, + 'result_chord_ordered': settings.CELERY_REDIS_BACKEND_ORDERED + +} +celery_app.conf.timezone = settings.DATETIME_TIMEZONE +celery_app.conf.task_track_started = True +celery_app.autodiscover_tasks(['app']) diff --git a/backend/app/core/conf.py b/backend/app/core/conf.py index 22049635..a5d9e70d 100644 --- a/backend/app/core/conf.py +++ b/backend/app/core/conf.py @@ -22,11 +22,12 @@ class Settings(BaseSettings): REDIS_PASSWORD: str REDIS_DATABASE: int - # Env APScheduler Redis - APS_REDIS_HOST: str - APS_REDIS_PORT: int - APS_REDIS_PASSWORD: str - APS_REDIS_DATABASE: int + # Env Celery + CELERY_REDIS_HOST: str + CELERY_REDIS_PORT: int + CELERY_REDIS_PASSWORD: str + CELERY_REDIS_DATABASE_BROKER: int + CELERY_REDIS_DATABASE_BACKEND: int # Env Token TOKEN_SECRET_KEY: str # 密钥 secrets.token_urlsafe(32) @@ -63,6 +64,11 @@ def validator_api_url(cls, values): UVICORN_PORT: int = 8000 UVICORN_RELOAD: bool = True + # Celery + CELERY_REDIS_BACKEND_PREFIX: str = 'fba_celery' + CELERY_REDIS_BACKEND_TIMEOUT: float = 5.0 + CELERY_REDIS_BACKEND_ORDERED: bool = True + # Static Server STATIC_FILES: bool = False @@ -84,14 +90,6 @@ def validator_api_url(cls, values): # Redis REDIS_TIMEOUT: int = 5 - # APScheduler Redis - APS_REDIS_TIMEOUT: int = 10 - - # APScheduler Default - APS_COALESCE: bool = False # 是否合并运行 - APS_MAX_INSTANCES: int = 3 # 最大实例数 - APS_MISFIRE_GRACE_TIME: int = 60 # 任务错过执行时间后,最大容错时间,过期后不再执行,单位:秒 - # Token TOKEN_ALGORITHM: str = 'HS256' # 算法 TOKEN_EXPIRE_SECONDS: int = 60 * 60 * 24 * 1 # 过期时间,单位:秒 diff --git a/backend/app/core/registrar.py b/backend/app/core/registrar.py index 71c67e24..67378417 100644 --- a/backend/app/core/registrar.py +++ b/backend/app/core/registrar.py @@ -10,7 +10,6 @@ from backend.app.api.routers import v1 from backend.app.common.exception.exception_handler import register_exception from backend.app.common.redis import redis_client -from backend.app.common.task import scheduler from backend.app.core.conf import settings from backend.app.database.db_mysql import create_table from backend.app.middleware.jwt_auth_middleware import JwtAuthMiddleware @@ -33,8 +32,6 @@ async def register_init(app: FastAPI): await redis_client.open() # 初始化 limiter await FastAPILimiter.init(redis_client, prefix=settings.LIMITER_REDIS_PREFIX, http_callback=http_limit_callback) - # 启动定时任务 - scheduler.start() yield @@ -42,8 +39,6 @@ async def register_init(app: FastAPI): await redis_client.close() # 关闭 limiter await FastAPILimiter.close() - # 关闭定时任务 - scheduler.shutdown() def register_app(): diff --git a/backend/app/schemas/task.py b/backend/app/schemas/task.py deleted file mode 100644 index c0a13412..00000000 --- a/backend/app/schemas/task.py +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -from datetime import datetime - -from backend.app.schemas.base import SchemaBase - - -class GetTask(SchemaBase): - id: str - func_name: str - trigger: str - executor: str - name: str - misfire_grace_time: str - coalesce: str - max_instances: str - next_run_time: datetime | None diff --git a/backend/app/services/task_service.py b/backend/app/services/task_service.py deleted file mode 100644 index a33f7eb2..00000000 --- a/backend/app/services/task_service.py +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -from datetime import datetime - -import pytz -from asgiref.sync import sync_to_async - -from backend.app.common.exception import errors -from backend.app.common.task import scheduler -from backend.app.core.conf import settings -from backend.app.schemas.task import GetTask - - -class TaskService: - @staticmethod - @sync_to_async - def get_task_list(): - tasks = [] - for job in scheduler.get_jobs(): - tasks.append( - GetTask( - **{ - 'id': job.id, - 'func_name': job.func_ref, - 'trigger': str(job.trigger), - 'executor': job.executor, - 'name': job.name, - 'misfire_grace_time': job.misfire_grace_time, - 'coalesce': job.coalesce, - 'max_instances': job.max_instances, - 'next_run_time': job.next_run_time, - } - ).dict() - ) - return tasks - - @staticmethod - @sync_to_async - def get_task(pk: str): - job = scheduler.get_job(job_id=pk) - if not job: - raise errors.NotFoundError(msg='任务不存在') - task = GetTask( - **{ - 'id': job.id, - 'func_name': job.func_ref, - 'trigger': str(job.trigger), - 'executor': job.executor, - 'name': job.name, - 'misfire_grace_time': job.misfire_grace_time, - 'coalesce': job.coalesce, - 'max_instances': job.max_instances, - 'next_run_time': job.next_run_time, - } - ) - - return task - - async def run(self, pk: str): - task = await self.get_task(pk=pk) - # next_run_time 仅适用于 pytz 模块 - scheduler.modify_job(job_id=pk, next_run_time=datetime.now(pytz.timezone(settings.DATETIME_TIMEZONE))) - return task - - async def pause(self, pk: str): - task = await self.get_task(pk=pk) - scheduler.pause_job(job_id=pk) - return task - - async def resume(self, pk: str): - task = await self.get_task(pk=pk) - scheduler.resume_job(job_id=pk) - return task - - async def delete(self, pk: str): - task = await self.get_task(pk=pk) - scheduler.remove_job(job_id=pk) - return task diff --git a/backend/app/tasks.py b/backend/app/tasks.py new file mode 100644 index 00000000..0ee213c8 --- /dev/null +++ b/backend/app/tasks.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +import uuid + +from backend.app.core.celery import celery_app + + +@celery_app.task() +async def task_demo_async(): + print(f'异步任务 {uuid.uuid4().hex}') diff --git a/requirements.txt b/requirements.txt index 0c939481..da4f75a1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,6 +7,7 @@ asyncmy==0.2.5 bcrypt==3.2.2 casbin==1.23.0 casbin_async_sqlalchemy_adapter==1.3.0 +celery==5.3.4 cryptography==41.0.2 email-validator==1.1.3 Faker==9.7.1 @@ -34,6 +35,6 @@ SQLAlchemy==2.0.8 starlette==0.27.0 supervisor==4.2.5 user_agents==2.2.0 -uvicorn[standard]==0.13.4 +uvicorn[standard]==0.22.0 wait-for-it==2.2.1 XdbSearchIP==1.0.2 From c2abfdde8d0b017eb8e492a2fcf2639d1f15db80 Mon Sep 17 00:00:00 2001 From: Wu Clan Date: Sat, 21 Oct 2023 03:27:56 +0800 Subject: [PATCH 02/17] black format --- backend/app/core/celery.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/backend/app/core/celery.py b/backend/app/core/celery.py index b3dedc84..8891762c 100644 --- a/backend/app/core/celery.py +++ b/backend/app/core/celery.py @@ -5,15 +5,18 @@ from backend.app.core.conf import settings celery_app = Celery('celery_app') -celery_app.conf.broker_url = f'redis://:{settings.CELERY_REDIS_PASSWORD}@{settings.CELERY_REDIS_HOST}:{settings.CELERY_REDIS_PORT}/{settings.CELERY_REDIS_DATABASE_BROKER}' -celery_app.conf.result_backend = f'redis://:{settings.CELERY_REDIS_PASSWORD}@{settings.CELERY_REDIS_HOST}:{settings.CELERY_REDIS_PORT}/{settings.CELERY_REDIS_DATABASE_BACKEND}' +celery_app.conf.broker_url = ( + f'redis://:{settings.CELERY_REDIS_PASSWORD}@{settings.CELERY_REDIS_HOST}:' + f'{settings.CELERY_REDIS_PORT}/{settings.CELERY_REDIS_DATABASE_BROKER}' +) +celery_app.conf.result_backend = ( + f'redis://:{settings.CELERY_REDIS_PASSWORD}@{settings.CELERY_REDIS_HOST}:' + f'{settings.CELERY_REDIS_PORT}/{settings.CELERY_REDIS_DATABASE_BACKEND}' +) celery_app.conf.result_backend_transport_options = { 'global_keyprefix': settings.CELERY_REDIS_BACKEND_PREFIX, - 'retry_policy': { - 'timeout': settings.CELERY_REDIS_BACKEND_TIMEOUT - }, - 'result_chord_ordered': settings.CELERY_REDIS_BACKEND_ORDERED - + 'retry_policy': {'timeout': settings.CELERY_REDIS_BACKEND_TIMEOUT}, + 'result_chord_ordered': settings.CELERY_REDIS_BACKEND_ORDERED, } celery_app.conf.timezone = settings.DATETIME_TIMEZONE celery_app.conf.task_track_started = True From 302fde091084720638f12de382a5c6c7c4502ec2 Mon Sep 17 00:00:00 2001 From: Wu Clan Date: Sat, 21 Oct 2023 04:49:07 +0800 Subject: [PATCH 03/17] Add celery to run the script --- backend/app/api/v1/mixed/tests.py | 4 ++-- backend/app/core/celery.py | 3 ++- backend/app/tasks.py | 7 +++++-- backend/app/worker.sh | 3 +++ 4 files changed, 12 insertions(+), 5 deletions(-) create mode 100644 backend/app/worker.sh diff --git a/backend/app/api/v1/mixed/tests.py b/backend/app/api/v1/mixed/tests.py index bef46ce7..70e1d7c2 100644 --- a/backend/app/api/v1/mixed/tests.py +++ b/backend/app/api/v1/mixed/tests.py @@ -10,8 +10,8 @@ @router.post('/task/async', summary='测试异步任务') async def task_demo_async_send(): - celery_app.send_task('task_demo_async') - return {'msg': 'Success'} + result = celery_app.send_task('tasks.task_demo_async') + return {'msg': 'Success', 'data': result.id} @router.post('/files', summary='测试文件上传') diff --git a/backend/app/core/celery.py b/backend/app/core/celery.py index 8891762c..30c02ef0 100644 --- a/backend/app/core/celery.py +++ b/backend/app/core/celery.py @@ -4,6 +4,8 @@ from backend.app.core.conf import settings +__all__ = ['celery_app'] + celery_app = Celery('celery_app') celery_app.conf.broker_url = ( f'redis://:{settings.CELERY_REDIS_PASSWORD}@{settings.CELERY_REDIS_HOST}:' @@ -20,4 +22,3 @@ } celery_app.conf.timezone = settings.DATETIME_TIMEZONE celery_app.conf.task_track_started = True -celery_app.autodiscover_tasks(['app']) diff --git a/backend/app/tasks.py b/backend/app/tasks.py index 0ee213c8..cdeebb2e 100644 --- a/backend/app/tasks.py +++ b/backend/app/tasks.py @@ -1,10 +1,13 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- import uuid +import sys -from backend.app.core.celery import celery_app +sys.path.append('../../') +from backend.app.core.celery import celery_app # noqa: E402 -@celery_app.task() + +@celery_app.task async def task_demo_async(): print(f'异步任务 {uuid.uuid4().hex}') diff --git a/backend/app/worker.sh b/backend/app/worker.sh new file mode 100644 index 00000000..abd3f91c --- /dev/null +++ b/backend/app/worker.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +celery -A tasks worker --loglevel=INFO From 419ec996354ac3914fa1bb2296be791702d2d990 Mon Sep 17 00:00:00 2001 From: Wu Clan Date: Sat, 21 Oct 2023 04:58:33 +0800 Subject: [PATCH 04/17] Update celery usage to README --- README.md | 15 +++++++++++---- README.zh-CN.md | 15 +++++++++++---- 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 3498645d..584eaa19 100644 --- a/README.md +++ b/README.md @@ -43,7 +43,7 @@ See a preview of some of the screenshots - [x] Follows Restful API specification - [x] Global SQLAlchemy 2.0 syntax - [x] Casbin RBAC access control model -- [x] APScheduler online timed tasks +- [x] Celery asynchronous tasks - [x] JWT middleware whitelist authentication - [x] Global customizable time zone time - [x] Docker / Docker-compose deployment @@ -85,6 +85,7 @@ TODO: ### BackEnd 1. Install dependencies + ```shell pip install -r requirements.txt ``` @@ -115,9 +116,15 @@ TODO: # Execute the migration alembic upgrade head ``` -7. Modify the configuration file as needed -8. Execute the `backend/app/main.py` file to start the service -9. Browser access: http://127.0.0.1:8000/api/v1/docs +7. Start celery worker + + ```shell + celery -A tasks worker --loglevel=INFO + ``` + +8. Modify the configuration file as needed +9. Execute the `backend/app/main.py` file to start the service +10. Browser access: http://127.0.0.1:8000/api/v1/docs --- diff --git a/README.zh-CN.md b/README.zh-CN.md index de53db58..0cd45e4e 100644 --- a/README.zh-CN.md +++ b/README.zh-CN.md @@ -32,7 +32,7 @@ mvc 架构作为常规设计模式,在 python web 中也很常见,但是三 - [x] 遵循 Restful API 规范 - [x] 全局 SQLAlchemy 2.0 语法 - [x] Casbin RBAC 访问控制模型 -- [x] APScheduler 在线定时任务 +- [x] Celery 异步任务 - [x] JWT 中间件白名单认证 - [x] 全局自定义时区时间 - [x] Docker / Docker-compose 部署 @@ -79,6 +79,7 @@ TODO: ### 后端 1. 安装依赖项 + ```shell pip install -r requirements.txt ``` @@ -110,9 +111,15 @@ TODO: alembic upgrade head ``` -7. 按需修改配置文件 -8. 执行 `backend/app/main.py` 文件启动服务 -9. 浏览器访问:http://127.0.0.1:8000/api/v1/docs +7. 启动 celery worker + + ```shell + celery -A tasks worker --loglevel=INFO + ``` + +8. 按需修改配置文件 +9. 执行 `backend/app/main.py` 文件启动服务 +10. 浏览器访问:http://127.0.0.1:8000/api/v1/docs --- From b0a3e5c40df963bca82629f86911fb91166b7f47 Mon Sep 17 00:00:00 2001 From: Wu Clan Date: Sun, 22 Oct 2023 08:12:26 +0800 Subject: [PATCH 05/17] Update test task --- backend/app/api/v1/mixed/tests.py | 4 ++-- backend/app/tasks.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/app/api/v1/mixed/tests.py b/backend/app/api/v1/mixed/tests.py index 70e1d7c2..e9999150 100644 --- a/backend/app/api/v1/mixed/tests.py +++ b/backend/app/api/v1/mixed/tests.py @@ -8,8 +8,8 @@ router = APIRouter(prefix='/tests') -@router.post('/task/async', summary='测试异步任务') -async def task_demo_async_send(): +@router.post('/send', summary='测试异步任务') +async def task_send(): result = celery_app.send_task('tasks.task_demo_async') return {'msg': 'Success', 'data': result.id} diff --git a/backend/app/tasks.py b/backend/app/tasks.py index cdeebb2e..06bde048 100644 --- a/backend/app/tasks.py +++ b/backend/app/tasks.py @@ -9,5 +9,5 @@ @celery_app.task -async def task_demo_async(): +def task_demo_async(): print(f'异步任务 {uuid.uuid4().hex}') From 460b8ebb0405535d0e0a4a820ba16e24395ed3b0 Mon Sep 17 00:00:00 2001 From: Wu Clan Date: Wed, 25 Oct 2023 10:55:51 +0800 Subject: [PATCH 06/17] Add celery rabbitmq broker --- Dockerfile => backend.dockerfile | 4 +- backend/app/.env.example | 9 +++- backend/app/{worker.sh => celery-start.sh} | 1 + backend/app/core/celery.py | 61 ++++++++++++++++------ backend/app/core/conf.py | 31 ++++++++--- celery.dockerfile | 23 ++++++++ deploy/docker-compose/.env.server | 16 ++++-- deploy/docker-compose/docker-compose.yml | 31 ++++++++++- 8 files changed, 141 insertions(+), 35 deletions(-) rename Dockerfile => backend.dockerfile (80%) rename backend/app/{worker.sh => celery-start.sh} (61%) create mode 100644 celery.dockerfile diff --git a/Dockerfile b/backend.dockerfile similarity index 80% rename from Dockerfile rename to backend.dockerfile index efe01972..6d094840 100644 --- a/Dockerfile +++ b/backend.dockerfile @@ -12,8 +12,8 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* # 某些包可能存在同步不及时导致安装失败的情况,可选择备用源 -# 清华源(国内快,也可能同步不及时):https://pypi.tuna.tsinghua.edu.cn/simple -# 官方源(国外慢,但永远都是最新的):https://pypi.org/simple +# 清华源:https://pypi.tuna.tsinghua.edu.cn/simple +# 官方源:https://pypi.org/simple RUN pip install --upgrade pip -i https://mirrors.aliyun.com/pypi/simple \ && pip install --no-cache-dir -r requirements.txt -i https://mirrors.aliyun.com/pypi/simple diff --git a/backend/app/.env.example b/backend/app/.env.example index d3cc10dd..1863b8c2 100644 --- a/backend/app/.env.example +++ b/backend/app/.env.example @@ -14,8 +14,13 @@ REDIS_DATABASE=0 CELERY_REDIS_HOST='127.0.0.1' CELERY_REDIS_PORT=6379 CELERY_REDIS_PASSWORD='' -CELERY_REDIS_DATABASE_BROKER=1 -CELERY_REDIS_DATABASE_BACKEND=2 +CELERY_BROKER_REDIS_DATABASE=1 +CELERY_BACKEND_REDIS_DATABASE=2 +# Rabbitmq +RABBITMQ_HOST='127.0.0.1' +RABBITMQ_PORT=5672 +RABBITMQ_USERNAME='guest' +RABBITMQ_PASSWORD='guest' # Token TOKEN_SECRET_KEY='1VkVF75nsNABBjK_7-qz7GtzNy3AMvktc9TCPwKczCk' # Opera Log diff --git a/backend/app/worker.sh b/backend/app/celery-start.sh similarity index 61% rename from backend/app/worker.sh rename to backend/app/celery-start.sh index abd3f91c..681d40c9 100644 --- a/backend/app/worker.sh +++ b/backend/app/celery-start.sh @@ -1,3 +1,4 @@ #!/usr/bin/env bash celery -A tasks worker --loglevel=INFO +celery -A tasks beat --loglevel=INFO diff --git a/backend/app/core/celery.py b/backend/app/core/celery.py index 30c02ef0..6f96a22b 100644 --- a/backend/app/core/celery.py +++ b/backend/app/core/celery.py @@ -6,19 +6,48 @@ __all__ = ['celery_app'] -celery_app = Celery('celery_app') -celery_app.conf.broker_url = ( - f'redis://:{settings.CELERY_REDIS_PASSWORD}@{settings.CELERY_REDIS_HOST}:' - f'{settings.CELERY_REDIS_PORT}/{settings.CELERY_REDIS_DATABASE_BROKER}' -) -celery_app.conf.result_backend = ( - f'redis://:{settings.CELERY_REDIS_PASSWORD}@{settings.CELERY_REDIS_HOST}:' - f'{settings.CELERY_REDIS_PORT}/{settings.CELERY_REDIS_DATABASE_BACKEND}' -) -celery_app.conf.result_backend_transport_options = { - 'global_keyprefix': settings.CELERY_REDIS_BACKEND_PREFIX, - 'retry_policy': {'timeout': settings.CELERY_REDIS_BACKEND_TIMEOUT}, - 'result_chord_ordered': settings.CELERY_REDIS_BACKEND_ORDERED, -} -celery_app.conf.timezone = settings.DATETIME_TIMEZONE -celery_app.conf.task_track_started = True + +def make_celery(main_name: str) -> Celery: + """ + 创建 celery 应用 + + :param main_name: __main__ module name + :return: + """ + app = Celery(main_name) + + # Celery Config + app.conf.broker_url = ( + ( + f'redis://:{settings.CELERY_REDIS_PASSWORD}@{settings.CELERY_REDIS_HOST}:' + f'{settings.CELERY_REDIS_PORT}/{settings.CELERY_BROKER_REDIS_DATABASE}' + ) + if settings.CELERY_BROKER == 'redis' + else ( + f'amqp://{settings.RABBITMQ_USERNAME}:{settings.RABBITMQ_PASSWORD}@{settings.RABBITMQ_HOST}:' + f'{settings.RABBITMQ_PORT}' + ) + ) + app.conf.result_backend = ( + f'redis://:{settings.CELERY_REDIS_PASSWORD}@{settings.CELERY_REDIS_HOST}:' + f'{settings.CELERY_REDIS_PORT}/{settings.CELERY_BACKEND_REDIS_DATABASE}' + ) + app.conf.result_backend_transport_options = { + 'global_keyprefix': settings.CELERY_BACKEND_REDIS_PREFIX, + 'retry_policy': { + 'timeout': settings.CELERY_BACKEND_REDIS_TIMEOUT, + }, + 'result_chord_ordered': settings.CELERY_BACKEND_REDIS_ORDERED, + } + app.conf.timezone = settings.DATETIME_TIMEZONE + app.conf.task_track_started = True + + # Celery Schedule Tasks + # https://docs.celeryq.dev/en/stable/userguide/periodic-tasks.html + app.conf.beat_schedule = settings.CELERY_BEAT_SCHEDULE + app.conf.beat_schedule_filename = settings.CELERY_BEAT_SCHEDULE_FILENAME + + return app + + +celery_app = make_celery('celery_app') diff --git a/backend/app/core/conf.py b/backend/app/core/conf.py index a5d9e70d..1d223236 100644 --- a/backend/app/core/conf.py +++ b/backend/app/core/conf.py @@ -26,8 +26,15 @@ class Settings(BaseSettings): CELERY_REDIS_HOST: str CELERY_REDIS_PORT: int CELERY_REDIS_PASSWORD: str - CELERY_REDIS_DATABASE_BROKER: int - CELERY_REDIS_DATABASE_BACKEND: int + CELERY_BROKER_REDIS_DATABASE: int # 仅当使用 redis 作为 broker 时生效, 更适用于测试环境 + CELERY_BACKEND_REDIS_DATABASE: int + + # Env Rabbitmq + # docker run -d --hostname fba-mq --name fba-mq -p 5672:5672 -p 15672:15672 rabbitmq:latest + RABBITMQ_HOST: str + RABBITMQ_PORT: int + RABBITMQ_USERNAME: str + RABBITMQ_PASSWORD: str # Env Token TOKEN_SECRET_KEY: str # 密钥 secrets.token_urlsafe(32) @@ -64,11 +71,6 @@ def validator_api_url(cls, values): UVICORN_PORT: int = 8000 UVICORN_RELOAD: bool = True - # Celery - CELERY_REDIS_BACKEND_PREFIX: str = 'fba_celery' - CELERY_REDIS_BACKEND_TIMEOUT: float = 5.0 - CELERY_REDIS_BACKEND_ORDERED: bool = True - # Static Server STATIC_FILES: bool = False @@ -145,10 +147,23 @@ def validator_api_url(cls, values): OPERA_LOG_ENCRYPT: int = 1 # 0: AES (性能损耗); 1: md5; 2: ItsDangerous; 3: 不加密, others: 替换为 ****** OPERA_LOG_ENCRYPT_INCLUDE: list[str] = ['password', 'old_password', 'new_password', 'confirm_password'] - # ip location + # Ip location IP_LOCATION_REDIS_PREFIX: str = 'fba_ip_location' IP_LOCATION_EXPIRE_SECONDS: int = 60 * 60 * 24 * 1 # 过期时间,单位:秒 + # Celery + CELERY_BROKER: Literal['rabbitmq', 'redis'] = 'rabbitmq' + CELERY_BACKEND_REDIS_PREFIX: str = 'fba_celery' + CELERY_BACKEND_REDIS_TIMEOUT: float = 5.0 + CELERY_BACKEND_REDIS_ORDERED: bool = True + CELERY_BEAT_SCHEDULE_FILENAME: str = './log/celery_beat-schedule' + CELERY_BEAT_SCHEDULE: dict = { + 'task_demo_async': { + 'task': 'tasks.task_demo_async', + 'schedule': 5.0, + }, + } + class Config: # https://docs.pydantic.dev/usage/settings/#dotenv-env-support env_file = '.env' diff --git a/celery.dockerfile b/celery.dockerfile new file mode 100644 index 00000000..8c44518a --- /dev/null +++ b/celery.dockerfile @@ -0,0 +1,23 @@ +FROM python:3.10-slim + +WORKDIR /fba + +COPY . . + +RUN sed -i s@/deb.debian.org/@/mirrors.aliyun.com/@g /etc/apt/sources.list \ + && sed -i s@/security.debian.org/@/mirrors.aliyun.com/@g /etc/apt/sources.list + +RUN apt-get update \ + && apt-get install -y --no-install-recommends gcc python3-dev \ + && rm -rf /var/lib/apt/lists/* + +RUN pip install --upgrade pip -i https://mirrors.aliyun.com/pypi/simple \ + && pip install --no-cache-dir -r requirements.txt -i https://mirrors.aliyun.com/pypi/simple + +ENV TZ = Asia/Shanghai + +WORKDIR /fba/backend/app + +CMD chmod +x celery-start.sh + +CMD ["./celery-start.sh"] diff --git a/deploy/docker-compose/.env.server b/deploy/docker-compose/.env.server index 3309a345..bccb885e 100644 --- a/deploy/docker-compose/.env.server +++ b/deploy/docker-compose/.env.server @@ -10,11 +10,17 @@ REDIS_HOST='fba_redis' REDIS_PORT=6379 REDIS_PASSWORD='' REDIS_DATABASE=0 -# APScheduler -APS_REDIS_HOST='fba_redis' -APS_REDIS_PORT=6379 -APS_REDIS_PASSWORD='' -APS_REDIS_DATABASE=1 +# Celery +CELERY_REDIS_HOST='fba_redis' +CELERY_REDIS_PORT=6379 +CELERY_REDIS_PASSWORD='' +CELERY_BROKER_REDIS_DATABASE=1 +CELERY_BACKEND_REDIS_DATABASE=2 +# Rabbitmq +RABBITMQ_HOST='fba_rabbitmq' +RABBITMQ_PORT=5672 +RABBITMQ_USERNAME='guest' +RABBITMQ_PASSWORD='guest' # Token TOKEN_SECRET_KEY='1VkVF75nsNABBjK_7-qz7GtzNy3AMvktc9TCPwKczCk' # Opera Log diff --git a/deploy/docker-compose/docker-compose.yml b/deploy/docker-compose/docker-compose.yml index 25cf1fb7..810cba3a 100644 --- a/deploy/docker-compose/docker-compose.yml +++ b/deploy/docker-compose/docker-compose.yml @@ -12,17 +12,19 @@ volumes: fba_mysql: fba_redis: fba_static: + fba_rabbitmq: services: fba_server: build: context: ../../ - dockerfile: Dockerfile + dockerfile: backend.dockerfile container_name: "fba_server" restart: always depends_on: - fba_mysql - fba_redis + - fba_rabbitmq volumes: - fba_static:/fba/backend/app/static networks: @@ -31,7 +33,7 @@ services: - bash - -c - | - wait-for-it -s fba_mysql:3306 -s fba_redis:6379 -t 300 + wait-for-it -s fba_mysql:3306 -s fba_redis:6379 fba_rabbitmq:5672 fba_rabbitmq:15672 -t 300 supervisord -c /fba/deploy/supervisor.conf fba_mysql: @@ -80,3 +82,28 @@ services: - fba_static:/www/fba/backend/app/static networks: - fba_network + + fba_rabbitmq: + hostname: fba_rabbitmq + image: rabbitmq:3.12.7 + ports: + - "15672:15672" + - "5672:5672" + container_name: "fba_rabbitmq" + restart: always + environment: + - RABBITMQ_DEFAULT_USER=guest + - RABBITMQ_DEFAULT_PASS=guest + volumes: + - fba_rabbitmq:/var/lib/rabbitmq + networks: + - fba_network + + fba_celery: + build: + context: ../../ + dockerfile: celery.dockerfile + container_name: "fba_celery" + restart: always + networks: + - fba_network From 60d3610c551d64e2790f9245592345496e0d0a76 Mon Sep 17 00:00:00 2001 From: Wu Clan Date: Wed, 25 Oct 2023 12:20:41 +0800 Subject: [PATCH 07/17] Fix dockerfiles --- README.md | 6 +++++- README.zh-CN.md | 6 +++++- backend.dockerfile | 7 ++----- celery.dockerfile | 3 +-- 4 files changed, 13 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 584eaa19..5d5269ed 100644 --- a/README.md +++ b/README.md @@ -134,6 +134,10 @@ Click [fastapi_best_architecture_ui](https://github.com/fastapi-practices/fastap ### Docker deploy +> [!WARNING] +> Default port conflict:8000,3306,6379,5672 +> As a best practice, shut down on-premises services before deployment:mysql,redis,rabbitmq... + 1. Go to the directory where the ``docker-compose.yml`` file is located and create the environment variable file ``.env`` @@ -150,7 +154,7 @@ Click [fastapi_best_architecture_ui](https://github.com/fastapi-practices/fastap 3. Execute the one-click boot command ```shell - docker-compose up -d -build + docker-compose up -d --build ``` 4. Wait for the command to complete automatically diff --git a/README.zh-CN.md b/README.zh-CN.md index 0cd45e4e..33e64261 100644 --- a/README.zh-CN.md +++ b/README.zh-CN.md @@ -129,6 +129,10 @@ TODO: ### Docker 部署 +> [!WARNING] +> 默认端口冲突:8000,3306,6379,5672 +> 最佳做法是在部署之前关闭本地服务:mysql,redis,rabbitmq... + 1. 进入 `docker-compose.yml` 文件所在目录,创建环境变量文件`.env` ```shell @@ -144,7 +148,7 @@ TODO: 3. 执行一键启动命令 ```shell - docker-compose up -d -build + docker-compose up -d --build ``` 4. 等待命令自动完成 diff --git a/backend.dockerfile b/backend.dockerfile index 6d094840..19dbd257 100644 --- a/backend.dockerfile +++ b/backend.dockerfile @@ -4,16 +4,13 @@ WORKDIR /fba COPY . . -RUN sed -i s@/deb.debian.org/@/mirrors.aliyun.com/@g /etc/apt/sources.list \ - && sed -i s@/security.debian.org/@/mirrors.aliyun.com/@g /etc/apt/sources.list +RUN sed -i 's/deb.debian.org/mirrors.ustc.edu.cn/g' /etc/apt/sources.list.d/debian.sources RUN apt-get update \ && apt-get install -y --no-install-recommends gcc python3-dev \ && rm -rf /var/lib/apt/lists/* -# 某些包可能存在同步不及时导致安装失败的情况,可选择备用源 -# 清华源:https://pypi.tuna.tsinghua.edu.cn/simple -# 官方源:https://pypi.org/simple +# 某些包可能存在同步不及时导致安装失败的情况,可更改为官方源:https://pypi.org/simple RUN pip install --upgrade pip -i https://mirrors.aliyun.com/pypi/simple \ && pip install --no-cache-dir -r requirements.txt -i https://mirrors.aliyun.com/pypi/simple diff --git a/celery.dockerfile b/celery.dockerfile index 8c44518a..3b0e3699 100644 --- a/celery.dockerfile +++ b/celery.dockerfile @@ -4,8 +4,7 @@ WORKDIR /fba COPY . . -RUN sed -i s@/deb.debian.org/@/mirrors.aliyun.com/@g /etc/apt/sources.list \ - && sed -i s@/security.debian.org/@/mirrors.aliyun.com/@g /etc/apt/sources.list +RUN sed -i 's/deb.debian.org/mirrors.ustc.edu.cn/g' /etc/apt/sources.list.d/debian.sources RUN apt-get update \ && apt-get install -y --no-install-recommends gcc python3-dev \ From 058c0fee0d6ad3fabaf3373c4213b504005fdaa1 Mon Sep 17 00:00:00 2001 From: Wu Clan Date: Wed, 25 Oct 2023 12:22:03 +0800 Subject: [PATCH 08/17] Add task interface access authorization --- backend/app/api/v1/task.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/backend/app/api/v1/task.py b/backend/app/api/v1/task.py index e7b4565b..e093e47d 100644 --- a/backend/app/api/v1/task.py +++ b/backend/app/api/v1/task.py @@ -5,13 +5,14 @@ from fastapi import APIRouter, Path from backend.app.common.exception.errors import NotFoundError +from backend.app.common.rbac import DependsRBAC from backend.app.common.response.response_code import CustomResponseCode from backend.app.common.response.response_schema import response_base router = APIRouter() -@router.get('/{pk}', summary='获取任务结果') +@router.get('/{pk}', summary='获取任务结果', dependencies=[DependsRBAC]) async def get_task_result(pk: str = Path(description='任务ID')): try: task = AsyncResult(pk) From 599c49dff9cec79ab24440932f83451cd6f87440 Mon Sep 17 00:00:00 2001 From: Wu Clan Date: Wed, 25 Oct 2023 12:42:31 +0800 Subject: [PATCH 09/17] Update celery deploy run --- backend/app/celery-start.sh | 3 +-- celery.dockerfile | 6 ++++-- deploy/docker-compose/docker-compose.yml | 1 + deploy/supervisor.conf | 20 ++++++++++++++++++++ 4 files changed, 26 insertions(+), 4 deletions(-) diff --git a/backend/app/celery-start.sh b/backend/app/celery-start.sh index 681d40c9..a44f2cb3 100644 --- a/backend/app/celery-start.sh +++ b/backend/app/celery-start.sh @@ -1,4 +1,3 @@ #!/usr/bin/env bash -celery -A tasks worker --loglevel=INFO -celery -A tasks beat --loglevel=INFO +celery -A tasks worker --loglevel=INFO -B diff --git a/celery.dockerfile b/celery.dockerfile index 3b0e3699..bfdca5a3 100644 --- a/celery.dockerfile +++ b/celery.dockerfile @@ -17,6 +17,8 @@ ENV TZ = Asia/Shanghai WORKDIR /fba/backend/app -CMD chmod +x celery-start.sh +RUN chmod +x celery-start.sh -CMD ["./celery-start.sh"] +# 这里不使用脚本启动 celery, 而是使用 supervisor +# 因为 celery 脚本包含 worker 和 beat, 但是并不推荐它们在一条命令中启动 +#CMD ["./celery-start.sh"] diff --git a/deploy/docker-compose/docker-compose.yml b/deploy/docker-compose/docker-compose.yml index 810cba3a..3b504593 100644 --- a/deploy/docker-compose/docker-compose.yml +++ b/deploy/docker-compose/docker-compose.yml @@ -25,6 +25,7 @@ services: - fba_mysql - fba_redis - fba_rabbitmq + - fba_celery volumes: - fba_static:/fba/backend/app/static networks: diff --git a/deploy/supervisor.conf b/deploy/supervisor.conf index ef9afbc7..0db200a8 100644 --- a/deploy/supervisor.conf +++ b/deploy/supervisor.conf @@ -163,3 +163,23 @@ autorestart=true startretries=5 redirect_stderr=true stdout_logfile=/var/log/fastapi_server/fba_server.log + +[program:celery_worker] +directory=/fba/backend/app +command=/usr/local/bin/celery -A tasks worker --loglevel=INFO +user=root +autostart=true +autorestart=true +startretries=5 +redirect_stderr=true +stdout_logfile=/var/log/celery/fba_celery_worker.log + +[program:celery_beat] +directory=/fba/backend/app +command=/usr/local/bin/celery -A tasks beat --loglevel=INFO +user=root +autostart=true +autorestart=true +startretries=5 +redirect_stderr=true +stdout_logfile=/var/log/celery/fba_celery_beat.log From 061c8ec22c751c954bbd85762567221d20737585 Mon Sep 17 00:00:00 2001 From: Wu Clan Date: Wed, 25 Oct 2023 12:53:20 +0800 Subject: [PATCH 10/17] Fix dockerfiles --- backend.dockerfile | 3 ++- celery.dockerfile | 5 ++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/backend.dockerfile b/backend.dockerfile index 19dbd257..2fc827b2 100644 --- a/backend.dockerfile +++ b/backend.dockerfile @@ -4,7 +4,8 @@ WORKDIR /fba COPY . . -RUN sed -i 's/deb.debian.org/mirrors.ustc.edu.cn/g' /etc/apt/sources.list.d/debian.sources +RUN sed -i 's/deb.debian.org/mirrors.ustc.edu.cn/g' /etc/apt/sources.list.d/debian.sources \ + && sed -i 's|security.debian.org/debian-security|mirrors.ustc.edu.cn/debian-security|g' /etc/apt/sources.list.d/debian.sources RUN apt-get update \ && apt-get install -y --no-install-recommends gcc python3-dev \ diff --git a/celery.dockerfile b/celery.dockerfile index bfdca5a3..8f48b0d1 100644 --- a/celery.dockerfile +++ b/celery.dockerfile @@ -4,7 +4,8 @@ WORKDIR /fba COPY . . -RUN sed -i 's/deb.debian.org/mirrors.ustc.edu.cn/g' /etc/apt/sources.list.d/debian.sources +RUN sed -i 's/deb.debian.org/mirrors.ustc.edu.cn/g' /etc/apt/sources.list.d/debian.sources \ + && sed -i 's|security.debian.org/debian-security|mirrors.ustc.edu.cn/debian-security|g' /etc/apt/sources.list.d/debian.sources RUN apt-get update \ && apt-get install -y --no-install-recommends gcc python3-dev \ @@ -15,6 +16,8 @@ RUN pip install --upgrade pip -i https://mirrors.aliyun.com/pypi/simple \ ENV TZ = Asia/Shanghai +RUN mkdir -p /var/log/celery + WORKDIR /fba/backend/app RUN chmod +x celery-start.sh From 445541e6323960d105e0286144ececb306e187a3 Mon Sep 17 00:00:00 2001 From: Wu Clan Date: Wed, 25 Oct 2023 20:56:08 +0800 Subject: [PATCH 11/17] Fix supervisor conf --- backend.dockerfile | 2 + celery.dockerfile | 6 +- deploy/celery.conf | 19 ++++ deploy/docker-compose/docker-compose.yml | 33 +++++-- deploy/fastapi_server.conf | 9 ++ deploy/supervisor.conf | 108 +---------------------- 6 files changed, 61 insertions(+), 116 deletions(-) create mode 100644 deploy/celery.conf create mode 100644 deploy/fastapi_server.conf diff --git a/backend.dockerfile b/backend.dockerfile index 2fc827b2..c5777def 100644 --- a/backend.dockerfile +++ b/backend.dockerfile @@ -19,6 +19,8 @@ ENV TZ = Asia/Shanghai RUN mkdir -p /var/log/fastapi_server +COPY ./deploy/fastapi_server.conf /etc/supervisor/conf.d/ + EXPOSE 8001 CMD ["uvicorn", "backend.app.main:app", "--host", "127.0.0.1", "--port", "8000"] diff --git a/celery.dockerfile b/celery.dockerfile index 8f48b0d1..33fc1885 100644 --- a/celery.dockerfile +++ b/celery.dockerfile @@ -18,10 +18,10 @@ ENV TZ = Asia/Shanghai RUN mkdir -p /var/log/celery +COPY ./deploy/celery.conf /etc/supervisor/conf.d/ + WORKDIR /fba/backend/app RUN chmod +x celery-start.sh -# 这里不使用脚本启动 celery, 而是使用 supervisor -# 因为 celery 脚本包含 worker 和 beat, 但是并不推荐它们在一条命令中启动 -#CMD ["./celery-start.sh"] +CMD ["./celery-start.sh"] diff --git a/deploy/celery.conf b/deploy/celery.conf new file mode 100644 index 00000000..ea68c063 --- /dev/null +++ b/deploy/celery.conf @@ -0,0 +1,19 @@ +[program:celery_worker] +directory=/fba/backend/app +command=/usr/local/bin/celery -A tasks worker --loglevel=INFO +user=root +autostart=true +autorestart=true +startretries=5 +redirect_stderr=true +stdout_logfile=/var/log/celery/fba_celery_worker.log + +[program:celery_beat] +directory=/fba/backend/app +command=/usr/local/bin/celery -A tasks beat --loglevel=INFO +user=root +autostart=true +autorestart=true +startretries=5 +redirect_stderr=true +stdout_logfile=/var/log/celery/fba_celery_beat.log diff --git a/deploy/docker-compose/docker-compose.yml b/deploy/docker-compose/docker-compose.yml index 3b504593..8656df2a 100644 --- a/deploy/docker-compose/docker-compose.yml +++ b/deploy/docker-compose/docker-compose.yml @@ -2,6 +2,7 @@ version: "3.10" networks: fba_network: + name: fba_network driver: bridge ipam: driver: default @@ -10,21 +11,24 @@ networks: volumes: fba_mysql: + name: fba_mysql fba_redis: + name: fba_redis fba_static: + name: fba_static fba_rabbitmq: + name: fba_rabbitmq services: fba_server: build: context: ../../ dockerfile: backend.dockerfile - container_name: "fba_server" + container_name: fba_server restart: always depends_on: - fba_mysql - fba_redis - - fba_rabbitmq - fba_celery volumes: - fba_static:/fba/backend/app/static @@ -34,14 +38,16 @@ services: - bash - -c - | - wait-for-it -s fba_mysql:3306 -s fba_redis:6379 fba_rabbitmq:5672 fba_rabbitmq:15672 -t 300 + wait-for-it -s fba_mysql:3306 -s fba_redis:6379 -t 300 + mkdir -p /var/log/supervisor/ supervisord -c /fba/deploy/supervisor.conf + supervisorctl restart fastapi_server fba_mysql: image: mysql:8.0.29 ports: - "${DOCKER_DB_MAP_PORT:-3306}:3306" - container_name: "fba_mysql" + container_name: fba_mysql restart: always environment: MYSQL_DATABASE: fba @@ -61,7 +67,7 @@ services: image: redis:6.2.7 ports: - "${DOCKER_REDIS_MAP_PORT:-6379}:6379" - container_name: "fba_redis" + container_name: fba_redis restart: always environment: - TZ=Asia/Shanghai @@ -74,7 +80,7 @@ services: image: nginx ports: - "8000:80" - container_name: "fba_nginx" + container_name: fba_nginx restart: always depends_on: - fba_server @@ -90,7 +96,7 @@ services: ports: - "15672:15672" - "5672:5672" - container_name: "fba_rabbitmq" + container_name: fba_rabbitmq restart: always environment: - RABBITMQ_DEFAULT_USER=guest @@ -104,7 +110,18 @@ services: build: context: ../../ dockerfile: celery.dockerfile - container_name: "fba_celery" + container_name: fba_celery restart: always + depends_on: + - fba_rabbitmq networks: - fba_network + command: + - bash + - -c + - | + wait-for-it -s fba_rabbitmq:5672 -t 300 + mkdir -p /var/log/supervisor/ + supervisord -c /fba/deploy/supervisor.conf + supervisorctl restart celery_worker + supervisorctl restart celery_beat diff --git a/deploy/fastapi_server.conf b/deploy/fastapi_server.conf new file mode 100644 index 00000000..76bd9ba6 --- /dev/null +++ b/deploy/fastapi_server.conf @@ -0,0 +1,9 @@ +[program:fastapi_server] +directory=/fba +command=/usr/local/bin/gunicorn -c /fba/deploy/gunicorn.conf.py main:app +user=root +autostart=true +autorestart=true +startretries=5 +redirect_stderr=true +stdout_logfile=/var/log/fastapi_server/fba_server.log diff --git a/deploy/supervisor.conf b/deploy/supervisor.conf index 0db200a8..f8e75b01 100644 --- a/deploy/supervisor.conf +++ b/deploy/supervisor.conf @@ -42,7 +42,7 @@ file=/tmp/supervisor.sock ; the path to the socket file ;password=123 ; default is no password (open server) [supervisord] -logfile=/tmp/supervisord.log ; main log file; default $CWD/supervisord.log +logfile=/var/log/supervisor/supervisord.log ; main log file; default $CWD/supervisord.log logfile_maxbytes=50MB ; max main logfile bytes b4 rotation; default 50MB logfile_backups=10 ; # of main logfile backups; 0 means none, default 10 loglevel=info ; log level; default info; others: debug,warn,trace @@ -79,107 +79,5 @@ serverurl=unix:///tmp/supervisor.sock ; use a unix:// URL for a unix socket ;prompt=mysupervisor ; cmd line prompt (default "supervisor") ;history_file=~/.sc_history ; use readline history if available -; The sample program section below shows all possible program subsection values. -; Create one or more 'real' program: sections to be able to control them under -; supervisor. - -;[program:theprogramname] -;command=/bin/cat ; the program (relative uses PATH, can take args) -;process_name=%(program_name)s ; process_name expr (default %(program_name)s) -;numprocs=1 ; number of processes copies to start (def 1) -;directory=/tmp ; directory to cwd to before exec (def no cwd) -;umask=022 ; umask for process (default None) -;priority=999 ; the relative start priority (default 999) -;autostart=true ; start at supervisord start (default: true) -;startsecs=1 ; # of secs prog must stay up to be running (def. 1) -;startretries=3 ; max # of serial start failures when starting (default 3) -;autorestart=unexpected ; when to restart if exited after running (def: unexpected) -;exitcodes=0 ; 'expected' exit codes used with autorestart (default 0) -;stopsignal=QUIT ; signal used to kill process (default TERM) -;stopwaitsecs=10 ; max num secs to wait b4 SIGKILL (default 10) -;stopasgroup=false ; send stop signal to the UNIX process group (default false) -;killasgroup=false ; SIGKILL the UNIX process group (def false) -;user=root ; setuid to this UNIX account to run the program -;redirect_stderr=true ; redirect proc stderr to stdout (default false) -;stdout_logfile=/a/path ; stdout log path, NONE for none; default AUTO -;stdout_logfile_maxbytes=1MB ; max # logfile bytes b4 rotation (default 50MB) -;stdout_logfile_backups=10 ; # of stdout logfile backups (0 means none, default 10) -;stdout_capture_maxbytes=1MB ; number of bytes in 'capturemode' (default 0) -;stdout_events_enabled=false ; emit events on stdout writes (default false) -;stdout_syslog=false ; send stdout to syslog with process name (default false) -;stderr_logfile=/a/path ; stderr log path, NONE for none; default AUTO -;stderr_logfile_maxbytes=1MB ; max # logfile bytes b4 rotation (default 50MB) -;stderr_logfile_backups=10 ; # of stderr logfile backups (0 means none, default 10) -;stderr_capture_maxbytes=1MB ; number of bytes in 'capturemode' (default 0) -;stderr_events_enabled=false ; emit events on stderr writes (default false) -;stderr_syslog=false ; send stderr to syslog with process name (default false) -;environment=A="1",B="2" ; process environment additions (def no adds) -;serverurl=AUTO ; override serverurl computation (childutils) - -; The sample eventlistener section below shows all possible eventlistener -; subsection values. Create one or more 'real' eventlistener: sections to be -; able to handle event notifications sent by supervisord. - -;[eventlistener:theeventlistenername] -;command=/bin/eventlistener ; the program (relative uses PATH, can take args) -;process_name=%(program_name)s ; process_name expr (default %(program_name)s) -;numprocs=1 ; number of processes copies to start (def 1) -;events=EVENT ; event notif. types to subscribe to (req'd) -;buffer_size=10 ; event buffer queue size (default 10) -;directory=/tmp ; directory to cwd to before exec (def no cwd) -;umask=022 ; umask for process (default None) -;priority=-1 ; the relative start priority (default -1) -;autostart=true ; start at supervisord start (default: true) -;startsecs=1 ; # of secs prog must stay up to be running (def. 1) -;startretries=3 ; max # of serial start failures when starting (default 3) -;autorestart=unexpected ; autorestart if exited after running (def: unexpected) -;exitcodes=0 ; 'expected' exit codes used with autorestart (default 0) -;stopsignal=QUIT ; signal used to kill process (default TERM) -;stopwaitsecs=10 ; max num secs to wait b4 SIGKILL (default 10) -;stopasgroup=false ; send stop signal to the UNIX process group (default false) -;killasgroup=false ; SIGKILL the UNIX process group (def false) -;user=chrism ; setuid to this UNIX account to run the program -;redirect_stderr=false ; redirect_stderr=true is not allowed for eventlisteners - -;[group:thegroupname] -;programs=progname1,progname2 ; each refers to 'x' in [program:x] definitions -;priority=999 ; the relative start priority (default 999) - -; The [include] section can just contain the "files" setting. This -; setting can list multiple files (separated by whitespace or -; newlines). It can also contain wildcards. The filenames are -; interpreted as relative to this file. Included files *cannot* -; include files themselves. - -;[include] -;files = relative/directory/*.ini - -[program:fastapi_server] -directory=/fba -command=/usr/local/bin/gunicorn -c /fba/deploy/gunicorn.conf.py main:app -user=root -autostart=true -autorestart=true -startretries=5 -redirect_stderr=true -stdout_logfile=/var/log/fastapi_server/fba_server.log - -[program:celery_worker] -directory=/fba/backend/app -command=/usr/local/bin/celery -A tasks worker --loglevel=INFO -user=root -autostart=true -autorestart=true -startretries=5 -redirect_stderr=true -stdout_logfile=/var/log/celery/fba_celery_worker.log - -[program:celery_beat] -directory=/fba/backend/app -command=/usr/local/bin/celery -A tasks beat --loglevel=INFO -user=root -autostart=true -autorestart=true -startretries=5 -redirect_stderr=true -stdout_logfile=/var/log/celery/fba_celery_beat.log +[include] +files = /etc/supervisor/conf.d/*.conf From 86b8d7ae4b32926a1845b6bb840c6d71a8108c36 Mon Sep 17 00:00:00 2001 From: Wu Clan Date: Wed, 25 Oct 2023 20:57:55 +0800 Subject: [PATCH 12/17] Update celery broker default is redis --- backend/app/core/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/app/core/conf.py b/backend/app/core/conf.py index 1d223236..0e683c8e 100644 --- a/backend/app/core/conf.py +++ b/backend/app/core/conf.py @@ -152,7 +152,7 @@ def validator_api_url(cls, values): IP_LOCATION_EXPIRE_SECONDS: int = 60 * 60 * 24 * 1 # 过期时间,单位:秒 # Celery - CELERY_BROKER: Literal['rabbitmq', 'redis'] = 'rabbitmq' + CELERY_BROKER: Literal['rabbitmq', 'redis'] = 'redis' CELERY_BACKEND_REDIS_PREFIX: str = 'fba_celery' CELERY_BACKEND_REDIS_TIMEOUT: float = 5.0 CELERY_BACKEND_REDIS_ORDERED: bool = True From def722d7a42d6ce0131008d818652d6670a2da97 Mon Sep 17 00:00:00 2001 From: Wu Clan Date: Wed, 25 Oct 2023 21:15:36 +0800 Subject: [PATCH 13/17] Force the pro env to use rabbitmq --- backend/app/core/conf.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/backend/app/core/conf.py b/backend/app/core/conf.py index 0e683c8e..bdf0bbdc 100644 --- a/backend/app/core/conf.py +++ b/backend/app/core/conf.py @@ -52,7 +52,7 @@ class Settings(BaseSettings): OPENAPI_URL: str | None = f'{API_V1_STR}/openapi' @root_validator - def validator_api_url(cls, values): + def validate_openapi_url(cls, values): if values['ENVIRONMENT'] == 'pro': values['OPENAPI_URL'] = None return values @@ -164,6 +164,12 @@ def validator_api_url(cls, values): }, } + @root_validator + def validate_celery_broker(cls, values): + if values['ENVIRONMENT'] == 'pro': + values['CELERY_BROKER'] = 'rabbitmq' + return values + class Config: # https://docs.pydantic.dev/usage/settings/#dotenv-env-support env_file = '.env' From 3cc2795177719f3b9c700ce2c2cd2bcb1b40e177 Mon Sep 17 00:00:00 2001 From: Wu Clan Date: Thu, 26 Oct 2023 00:11:53 +0800 Subject: [PATCH 14/17] Update the task interface --- backend/app/api/v1/mixed/tests.py | 5 ++-- backend/app/api/v1/task.py | 40 ++++++++++++++++++---------- backend/app/core/celery.py | 1 + backend/app/services/task_service.py | 33 +++++++++++++++++++++++ backend/app/tasks.py | 6 +++-- 5 files changed, 66 insertions(+), 19 deletions(-) create mode 100644 backend/app/services/task_service.py diff --git a/backend/app/api/v1/mixed/tests.py b/backend/app/api/v1/mixed/tests.py index e9999150..9dc6049f 100644 --- a/backend/app/api/v1/mixed/tests.py +++ b/backend/app/api/v1/mixed/tests.py @@ -1,16 +1,15 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- - from fastapi import APIRouter, File, UploadFile, Form -from backend.app.core.celery import celery_app +from backend.app.tasks import task_demo_async router = APIRouter(prefix='/tests') @router.post('/send', summary='测试异步任务') async def task_send(): - result = celery_app.send_task('tasks.task_demo_async') + result = task_demo_async.delay() return {'msg': 'Success', 'data': result.id} diff --git a/backend/app/api/v1/task.py b/backend/app/api/v1/task.py index e093e47d..47577209 100644 --- a/backend/app/api/v1/task.py +++ b/backend/app/api/v1/task.py @@ -1,25 +1,37 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- -from celery.exceptions import BackendGetMetaError -from celery.result import AsyncResult -from fastapi import APIRouter, Path +from typing import Annotated -from backend.app.common.exception.errors import NotFoundError +from fastapi import APIRouter, Path, Body + +from backend.app.common.jwt import DependsJwtAuth from backend.app.common.rbac import DependsRBAC from backend.app.common.response.response_code import CustomResponseCode from backend.app.common.response.response_schema import response_base +from backend.app.services.task_service import TaskService router = APIRouter() -@router.get('/{pk}', summary='获取任务结果', dependencies=[DependsRBAC]) +@router.get('', summary='获取所有可执行任务模块', dependencies=[DependsJwtAuth]) +async def get_all_tasks(): + tasks = TaskService.gets() + return await response_base.success(data=tasks) + + +@router.get('/{pk}', summary='获取任务结果', dependencies=[DependsJwtAuth]) async def get_task_result(pk: str = Path(description='任务ID')): - try: - task = AsyncResult(pk) - except BackendGetMetaError: - raise NotFoundError(msg='任务不存在') - else: - status = task.status - if status == 'FAILURE': - return await response_base.fail(res=CustomResponseCode.HTTP_204, data=task.result) - return await response_base.success(data=task.result) + task = TaskService.get(pk) + if not task: + return await response_base.fail(res=CustomResponseCode.HTTP_204, data=pk) + return await response_base.success(data=task.result) + + +@router.post('/{module}', summary='执行任务', dependencies=[DependsRBAC]) +async def run_task( + module: Annotated[str, Path(description='任务模块')], + args: Annotated[list | None, Body()] = None, + kwargs: Annotated[dict | None, Body()] = None, +): + task = TaskService.run(module=module, args=args, kwargs=kwargs) + return await response_base.success(data=task.result) diff --git a/backend/app/core/celery.py b/backend/app/core/celery.py index 6f96a22b..81ad2ea4 100644 --- a/backend/app/core/celery.py +++ b/backend/app/core/celery.py @@ -41,6 +41,7 @@ def make_celery(main_name: str) -> Celery: } app.conf.timezone = settings.DATETIME_TIMEZONE app.conf.task_track_started = True + app.autodiscover_tasks() # Celery Schedule Tasks # https://docs.celeryq.dev/en/stable/userguide/periodic-tasks.html diff --git a/backend/app/services/task_service.py b/backend/app/services/task_service.py new file mode 100644 index 00000000..0e95e5da --- /dev/null +++ b/backend/app/services/task_service.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +from celery.exceptions import BackendGetMetaError, NotRegistered +from celery.result import AsyncResult + +from backend.app.common.exception.errors import NotFoundError +from backend.app.core.celery import celery_app + + +class TaskService: + @staticmethod + def get(pk: str) -> AsyncResult | None: + try: + result = celery_app.AsyncResult(pk) + except (BackendGetMetaError, NotRegistered): + raise NotFoundError(msg='任务不存在') + if result.failed(): + return None + return result + + @staticmethod + def gets() -> dict: + filtered_tasks = {} + tasks = celery_app.tasks + for key, value in tasks.items(): + if not key.startswith('celery.'): + filtered_tasks[key] = value + return filtered_tasks + + @staticmethod + def run(*, module: str, args: list | None = None, kwargs: dict | None = None) -> AsyncResult: + task = celery_app.send_task(module, args, kwargs) + return task diff --git a/backend/app/tasks.py b/backend/app/tasks.py index 06bde048..7d18228b 100644 --- a/backend/app/tasks.py +++ b/backend/app/tasks.py @@ -9,5 +9,7 @@ @celery_app.task -def task_demo_async(): - print(f'异步任务 {uuid.uuid4().hex}') +def task_demo_async() -> str: + uid = uuid.uuid4().hex + print(f'异步任务 {uid} 执行成功') + return uid From 0050bb313ce2635128016c28562f5842e3c478c9 Mon Sep 17 00:00:00 2001 From: Wu Clan Date: Thu, 26 Oct 2023 00:20:14 +0800 Subject: [PATCH 15/17] Add celery beat README description --- README.md | 4 +++- README.zh-CN.md | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 5d5269ed..918d2a47 100644 --- a/README.md +++ b/README.md @@ -116,10 +116,12 @@ TODO: # Execute the migration alembic upgrade head ``` -7. Start celery worker +7. Start celery worker and beat ```shell celery -A tasks worker --loglevel=INFO + # Optional, if you don't need to use the scheduled task + celery -A tasks beat --loglevel=INFO ``` 8. Modify the configuration file as needed diff --git a/README.zh-CN.md b/README.zh-CN.md index 33e64261..b27d7253 100644 --- a/README.zh-CN.md +++ b/README.zh-CN.md @@ -111,10 +111,12 @@ TODO: alembic upgrade head ``` -7. 启动 celery worker +7. 启动 celery worker 和 beat ```shell celery -A tasks worker --loglevel=INFO + # 可选,如果您不需要使用计划任务 + celery -A tasks beat --loglevel=INFO ``` 8. 按需修改配置文件 From 292b6a1e89422bbe23753f7b3d63dbec8f578a8f Mon Sep 17 00:00:00 2001 From: Wu Clan Date: Thu, 26 Oct 2023 00:24:58 +0800 Subject: [PATCH 16/17] Update warning text style --- README.md | 3 ++- README.zh-CN.md | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 918d2a47..cca73d19 100644 --- a/README.md +++ b/README.md @@ -137,7 +137,8 @@ Click [fastapi_best_architecture_ui](https://github.com/fastapi-practices/fastap ### Docker deploy > [!WARNING] -> Default port conflict:8000,3306,6379,5672 +> Default port conflict:8000,3306,6379,5672 +> > As a best practice, shut down on-premises services before deployment:mysql,redis,rabbitmq... 1. Go to the directory where the ``docker-compose.yml`` file is located and create the environment variable diff --git a/README.zh-CN.md b/README.zh-CN.md index b27d7253..f250b272 100644 --- a/README.zh-CN.md +++ b/README.zh-CN.md @@ -133,6 +133,7 @@ TODO: > [!WARNING] > 默认端口冲突:8000,3306,6379,5672 +> > 最佳做法是在部署之前关闭本地服务:mysql,redis,rabbitmq... 1. 进入 `docker-compose.yml` 文件所在目录,创建环境变量文件`.env` From 8d6ce17506cdc531f83eb88fa521af3a682eee73 Mon Sep 17 00:00:00 2001 From: Wu Clan Date: Thu, 26 Oct 2023 00:28:37 +0800 Subject: [PATCH 17/17] Revoke the default config comment content of the supervisor --- deploy/supervisor.conf | 72 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 72 insertions(+) diff --git a/deploy/supervisor.conf b/deploy/supervisor.conf index f8e75b01..c69c0ef7 100644 --- a/deploy/supervisor.conf +++ b/deploy/supervisor.conf @@ -79,5 +79,77 @@ serverurl=unix:///tmp/supervisor.sock ; use a unix:// URL for a unix socket ;prompt=mysupervisor ; cmd line prompt (default "supervisor") ;history_file=~/.sc_history ; use readline history if available +; The sample program section below shows all possible program subsection values. +; Create one or more 'real' program: sections to be able to control them under +; supervisor. + +;[program:theprogramname] +;command=/bin/cat ; the program (relative uses PATH, can take args) +;process_name=%(program_name)s ; process_name expr (default %(program_name)s) +;numprocs=1 ; number of processes copies to start (def 1) +;directory=/tmp ; directory to cwd to before exec (def no cwd) +;umask=022 ; umask for process (default None) +;priority=999 ; the relative start priority (default 999) +;autostart=true ; start at supervisord start (default: true) +;startsecs=1 ; # of secs prog must stay up to be running (def. 1) +;startretries=3 ; max # of serial start failures when starting (default 3) +;autorestart=unexpected ; when to restart if exited after running (def: unexpected) +;exitcodes=0 ; 'expected' exit codes used with autorestart (default 0) +;stopsignal=QUIT ; signal used to kill process (default TERM) +;stopwaitsecs=10 ; max num secs to wait b4 SIGKILL (default 10) +;stopasgroup=false ; send stop signal to the UNIX process group (default false) +;killasgroup=false ; SIGKILL the UNIX process group (def false) +;user=root ; setuid to this UNIX account to run the program +;redirect_stderr=true ; redirect proc stderr to stdout (default false) +;stdout_logfile=/a/path ; stdout log path, NONE for none; default AUTO +;stdout_logfile_maxbytes=1MB ; max # logfile bytes b4 rotation (default 50MB) +;stdout_logfile_backups=10 ; # of stdout logfile backups (0 means none, default 10) +;stdout_capture_maxbytes=1MB ; number of bytes in 'capturemode' (default 0) +;stdout_events_enabled=false ; emit events on stdout writes (default false) +;stdout_syslog=false ; send stdout to syslog with process name (default false) +;stderr_logfile=/a/path ; stderr log path, NONE for none; default AUTO +;stderr_logfile_maxbytes=1MB ; max # logfile bytes b4 rotation (default 50MB) +;stderr_logfile_backups=10 ; # of stderr logfile backups (0 means none, default 10) +;stderr_capture_maxbytes=1MB ; number of bytes in 'capturemode' (default 0) +;stderr_events_enabled=false ; emit events on stderr writes (default false) +;stderr_syslog=false ; send stderr to syslog with process name (default false) +;environment=A="1",B="2" ; process environment additions (def no adds) +;serverurl=AUTO ; override serverurl computation (childutils) + +; The sample eventlistener section below shows all possible eventlistener +; subsection values. Create one or more 'real' eventlistener: sections to be +; able to handle event notifications sent by supervisord. + +;[eventlistener:theeventlistenername] +;command=/bin/eventlistener ; the program (relative uses PATH, can take args) +;process_name=%(program_name)s ; process_name expr (default %(program_name)s) +;numprocs=1 ; number of processes copies to start (def 1) +;events=EVENT ; event notif. types to subscribe to (req'd) +;buffer_size=10 ; event buffer queue size (default 10) +;directory=/tmp ; directory to cwd to before exec (def no cwd) +;umask=022 ; umask for process (default None) +;priority=-1 ; the relative start priority (default -1) +;autostart=true ; start at supervisord start (default: true) +;startsecs=1 ; # of secs prog must stay up to be running (def. 1) +;startretries=3 ; max # of serial start failures when starting (default 3) +;autorestart=unexpected ; autorestart if exited after running (def: unexpected) +;exitcodes=0 ; 'expected' exit codes used with autorestart (default 0) +;stopsignal=QUIT ; signal used to kill process (default TERM) +;stopwaitsecs=10 ; max num secs to wait b4 SIGKILL (default 10) +;stopasgroup=false ; send stop signal to the UNIX process group (default false) +;killasgroup=false ; SIGKILL the UNIX process group (def false) +;user=chrism ; setuid to this UNIX account to run the program +;redirect_stderr=false ; redirect_stderr=true is not allowed for eventlisteners + +;[group:thegroupname] +;programs=progname1,progname2 ; each refers to 'x' in [program:x] definitions +;priority=999 ; the relative start priority (default 999) + +; The [include] section can just contain the "files" setting. This +; setting can list multiple files (separated by whitespace or +; newlines). It can also contain wildcards. The filenames are +; interpreted as relative to this file. Included files *cannot* +; include files themselves. + [include] files = /etc/supervisor/conf.d/*.conf