Reputation: 1
I have a flask app with various endpoints and want to add an endpoint to a celery worker task que. It has been challenging to get the celery worker container to build due to circular imports. I tried several ways of getting the celery worker to recognise the task as shown further below, but it seems the flask app context is not passed to the celery work when it is initialized. Running it with docker compose - the web, db (postgres 13), redis, celery worker appear to build containers fine, but when sending the post request which should be sent to the celery worker no context is found. Any help or pointers would be much appreciated, thank you.
Error:
celery_worker-1 | [2024-07-18 18:23:17,765: ERROR/MainProcess] Received unregistered task of type 'tasks.process_user_task'.
celery_worker-1 | The message has been ignored and discarded.
celery_worker-1 |
celery_worker-1 | Did you remember to import the module containing this task?
celery_worker-1 | Or maybe you're using relative imports?
app.py
import os
from flask import Flask
from datetime import timedelta
from dotenv import load_dotenv
from flask_smorest import Api
import logging
def create_app(db_url=None):
app = Flask(__name__)
load_dotenv()
app.config["PROPAGATE_EXCEPTIONS"] = True
app.config["API_TITLE"] = "###"
app.config["API_VERSION"] = "v1"
app.config["OPENAPI_VERSION"] = "3.0.3"
app.config["OPENAPI_URL_PREFIX"] = "/"
app.config["OPENAPI_SWAGGER_UI_PATH"] = "/swagger-ui"
app.config["OPENAPI_SWAGGER_UI_URL"] = "https://cdn.jsdelivr.net/npm/swagger-ui-dist/"
app.config["SQLALCHEMY_DATABASE_URI"] = db_url or os.getenv("DATABASE_URL")
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
app.config["JWT_SECRET_KEY"] = os.getenv("JWT_SECRET_KEY", "super_secret_key")
app.config["JWT_ACCESS_TOKEN_EXPIRES"] = timedelta(minutes=15)
from db import db
db.init_app(app)
from flask_jwt_extended import JWTManager
jwt = JWTManager(app)
from resources.users import blp as UsersBlueprint
from resources.auth import blp as AuthBlueprint
api = Api(app)
api.register_blueprint(UsersBlueprint)
api.register_blueprint(AuthBlueprint)
with app.app_context():
db.create_all()
from celery_app import init_celery
celery = init_celery(app)
return app, celery
app, celery = create_app()
celery_instance.py:
from celery import Celery
import os
def make_celery(app=None):
# Initialize Celery instance
celery = Celery(
app.import_name if app else __name__,
backend=os.getenv("CELERY_RESULT_BACKEND", "redis://redis:6379/0"),
broker=os.getenv("CELERY_BROKER_URL", "redis://redis:6379/0"),
)
if app:
celery.conf.update(app.config)
class ContextTask(celery.Task):
def __call__(self, *args, **kwargs):
with app.app_context():
return self.run(*args, **kwargs)
celery.Task = ContextTask
return celery
celery_app.py:
from celery_instance import make_celery
celery = make_celery() # Initialize Celery without the app
def init_celery(app):
with app.app_context():
celery.conf.update(app.config)
resources/users.py:
import uuid
import json
import os
from flask import jsonify, send_file
from flask.views import MethodView
from flask_smorest import Blueprint, abort
from sqlalchemy.exc import SQLAlchemyError
from flask_jwt_extended import jwt_required
from schemas import UserSchema, UserListSchema, UserResponseSchema
from models import Users
from db import db
from email_service import send_email
blp = Blueprint("users", "users", description="Operations on users")
@blp.route("/add_user")
class AddUser(MethodView):
@blp.arguments(UserSchema)
@blp.response(202, UserSchema)
def post(self, user_data):
from celery_app import celery # Import Celery instance dynamically to avoid circular import
from tasks import create_task # Import tasks dynamically to avoid circular import
process_user_task = create_task(celery) # Register the task dynamically
task = process_user_task.delay(user_data) # Enqueue the task
return {"task_id": task.id, "status": "Processing"}, 202
@blp.route("/task_status/<task_id>")
class TaskStatus(MethodView):
def get(self, task_id):
from celery_app import celery # Import Celery instance dynamically to avoid circular import
from tasks import create_task # Import tasks dynamically to avoid circular import
process_user_task = create_task(celery) # Register the task dynamically
task = process_user_task.AsyncResult(task_id) # Get task result
if task.state == 'PENDING':
response = {
'state': task.state,
'status': 'Pending...'
}
elif task.state != 'FAILURE':
response = {
'state': task.state,
'result': task.result
}
else:
response = {
'state': task.state,
'status': str(task.info), # Exception info
}
return jsonify(response)
tasks.py:
import logging
from models import Users
from db import db
from email_service import send_email
import json
import os
import uuid
def create_task(celery):
@celery.task
def process_user_task(user_data):
logging.info("Starting process_user_task with data: %s", user_data)
with celery.app.app_context():
logging.info("App context is active.")
## function ##
I have tried a few things, all managed to build the containers but same issue of not having tasks registered.
Initializing Celery after flask app is fully created - app.py:
return app
app = create_app()
# Initialize Celery after the Flask app is fully created
from celery_app import init_celery
celery = init_celery(app)
Importing tasks into celery_app.py:
app.logger.info("Celery configured successfully")
# Import tasks to ensure they are registered with Celery
from tasks import register_tasks
register_tasks(celery)
# List registered tasks
list_registered_tasks(celery)
return celery
Using the celery.autodiscover_tasks in celery_instance.py:
def make_celery(app=None):
celery = Celery(
app.import_name if app else __name__,
backend=os.getenv("CELERY_RESULT_BACKEND", "redis://redis:6379/0"),
broker=os.getenv("CELERY_BROKER_URL", "redis://redis:6379/0"),
)
if app:
celery.conf.update(app.config)
celery.autodiscover_tasks(['tasks.process_user_tasks'], force=True)
class ContextTask(celery.Task):
def __call__(self, *args, **kwargs):
with app.app_context():
return self.run(*args, **kwargs)
celery.Task = ContextTask
return celery
Also looked at some stackexchange posts such as below and implemented a separate celery_worker.py file to keep celery tasks outside of resource classes - it built containers fine but same issue Celery/Flask Receiving unregistered task of type (App Factory + Blueprints) :
import os
from app import create_app
from celery_app import init_celery
app, celery = create_app()
app.app_context().push()
Any help or pointers would be much appreciated, thank you.
Upvotes: 0
Views: 62
Reputation: 1
My solution was to use Redis-Rq instead of Celery. It was much easier to implement.
Upvotes: 0