You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

tasks.py 3.0KB

3 hafta önce
12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788
  1. from celery_app import celery_app
  2. from fastapi import Request,FastAPI
  3. import time
  4. from services.redis_service import RedisService
  5. from services.kafka_service import KafkaService
  6. from services.gewe_service import GeWeService
  7. from common.log import logger
  8. import asyncio
  9. @celery_app.task(name='tasks.add_task', bind=True, acks_late=True)
  10. def add_task(self, x, y):
  11. time.sleep(5) # 模拟长时间计算
  12. logger.info('add')
  13. return x + y
  14. @celery_app.task(name='tasks.mul_task', bind=True, acks_late=True)
  15. def mul_task(self, x, y):
  16. time.sleep(5) # 模拟长时间计算
  17. return x * y
  18. # @celery.task(name='app.tasks.sync_contacts', bind=True, acks_late=True)
  19. # async def sync_contacts_task(self,app):
  20. # login_keys = list(await app.state.redis_service.client.scan_iter(match='__AI_OPS_WX__:LOGININFO:*'))
  21. # return login_keys
  22. # # for k in login_keys:
  23. # # print(k)
  24. @celery_app.task(name='tasks.sync_contacts', bind=True, acks_late=True)
  25. async def sync_contacts_task(self, redis_service):
  26. # Use the redis_service passed as an argument
  27. login_keys = list(await redis_service.client.scan_iter(match='__AI_OPS_WX__:LOGININFO:*'))
  28. return login_keys
  29. @celery_app.task(name='tasks.background_worker_task', bind=True, acks_late=True)
  30. def background_worker_task(self, redis_config, kafka_config, gewe_config):
  31. async def task():
  32. redis_service = RedisService()
  33. await redis_service.init(**redis_config)
  34. login_keys = []
  35. async for key in redis_service.client.scan_iter(match='__AI_OPS_WX__:LOGININFO:*'):
  36. login_keys.append(key)
  37. print(login_keys)
  38. asyncio.run(task())
  39. # @celery.task(name='tasks.background_worker_task', bind=True, acks_late=True)
  40. # async def background_worker_task(self, redis_config, kafka_config, gewe_config):
  41. # # Initialize services inside the task
  42. # redis_service = RedisService()
  43. # await redis_service.init(**redis_config)
  44. # login_keys = []
  45. # async for key in redis_service.client.scan_iter(match='__AI_OPS_WX__:LOGININFO:*'): # 使用 async for 遍历异步生成器
  46. # login_keys.append(key)
  47. # print(login_keys)
  48. # kafka_service = KafkaService(**kafka_config)
  49. # await kafka_service.start()
  50. # gewe_service = await GeWeService.get_instance(None, gewe_config['api_url'])
  51. # # Task logic
  52. # lock_name = "background_wxchat_thread_lock"
  53. # lock_identifier = str(time.time())
  54. # while True:
  55. # if await redis_service.acquire_lock(lock_name, timeout=10):
  56. # try:
  57. # logger.info("分布式锁已成功获取")
  58. # # Perform task logic
  59. # finally:
  60. # await redis_service.release_lock(lock_name, lock_identifier)
  61. # break
  62. # else:
  63. # logger.info("获取分布式锁失败,等待10秒后重试...")
  64. # await asyncio.sleep(10)
  65. @celery_app.task(name='tasks.scheduled_task', bind=True, acks_late=True)
  66. def scheduled_task(self):
  67. print("🚀 定时任务执行成功!")
  68. return "Hello from Celery Beat + RedBeat!"