|
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301 |
- from celery_app import celery_app
- from fastapi import Request,FastAPI
- import time,datetime
- from celery import Celery
- import celery.schedules
- from redbeat import RedBeatSchedulerEntry
- from datetime import timedelta
-
- from services.redis_service import RedisService
- from services.kafka_service import KafkaService
- from services.gewe_service import GeWeService
- from common.log import logger
- import asyncio,random
-
-
- @celery_app.task(name='tasks.add_task', bind=True, acks_late=True)
- def add_task(self, x, y):
- time.sleep(5)
- logger.info('add')
- return x + y
-
-
- @celery_app.task(name='tasks.mul_task', bind=True, acks_late=True)
- def mul_task(self, x, y):
- time.sleep(5)
- return x * y
-
-
-
-
-
-
-
-
-
- @celery_app.task(name='tasks.sync_contacts', bind=True, acks_late=True)
- async def sync_contacts_task(self, redis_service):
-
- login_keys = list(await redis_service.client.scan_iter(match='__AI_OPS_WX__:LOGININFO:*'))
- return login_keys
-
-
- @celery_app.task(name='tasks.background_worker_task', bind=True, acks_late=True)
- def background_worker_task(self, redis_config, kafka_config, gewe_config):
- async def task():
- redis_service = RedisService()
- await redis_service.init(**redis_config)
- login_keys = []
- async for key in redis_service.client.scan_iter(match='__AI_OPS_WX__:LOGININFO:*'):
- login_keys.append(key)
- print(login_keys)
-
- asyncio.run(task())
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- @celery_app.task(name='tasks.scheduled_task_sync_wx_info', bind=True, acks_late=True)
- def scheduled_task_sync_wx_info(self, redis_config, kafka_config, gewe_config):
- '''
- 定时获取微信号资料
- '''
- async def task():
- try:
- redis_service = RedisService()
- await redis_service.init(**redis_config)
- gewe_service = await GeWeService.get_instance(redis_service,gewe_config['api_url'])
- login_keys = []
- async for key in redis_service.client.scan_iter(match='__AI_OPS_WX__:LOGININFO:*'):
- login_keys.append(key)
-
- print(login_keys)
- for k in login_keys:
- r = await redis_service.get_hash(k)
- app_id = r.get("appId")
- token_id = r.get("tokenId")
- wxid = r.get("wxid")
- status = r.get('status')
- if status == '0':
- logger.warning(f"微信号 {wxid} 已经离线: {ret}-{msg}")
- continue
- ret, msg, profile = await gewe_service.get_profile_async(token_id, app_id)
- if ret != 200:
- logger.warning(f"同步微信号 {wxid} 资料失败: {ret}-{msg}")
- continue
- nickname=profile.get("nickName")
- head_img_url=profile.get("smallHeadImgUrl")
-
-
- nickname=profile.get("nickName")
- head_img_url=profile.get("smallHeadImgUrl")
- r.update({"nickName":nickname,"headImgUrl":head_img_url,"modify_at":int(time.time())})
- cleaned_login_info = {k: (v if v is not None else '') for k, v in r.items()}
- await redis_service.set_hash(k, cleaned_login_info)
- logger.info(f"定时同步微信号{wxid}-昵称{nickname} 资料成功")
-
- except Exception as e:
- logger.error(f"任务执行过程中发生异常: {e}")
-
- loop = asyncio.get_event_loop()
- if loop.is_closed():
- loop = asyncio.new_event_loop()
- asyncio.set_event_loop(loop)
-
- loop.run_until_complete(task())
-
-
-
- REDIS_KEY_PATTERN = "friend_add_limit:{date}"
- REDIS_LAST_RUN_KEY = "last_run_time:add_friends_task"
-
- @celery_app.task(name='tasks.add_friends_task', bind=True, acks_late=True)
- def add_friends_task(self,redis_config):
- """
- 限制每天最多 15 个,每 2 小时最多 8 个
- """
- async def task():
- redis_service = RedisService()
- await redis_service.init(**redis_config)
- today_str = datetime.datetime.now().strftime("%Y%m%d")
- redis_key = REDIS_KEY_PATTERN.format(date=today_str)
-
-
- total_added = await redis_service.get_hash_field(redis_key, "total") or 0
- last_2h_added =await redis_service.get_hash_field(redis_key, "last_2h") or 0
-
- total_added = int(total_added)
- last_2h_added = int(last_2h_added)
-
- logger.info(f"当前添加好友总数: {total_added}, 过去2小时添加: {last_2h_added}")
-
-
- if total_added >= 15:
- logger.warning("今日好友添加已达上限!")
- return
-
- if last_2h_added >= 8:
- logger.warning("过去2小时添加已达上限!")
- return
-
-
- max_add = min(15 - total_added, 8 - last_2h_added)
- if max_add <= 0:
- return
-
- num_to_add = min(max_add, 1)
- logger.info(f"本次添加 {num_to_add} 位好友")
-
-
-
-
- success = num_to_add
-
-
- if success > 0:
- await redis_service.increment_hash_field(redis_key, "total", success)
- await redis_service.increment_hash_field(redis_key, "last_2h", success)
-
-
- await redis_service.expire(redis_key, 86400)
- await redis_service.expire_field(redis_key, "last_2h", 7200)
-
- logger.info(f"成功添加 {success} 位好友, 今日总数 {total_added + success}")
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- loop = asyncio.get_event_loop()
-
- if loop.is_closed():
- loop = asyncio.new_event_loop()
- asyncio.set_event_loop(loop)
-
- loop.run_until_complete(task())
-
-
- @celery_app.task(name='tasks.random_scheduled_task', bind=True, acks_late=True)
- def random_scheduled_task(self,):
- print(f"Task executed at {datetime.datetime.now()}")
-
- next_run_in = random.randint(10, 60)
- print(f"Next execution will be in {next_run_in} seconds")
-
-
- entry = RedBeatSchedulerEntry(
- name='random-task',
- task='tasks.random_scheduled_task',
- schedule=timedelta(seconds=next_run_in),
- app=celery_app
- )
- entry.save()
- return f"Scheduled next run in {next_run_in} seconds"
|