《MetaGPT智能体开发入门》学习笔记 第四章 MetaGPT订阅模式

什么时订阅模式:比如花了120元问邮局订了12个月的杂志,每个月有差师傅都会将杂志送到家门口,当我到家时便会看到杂志;便可以演化为,有很多信息,有一个信息监听器,接受消息,整理;基于这种思想我们着手做一个微博热点的订阅系统,收集到每天微博排行的前十名,对其分析然后获得分析结果总结最近的热门是什么。
这样的化我们需要有以下几个组件:
1.爬虫爬取排行榜
2.分析爬取到的内容
3.MeteGPT使用SubscriptionRunner方法提供了一个Role的运行方式,基于SubscriptionRunner类,我们可以定时触发运行一个Role,然后将Role的执行输出通知给用户

代码如下:MetaGPT 0.8版本
MetaGPT执行方法:WeiboSubscriptionAgent.py 执行方法


"""
Filename: MetaGPT/examples/oss/WeiboSubscriptionAgent.py to run OssWatcherWeibo
Created Date:
Author:
"""
 
import asyncio
 
# from QQMailSender import QQMailSender
from WeiboOssWatcher import WeiboOssWatcher
from typing import Optional
 
from aiocron import crontab
from pydantic import BaseModel
from pytz import BaseTzInfo
from metagpt.logs import logger
 
from metagpt.environment import Environment
from metagpt.schema import Message
from metagpt.subscription import SubscriptionRunner
 
# Trigger
class OssInfo(BaseModel):
    url: str
    timestamp: float
 
class WeiboHotCronTrigger:
    def __init__(self, spec: str, tz: Optional[BaseTzInfo] = None, topic: str = "社会") -> None:
        self.crontab = crontab(spec, tz=tz)
        self.topic = topic
 
    def __aiter__(self):
        return self
 
    async def __anext__(self):
        await self.crontab.next()
        # logger.info(self.url)
        logger.info(self.topic)
        # logger.info(OssInfo(url=self.url))
        # logger.info(Message(self.url))
        logger.info(Message(self.topic))
        return Message(self.topic)
 
async def qqmail_callback(msg: Message):
    # sender = QQMailSender() # 使用默认的邮箱和授权码    
    # sender.send_mail('jiahello@qq.com', 'MetaGPT-微博热搜', msg.content)
    print('发送完成:{}', msg.content)
 
# 运行入口,
async def main(spec: str = "*/2 * * * *"):
    callbacks = [qqmail_callback]
 
    if not callbacks:
        async def _print(msg: Message):
            print(msg.content)
 
        callbacks.append(_print)
 
    async def callback(msg):
        await asyncio.gather(*(call(msg) for call in callbacks))
 
    runner = SubscriptionRunner()
    await runner.subscribe(WeiboOssWatcher(), WeiboHotCronTrigger(spec), callback)
    await runner.run()
 
if __name__ == "__main__":
    import fire
    fire.Fire(main)

WeiboOssWatcher 观察爬虫爬下来的信息

"""
Filename: MetaGPT/examples/oss/WeiboOssWatcher.py to create a role and run multi actions:CrawlingWeiboTrendingTopic,SummarizeTrendingTopic
Created Date: 
Author: 
"""
 
from metagpt.llm import LLM
from metagpt.actions import Action
from metagpt.roles import Role
from metagpt.schema import Message
from metagpt.logs import logger
from CrawlingWeiboTrendingTopic import CrawlingWeiboTrendingTopic
from SummarizeTrendingTopic import SummarizeTrendingTopic
 
class WeiboOssWatcher(Role):
    topic: str = ''
 
    def __init__(
            self,
            name="Gab Jia",
            profile="OssWatcherWeibo",
            goal="Generate an insightful Weibo Trending Topic analysis report.",
            constraints="Only analyze based on the provided data of Weibo Trending Topic.",
    ):
        super().__init__(name=name, profile=profile, goal=goal, constraints=constraints)
        # self._init_actions([CrawlingWeiboTrendingTopic, SummarizeTrendingTopic])
        self.set_actions([CrawlingWeiboTrendingTopic, SummarizeTrendingTopic])
        self._set_react_mode(react_mode="by_order")
 
    async def _act(self) -> Message:
        logger.info(f"{self._setting}: ready to {self.rc.todo}")
        # By choosing the Action by order under the hood
        todo = self.rc.todo
 
        msg = self.get_memories(k=1)[0]  # find the most k recent messages
        print('input:', msg.content)
 
        if type(todo) == SummarizeTrendingTopic:
            topic = self.get_memories(k=2)[0].content
            print('topic:', topic)
            result = await todo.run(msg.content, topic)
        else:
            result = await todo.run()
 
        msg = Message(content=str(result), role=self.profile, cause_by=type(todo))
        self.rc.memory.add(msg)
        return msg

SummarizeTrendingTopic 热点分析

"""
Filename: MetaGPT/examples/oss/SummarizeTrendingTopic.py to Summarize the Trending Topic of Weibo
Created Date: 
Author:
"""
 
from typing import Any
from metagpt.actions.action import Action
 
ANALYSIS_PROMPT = """
你是热点分析师,根据热点信息和我关注的主题进行个性化的推荐和分析。
我关注的热点主题是{topic}
# For Example

# 热点总结
今天和{topic}相关的热点共有X条,具体内容如下
## 热点标题
## 热点内容

# 以下是给你用于分析的全部热点内容
{content}
"""
 
class SummarizeTrendingTopic(Action):
  """
  根据微博热点信息和我关注的主题进行个性化的推荐和分析
  """
  name: str = "SummarizeTrendingTopic"
  async def run(self, content: Any, topic: Any = '社会'):
    return await self._aask(ANALYSIS_PROMPT.format(topic=topic, content=content))


爬虫:CrawlingWeiboTrendingTopic

"""
Filename: MetaGPT/examples/oss/CrawlingWeiboTrendingTopic.py to create a role and run multi actions
爬虫代码参考:https://zhuanlan.zhihu.com/p/651090185
Created Date: 
Author:
"""
 
import requests
from bs4 import BeautifulSoup
from urllib.parse import urlencode
import pandas as pd
import time
from metagpt.actions import Action
 
class CrawlingWeiboTrendingTopic(Action):
    name: str = "CrawlingWeiboTrendingTopic"
    async def run(self, url: str = "https://s.weibo.com/top/summary/") -> dict:
        cookie = ''
        # Cookie使用自己的新浪网页Cookie,用于通过新浪通行证
        headers = {'Cookie': cookie}
        # 获取网页响应,对网页响应进行编码
        response = requests.get(url, headers=headers)
        response.encoding = response.apparent_encoding
        html = response.text
 
        # 将网页文本使用Beautifulsoup解析
        soup = BeautifulSoup(html, 'html.parser')
        # allnews存放热搜前50的新闻和热度,形式为{'新闻':'热度'}字典
        all_news = {}
 
        # 定位网页元素,观察到热搜新闻位于'td'元素下,class为'td-02'
        for news in soup.find_all('td', class_='td-02')[1:]:
            text = news.text.split('\n')[1].strip()
            if news.text.split('\n')[2].strip() == '':
                continue
            elif news.text.split('\n')[2].strip()[0].isdigit():
                hot = news.text.split('\n')[2].strip()
            else:
                hot = news.text.split('\n')[2].strip()[2:]
            all_news[text] = hot
 
        return all_news



运行结果:

运行一次被封掉了哈哈

原文来自:url

  • 8
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值