PyBot/media/freebuf.py

86 lines
2.9 KiB
Python
Raw Normal View History

2024-12-06 16:32:34 +08:00
# -*- coding: utf-8 -*-
2024-12-03 00:03:14 +08:00
import os
import requests
import xml.etree.ElementTree as ET
import json
2024-12-06 16:53:58 +08:00
from requests.exceptions import RequestException
2024-12-08 00:18:31 +08:00
import logging
# 设置日志记录
logger = logging.getLogger()
logger.setLevel(logging.INFO)
logger.handlers.clear() # 清除已有的处理器
file_handler = logging.FileHandler('./log/spider.log', mode='a', encoding='utf-8')
file_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
console_handler = logging.StreamHandler()
console_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
logger.addHandler(file_handler)
logger.addHandler(console_handler)
logger.propagate = False # 禁用日志传递
2024-12-03 00:03:14 +08:00
# 测试用爬虫请求头
headers = {
"Content-Type": "application/json",
"Cache-Control": "no-cache",
"Upgrade-Insecure-Requests": "1",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.55 Safari/537.36",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
"Sec-Fetch-Site": "same-origin",
"Sec-Fetch-Mode": "navigate",
"Sec-Fetch-User": "?1",
"Sec-Fetch-Dest": "document",
"Accept-Language": "zh-CN,zh;q=0.9"
}
def fetch_rss(url, headers):
2024-12-06 16:53:58 +08:00
try:
response = requests.get(url, headers=headers)
response.raise_for_status() # 检查请求是否成功
return response.content
except RequestException as e:
2024-12-08 00:18:31 +08:00
logger.error(f"请求 {url} 时发生错误: {e}")
2024-12-06 16:53:58 +08:00
return None # 返回None表示请求失败
2024-12-03 00:03:14 +08:00
def parse_rss(rss_content):
items = []
root = ET.fromstring(rss_content)
for item in root.findall('.//item'):
item_dict = {}
for child in item:
tag = child.tag
# 将一标签替换名称方便处理
if tag.startswith('{http://purl.org/rss/1.0/modules/content/}'):
tag = 'body'
item_dict[tag] = child.text
items.append(item_dict)
return items
def save_to_json(data, filename):
with open(filename, 'w', encoding='utf-8') as f:
json.dump(data, f, ensure_ascii=False, indent=4)
def freebuf_main():
url = "https://www.freebuf.com/feed"
rss_content = fetch_rss(url, headers)
2024-12-08 00:18:31 +08:00
if rss_content is None:
logger.warning("无法获取Freebuf RSS内容跳过保存操作。")
return
try:
2024-12-06 16:53:58 +08:00
items = parse_rss(rss_content)
# 确保目录存在
os.makedirs(os.path.dirname('./JSON/freebuf.json'), exist_ok=True)
# 将解析后的数据保存到 JSON 文件
save_to_json(items, './JSON/freebuf.json')
2024-12-08 00:18:31 +08:00
logger.info("数据已保存到 ./JSON/freebuf.json")
except Exception as e:
logger.error(f"解析或保存Freebuf RSS内容时发生错误: {e}")
2024-12-03 00:03:14 +08:00
if __name__ == '__main__':
freebuf_main()