This commit is contained in:
MasonLiu 2024-12-09 23:03:35 +08:00
parent 39e5ca3924
commit 5797ddae06
11 changed files with 2720 additions and 30 deletions

2633
1.txt Normal file

File diff suppressed because one or more lines are too long

27
Core.py
View File

@ -50,14 +50,13 @@ def send_job(time_1):
# 爬取数据
print("正在启动各爬虫并获取资源中...")
seebug_main()
M_4hou_main()
anquanke_main()
# sec_wiki_main()
huawei_main()
doonsec_main()
qianxin_main()
freebuf_main()
xianzhi_main()
M_4hou_main()
# 分析各个数据源的结果
reslt_4hou = Src_4hou(time_1)
@ -75,7 +74,8 @@ def send_job(time_1):
if reslt_4hou:
print("-" * 40)
logger.info("嘶吼资讯递送中:")
SendToFeishu(reslt_4hou, "嘶吼资讯递送", webhook_url, timestamp, sign)
result = SendToFeishu(reslt_4hou, "嘶吼资讯递送", webhook_url, timestamp, sign)
logger.info(result)
print("-" * 40 + "\n")
time.sleep(60)
else:
@ -86,7 +86,8 @@ def send_job(time_1):
if reslt_anquanke:
print("-" * 40)
logger.info("安全客资讯递送中:")
SendToFeishu(reslt_anquanke, "安全客资讯递送", webhook_url, timestamp, sign)
result = SendToFeishu(reslt_anquanke, "安全客资讯递送", webhook_url, timestamp, sign)
logger.info(result)
print("-" * 40 + "\n")
time.sleep(60)
else:
@ -97,7 +98,8 @@ def send_job(time_1):
if reslt_doonsec:
print("-" * 40)
logger.info("洞见微信安全资讯递送中:")
SendToFeishu(reslt_doonsec, "洞见微信安全资讯递送", webhook_url, timestamp, sign)
result = SendToFeishu(reslt_doonsec, "洞见微信安全资讯递送", webhook_url, timestamp, sign)
logger.info(result)
print("-" * 40 + "\n")
time.sleep(60)
else:
@ -108,7 +110,8 @@ def send_job(time_1):
if reslt_xianzhi:
print("-" * 40)
logger.info("先知社区资讯递送中:")
SendToFeishu(reslt_xianzhi, "先知社区资讯递送", webhook_url, timestamp, sign)
result = SendToFeishu(reslt_xianzhi, "先知社区资讯递送", webhook_url, timestamp, sign)
logger.info(result)
print("-" * 40 + "\n")
time.sleep(60)
else:
@ -119,7 +122,8 @@ def send_job(time_1):
if reslt_freebuf:
print("-" * 40)
logger.info("FreeBuf资讯递送中")
SendToFeishu(reslt_freebuf, "FreeBuf资讯递送", webhook_url, timestamp, sign)
result = SendToFeishu(reslt_freebuf, "FreeBuf资讯递送", webhook_url, timestamp, sign)
logger.info(result)
print("-" * 40 + "\n")
time.sleep(60)
else:
@ -130,7 +134,8 @@ def send_job(time_1):
if reslt_qianxin:
print("-" * 40)
logger.info("奇安信攻防社区资讯递送中:")
SendToFeishu(reslt_qianxin, "奇安信攻防社区资讯递送", webhook_url, timestamp, sign)
result = SendToFeishu(reslt_qianxin, "奇安信攻防社区资讯递送", webhook_url, timestamp, sign)
logger.info(result)
print("-" * 40 + "\n")
time.sleep(60)
else:
@ -143,7 +148,8 @@ def send_job(time_1):
webhook_url, timestamp, sign = gen_sign()
print("-" * 40)
logger.info("Seebug社区资讯递送中")
SendToFeishu(reslt_seebug, "Seebug社区资讯递送", webhook_url, timestamp, sign)
result = SendToFeishu(reslt_seebug, "Seebug社区资讯递送", webhook_url, timestamp, sign)
logger.info(result)
print("-" * 40 + "\n")
else:
print("-" * 40)
@ -171,7 +177,8 @@ def main_loop():
except Exception as e:
logger.error(f"发生错误: {e}, 程序已暂停")
# SendToFeishu(f"发生错误: {e}, 程序已退出", "报错信息")
# result = SendToFeishu(f"发生错误: {e}, 程序已退出", "报错信息")
# logger.info(result)
exit()
# 探测rss源状态

View File

@ -97,8 +97,8 @@ def clear_table():
def get_filtered_articles(entries):
result = ""
for entry in entries:
result += f"作者:{entry[5]}\t文章:{entry[1]}\n"
result += f"链接:{entry[2]}\t上传时间:{entry[4]}\n"
result += f"作者:{entry[5]}\n文章:{entry[1]}\n"
result += f"链接:{entry[2]}\n上传时间:{entry[4]}\n"
result += "-" * 40 + "\n" # 添加分隔线以便区分不同文章
return result

View File

@ -90,8 +90,8 @@ def clear_table():
def get_filtered_articles(entries):
result = ""
for entry in entries:
result += f"作者:{entry[6]}\t来源:{entry[3]}\t文章:{entry[1]}\n"
result += f"链接:{entry[2]}\t上传时间:{entry[5]}\n"
result += f"作者:{entry[6]}\n来源:{entry[3]}\n文章:{entry[1]}\n"
result += f"链接:{entry[2]}\n上传时间:{entry[5]}\n"
result += "-" * 40 + "\n" # 添加分隔线以便区分不同文章
return result

View File

@ -97,8 +97,8 @@ def clear_table():
def get_filtered_articles(entries):
result = ""
for entry in entries:
result += f"作者:{entry[5]}\t文章:{entry[1]}\n"
result += f"链接:[点此访问]({entry[2]})\t上传时间:{entry[4]}\n"
result += f"作者:{entry[5]}\n文章:{entry[1]}\n"
result += f"链接:[点此访问]({entry[2]})\n上传时间:{entry[4]}\n"
result += f"简介:{entry[3]}\n"
result += "-" * 40 + "\n" # 添加分隔线以便区分不同文章
return result

View File

@ -97,8 +97,8 @@ def clear_table():
def get_filtered_articles(entries):
result = ""
for entry in entries:
result += f"类型:{entry[5]}\t文章:{entry[1]}\n"
result += f"链接:{entry[2]}\t上传时间:{entry[4]}\n"
result += f"类型:{entry[5]}\n文章:{entry[1]}\n"
result += f"链接:{entry[2]}\n上传时间:{entry[4]}\n"
result += "-" * 40 + "\n" # 添加分隔线以便区分不同文章
return result

View File

@ -88,8 +88,8 @@ def clear_table():
def get_filtered_articles(entries):
result = ""
for entry in entries:
result += f"来源:{entry[3]}\t文章:{entry[1]}\n"
result += f"链接:{entry[2]}\t上传时间:{entry[5]}\n"
result += f"来源:{entry[3]}\n文章:{entry[1]}\n"
result += f"链接:{entry[2]}\n上传时间:{entry[5]}\n"
result += f"描述:{entry[4]}\n"
result += "-" * 40 + "\n" # 添加分隔线以便区分不同文章
return result

View File

@ -95,8 +95,8 @@ def clear_table():
def get_filtered_articles(entries):
result = ""
for entry in entries:
result += f"类型:{entry[3]}\t文章:{entry[1]}"
result += f"链接:{entry[2]}\t上传时间:{entry[5]}\n"
result += f"类型:{entry[3]}\n文章:{entry[1]}"
result += f"链接:{entry[2]}\n上传时间:{entry[5]}\n"
result += f"{entry[4]}\n"
result += "-" * 40 + "\n" # 添加分隔线以便区分不同文章
return result

View File

@ -95,7 +95,7 @@ def get_filtered_articles(entries):
result = ""
for entry in entries:
result += f"文章:{entry[1]}\n"
result += f"链接:{entry[2]}\t上传时间:{entry[3]}\n"
result += f"链接:{entry[2]}\n上传时间:{entry[3]}\n"
result += "-" * 40 + "\n" # 添加分隔线以便区分不同文章
return result

View File

@ -3,6 +3,17 @@ RSS订阅链接来源https://github.com/zhengjim/Chinese-Security-RSS <br>
使用python-json进行格式化然后使用飞书webhook机器人进行发送 <br>
config.yaml可指定大部分可能需要的参数 <br>
### 下一步计划 <br>
- 添加更多RSS订阅源 <br>
- 将所有打印信息转为logging info并存档已完成<br>
- 将logging info转为异步的loguru <br>
- 探查异常中断原因暂未清楚发生原因猜测和4hou获取rss后的代码逻辑有关 <br>
- 存档所有推送文章方便以后查看 <br>
- 添加更多推送方式,如邮件、微信等 <br>
### 日志相关
请查看./log文件夹下内容 <br>
### 使用建议: <br>
Linux系统建议下载screen于后台持续运行本脚本。 <br>
debian/ubuntu/kali: `apt install screen` <br>
@ -30,10 +41,3 @@ centos: `yum install screen` <br>
### 运行结果 <br>
![飞书展示](./imgs/start.jpg) <br>
![后端展示](./imgs/run.jpg) <br>
### 下一步计划
- 探查异常中断原因
- 添加更多RSS订阅源
- 将所有打印信息转为logging info并存档已完成
- 存档所有推送文章方便以后查看
- 添加更多推送方式,如邮件、微信等

46
db/test.py Normal file
View File

@ -0,0 +1,46 @@
import os
import sqlite3
def read_sqlite_and_append_to_file(folder_path, output_file='./1.txt'):
"""
读取指定文件夹下所有SQLite数据库中的数据并将其按照列名: 内容的格式
追加到指定的文本文件中
:param folder_path: 包含SQLite数据库的文件夹路径
:param output_file: 输出文件的路径默认为'1.txt'
"""
# 遍历文件夹中的所有文件
for filename in os.listdir(folder_path):
if filename.endswith('.db'):
db_path = os.path.join(folder_path, filename)
# 连接到SQLite数据库
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
# 获取所有表名
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
tables = cursor.fetchall()
for table_name in tables:
table_name = table_name[0]
# 获取表的列名
cursor.execute(f"PRAGMA table_info({table_name});")
columns = cursor.fetchall()
column_names = [col[1] for col in columns]
# 查询表中的所有数据
cursor.execute(f"SELECT * FROM {table_name};")
rows = cursor.fetchall()
for row in rows:
# 将每一行的数据按照“列名: 内容”的格式写入文件
with open(output_file, 'a', encoding='utf-8') as file:
for col_name, value in zip(column_names, row):
file.write(f"{col_name}: {value}\n")
file.write("\n") # 每一行数据之间添加一个空行
# 关闭数据库连接
conn.close()
# 示例使用
read_sqlite_and_append_to_file('./db/')