Compare commits
No commits in common. "main" and "beta0.1" have entirely different histories.
98
Core.py
98
Core.py
@ -24,7 +24,6 @@ from GotoSend.xianzhi import Src_xianzhi
|
|||||||
from GotoSend.freebuf import Src_freebuf
|
from GotoSend.freebuf import Src_freebuf
|
||||||
from GotoSend.qianxin import Src_qianxin
|
from GotoSend.qianxin import Src_qianxin
|
||||||
from GotoSend.seebug import Src_seebug
|
from GotoSend.seebug import Src_seebug
|
||||||
from config.check_config import get_core_config, get_debug_config
|
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
|
|
||||||
# 清除所有已有的日志记录器配置
|
# 清除所有已有的日志记录器配置
|
||||||
@ -36,20 +35,21 @@ logger.add("./log/core.log",
|
|||||||
compression="zip",
|
compression="zip",
|
||||||
encoding="utf-8")
|
encoding="utf-8")
|
||||||
# shell终端打印日志
|
# shell终端打印日志
|
||||||
debug = get_debug_config()
|
# logger.add(lambda msg: print(msg),
|
||||||
if debug == "True":
|
# format="{time:YYYY-MM-DD HH:mm:ss} - {level} - {name}:{function}:{line} - {message}")
|
||||||
logger.add(lambda msg: print(msg),
|
|
||||||
format="{time:YYYY-MM-DD HH:mm:ss} - {level} - {name}:{function}:{line} - {message}")
|
|
||||||
|
|
||||||
def signal_handler(sig, frame):
|
# 加载参数
|
||||||
logger.info("接收到退出信号,程序即将退出...")
|
with open('./config.yaml', 'r', encoding="utf-8") as file:
|
||||||
sys.exit(0)
|
config = yaml.safe_load(file)
|
||||||
|
# sleep_time = int(f"{config['sleep_time']}")
|
||||||
|
e_hour = int(f"{config['e_hour']}")
|
||||||
|
choice = int(f"{config['circle']}")
|
||||||
|
fs_activate = f"{config['fs_activate']}"
|
||||||
|
wx_activate = f"{config['wx_activate']}"
|
||||||
|
ding_activate = f"{config['ding_activate']}"
|
||||||
|
lx_activate = f"{config['lx_activate']}"
|
||||||
|
|
||||||
# 全局变量
|
|
||||||
signal.signal(signal.SIGINT, signal_handler) # Ctrl+C
|
|
||||||
signal.signal(signal.SIGTERM, signal_handler) # kill命令
|
|
||||||
webhook_url_once, timestamp_once, sign_once = gen_sign()
|
webhook_url_once, timestamp_once, sign_once = gen_sign()
|
||||||
e_hour, choice, fs_activate, wx_activate, ding_activate, lx_activate, url_web = get_core_config()
|
|
||||||
|
|
||||||
def check_avaliable(info_long, info_short, title, webhook_url, timestamp, sign):
|
def check_avaliable(info_long, info_short, title, webhook_url, timestamp, sign):
|
||||||
if info_long: # 发送完整文章相关内容
|
if info_long: # 发送完整文章相关内容
|
||||||
@ -121,17 +121,52 @@ def send_job(time_1):
|
|||||||
check_avaliable(result_seebug_long, result_seebug_short, "Seebug社区资讯", webhook_url, timestamp, sign)
|
check_avaliable(result_seebug_long, result_seebug_short, "Seebug社区资讯", webhook_url, timestamp, sign)
|
||||||
|
|
||||||
if fs_activate == "True":
|
if fs_activate == "True":
|
||||||
send_result = SendToFeishu(f"[点此访问]({url_web})网站以查看全部文章。", "单次运行结束", webhook_url, timestamp, sign)
|
send_result = SendToFeishu("[点此访问](https://info.masonliu.com)网站以查看全部文章。", "单次运行结束", webhook_url, timestamp, sign)
|
||||||
logger.info(send_result)
|
logger.info(send_result)
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
if wx_activate == "True":
|
if wx_activate == "True":
|
||||||
send_result = SendToWX(f"[点此访问]({url_web})网站以查看全部文章。", "单次运行结束")
|
send_result = SendToWX("[点此访问](https://info.masonliu.com)网站以查看全部文章。", "单次运行结束")
|
||||||
logger.info(send_result)
|
logger.info(send_result)
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
logger.info("执行完毕,等待下一次执行...")
|
logger.info("执行完毕,等待下一次执行...")
|
||||||
|
|
||||||
|
def signal_handler(sig, frame):
|
||||||
|
logger.info("接收到退出信号,程序即将退出...")
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
signal.signal(signal.SIGINT, signal_handler) # Ctrl+C
|
||||||
|
signal.signal(signal.SIGTERM, signal_handler) # kill命令
|
||||||
|
|
||||||
|
|
||||||
|
def main_loop(choice):
|
||||||
|
if choice == 1:
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
# 执行任务
|
||||||
|
send_job(e_hour)
|
||||||
|
time.sleep(e_hour * 60 * 60 - 3 * 60)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"发生错误: {e}, 程序已暂停")
|
||||||
|
# result = SendToFeishu(f"发生错误: {e}, 程序已退出", "报错信息")
|
||||||
|
# logger.info(result)
|
||||||
|
exit()
|
||||||
|
|
||||||
|
elif choice == 0:
|
||||||
|
# 设置每天的特定时间点执行job函数
|
||||||
|
schedule.every().day.at("09:00").do(send_job, 12)
|
||||||
|
schedule.every().day.at("12:00").do(send_job, 3)
|
||||||
|
schedule.every().day.at("15:00").do(send_job, 3)
|
||||||
|
schedule.every().day.at("18:00").do(send_job, 3)
|
||||||
|
schedule.every().day.at("21:00").do(send_job, 3)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
schedule.run_pending()
|
||||||
|
time.sleep(60) # 每分钟检查一次是否有任务需要执行
|
||||||
|
|
||||||
# 探测rss源状态
|
# 探测rss源状态
|
||||||
def check_rss_status(url):
|
def check_rss_status(url):
|
||||||
try:
|
try:
|
||||||
@ -167,33 +202,9 @@ def test_rss_source():
|
|||||||
|
|
||||||
return rss_info
|
return rss_info
|
||||||
|
|
||||||
def main_loop(choice):
|
if __name__ == "__main__":
|
||||||
if choice == 1:
|
print("程序正在运行当中。")
|
||||||
while True:
|
time.sleep(5) # 添加短暂的延迟
|
||||||
try:
|
|
||||||
# 执行任务
|
|
||||||
send_job(e_hour)
|
|
||||||
time.sleep(e_hour * 60 * 60 - 3 * 60)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"发生错误: {e}, 程序已暂停")
|
|
||||||
# result = SendToFeishu(f"发生错误: {e}, 程序已退出", "报错信息")
|
|
||||||
# logger.info(result)
|
|
||||||
exit()
|
|
||||||
|
|
||||||
elif choice == 0:
|
|
||||||
# 设置每天的特定时间点执行job函数
|
|
||||||
schedule.every().day.at("09:00").do(send_job, 12)
|
|
||||||
schedule.every().day.at("12:00").do(send_job, 3)
|
|
||||||
schedule.every().day.at("15:00").do(send_job, 3)
|
|
||||||
schedule.every().day.at("18:00").do(send_job, 3)
|
|
||||||
schedule.every().day.at("21:00").do(send_job, 3)
|
|
||||||
|
|
||||||
while True:
|
|
||||||
schedule.run_pending()
|
|
||||||
time.sleep(60) # 每分钟检查一次是否有任务需要执行
|
|
||||||
|
|
||||||
def send_first_message():
|
|
||||||
rss_info = test_rss_source()
|
rss_info = test_rss_source()
|
||||||
start_info = ""
|
start_info = ""
|
||||||
start_info += "程序已启动,当前时间为:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "\n"
|
start_info += "程序已启动,当前时间为:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "\n"
|
||||||
@ -218,14 +229,9 @@ def send_first_message():
|
|||||||
logger.info(result)
|
logger.info(result)
|
||||||
else:
|
else:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
logger.info("程序正在运行当中。")
|
|
||||||
time.sleep(5) # 添加短暂的延迟
|
|
||||||
|
|
||||||
# 首次运行先暂停两分钟
|
# 首次运行先暂停两分钟
|
||||||
# time.sleep(2 * 60)
|
# time.sleep(2 * 60)
|
||||||
|
|
||||||
# 主程序
|
# 主程序
|
||||||
send_first_message()
|
|
||||||
main_loop(choice)
|
main_loop(choice)
|
@ -112,16 +112,17 @@ def get_filtered_articles(entries, Is_short):
|
|||||||
record = ""
|
record = ""
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
if Is_short == False:
|
if Is_short == False:
|
||||||
result += f"文章:[{entry[1]}]({entry[2]})\n作者:{entry[5]}\n"
|
result += f"作者:{entry[5]}\n文章:{entry[1]}\n"
|
||||||
result += f"上传时间:{entry[4]}\n"
|
result += f"链接:{entry[2]}\n上传时间:{entry[4]}\n"
|
||||||
result += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
result += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
||||||
if Is_short == True:
|
if Is_short == True:
|
||||||
result += f"文章:[{entry[1]}]({entry[2]})\n"
|
result += f"文章:{entry[1]}\n"
|
||||||
result += f"链接:{entry[2]}\n上传时间:{entry[4]}\n"
|
result += f"链接:{entry[2]}\n上传时间:{entry[4]}\n"
|
||||||
result += "\n" + "-" * 3 + "\n" # 添加分隔线以便区分不同文章
|
result += "\n" + "-" * 3 + "\n" # 添加分隔线以便区分不同文章
|
||||||
|
|
||||||
record += f"#### 文章:[{entry[1]}]({entry[2]})\n"
|
record += f"#### 文章:{entry[1]}\n"
|
||||||
record += f"**作者**:{entry[5]}\n"
|
record += f"**作者**:{entry[5]}\n"
|
||||||
|
record += f"**链接**:{entry[2]}\n"
|
||||||
record += f"**上传时间**:{entry[4]}\n"
|
record += f"**上传时间**:{entry[4]}\n"
|
||||||
record += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
record += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
||||||
record_md(record)
|
record_md(record)
|
||||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -107,17 +107,18 @@ def get_filtered_articles(entries, Is_short):
|
|||||||
record = ""
|
record = ""
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
if Is_short == False:
|
if Is_short == False:
|
||||||
result += f"文章:[{entry[1]}]({entry[2]})\n作者:{entry[6]}\n来源:{entry[3]}\n"
|
result += f"作者:{entry[6]}\n来源:{entry[3]}\n文章:{entry[1]}\n"
|
||||||
result += f"上传时间:{entry[5]}\n"
|
result += f"链接:{entry[2]}\n上传时间:{entry[5]}\n"
|
||||||
result += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
result += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
||||||
elif Is_short == True:
|
elif Is_short == True:
|
||||||
result += f"文章:{entry[1]}\n"
|
result += f"文章:{entry[1]}\n"
|
||||||
result += f"链接:{entry[2]}\n上传时间:{entry[5]}\n"
|
result += f"链接:{entry[2]}\n上传时间:{entry[5]}\n"
|
||||||
result += "\n" + "-" * 3 + "\n" # 添加分隔线以便区分不同文章
|
result += "\n" + "-" * 3 + "\n" # 添加分隔线以便区分不同文章
|
||||||
|
|
||||||
record += f"#### 文章:[{entry[1]}]({entry[2]})\n"
|
record += f"#### 文章:{entry[1]}\n"
|
||||||
record += f"**作者**:{entry[6]}\n"
|
record += f"**作者**:{entry[6]}\n"
|
||||||
record += f"**来源**:{entry[3]}\n"
|
record += f"**来源**:{entry[3]}\n"
|
||||||
|
record += f"**链接**:{entry[2]}\n"
|
||||||
record += f"**上传时间**:{entry[5]}\n"
|
record += f"**上传时间**:{entry[5]}\n"
|
||||||
record += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
record += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
||||||
record_md(record)
|
record_md(record)
|
||||||
|
@ -114,18 +114,18 @@ def get_filtered_articles(entries, Is_short):
|
|||||||
record = ""
|
record = ""
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
if Is_short == False:
|
if Is_short == False:
|
||||||
result += f"文章:[{entry[1]}]({entry[2]})\n"
|
result += f"作者:{entry[5]}\n文章:{entry[1]}\n"
|
||||||
result += f"作者:{entry[5]}\n"
|
result += f"链接:[点此访问]({entry[2]})\n上传时间:{entry[4]}\n"
|
||||||
result += f"上传时间:{entry[4]}\n"
|
|
||||||
result += f"简介:{entry[3]}\n"
|
result += f"简介:{entry[3]}\n"
|
||||||
result += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
result += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
||||||
if Is_short == True:
|
if Is_short == True:
|
||||||
result += f"文章:[{entry[1]}]({entry[2]})\n"
|
result += f"文章:{entry[1]}\n"
|
||||||
result += f"上传时间:{entry[4]}\n"
|
result += f"链接:[点此访问]({entry[2]})\n上传时间:{entry[4]}\n"
|
||||||
result += "\n" + "-" * 3 + "\n" # 添加分隔线以便区分不同文章
|
result += "\n" + "-" * 3 + "\n" # 添加分隔线以便区分不同文章
|
||||||
|
|
||||||
record += f"#### 文章:[{entry[1]}]({entry[2]})\n"
|
record += f"#### 文章:{entry[1]}\n"
|
||||||
record += f"**作者**:{entry[5]}\n"
|
record += f"**作者**:{entry[5]}\n"
|
||||||
|
record += f"**链接**:[点此访问]({entry[2]})\n"
|
||||||
record += f"**上传时间**:{entry[4]}\n"
|
record += f"**上传时间**:{entry[4]}\n"
|
||||||
record += f"**简介**:{entry[3]}\n"
|
record += f"**简介**:{entry[3]}\n"
|
||||||
record += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
record += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
||||||
|
@ -113,16 +113,17 @@ def get_filtered_articles(entries, Is_short):
|
|||||||
record = ""
|
record = ""
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
if Is_short == False:
|
if Is_short == False:
|
||||||
result += f"文章:[{entry[1]}]({entry[2]})\n类型:{entry[5]}\n"
|
result += f"类型:{entry[5]}\n文章:{entry[1]}\n"
|
||||||
result += f"上传时间:{entry[4]}\n"
|
result += f"链接:{entry[2]}\n上传时间:{entry[4]}\n"
|
||||||
result += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
result += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
||||||
elif Is_short == True:
|
elif Is_short == True:
|
||||||
result += f"文章:[{entry[1]}]({entry[2]})\n"
|
result += f"文章:{entry[1]}\n"
|
||||||
result += f"上传时间:{entry[4]}\n"
|
result += f"链接:{entry[2]}\n上传时间:{entry[4]}\n"
|
||||||
result += "\n" + "-" * 3 + "\n" # 添加分隔线以便区分不同文章
|
result += "\n" + "-" * 3 + "\n" # 添加分隔线以便区分不同文章
|
||||||
|
|
||||||
record += f"#### 文章:[{entry[1]}]({entry[2]})\n"
|
record += f"#### 文章:{entry[1]}\n"
|
||||||
record += f"**类型**:{entry[5]}\n"
|
record += f"**类型**:{entry[5]}\n"
|
||||||
|
record += f"**链接**:{entry[2]}\n"
|
||||||
record += f"**上传时间**:{entry[4]}\n"
|
record += f"**上传时间**:{entry[4]}\n"
|
||||||
record += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
record += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
||||||
record_md(record)
|
record_md(record)
|
||||||
|
@ -104,17 +104,18 @@ def get_filtered_articles(entries, Is_short):
|
|||||||
record = ""
|
record = ""
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
if Is_short == False:
|
if Is_short == False:
|
||||||
result += f"文章:[{entry[1]}]({entry[2]})\n来源:{entry[3]}\n"
|
result += f"来源:{entry[3]}\n文章:{entry[1]}\n"
|
||||||
result += f"上传时间:{entry[5]}\n"
|
result += f"链接:{entry[2]}\n上传时间:{entry[5]}\n"
|
||||||
result += f"描述:{entry[4]}\n"
|
result += f"描述:{entry[4]}\n"
|
||||||
result += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
result += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
||||||
if Is_short == False:
|
if Is_short == False:
|
||||||
result += f"文章:[{entry[1]}]({entry[2]})\n"
|
result += f"文章:{entry[1]}\n"
|
||||||
result += f"上传时间:{entry[5]}\n"
|
result += f"链接:{entry[2]}\n上传时间:{entry[5]}\n"
|
||||||
result += "\n" + "-" * 3 + "\n" # 添加分隔线以便区分不同文章
|
result += "\n" + "-" * 3 + "\n" # 添加分隔线以便区分不同文章
|
||||||
|
|
||||||
record += f"#### 文章:[{entry[1]}]({entry[2]})\n"
|
record += f"#### 文章:{entry[1]}\n"
|
||||||
record += f"**来源**:{entry[3]}\n"
|
record += f"**来源**:{entry[3]}\n"
|
||||||
|
record += f"**链接**:{entry[2]}\n"
|
||||||
record += f"**上传时间**:{entry[5]}\n"
|
record += f"**上传时间**:{entry[5]}\n"
|
||||||
record += f"**描述**:{entry[4]}\n"
|
record += f"**描述**:{entry[4]}\n"
|
||||||
record += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
record += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
||||||
|
@ -111,17 +111,18 @@ def get_filtered_articles(entries, Is_short):
|
|||||||
record = ""
|
record = ""
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
if Is_short == False:
|
if Is_short == False:
|
||||||
result += f"文章:[{entry[1]}]({entry[2]})\n类型:{entry[3]}\n"
|
result += f"类型:{entry[3]}\n文章:{entry[1]}"
|
||||||
result += f"上传时间:{entry[5]}\n"
|
result += f"链接:{entry[2]}\n上传时间:{entry[5]}\n"
|
||||||
result += f"{entry[4]}\n"
|
result += f"{entry[4]}\n"
|
||||||
result += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
result += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
||||||
if Is_short == True:
|
if Is_short == True:
|
||||||
result += f"文章:[{entry[1]}]({entry[2]})"
|
result += f"文章:{entry[1]}"
|
||||||
result += f"上传时间:{entry[5]}\n"
|
result += f"链接:{entry[2]}\n上传时间:{entry[5]}\n"
|
||||||
result += "\n" + "-" * 3 + "\n" # 添加分隔线以便区分不同文章
|
result += "\n" + "-" * 3 + "\n" # 添加分隔线以便区分不同文章
|
||||||
|
|
||||||
record += f"#### 文章:[{entry[1]}]({entry[2]})\n"
|
record += f"#### 文章:{entry[1]}\n"
|
||||||
record += f"**类型**:{entry[3]}\n"
|
record += f"**类型**:{entry[3]}\n"
|
||||||
|
record += f"**链接**:{entry[2]}\n"
|
||||||
record += f"**上传时间**:{entry[5]}\n"
|
record += f"**上传时间**:{entry[5]}\n"
|
||||||
record += f"{entry[4]}\n"
|
record += f"{entry[4]}\n"
|
||||||
record += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
record += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
||||||
|
@ -1,196 +0,0 @@
|
|||||||
import os
|
|
||||||
import json
|
|
||||||
import sqlite3
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
|
|
||||||
def clear_table():
|
|
||||||
conn = sqlite3.connect('./db/sougou-wx.db')
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute('DELETE FROM articles')
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
def create_database():
|
|
||||||
conn = sqlite3.connect('./db/sougou-wx.db')
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute('''CREATE TABLE IF NOT EXISTS articles (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
title TEXT,
|
|
||||||
link TEXT,
|
|
||||||
description TEXT,
|
|
||||||
pubDate DATETIME,
|
|
||||||
author TEXT,
|
|
||||||
keyword TEXT,
|
|
||||||
is_sended BOOLEAN
|
|
||||||
)''')
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
def insert_data(data):
|
|
||||||
conn = sqlite3.connect('./db/sougou-wx.db')
|
|
||||||
cursor = conn.cursor()
|
|
||||||
for entry in data:
|
|
||||||
# 检查是否存在相同 title 和 author 的记录
|
|
||||||
cursor.execute('''
|
|
||||||
SELECT 1 FROM articles WHERE title = ? AND author = ?
|
|
||||||
''', (entry['title'], entry['author']))
|
|
||||||
|
|
||||||
if cursor.fetchone() is None:
|
|
||||||
# 如果没有找到相同记录,则插入新记录
|
|
||||||
cursor.execute('''
|
|
||||||
INSERT INTO articles (title, link, description, pubDate, author, keyword)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?)
|
|
||||||
''', (entry['title'], entry['link'], entry['description'], entry['pubDate'], entry['author'], entry['keyword']))
|
|
||||||
|
|
||||||
conn.commit()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
def get_json():
|
|
||||||
# 检查文件是否存在
|
|
||||||
if not os.path.exists('./JSON/sougou-wx.json'):
|
|
||||||
raise FileNotFoundError(f"sougou-wx.json文件不存在,请检查程序是否运行正常!")
|
|
||||||
|
|
||||||
# 打开并读取JSON文件
|
|
||||||
with open('./JSON/sougou-wx.json', 'r', encoding='utf-8') as file:
|
|
||||||
data = json.load(file)
|
|
||||||
|
|
||||||
# 假设data是一个包含多个关键词的字典
|
|
||||||
total_data = []
|
|
||||||
for keyword, keyword_data in data.items():
|
|
||||||
# 检查关键词对应的数据是否为列表
|
|
||||||
if not isinstance(keyword_data, list):
|
|
||||||
raise ValueError(f"关键词 {keyword} 对应的数据格式错误,请检查common.py是否异常!")
|
|
||||||
|
|
||||||
# 提取所需字段并编号
|
|
||||||
for index, item in enumerate(keyword_data, start=1):
|
|
||||||
entry = {
|
|
||||||
"id": index,
|
|
||||||
"title": item.get("title", ""),
|
|
||||||
"link": item.get("link", ""),
|
|
||||||
"description": item.get("description", ""),
|
|
||||||
"pubDate": item.get("pubDate", ""),
|
|
||||||
"author": item.get("author", ""),
|
|
||||||
"keyword": keyword
|
|
||||||
}
|
|
||||||
total_data.append(entry)
|
|
||||||
|
|
||||||
return total_data
|
|
||||||
|
|
||||||
def select_articles():
|
|
||||||
conn = sqlite3.connect('./db/sougou-wx.db')
|
|
||||||
cursor = conn.cursor()
|
|
||||||
|
|
||||||
# 获取当前日期和时间
|
|
||||||
now = datetime.now()
|
|
||||||
two_months_ago = now - timedelta(days=60) # 假设两个月大约60天
|
|
||||||
|
|
||||||
try:
|
|
||||||
# 查询最近的3条未被标记为True的消息且发布时间不超过两个月
|
|
||||||
cursor.execute('''
|
|
||||||
SELECT * FROM articles
|
|
||||||
WHERE is_sended IS NULL AND pubDate BETWEEN ? AND ?
|
|
||||||
ORDER BY pubDate DESC
|
|
||||||
LIMIT 3
|
|
||||||
''', (two_months_ago.strftime('%Y-%m-%d %H:%M:%S'), now.strftime('%Y-%m-%d %H:%M:%S')))
|
|
||||||
|
|
||||||
# 查询最近的3条未被标记为True的消息
|
|
||||||
# cursor.execute('''
|
|
||||||
# SELECT * FROM articles
|
|
||||||
# WHERE is_sended IS NULL
|
|
||||||
# ORDER BY pubDate DESC
|
|
||||||
# LIMIT 3
|
|
||||||
# ''')
|
|
||||||
|
|
||||||
results = cursor.fetchall()
|
|
||||||
# print(results)
|
|
||||||
|
|
||||||
if results:
|
|
||||||
for row in results:
|
|
||||||
article_id = row[0]
|
|
||||||
cursor.execute('''
|
|
||||||
UPDATE articles
|
|
||||||
SET is_sended = True
|
|
||||||
WHERE id = ?
|
|
||||||
''', (article_id,))
|
|
||||||
|
|
||||||
conn.commit() # 提交事务
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
conn.rollback() # 回滚事务
|
|
||||||
print(f"Error: {e}")
|
|
||||||
finally:
|
|
||||||
cursor.close()
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
def record_md(result, filename="./history/wx_news.md"):
|
|
||||||
# 读取现有内容
|
|
||||||
if os.path.exists(filename):
|
|
||||||
with open(filename, 'r', encoding='utf-8') as file:
|
|
||||||
existing_content = file.read()
|
|
||||||
else:
|
|
||||||
existing_content = ""
|
|
||||||
|
|
||||||
# 将新内容插入到现有内容的开头
|
|
||||||
new_content = result + existing_content
|
|
||||||
|
|
||||||
# 写回文件
|
|
||||||
with open(filename, 'w', encoding='utf-8') as file:
|
|
||||||
file.write(new_content)
|
|
||||||
|
|
||||||
def get_filtered_articles(entries, Is_short):
|
|
||||||
result = ""
|
|
||||||
record = ""
|
|
||||||
for entry in entries:
|
|
||||||
if Is_short == False:
|
|
||||||
result += f"文章:[{entry[1]}]({entry[2]})\n描述:{entry[3]}\n"
|
|
||||||
result += f"上传时间:{entry[4]}\n"
|
|
||||||
result += f"作者:{entry[5]}\n"
|
|
||||||
result += f"关键词:{entry[6]}\n"
|
|
||||||
result += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
|
||||||
if Is_short == True:
|
|
||||||
result += f"文章:[{entry[1]}]({entry[2]})"
|
|
||||||
result += f"上传时间:{entry[4]}\n"
|
|
||||||
result += "\n" + "-" * 3 + "\n" # 添加分隔线以便区分不同文章
|
|
||||||
|
|
||||||
record += f"#### 文章:[{entry[1]}]({entry[2]})\n描述:{entry[3]}\n"
|
|
||||||
record += f"**上传时间**:{entry[4]}\n"
|
|
||||||
record += f"**作者**:{entry[5]}\n"
|
|
||||||
record += f"**关键词**:{entry[6]}\n"
|
|
||||||
record += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
|
||||||
record_md(record)
|
|
||||||
return result
|
|
||||||
|
|
||||||
def Src_sougou_wx(Is_short):
|
|
||||||
if not os.path.exists('./db/sougou-wx.db'):
|
|
||||||
# 创建数据库和表
|
|
||||||
create_database()
|
|
||||||
|
|
||||||
# 清空表
|
|
||||||
# clear_table()
|
|
||||||
|
|
||||||
# 获取 JSON 数据
|
|
||||||
sougou_wx_data = get_json()
|
|
||||||
|
|
||||||
# 插入数据到数据库
|
|
||||||
insert_data(sougou_wx_data)
|
|
||||||
|
|
||||||
# 查询指定时间段内的数据
|
|
||||||
filtered_articles = select_articles()
|
|
||||||
# print(filtered_articles)
|
|
||||||
|
|
||||||
if filtered_articles:
|
|
||||||
results = get_filtered_articles(filtered_articles, Is_short)
|
|
||||||
return results
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
reslts = Src_sougou_wx(False)
|
|
||||||
if reslts != False:
|
|
||||||
print(reslts)
|
|
||||||
else:
|
|
||||||
# 如果为空,则跳过执行
|
|
||||||
print("-" * 40)
|
|
||||||
print("微信公众号数据为空,跳过执行。")
|
|
@ -110,15 +110,16 @@ def get_filtered_articles(entries, Is_short):
|
|||||||
record = ""
|
record = ""
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
if Is_short == False:
|
if Is_short == False:
|
||||||
result += f"文章:[{entry[1]}]({entry[2]})\n"
|
result += f"文章:{entry[1]}\n"
|
||||||
result += f"上传时间:{entry[3]}\n"
|
result += f"链接:{entry[2]}\n上传时间:{entry[3]}\n"
|
||||||
result += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
result += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
||||||
if Is_short == False:
|
if Is_short == False:
|
||||||
result += f"文章:[{entry[1]}]({entry[2]})\n"
|
result += f"文章:{entry[1]}\n"
|
||||||
result += f"上传时间:{entry[3]}\n"
|
result += f"链接:{entry[2]}\n上传时间:{entry[3]}\n"
|
||||||
result += "\n" + "-" * 3 + "\n" # 添加分隔线以便区分不同文章
|
result += "\n" + "-" * 3 + "\n" # 添加分隔线以便区分不同文章
|
||||||
|
|
||||||
record += f"#### 文章:[{entry[1]}]({entry[2]})\n"
|
record += f"#### 文章:{entry[1]}\n"
|
||||||
|
record += f"**链接**:{entry[2]}\n"
|
||||||
record += f"**上传时间**:{entry[3]}\n"
|
record += f"**上传时间**:{entry[3]}\n"
|
||||||
record += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
record += "\n" + "-" * 40 + "\n" # 添加分隔线以便区分不同文章
|
||||||
record_md(record)
|
record_md(record)
|
||||||
|
1002
JSON/xianzhi.json
1002
JSON/xianzhi.json
File diff suppressed because it is too large
Load Diff
14
README.md
14
README.md
@ -3,11 +3,6 @@ RSS订阅链接来源:https://github.com/zhengjim/Chinese-Security-RSS <br>
|
|||||||
使用python-json进行格式化,然后使用飞书webhook机器人进行发送 <br>
|
使用python-json进行格式化,然后使用飞书webhook机器人进行发送 <br>
|
||||||
config.yaml可指定大部分可能需要的参数 <br>
|
config.yaml可指定大部分可能需要的参数 <br>
|
||||||
|
|
||||||
### 项目特色 <br>
|
|
||||||
- 模块化:爬虫(获取信息部分)、分析(对获取的json信息进行筛选分析存储)、推送(推送至各渠道)、网页等各模块均可单独运行。 <br>
|
|
||||||
- 轻量化:默认使用sqlite以及其他常见的各系统自带的库,用户仅需配置python环境,不会占用过多内存。 <br>
|
|
||||||
- 简单化:配置好config后即可一步运行,效率极高。 <br>
|
|
||||||
|
|
||||||
### 日志相关
|
### 日志相关
|
||||||
请查看./log文件夹下内容 <br>
|
请查看./log文件夹下内容 <br>
|
||||||
|
|
||||||
@ -25,7 +20,7 @@ centos: `yum install screen` <br>
|
|||||||
随后便可直接运行:`python Core.py` <br>
|
随后便可直接运行:`python Core.py` <br>
|
||||||
web运行:`python ./web/app.py` <br>
|
web运行:`python ./web/app.py` <br>
|
||||||
随后web网页将会在本地5000端口启动,访问即可,使用反向代理即可以域名映射到外网 <br>
|
随后web网页将会在本地5000端口启动,访问即可,使用反向代理即可以域名映射到外网 <br>
|
||||||
直接访问web域名即可查看历史推送,访问路径/log即可查看程序运行日志,/weblog查看flask日志 <br>
|
直接访问web域名即可查看历史推送,访问路径/log即可查看程序运行日志 <br>
|
||||||
|
|
||||||
### 配置 <br>
|
### 配置 <br>
|
||||||
首先先在飞书中创建群组,然后再创建WebHook机器人 <br>
|
首先先在飞书中创建群组,然后再创建WebHook机器人 <br>
|
||||||
@ -37,13 +32,6 @@ web运行:`python ./web/app.py` <br>
|
|||||||
<center><img src="./imgs/config.jpg" width="50%" alt="配置"/></center><br>
|
<center><img src="./imgs/config.jpg" width="50%" alt="配置"/></center><br>
|
||||||
那么现在,您就可以开始运行使用了。 <br>
|
那么现在,您就可以开始运行使用了。 <br>
|
||||||
|
|
||||||
### Github访问限制配置 <br>
|
|
||||||
若短时间内请求次数过多,可能会被github限制,可参考以下配置 <br>
|
|
||||||
- 对于未经身份验证的请求,github 速率限制允许每小时最多 60 个请求 <br>
|
|
||||||
- 而通过使用基本身份验证的 API 请求,每小时最多可以发出 5,000 个请求 <br>
|
|
||||||
- https://github.com/settings/tokens/new 创建token,时间建议选择无限制。[github-token](./imgs/github-token.png) <br>
|
|
||||||
`./config/config.yaml`中可配置github_token <br>
|
|
||||||
|
|
||||||
|
|
||||||
### 运行结果 <br>
|
### 运行结果 <br>
|
||||||
<center><img src="./imgs/start.jpg" width="50%" alt="飞书运行提示"/></center><br>
|
<center><img src="./imgs/start.jpg" width="50%" alt="飞书运行提示"/></center><br>
|
||||||
|
@ -9,7 +9,7 @@ import time
|
|||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
def gen_sign():
|
def gen_sign():
|
||||||
with open('./config/config.yaml', 'r', encoding="utf-8") as file:
|
with open('./config.yaml', 'r', encoding="utf-8") as file:
|
||||||
config = yaml.safe_load(file)
|
config = yaml.safe_load(file)
|
||||||
secret = f"{config['fs_secret']}"
|
secret = f"{config['fs_secret']}"
|
||||||
# print(secret)
|
# print(secret)
|
||||||
|
@ -7,7 +7,7 @@ from email.mime.text import MIMEText
|
|||||||
from email.header import Header
|
from email.header import Header
|
||||||
|
|
||||||
# 加载参数
|
# 加载参数
|
||||||
with open('./config/config.yaml', 'r', encoding="utf-8") as file:
|
with open('./config.yaml', 'r', encoding="utf-8") as file:
|
||||||
config = yaml.safe_load(file)
|
config = yaml.safe_load(file)
|
||||||
mail_host = f"{config['mail_host']}"
|
mail_host = f"{config['mail_host']}"
|
||||||
mail_user = f"{config['mail_user']}"
|
mail_user = f"{config['mail_user']}"
|
||||||
|
@ -9,7 +9,7 @@ import hmac
|
|||||||
import time
|
import time
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
with open('./config/config.yaml', 'r', encoding="utf-8") as file:
|
with open('./config.yaml', 'r', encoding="utf-8") as file:
|
||||||
config = yaml.safe_load(file)
|
config = yaml.safe_load(file)
|
||||||
webhook_url = f"https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key={config['wx_key']}"
|
webhook_url = f"https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key={config['wx_key']}"
|
||||||
# print(webhook_url)
|
# print(webhook_url)
|
||||||
|
Binary file not shown.
Binary file not shown.
20
UpdateLOG.md
20
UpdateLOG.md
@ -1,27 +1,21 @@
|
|||||||
|
### 从2024年12月15日开始记录
|
||||||
|
|
||||||
### 问题反馈 <br>
|
### 问题反馈 <br>
|
||||||
- 准点发送的文章在定点推送模式下可能会被遗漏推送 <br>
|
- 准点发送的文章在定点推送模式下可能会被遗漏推送 <br>
|
||||||
- 钉钉/企业微信/蓝信webhook存在字节长度限制,需要优化程序推送逻辑 <br>
|
- 钉钉/企业微信/蓝信webhook存在字节长度限制,需要优化程序推送逻辑 <br>
|
||||||
|
|
||||||
### 下一步计划(待完成) <br>
|
### 下一步计划 <br>
|
||||||
- 添加更多RSS订阅源(持续进行中) <br>
|
- 添加更多RSS订阅源(持续进行中) <br>
|
||||||
- 更换筛选模块,由时段筛选改为历史记录筛选以确保不会有资讯漏报 <br>
|
|
||||||
- 添加更多推送方式,如邮件、微信等 <br>
|
|
||||||
- 添加GitHub等监测源(参考github-cve-monitor) <br>
|
|
||||||
- 添加Mysql作为数据库存储 <br>
|
|
||||||
|
|
||||||
### 下一步计划(已完成) <br>
|
|
||||||
- 将所有打印信息转为logging info并存档(已完成)<br>
|
- 将所有打印信息转为logging info并存档(已完成)<br>
|
||||||
- 将logging info转为异步的loguru(已完成) <br>
|
- 将logging info转为异步的loguru(已完成) <br>
|
||||||
- 探查异常中断原因(已解决,获取rss源时的请求未做超时检测) <br>
|
- 探查异常中断原因(已发现,获取rss源时的请求未做超时) <br>
|
||||||
- 添加超时机制,防止程序异常卡死(已完成) <br>
|
- 添加超时机制,防止程序异常卡死(已完成) <br>
|
||||||
- 存档所有推送文章方便以后查看(已完成) <br>
|
- 存档所有推送文章方便以后查看(已完成) <br>
|
||||||
- 创建Web网页以展示最新推送(info.masonliu.com,已完成) <br>
|
- 添加更多推送方式,如邮件、微信等 <br>
|
||||||
|
- 创建Web网页以展示最新推送(info.masonliu.com) <br>
|
||||||
|
|
||||||
### 更新日志
|
### 更新日志
|
||||||
#### 从2024年12月15日开始记录
|
|
||||||
- 2024年12月15日早:优化了文件结构,修复了日志记录时的小BUG,添加web展示日志功能 <br>
|
- 2024年12月15日早:优化了文件结构,修复了日志记录时的小BUG,添加web展示日志功能 <br>
|
||||||
- 2024年12月15日晚:修复了单次运行结束时的校验错误问题 <br>
|
- 2024年12月15日晚:修复了单次运行结束时的校验错误问题 <br>
|
||||||
- 2024年12月18日早:添加了短文本推送机制,一定程度上解决了长字节推送问题(解决办法正在思考中) <br>
|
- 2024年12月18日早:添加了短文本推送机制,一定程度上解决了长字节推送问题(解决办法正在思考中) <br>
|
||||||
- 2024年12月24日晚:上传了测试0.1版本,修复了报错问题 <br>
|
- 2024年12月24日晚:上传了测试0.1版本,修复了报错问题 <br>
|
||||||
- 2024年12月25日早:优化了代码逻辑和表现 <br>
|
|
||||||
- 2024年12月25日晚:优化了推送报文格式 <br>
|
|
@ -1,19 +1,17 @@
|
|||||||
# 飞书相关配置信息
|
# 飞书相关配置信息
|
||||||
fs_activate: True
|
fs_activate: True
|
||||||
fs_key: aa04a02f-d7bf-4279-bd48-44c4f28c8f74 # 此处填写token,记得冒号后空一格,如aa04a02f-d7bf-4279-bd48-44c4f28c8f74
|
fs_key: # 此处填写token,记得冒号后空一格,如aa04a02f-d7bf-4279-bd48-44c4f28c8f74
|
||||||
fs_secret: 4tq65T4jm1MO2IlxvHxBWe # 此处填写签名密钥,记得冒号后空一格,如4tq65T4jm1MO2IlxvHxBWe
|
fs_secret: # 此处填写签名密钥,记得冒号后空一格,如4tq65T4jm1MO2IlxvHxBWe
|
||||||
|
|
||||||
# 企业微信相关配置信息
|
# 企业微信相关配置信息
|
||||||
wx_activate: False
|
wx_activate: True
|
||||||
wx_key: # 此处填写token,记得冒号后空一格,如9a3dd6ff-75d6-4208-bc4b-77724a5805d6
|
wx_key: # 此处填写token,记得冒号后空一格,如9a3dd6ff-75d6-4208-bc4b-77724a5805d6
|
||||||
|
|
||||||
# 钉钉相关配置信息
|
# 钉钉相关配置信息
|
||||||
ding_activate: False
|
ding_activate: False
|
||||||
ding_key:
|
|
||||||
|
|
||||||
# 蓝信相关配置信息
|
# 蓝信相关配置信息
|
||||||
lx_activate: False
|
lx_activate: False
|
||||||
lx_key:
|
|
||||||
|
|
||||||
# 邮件配置,邮件推送正在完善中
|
# 邮件配置,邮件推送正在完善中
|
||||||
mail_host: smtp.masonliu.com #设置服务器
|
mail_host: smtp.masonliu.com #设置服务器
|
||||||
@ -24,9 +22,3 @@ receivers: ['2857911564@qq.com']
|
|||||||
# 结算时间范围
|
# 结算时间范围
|
||||||
e_hour: 4 # 程序运行时间间隔
|
e_hour: 4 # 程序运行时间间隔
|
||||||
circle: 1 # 是否启用循环,设置为0后将设置为特定时间点运行
|
circle: 1 # 是否启用循环,设置为0后将设置为特定时间点运行
|
||||||
|
|
||||||
# 网址配置
|
|
||||||
url: https://info.masonliu.com/ # 请设置为您自己反代的域名,或者改为 http://127.0.0.1:5000 或者对应IP域名
|
|
||||||
|
|
||||||
# 调试模式
|
|
||||||
debug: True
|
|
Binary file not shown.
@ -1,63 +0,0 @@
|
|||||||
import yaml
|
|
||||||
from loguru import logger
|
|
||||||
|
|
||||||
# 清除所有已有的日志记录器配置
|
|
||||||
logger.remove()
|
|
||||||
|
|
||||||
logger.add("./log/core.log",
|
|
||||||
format="{time:YYYY-MM-DD HH:mm:ss} - {level} - {name}:{function}:{line} - {message}",
|
|
||||||
rotation="100 MB",
|
|
||||||
compression="zip",
|
|
||||||
encoding="utf-8")
|
|
||||||
# shell终端打印日志
|
|
||||||
logger.add(lambda msg: print(msg),
|
|
||||||
format="{time:YYYY-MM-DD HH:mm:ss} - {level} - {name}:{function}:{line} - {message}")
|
|
||||||
|
|
||||||
def get_core_config():
|
|
||||||
# 加载参数
|
|
||||||
with open('./config/config.yaml', 'r', encoding="utf-8") as file:
|
|
||||||
config = yaml.safe_load(file)
|
|
||||||
logger.debug(f"Loaded config: {config}") # 输出加载的配置
|
|
||||||
|
|
||||||
choice = int(f"{config['circle']}")
|
|
||||||
e_hour = int(config.get('e_hour', '4')) # 默认循环时间为4小时
|
|
||||||
|
|
||||||
fs_activate = f"{config['fs_activate']}"
|
|
||||||
if fs_activate == "True":
|
|
||||||
fs_key = config.get('fs_key')
|
|
||||||
fs_secret = config.get('fs_secret')
|
|
||||||
if not fs_key or not fs_secret:
|
|
||||||
logger.error("飞书相关配置不能为空,请检查配置文件./config/config.yaml")
|
|
||||||
exit(5)
|
|
||||||
|
|
||||||
wx_activate = f"{config['wx_activate']}"
|
|
||||||
if wx_activate == "True":
|
|
||||||
wx_key = config.get('wx_key')
|
|
||||||
if not wx_key:
|
|
||||||
logger.error("企业微信相关配置不能为空,请检查配置文件./config/config.yaml")
|
|
||||||
exit(5)
|
|
||||||
|
|
||||||
ding_activate = f"{config['ding_activate']}"
|
|
||||||
if ding_activate == "True":
|
|
||||||
ding_key = config.get('ding_key')
|
|
||||||
if not ding_key:
|
|
||||||
logger.error("钉钉相关配置不能为空,请检查配置文件./config/config.yaml")
|
|
||||||
exit(5)
|
|
||||||
|
|
||||||
lx_activate = f"{config['lx_activate']}"
|
|
||||||
if lx_activate == "True":
|
|
||||||
lx_key = config.get('lx_key')
|
|
||||||
if not lx_key:
|
|
||||||
logger.error("蓝信相关配置不能为空,请检查配置文件./config/config.yaml")
|
|
||||||
exit(5)
|
|
||||||
|
|
||||||
url_web = f"{config['url']}"
|
|
||||||
|
|
||||||
return e_hour, choice, fs_activate, wx_activate, ding_activate, lx_activate, url_web
|
|
||||||
|
|
||||||
def get_debug_config():
|
|
||||||
with open('./config/config.yaml', 'r', encoding="utf-8") as file:
|
|
||||||
config = yaml.safe_load(file)
|
|
||||||
debug = f"{config['debug']}"
|
|
||||||
|
|
||||||
return debug
|
|
@ -1,46 +0,0 @@
|
|||||||
# github相关配置信息
|
|
||||||
github_token: xxxxxx # 此处填写github-token,在高速率获取github资源时有效防止403封禁
|
|
||||||
translate: False # 是否开启翻译
|
|
||||||
|
|
||||||
# 监控列表
|
|
||||||
tool_list: # 监控已创建的仓库是否更新
|
|
||||||
- https://api.github.com/repos/BeichenDream/Godzilla
|
|
||||||
- https://api.github.com/repos/rebeyond/Behinder
|
|
||||||
- https://api.github.com/repos/AntSwordProject/antSword
|
|
||||||
- https://api.github.com/repos/j1anFen/shiro_attack
|
|
||||||
- https://api.github.com/repos/yhy0/github-cve-monitor
|
|
||||||
- https://api.github.com/repos/gentilkiwi/mimikatz
|
|
||||||
- https://api.github.com/repos/ehang-io/nps
|
|
||||||
- https://api.github.com/repos/chaitin/xray
|
|
||||||
- https://api.github.com/repos/FunnyWolf/pystinger
|
|
||||||
- https://api.github.com/repos/L-codes/Neo-reGeorg
|
|
||||||
- https://api.github.com/repos/shadow1ng/fscan
|
|
||||||
- https://api.github.com/repos/SafeGroceryStore/MDUT
|
|
||||||
- https://api.github.com/repos/EdgeSecurityTeam/Vulnerability
|
|
||||||
- https://api.github.com/repos/wy876/POC
|
|
||||||
- https://api.github.com/Vme18000yuan/FreePOC
|
|
||||||
|
|
||||||
keyword_list: # 监控关键词
|
|
||||||
- sql注入
|
|
||||||
- cnvd
|
|
||||||
- 未授权
|
|
||||||
- 漏洞POC
|
|
||||||
- RCE
|
|
||||||
- 渗透测试
|
|
||||||
- 反序列化
|
|
||||||
- 攻防
|
|
||||||
- webshell
|
|
||||||
- 红队
|
|
||||||
- redteam
|
|
||||||
- 信息收集
|
|
||||||
- 绕过
|
|
||||||
- bypass av
|
|
||||||
|
|
||||||
user_list: # 监控用户
|
|
||||||
- su18
|
|
||||||
- BeichenDream
|
|
||||||
- phith0n
|
|
||||||
- zhzyker
|
|
||||||
- lijiejie
|
|
||||||
- projectdiscovery
|
|
||||||
- HavocFramework
|
|
@ -1 +0,0 @@
|
|||||||
Sogou-WX: ["中国银行", "APP逆向", "渗透测试"] # 基于搜狗引擎搜索特定关键词的微信公众号文章
|
|
143
github开发文档.md
143
github开发文档.md
@ -1,143 +0,0 @@
|
|||||||
### 设计思路
|
|
||||||
前情提要:GitHub的API接口为json格式,极其方便于使用python进行提取分析
|
|
||||||
api地址:
|
|
||||||
获取关键词下更新的最新仓库源:https://api.github.com/search/repositories?q={Keyword}&sort=updated&per_page=30
|
|
||||||
- sort=updated:按更新时间排序
|
|
||||||
- per_page=30:查询数量,建议设置为30
|
|
||||||
- page=1:指定查询页数
|
|
||||||
获取指定用户的仓库更新情况:https://api.github.com/users/{user}/repos
|
|
||||||
获取指定仓库commit情况:https://api.github.com/repos/{user}/{repo}
|
|
||||||
|
|
||||||
### 速率限制
|
|
||||||
headers ={ "Authorization": " token OAUTH-TOKEN"}
|
|
||||||
OAUTH-TOKEN:github个人账号设置->开发者设置->个人token。创建一个新token时,可以选择具体的权限,创建成功时一定要复制到本地哪里保存,只会让你看见一次,如果忘记的话就需要重新生成。
|
|
||||||
|
|
||||||
### 使用技术
|
|
||||||
- python-json解析
|
|
||||||
- python-sqlite联动
|
|
||||||
- python-request爬虫
|
|
||||||
- sqlite筛选
|
|
||||||
|
|
||||||
### 参考Json源格式
|
|
||||||
所需部分:
|
|
||||||
- html_url
|
|
||||||
- created_at:仓库创建时间
|
|
||||||
- updated_at:仓库最近更新时间
|
|
||||||
- pushed_at:仓库最近推送时间(参考此元素进行设计)
|
|
||||||
- description:仓库描述
|
|
||||||
|
|
||||||
{
|
|
||||||
"id": 511095846,
|
|
||||||
"node_id": "R_kgDOHna0Jg",
|
|
||||||
"name": "TestnetProtocol",
|
|
||||||
"full_name": "exorde-labs/TestnetProtocol",
|
|
||||||
"private": false,
|
|
||||||
"owner": {
|
|
||||||
"login": "exorde-labs",
|
|
||||||
"id": 64810085,
|
|
||||||
"node_id": "MDEyOk9yZ2FuaXphdGlvbjY0ODEwMDg1",
|
|
||||||
"avatar_url": "https://avatars.githubusercontent.com/u/64810085?v=4",
|
|
||||||
"gravatar_id": "",
|
|
||||||
"url": "https://api.github.com/users/exorde-labs",
|
|
||||||
"html_url": "https://github.com/exorde-labs",
|
|
||||||
"followers_url": "https://api.github.com/users/exorde-labs/followers",
|
|
||||||
"following_url": "https://api.github.com/users/exorde-labs/following{/other_user}",
|
|
||||||
"gists_url": "https://api.github.com/users/exorde-labs/gists{/gist_id}",
|
|
||||||
"starred_url": "https://api.github.com/users/exorde-labs/starred{/owner}{/repo}",
|
|
||||||
"subscriptions_url": "https://api.github.com/users/exorde-labs/subscriptions",
|
|
||||||
"organizations_url": "https://api.github.com/users/exorde-labs/orgs",
|
|
||||||
"repos_url": "https://api.github.com/users/exorde-labs/repos",
|
|
||||||
"events_url": "https://api.github.com/users/exorde-labs/events{/privacy}",
|
|
||||||
"received_events_url": "https://api.github.com/users/exorde-labs/received_events",
|
|
||||||
"type": "Organization",
|
|
||||||
"user_view_type": "public",
|
|
||||||
"site_admin": false
|
|
||||||
},
|
|
||||||
"html_url": "https://github.com/exorde-labs/TestnetProtocol",
|
|
||||||
"description": null,
|
|
||||||
"fork": false,
|
|
||||||
"url": "https://api.github.com/repos/exorde-labs/TestnetProtocol",
|
|
||||||
"forks_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/forks",
|
|
||||||
"keys_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/keys{/key_id}",
|
|
||||||
"collaborators_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/collaborators{/collaborator}",
|
|
||||||
"teams_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/teams",
|
|
||||||
"hooks_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/hooks",
|
|
||||||
"issue_events_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/issues/events{/number}",
|
|
||||||
"events_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/events",
|
|
||||||
"assignees_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/assignees{/user}",
|
|
||||||
"branches_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/branches{/branch}",
|
|
||||||
"tags_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/tags",
|
|
||||||
"blobs_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/git/blobs{/sha}",
|
|
||||||
"git_tags_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/git/tags{/sha}",
|
|
||||||
"git_refs_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/git/refs{/sha}",
|
|
||||||
"trees_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/git/trees{/sha}",
|
|
||||||
"statuses_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/statuses/{sha}",
|
|
||||||
"languages_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/languages",
|
|
||||||
"stargazers_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/stargazers",
|
|
||||||
"contributors_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/contributors",
|
|
||||||
"subscribers_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/subscribers",
|
|
||||||
"subscription_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/subscription",
|
|
||||||
"commits_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/commits{/sha}",
|
|
||||||
"git_commits_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/git/commits{/sha}",
|
|
||||||
"comments_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/comments{/number}",
|
|
||||||
"issue_comment_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/issues/comments{/number}",
|
|
||||||
"contents_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/contents/{+path}",
|
|
||||||
"compare_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/compare/{base}...{head}",
|
|
||||||
"merges_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/merges",
|
|
||||||
"archive_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/{archive_format}{/ref}",
|
|
||||||
"downloads_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/downloads",
|
|
||||||
"issues_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/issues{/number}",
|
|
||||||
"pulls_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/pulls{/number}",
|
|
||||||
"milestones_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/milestones{/number}",
|
|
||||||
"notifications_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/notifications{?since,all,participating}",
|
|
||||||
"labels_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/labels{/name}",
|
|
||||||
"releases_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/releases{/id}",
|
|
||||||
"deployments_url": "https://api.github.com/repos/exorde-labs/TestnetProtocol/deployments",
|
|
||||||
"created_at": "2022-07-06T10:44:29Z",
|
|
||||||
"updated_at": "2024-12-27T02:20:32Z",
|
|
||||||
"pushed_at": "2024-12-27T02:20:28Z",
|
|
||||||
"git_url": "git://github.com/exorde-labs/TestnetProtocol.git",
|
|
||||||
"ssh_url": "git@github.com:exorde-labs/TestnetProtocol.git",
|
|
||||||
"clone_url": "https://github.com/exorde-labs/TestnetProtocol.git",
|
|
||||||
"svn_url": "https://github.com/exorde-labs/TestnetProtocol",
|
|
||||||
"homepage": null,
|
|
||||||
"size": 1918317,
|
|
||||||
"stargazers_count": 16,
|
|
||||||
"watchers_count": 16,
|
|
||||||
"language": "Solidity",
|
|
||||||
"has_issues": true,
|
|
||||||
"has_projects": true,
|
|
||||||
"has_downloads": true,
|
|
||||||
"has_wiki": true,
|
|
||||||
"has_pages": false,
|
|
||||||
"has_discussions": false,
|
|
||||||
"forks_count": 20,
|
|
||||||
"mirror_url": null,
|
|
||||||
"archived": false,
|
|
||||||
"disabled": false,
|
|
||||||
"open_issues_count": 0,
|
|
||||||
"license": {
|
|
||||||
"key": "mit",
|
|
||||||
"name": "MIT License",
|
|
||||||
"spdx_id": "MIT",
|
|
||||||
"url": "https://api.github.com/licenses/mit",
|
|
||||||
"node_id": "MDc6TGljZW5zZTEz"
|
|
||||||
},
|
|
||||||
"allow_forking": true,
|
|
||||||
"is_template": false,
|
|
||||||
"web_commit_signoff_required": false,
|
|
||||||
"topics": [
|
|
||||||
|
|
||||||
],
|
|
||||||
"visibility": "public",
|
|
||||||
"forks": 20,
|
|
||||||
"open_issues": 0,
|
|
||||||
"watchers": 16,
|
|
||||||
"default_branch": "main",
|
|
||||||
"score": 1.0
|
|
||||||
}
|
|
||||||
|
|
||||||
### 参考代码
|
|
||||||
year = datetime.datetime.now().year
|
|
||||||
api = "https://api.github.com/search/repositories?q=CVE-{}&sort=updated".format(year)
|
|
||||||
json_str = requests.get(api, headers=github_headers, timeout=10).json()
|
|
@ -1,10 +0,0 @@
|
|||||||
#### 文章:[钓鱼下载网站传播“游蛇”威胁,恶意安装程序暗藏远控木马](https://www.4hou.com/posts/6MVz)
|
|
||||||
**作者**:安天
|
|
||||||
**上传时间**:2024-12-25 17:02:19
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[钓鱼下载网站传播“游蛇”威胁,恶意安装程序暗藏远控木马](https://www.4hou.com/posts/6MVz)
|
|
||||||
**作者**:安天
|
|
||||||
**上传时间**:2024-12-25 17:02:19
|
|
||||||
|
|
||||||
----------------------------------------
|
|
@ -1,228 +0,0 @@
|
|||||||
#### 文章:[【2024补天白帽黑客年度盛典】Windows服务进程漏洞挖掘](https://forum.butian.net/share/4089)
|
|
||||||
**来源**:subject
|
|
||||||
**上传时间**:2024-12-25 17:39:57
|
|
||||||
**描述**:演讲议题:Windows服务进程漏洞挖掘
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[【2024补天白帽黑客年度盛典】大模型越狱攻击与评测](https://forum.butian.net/share/4088)
|
|
||||||
**来源**:subject
|
|
||||||
**上传时间**:2024-12-25 17:33:43
|
|
||||||
**描述**:演讲议题:大模型越狱攻击与评测
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[【2024补天白帽黑客年度盛典】当今勒索病毒的攻与防](https://forum.butian.net/share/4087)
|
|
||||||
**来源**:subject
|
|
||||||
**上传时间**:2024-12-25 17:26:49
|
|
||||||
**描述**:演讲议题:当今勒索病毒的攻与防
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[网安瞭望台第18期:警惕新型攻击利用Windows Defender绕过终端检测、CVE-2024-50379 漏洞利用工具分享](https://mp.weixin.qq.com/s?__biz=Mzg2NTkwODU3Ng==&mid=2247514556&idx=1&sn=a10e80238c91658489ebe6cc8657315c)
|
|
||||||
**作者**:东方隐侠安全团队
|
|
||||||
**上传时间**:2024-12-25 20:31:30
|
|
||||||
**简介**:网安资讯分享\\x0d\\x0aDAILY NEWS AND KNOWLEDGE
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[MDUT-Extend(MDUT-增强版) V1.2.0 Released](https://mp.weixin.qq.com/s?__biz=MzI5NDg0ODkwMQ==&mid=2247486138&idx=1&sn=4f881e7e1cc99466d57aa3d95d980b3b)
|
|
||||||
**作者**:格格巫和蓝精灵
|
|
||||||
**上传时间**:2024-12-25 20:16:51
|
|
||||||
**简介**:None
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[CobaltStrike Bof开发(1)](https://mp.weixin.qq.com/s?__biz=Mzg5MDg3OTc0OA==&mid=2247489138&idx=1&sn=3095870df2c9d365db698936abde43b2)
|
|
||||||
**作者**:Relay学安全
|
|
||||||
**上传时间**:2024-12-25 20:05:52
|
|
||||||
**简介**:None
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[Sa7mon-S3scanner:一款针对S3 Bucket的错误配置扫描工具](https://mp.weixin.qq.com/s?__biz=MjM5NjA0NjgyMA==&mid=2651310595&idx=4&sn=78fdcc1150147cc6155e1a2e73c31521)
|
|
||||||
**作者**:FreeBuf
|
|
||||||
**上传时间**:2024-12-25 19:56:23
|
|
||||||
**简介**:该工具兼容S3 API,可扫描开放S3 Bucket中潜在的错误配置。
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[免杀对抗从0开始(七)](https://mp.weixin.qq.com/s?__biz=Mzk0MzU5NTg1Ng==&mid=2247484849&idx=1&sn=f075965e73b511cfba0e53536232cf34)
|
|
||||||
**作者**:泾弦安全
|
|
||||||
**上传时间**:2024-12-25 19:50:33
|
|
||||||
**简介**:None
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[针对护网行动中红队溯源工具 - HuntBack](https://mp.weixin.qq.com/s?__biz=MzIzNTE0Mzc0OA==&mid=2247486015&idx=1&sn=bc5b7dea1d9621678e4cc49a85d736ae)
|
|
||||||
**作者**:GSDK安全团队
|
|
||||||
**上传时间**:2024-12-25 19:41:48
|
|
||||||
**简介**:HuntBack(反击狩猎),用于攻防演练中,防守方对恶意ip进行web指纹扫描与识别。在蓝队职守中,安全设备爆出恶意攻击ip地址,如果对方使用的是自己的服务器,并且搭建了一些安全业务,可使用本工具对目前已知工具进行探测
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[MDUT-Extend(MDUT-增强版) V1.2.0 Released](https://mp.weixin.qq.com/s?__biz=MzU0MzkzOTYzOQ==&mid=2247489554&idx=1&sn=d3d5aa81f68c323b815bcabe78f0b46a)
|
|
||||||
**作者**:黑伞安全
|
|
||||||
**上传时间**:2024-12-25 19:38:38
|
|
||||||
**简介**:None
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[一款wifi数据抓包破解工具Wireshark](https://mp.weixin.qq.com/s?__biz=MzI1MzQwNjEzNA==&mid=2247484154&idx=1&sn=252b411b617f65ba4513c1dda0fe70aa)
|
|
||||||
**作者**:渗透测试知识学习
|
|
||||||
**上传时间**:2024-12-25 19:35:56
|
|
||||||
**简介**:wife数据包破解
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[银狐黑产组织最新免杀样本详细分析](https://mp.weixin.qq.com/s?__biz=MzA4ODEyODA3MQ==&mid=2247489745&idx=1&sn=92cfd13140b08317c1901f6f89c89239)
|
|
||||||
**作者**:安全分析与研究
|
|
||||||
**上传时间**:2024-12-25 18:30:13
|
|
||||||
**简介**:银狐黑产组织最新免杀样本详细分析
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[警惕!2024年全球零日漏洞利用呈现七大趋势](https://mp.weixin.qq.com/s?__biz=MzI4NDY2MDMwMw==&mid=2247513353&idx=1&sn=cc572d3391797a15aa66590d70d0ac96)
|
|
||||||
**作者**:安全内参
|
|
||||||
**上传时间**:2024-12-25 18:14:14
|
|
||||||
**简介**:零日漏洞的攻击目标迁移
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[Apache Tomcat 漏洞导致服务器易受RCE攻击](https://mp.weixin.qq.com/s?__biz=MzI2NTg4OTc5Nw==&mid=2247521893&idx=1&sn=867f98595849107577a98fcaf043a177)
|
|
||||||
**作者**:代码卫士
|
|
||||||
**上传时间**:2024-12-25 18:11:51
|
|
||||||
**简介**:速修复
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[绕过Elastic EDR进行横向移动](https://mp.weixin.qq.com/s?__biz=MzAxMjYyMzkwOA==&mid=2247526433&idx=1&sn=6ee718605b5d67e3f68417bf664c46f8)
|
|
||||||
**作者**:Ots安全
|
|
||||||
**上传时间**:2024-12-25 18:01:05
|
|
||||||
**简介**:None
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[探索 NASA CryptoLib 的 SDLS 实现中的漏洞](https://mp.weixin.qq.com/s?__biz=MzAxMjYyMzkwOA==&mid=2247526433&idx=2&sn=9734352beed4645fcdc599733e607e22)
|
|
||||||
**作者**:Ots安全
|
|
||||||
**上传时间**:2024-12-25 18:01:05
|
|
||||||
**简介**:None
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[利用 WDAC 武器化:粉碎 EDR 的梦想](https://mp.weixin.qq.com/s?__biz=MzAxMjYyMzkwOA==&mid=2247526433&idx=3&sn=7fcef7477d3365c9f2905137b1be267e)
|
|
||||||
**作者**:Ots安全
|
|
||||||
**上传时间**:2024-12-25 18:01:05
|
|
||||||
**简介**:None
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[《英雄无敌》4:修改pe导入表注入DLL扩展回城术功能](https://mp.weixin.qq.com/s?__biz=MjM5NTc2MDYxMw==&mid=2458587767&idx=1&sn=dd1c04637890c14cb9d72fb95bbb0010)
|
|
||||||
**作者**:看雪学苑
|
|
||||||
**上传时间**:2024-12-25 17:59:58
|
|
||||||
**简介**:看雪论坛作者ID:fdark
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[【漏洞文字】博斯外贸管理软件 SQL注入](https://mp.weixin.qq.com/s?__biz=MzkyMTY1NDc2OA==&mid=2247487244&idx=1&sn=5011bd862eae6337a04f9e1673c7a184)
|
|
||||||
**作者**:小羊安全屋
|
|
||||||
**上传时间**:2024-12-25 17:01:56
|
|
||||||
**简介**:None
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[【2024补天白帽黑客年度盛典】Windows服务进程漏洞挖掘](https://forum.butian.net/share/4089)
|
|
||||||
**来源**:subject
|
|
||||||
**上传时间**:2024-12-25 17:39:57
|
|
||||||
**描述**:演讲议题:Windows服务进程漏洞挖掘
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[【2024补天白帽黑客年度盛典】大模型越狱攻击与评测](https://forum.butian.net/share/4088)
|
|
||||||
**来源**:subject
|
|
||||||
**上传时间**:2024-12-25 17:33:43
|
|
||||||
**描述**:演讲议题:大模型越狱攻击与评测
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[【2024补天白帽黑客年度盛典】当今勒索病毒的攻与防](https://forum.butian.net/share/4087)
|
|
||||||
**来源**:subject
|
|
||||||
**上传时间**:2024-12-25 17:26:49
|
|
||||||
**描述**:演讲议题:当今勒索病毒的攻与防
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[网安瞭望台第18期:警惕新型攻击利用Windows Defender绕过终端检测、CVE-2024-50379 漏洞利用工具分享](https://mp.weixin.qq.com/s?__biz=Mzg2NTkwODU3Ng==&mid=2247514556&idx=1&sn=a10e80238c91658489ebe6cc8657315c)
|
|
||||||
**作者**:东方隐侠安全团队
|
|
||||||
**上传时间**:2024-12-25 20:31:30
|
|
||||||
**简介**:网安资讯分享\\x0d\\x0aDAILY NEWS AND KNOWLEDGE
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[MDUT-Extend(MDUT-增强版) V1.2.0 Released](https://mp.weixin.qq.com/s?__biz=MzI5NDg0ODkwMQ==&mid=2247486138&idx=1&sn=4f881e7e1cc99466d57aa3d95d980b3b)
|
|
||||||
**作者**:格格巫和蓝精灵
|
|
||||||
**上传时间**:2024-12-25 20:16:51
|
|
||||||
**简介**:None
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[CobaltStrike Bof开发(1)](https://mp.weixin.qq.com/s?__biz=Mzg5MDg3OTc0OA==&mid=2247489138&idx=1&sn=3095870df2c9d365db698936abde43b2)
|
|
||||||
**作者**:Relay学安全
|
|
||||||
**上传时间**:2024-12-25 20:05:52
|
|
||||||
**简介**:None
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[Sa7mon-S3scanner:一款针对S3 Bucket的错误配置扫描工具](https://mp.weixin.qq.com/s?__biz=MjM5NjA0NjgyMA==&mid=2651310595&idx=4&sn=78fdcc1150147cc6155e1a2e73c31521)
|
|
||||||
**作者**:FreeBuf
|
|
||||||
**上传时间**:2024-12-25 19:56:23
|
|
||||||
**简介**:该工具兼容S3 API,可扫描开放S3 Bucket中潜在的错误配置。
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[免杀对抗从0开始(七)](https://mp.weixin.qq.com/s?__biz=Mzk0MzU5NTg1Ng==&mid=2247484849&idx=1&sn=f075965e73b511cfba0e53536232cf34)
|
|
||||||
**作者**:泾弦安全
|
|
||||||
**上传时间**:2024-12-25 19:50:33
|
|
||||||
**简介**:None
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[针对护网行动中红队溯源工具 - HuntBack](https://mp.weixin.qq.com/s?__biz=MzIzNTE0Mzc0OA==&mid=2247486015&idx=1&sn=bc5b7dea1d9621678e4cc49a85d736ae)
|
|
||||||
**作者**:GSDK安全团队
|
|
||||||
**上传时间**:2024-12-25 19:41:48
|
|
||||||
**简介**:HuntBack(反击狩猎),用于攻防演练中,防守方对恶意ip进行web指纹扫描与识别。在蓝队职守中,安全设备爆出恶意攻击ip地址,如果对方使用的是自己的服务器,并且搭建了一些安全业务,可使用本工具对目前已知工具进行探测
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[MDUT-Extend(MDUT-增强版) V1.2.0 Released](https://mp.weixin.qq.com/s?__biz=MzU0MzkzOTYzOQ==&mid=2247489554&idx=1&sn=d3d5aa81f68c323b815bcabe78f0b46a)
|
|
||||||
**作者**:黑伞安全
|
|
||||||
**上传时间**:2024-12-25 19:38:38
|
|
||||||
**简介**:None
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[一款wifi数据抓包破解工具Wireshark](https://mp.weixin.qq.com/s?__biz=MzI1MzQwNjEzNA==&mid=2247484154&idx=1&sn=252b411b617f65ba4513c1dda0fe70aa)
|
|
||||||
**作者**:渗透测试知识学习
|
|
||||||
**上传时间**:2024-12-25 19:35:56
|
|
||||||
**简介**:wife数据包破解
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[银狐黑产组织最新免杀样本详细分析](https://mp.weixin.qq.com/s?__biz=MzA4ODEyODA3MQ==&mid=2247489745&idx=1&sn=92cfd13140b08317c1901f6f89c89239)
|
|
||||||
**作者**:安全分析与研究
|
|
||||||
**上传时间**:2024-12-25 18:30:13
|
|
||||||
**简介**:银狐黑产组织最新免杀样本详细分析
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[警惕!2024年全球零日漏洞利用呈现七大趋势](https://mp.weixin.qq.com/s?__biz=MzI4NDY2MDMwMw==&mid=2247513353&idx=1&sn=cc572d3391797a15aa66590d70d0ac96)
|
|
||||||
**作者**:安全内参
|
|
||||||
**上传时间**:2024-12-25 18:14:14
|
|
||||||
**简介**:零日漏洞的攻击目标迁移
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[Apache Tomcat 漏洞导致服务器易受RCE攻击](https://mp.weixin.qq.com/s?__biz=MzI2NTg4OTc5Nw==&mid=2247521893&idx=1&sn=867f98595849107577a98fcaf043a177)
|
|
||||||
**作者**:代码卫士
|
|
||||||
**上传时间**:2024-12-25 18:11:51
|
|
||||||
**简介**:速修复
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[绕过Elastic EDR进行横向移动](https://mp.weixin.qq.com/s?__biz=MzAxMjYyMzkwOA==&mid=2247526433&idx=1&sn=6ee718605b5d67e3f68417bf664c46f8)
|
|
||||||
**作者**:Ots安全
|
|
||||||
**上传时间**:2024-12-25 18:01:05
|
|
||||||
**简介**:None
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[探索 NASA CryptoLib 的 SDLS 实现中的漏洞](https://mp.weixin.qq.com/s?__biz=MzAxMjYyMzkwOA==&mid=2247526433&idx=2&sn=9734352beed4645fcdc599733e607e22)
|
|
||||||
**作者**:Ots安全
|
|
||||||
**上传时间**:2024-12-25 18:01:05
|
|
||||||
**简介**:None
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[利用 WDAC 武器化:粉碎 EDR 的梦想](https://mp.weixin.qq.com/s?__biz=MzAxMjYyMzkwOA==&mid=2247526433&idx=3&sn=7fcef7477d3365c9f2905137b1be267e)
|
|
||||||
**作者**:Ots安全
|
|
||||||
**上传时间**:2024-12-25 18:01:05
|
|
||||||
**简介**:None
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[《英雄无敌》4:修改pe导入表注入DLL扩展回城术功能](https://mp.weixin.qq.com/s?__biz=MjM5NTc2MDYxMw==&mid=2458587767&idx=1&sn=dd1c04637890c14cb9d72fb95bbb0010)
|
|
||||||
**作者**:看雪学苑
|
|
||||||
**上传时间**:2024-12-25 17:59:58
|
|
||||||
**简介**:看雪论坛作者ID:fdark
|
|
||||||
|
|
||||||
----------------------------------------
|
|
||||||
#### 文章:[【漏洞文字】博斯外贸管理软件 SQL注入](https://mp.weixin.qq.com/s?__biz=MzkyMTY1NDc2OA==&mid=2247487244&idx=1&sn=5011bd862eae6337a04f9e1673c7a184)
|
|
||||||
**作者**:小羊安全屋
|
|
||||||
**上传时间**:2024-12-25 17:01:56
|
|
||||||
**简介**:None
|
|
||||||
|
|
||||||
----------------------------------------
|
|
Binary file not shown.
Before Width: | Height: | Size: 211 KiB |
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -6,6 +6,15 @@ import json
|
|||||||
from requests.exceptions import RequestException
|
from requests.exceptions import RequestException
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
|
|
||||||
|
logger.add("./log/spider.log",
|
||||||
|
format="{time:YYYY-MM-DD HH:mm:ss} - {level} - {name}:{function}:{line} - {message}",
|
||||||
|
rotation="10 MB",
|
||||||
|
compression="zip",
|
||||||
|
encoding="utf-8")
|
||||||
|
# shell终端打印日志
|
||||||
|
# logger.add(lambda msg: print(msg),
|
||||||
|
# format="{time:YYYY-MM-DD HH:mm:ss} - {level} - {name}:{function}:{line} - {message}")
|
||||||
|
|
||||||
# 测试用爬虫请求头
|
# 测试用爬虫请求头
|
||||||
headers = {
|
headers = {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
|
@ -6,6 +6,15 @@ import json
|
|||||||
from requests.exceptions import RequestException
|
from requests.exceptions import RequestException
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
|
|
||||||
|
logger.add("./log/spider.log",
|
||||||
|
format="{time:YYYY-MM-DD HH:mm:ss} - {level} - {name}:{function}:{line} - {message}",
|
||||||
|
rotation="10 MB",
|
||||||
|
compression="zip",
|
||||||
|
encoding="utf-8")
|
||||||
|
# shell终端打印日志
|
||||||
|
# logger.add(lambda msg: print(msg),
|
||||||
|
# format="{time:YYYY-MM-DD HH:mm:ss} - {level} - {name}:{function}:{line} - {message}")
|
||||||
|
|
||||||
# 测试用爬虫请求头
|
# 测试用爬虫请求头
|
||||||
headers = {
|
headers = {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
|
@ -1,18 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
import os
|
|
||||||
import requests
|
|
||||||
import xml.etree.ElementTree as ET
|
|
||||||
import json
|
|
||||||
from requests.exceptions import RequestException
|
|
||||||
from loguru import logger
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
github_headers = {
|
|
||||||
'Authorization': f"token {github_token}"
|
|
||||||
}
|
|
||||||
|
|
||||||
# 抓取本年的
|
|
||||||
year = datetime.datetime.now().year
|
|
||||||
api = "https://api.github.com/search/repositories?q=CVE-{}&sort=updated".format(year)
|
|
||||||
json_str = requests.get(api, headers=github_headers, timeout=10).json()
|
|
@ -1,118 +0,0 @@
|
|||||||
import requests
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
import os
|
|
||||||
import datetime
|
|
||||||
from requests.exceptions import RequestException
|
|
||||||
from loguru import logger
|
|
||||||
|
|
||||||
headers = {
|
|
||||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:133.0) Gecko/20100101 Firefox/133.0",
|
|
||||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
|
|
||||||
"Accept-Language": "zh-CN,zh;q=0.8,zh-TW;q=0.7,zh-HK;q=0.5,en-US;q=0.3,en;q=0.2",
|
|
||||||
"Accept-Encoding": "gzip, deflate, br",
|
|
||||||
"Upgrade-Insecure-Requests": "1",
|
|
||||||
"Sec-Fetch-Dest": "document",
|
|
||||||
"Sec-Fetch-Mode": "navigate",
|
|
||||||
"Sec-Fetch-Site": "none",
|
|
||||||
"Sec-Fetch-User": "?1",
|
|
||||||
"Priority": "u=0, i",
|
|
||||||
"Te": "trailers",
|
|
||||||
"Connection": "keep-alive"
|
|
||||||
}
|
|
||||||
|
|
||||||
def fetch_html(url, headers=headers, timeout=10):
|
|
||||||
try:
|
|
||||||
response = requests.get(url, headers=headers, timeout=timeout)
|
|
||||||
response.raise_for_status()
|
|
||||||
return response.text
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
print(f"请求出错: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def parse_html(html_content):
|
|
||||||
soup = BeautifulSoup(html_content, 'html.parser')
|
|
||||||
|
|
||||||
# 提取所有符合条件的<li>标签
|
|
||||||
items = soup.find_all('li', id=lambda x: x and x.startswith('sogou_vr_11002601_box_'))
|
|
||||||
|
|
||||||
results = []
|
|
||||||
|
|
||||||
for item in items:
|
|
||||||
# 提取标题和链接
|
|
||||||
title_tag = item.find('h3')
|
|
||||||
if title_tag:
|
|
||||||
a_tag = title_tag.find('a')
|
|
||||||
title = title_tag.get_text(strip=True) if title_tag else "No title found"
|
|
||||||
link = a_tag['href'] if a_tag else "No link found"
|
|
||||||
if link and not link.startswith('http'):
|
|
||||||
link = "https://weixin.sogou.com" + link
|
|
||||||
else:
|
|
||||||
title = "No title found"
|
|
||||||
link = "No link found"
|
|
||||||
|
|
||||||
# 提取摘要
|
|
||||||
summary_tag = item.find('p', class_='txt-info')
|
|
||||||
summary = summary_tag.get_text(strip=True) if summary_tag else "No summary found"
|
|
||||||
|
|
||||||
# 提取发布者
|
|
||||||
publisher_tag = item.find('span', class_='all-time-y2')
|
|
||||||
publisher = publisher_tag.get_text(strip=True) if publisher_tag else "No publisher found"
|
|
||||||
|
|
||||||
# 提取时间戳并转换为标准时间格式
|
|
||||||
timestamp_script = item.find('script', string=lambda text: 'document.write(timeConvert' in text)
|
|
||||||
if timestamp_script:
|
|
||||||
timestamp_str = timestamp_script.string.split("'")[1]
|
|
||||||
timestamp = int(timestamp_str)
|
|
||||||
standard_time = datetime.datetime.fromtimestamp(timestamp).strftime('%Y-%m-%d %H:%M:%S')
|
|
||||||
else:
|
|
||||||
standard_time = "No timestamp found"
|
|
||||||
|
|
||||||
results.append({
|
|
||||||
"title": title,
|
|
||||||
"link": link,
|
|
||||||
"description": summary,
|
|
||||||
"author": publisher,
|
|
||||||
"pubDate": standard_time
|
|
||||||
})
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
def remove_surrogates(text):
|
|
||||||
"""移除非法代理对"""
|
|
||||||
return text.encode('utf-8', 'ignore').decode('utf-8')
|
|
||||||
|
|
||||||
def sougou_wx_main(keywords):
|
|
||||||
all_results = {} # 用于存储所有关键词的结果
|
|
||||||
|
|
||||||
for keyword in keywords:
|
|
||||||
url = f"https://weixin.sogou.com/weixin?type=2&s_from=input&ie=utf8&query={keyword}"
|
|
||||||
html_content = fetch_html(url)
|
|
||||||
# print(html_content)
|
|
||||||
|
|
||||||
if html_content is None:
|
|
||||||
logger.warning(f"无法获取微信公众号-Sogou搜索内容,跳过保存操作。关键词: {keyword}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
results = parse_html(html_content)
|
|
||||||
# 移除非法代理对
|
|
||||||
cleaned_results = [{k: remove_surrogates(v) for k, v in item.items()} for item in results]
|
|
||||||
logger.warning(f"关键词【{keyword}】的微信公众号-Sogou搜索内容保存成功。")
|
|
||||||
all_results[keyword] = cleaned_results # 将结果存储在字典中,以关键词为键
|
|
||||||
time.sleep(5)
|
|
||||||
|
|
||||||
# 将所有结果转换为JSON格式
|
|
||||||
json_results = json.dumps(all_results, ensure_ascii=False, indent=4)
|
|
||||||
# print(json_results)
|
|
||||||
|
|
||||||
# 确保目录存在
|
|
||||||
os.makedirs(os.path.dirname('./JSON/sougou-wx.json'), exist_ok=True)
|
|
||||||
|
|
||||||
# 将解析后的数据保存到 JSON 文件
|
|
||||||
with open('./JSON/sougou-wx.json', 'w', encoding='utf-8') as f:
|
|
||||||
f.write(json_results)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
keywords = ["齐鲁银行", "APP逆向", "渗透测试"]
|
|
||||||
sougou_wx_main(keywords)
|
|
@ -5,6 +5,15 @@ import xmltodict
|
|||||||
import json
|
import json
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
|
|
||||||
|
logger.add("./log/spider.log",
|
||||||
|
format="{time:YYYY-MM-DD HH:mm:ss} - {level} - {name}:{function}:{line} - {message}",
|
||||||
|
rotation="10 MB",
|
||||||
|
compression="zip",
|
||||||
|
encoding="utf-8")
|
||||||
|
# shell终端打印日志
|
||||||
|
# logger.add(lambda msg: print(msg),
|
||||||
|
# format="{time:YYYY-MM-DD HH:mm:ss} - {level} - {name}:{function}:{line} - {message}")
|
||||||
|
|
||||||
# 测试用爬虫请求头
|
# 测试用爬虫请求头
|
||||||
headers = {
|
headers = {
|
||||||
"Content-Type": "application/atom+xml; charset=utf-8",
|
"Content-Type": "application/atom+xml; charset=utf-8",
|
||||||
|
38
web/app.py
38
web/app.py
@ -1,6 +1,5 @@
|
|||||||
from flask import Flask, jsonify, render_template
|
from flask import Flask, jsonify, render_template
|
||||||
import os
|
import os
|
||||||
import logging
|
|
||||||
|
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
|
|
||||||
@ -9,75 +8,58 @@ BASE_DIR = os.path.dirname(os.path.abspath(__file__))
|
|||||||
PARENT_DIR = os.path.dirname(BASE_DIR) # 上一个文件夹
|
PARENT_DIR = os.path.dirname(BASE_DIR) # 上一个文件夹
|
||||||
SEC_NEWS_PATH = os.path.join(PARENT_DIR, 'history', 'sec_news.md')
|
SEC_NEWS_PATH = os.path.join(PARENT_DIR, 'history', 'sec_news.md')
|
||||||
TECH_PASSAGE_PATH = os.path.join(PARENT_DIR, 'history', 'tech_passage.md')
|
TECH_PASSAGE_PATH = os.path.join(PARENT_DIR, 'history', 'tech_passage.md')
|
||||||
CORE_LOG_PATH = os.path.join(PARENT_DIR, 'log', 'core.log')
|
CORE_LOG_PATH = os.path.join(PARENT_DIR, 'log', 'core.log') # 新增日志文件路径
|
||||||
WEB_LOG_PATH = os.path.join(PARENT_DIR, 'log', 'app.log')
|
|
||||||
|
|
||||||
# 配置日志记录器
|
|
||||||
logging.basicConfig(
|
|
||||||
filename=WEB_LOG_PATH,
|
|
||||||
level=logging.INFO,
|
|
||||||
format= '%(asctime)s - %(levelname)s - %(message)s'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
# 替换输出内容
|
||||||
def replace_content(content):
|
def replace_content(content):
|
||||||
content = content.replace('####', '###')
|
content = content.replace('####', '###')
|
||||||
content = content.replace(r"e:\Self-Tool-Code\PyBot", '.') # 修改: 使用原始字符串避免转义问题
|
|
||||||
return content
|
return content
|
||||||
|
|
||||||
@app.route('/')
|
@app.route('/')
|
||||||
def index():
|
def index():
|
||||||
logging.info("访问主页")
|
|
||||||
return render_template('index.html')
|
return render_template('index.html')
|
||||||
|
|
||||||
@app.route('/get-sec-news')
|
@app.route('/get-sec-news')
|
||||||
def get_sec_news():
|
def get_sec_news():
|
||||||
logging.info(f"尝试打开安全新闻历史推送文件: {SEC_NEWS_PATH}")
|
print(f"尝试打开安全新闻历史推送文件: {SEC_NEWS_PATH}")
|
||||||
try:
|
try:
|
||||||
with open(SEC_NEWS_PATH, 'r', encoding='utf-8') as file:
|
with open(SEC_NEWS_PATH, 'r', encoding='utf-8') as file:
|
||||||
content = file.read()
|
content = file.read()
|
||||||
content = replace_content(content)
|
content = replace_content(content)
|
||||||
return jsonify({'content': content}), 200
|
return jsonify({'content': content}), 200
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
logging.error(f"文件缺失: {SEC_NEWS_PATH}")
|
print(f"文件缺失: {SEC_NEWS_PATH}")
|
||||||
return jsonify({'error': '安全新闻历史推送文件缺失!'}), 404
|
return jsonify({'error': '安全新闻历史推送文件缺失!'}), 404
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"读取时出错: {SEC_NEWS_PATH}, 原因: {str(e)}")
|
print(f"读取时出错: {SEC_NEWS_PATH}, 原因: {str(e)}")
|
||||||
return jsonify({'error': str(e)}), 500
|
return jsonify({'error': str(e)}), 500
|
||||||
|
|
||||||
@app.route('/get-tech-passage')
|
@app.route('/get-tech-passage')
|
||||||
def get_tech_passage():
|
def get_tech_passage():
|
||||||
logging.info(f"尝试打开技术文章历史推送文件: {TECH_PASSAGE_PATH}")
|
print(f"尝试打开技术文章历史推送文件: {TECH_PASSAGE_PATH}")
|
||||||
try:
|
try:
|
||||||
with open(TECH_PASSAGE_PATH, 'r', encoding='utf-8') as file:
|
with open(TECH_PASSAGE_PATH, 'r', encoding='utf-8') as file:
|
||||||
content = file.read()
|
content = file.read()
|
||||||
content = replace_content(content)
|
content = replace_content(content)
|
||||||
return jsonify({'content': content}), 200
|
return jsonify({'content': content}), 200
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
logging.error(f"文件缺失: {TECH_PASSAGE_PATH}")
|
print(f"文件缺失: {TECH_PASSAGE_PATH}")
|
||||||
return jsonify({'error': '技术文章历史推送文件缺失!'}), 404
|
return jsonify({'error': '技术文章历史推送文件缺失!'}), 404
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f"读取时出错: {TECH_PASSAGE_PATH}, 原因: {str(e)}")
|
print(f"读取时出错: {TECH_PASSAGE_PATH}, 原因: {str(e)}")
|
||||||
return jsonify({'error': str(e)}), 500
|
return jsonify({'error': str(e)}), 500
|
||||||
|
|
||||||
@app.route('/log')
|
@app.route('/log')
|
||||||
def get_log():
|
def get_log():
|
||||||
logging.info(f"尝试打开核心日志文件: {CORE_LOG_PATH}")
|
print(f"尝试打开核心日志文件: {CORE_LOG_PATH}")
|
||||||
# 读取日志文件内容
|
# 读取日志文件内容
|
||||||
with open(CORE_LOG_PATH, 'r', encoding='utf-8') as file:
|
with open(CORE_LOG_PATH, 'r', encoding='utf-8') as file:
|
||||||
log_content = file.read()
|
log_content = file.read()
|
||||||
# 将日志内容传递给模板
|
# 将日志内容传递给模板
|
||||||
return render_template('log.html', log_content=log_content)
|
return render_template('log.html', log_content=log_content)
|
||||||
|
|
||||||
@app.route('/weblog')
|
|
||||||
def get_weblog():
|
|
||||||
logging.info(f"尝试打开Web应用日志文件: {WEB_LOG_PATH}")
|
|
||||||
with open(WEB_LOG_PATH, 'r') as file:
|
|
||||||
log_content = file.read()
|
|
||||||
log_content = replace_content(log_content)
|
|
||||||
return render_template('log.html', log_content=log_content)
|
|
||||||
|
|
||||||
def run_server():
|
def run_server():
|
||||||
app.run(host='0.0.0.0', port=5000)
|
app.run(host='0.0.0.0', port=5000)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
app.run(debug=False) # 在生产环境中应设置为 False
|
app.run(debug=True) # 在生产环境中应设置为 False
|
@ -99,7 +99,7 @@
|
|||||||
const htmlContent = marked.parse(data.content);
|
const htmlContent = marked.parse(data.content);
|
||||||
document.getElementById('markdown-content').innerHTML = htmlContent;
|
document.getElementById('markdown-content').innerHTML = htmlContent;
|
||||||
} else {
|
} else {
|
||||||
document.getElementById('markdown-content').innerHTML = '<p>加载历史推送文件时出错!(推送历史记录为空)</p>';
|
document.getElementById('markdown-content').innerHTML = '<p>加载历史推送文件时出错!</p>';
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
@ -119,7 +119,7 @@
|
|||||||
const htmlContent = marked.parse(data.content);
|
const htmlContent = marked.parse(data.content);
|
||||||
document.getElementById('markdown-content').innerHTML = htmlContent;
|
document.getElementById('markdown-content').innerHTML = htmlContent;
|
||||||
} else {
|
} else {
|
||||||
document.getElementById('markdown-content').innerHTML = '<p>加载历史推送文件时出错!(推送历史记录为空)</p>';
|
document.getElementById('markdown-content').innerHTML = '<p>加载历史推送文件时出错!</p>';
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
|
34
功能解释.md
34
功能解释.md
@ -1,34 +0,0 @@
|
|||||||
./Core.py: 核心程序
|
|
||||||
./Dev_test.py: 开发测试程序
|
|
||||||
|
|
||||||
- ./config
|
|
||||||
config.yaml: 配置各模块参数以及Token密钥
|
|
||||||
check_config.py: 核查配置信息并获取到配置信息返回给核心程序
|
|
||||||
|
|
||||||
- ./log
|
|
||||||
app.py: web运行日志
|
|
||||||
core.py: 程序运行日志
|
|
||||||
|
|
||||||
- ./media
|
|
||||||
爬取RSS源以及信息源的json数据并保存
|
|
||||||
|
|
||||||
- ./JSON
|
|
||||||
存储获取到的原始json数据
|
|
||||||
|
|
||||||
- ./GotoSend
|
|
||||||
对获取到的json数据进行处理,将其存储值db文件内
|
|
||||||
|
|
||||||
- ./db
|
|
||||||
存储处理过后的数据
|
|
||||||
|
|
||||||
- ./SendCore
|
|
||||||
各渠道推送核心程序
|
|
||||||
FeishuSendBot.py: 飞书推送核心程序
|
|
||||||
MailSendBot.py: 邮件推送核心程序
|
|
||||||
QiweiSendBot.py: 企业微信推送核心程序
|
|
||||||
|
|
||||||
- ./history
|
|
||||||
存储历史推送记录为markdown文件
|
|
||||||
|
|
||||||
- ./Web
|
|
||||||
网页运行程序
|
|
Loading…
Reference in New Issue
Block a user