diff --git a/README.md b/README.md index be4ce08..3174b3a 100644 --- a/README.md +++ b/README.md @@ -98,6 +98,17 @@ WX: ![](docs/images/project-cn.png) + +## 项目资产聚合 +### 面板-概况 +![](docs/images/project-dsh.png) +### 子域名 +![](docs/images/project-subdomain.png) +### 端口 +![](docs/images/project-port.png) +### 服务 +![](docs/images/project-server.png) + ## 任务 ![](docs/images/task-cn.png) diff --git a/api/scheduled_tasks.py b/api/scheduled_tasks.py index dc68464..f154f17 100644 --- a/api/scheduled_tasks.py +++ b/api/scheduled_tasks.py @@ -147,7 +147,8 @@ async def get_page_monitoring_time(): async for db in get_mongo_db(): result = await db.ScheduledTasks.find_one({"id": "page_monitoring"}) time = result['hour'] - return time + flag = result['state'] + return time, flag async def create_page_monitoring_task(): diff --git a/api/sensitive.py b/api/sensitive.py index 114a57d..fb892f5 100644 --- a/api/sensitive.py +++ b/api/sensitive.py @@ -4,7 +4,7 @@ # @version: from datetime import datetime -from bson import ObjectId +from bson import ObjectId, SON from fastapi import APIRouter, Depends from pymongo import DESCENDING @@ -305,6 +305,35 @@ async def get_sensitive_result_data2(request_data: dict, db=Depends(get_mongo_db return {"message": "error","code":500} +@router.post("/sensitive/result/names") +async def get_sensitive_result_names(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)): + query = await get_search_query("sens", request_data) + if query == "": + return {"message": "Search condition parsing error", "code": 500} + + pipeline = [ + { + "$match": query + }, + {"$group": {"_id": "$sid", "count": {"$sum": 1}, "color": {"$first": "$color"}}}, + {"$sort": SON([("count", -1)])}, + { + "$project": { + "name": "$_id", + "count": 1, + "_id": 0, + "color": 1 + } + } + ] + result = await db['SensitiveResult'].aggregate(pipeline).to_list(None) + return { + "code": 200, + "data": { + 'list': result + } + } + @router.post("/sensitive/result/body") async def get_sensitive_result_body_rules(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)): diff --git a/core/util.py b/core/util.py index 5ec96ca..29abecb 100644 --- a/core/util.py +++ b/core/util.py @@ -404,7 +404,7 @@ async def get_search_query(name, request_data): if query == "" or query is None: return "" query = query[0] - filter_key = {'app':'app','color': 'color', 'status': 'status', 'level': 'level', 'type': 'type', 'project': 'project', 'port': 'port', 'protocol': ['protocol', 'type'], 'icon': 'faviconmmh3', "statuscode":"statuscode"} + filter_key = {'app':'app','color': 'color', 'status': 'status', 'level': 'level', 'type': 'type', 'project': 'project', 'port': 'port', 'protocol': ['protocol', 'type'], 'icon': 'faviconmmh3', "statuscode": "statuscode", "sname": "sid"} filter = request_data.get("filter", {}) if filter: query["$and"] = [] diff --git a/docs/images/project-dsh.png b/docs/images/project-dsh.png new file mode 100644 index 0000000..4c10e29 Binary files /dev/null and b/docs/images/project-dsh.png differ diff --git a/docs/images/project-port.png b/docs/images/project-port.png new file mode 100644 index 0000000..66af2b2 Binary files /dev/null and b/docs/images/project-port.png differ diff --git a/docs/images/project-server.png b/docs/images/project-server.png new file mode 100644 index 0000000..472ddc1 Binary files /dev/null and b/docs/images/project-server.png differ diff --git a/docs/images/project-subdomain.png b/docs/images/project-subdomain.png new file mode 100644 index 0000000..0315fcf Binary files /dev/null and b/docs/images/project-subdomain.png differ diff --git a/main.py b/main.py index 3f3e088..6028c0a 100644 --- a/main.py +++ b/main.py @@ -114,8 +114,9 @@ async def startup_db_client(): find_page_m = True if not find_page_m: from api.scheduled_tasks import get_page_monitoring_time, create_page_monitoring_task - pat = await get_page_monitoring_time() - scheduler.add_job(create_page_monitoring_task, 'interval', hours=pat, id='page_monitoring', jobstore='mongo') + pat, flag = await get_page_monitoring_time() + if flag: + scheduler.add_job(create_page_monitoring_task, 'interval', hours=pat, id='page_monitoring', jobstore='mongo') asyncio.create_task(subscribe_log_channel())