From d0d2122b2fefb55e9207c4a169ed1bf7fb32f0e3 Mon Sep 17 00:00:00 2001 From: "Autumn.home" Date: Wed, 10 Jul 2024 23:21:15 +0800 Subject: [PATCH] =?UTF-8?q?#todo=20=E8=81=9A=E5=90=88?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api/asset_info.py | 44 ++++++++++++++++++++++++---- api/project_aggregation.py | 60 +++++++++++++++++++++++++++++++------- 2 files changed, 88 insertions(+), 16 deletions(-) diff --git a/api/asset_info.py b/api/asset_info.py index 9bfddb2..cbe76b7 100644 --- a/api/asset_info.py +++ b/api/asset_info.py @@ -520,6 +520,40 @@ async def asset_data_statistics_port(request_data: dict, db=Depends(get_mongo_db "data": result_list } +@router.post("/asset/statistics/title") +async def asset_data_statistics_title(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)): + request_data['filter']['type'] = ['https', 'http'] + query = await get_search_query("asset", request_data) + if query == "": + return {"message": "Search condition parsing error", "code": 500} + pipeline = [ + { + "$match": query # 添加搜索条件 + }, + { + "$facet": { + "by_title": [ + {"$group": {"_id": "$title", "num_tutorial": {"$sum": 1}}}, + {"$match": {"_id": {"$ne": ""}}} + ] + } + } + ] + result = await db['asset'].aggregate(pipeline).to_list(None) + result_list = {"Title": []} + title_list = {} + + for r in result: + for port in r['by_title']: + title_list[port["_id"]] = port["num_tutorial"] + + title_list = dict(sorted(title_list.items(), key=lambda item: -item[1])) + for title in title_list: + result_list['Title'].append({"value": title, "number": title_list[title]}) + return { + "code": 200, + "data": result_list + } @router.post("/asset/statistics/type") async def asset_data_statistics_type(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)): @@ -704,16 +738,14 @@ async def asset_data_statistics_app(request_data: dict, db=Depends(get_mongo_db) @router.post("/data/delete") async def delete_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)): try: - # Extract the list of IDs from the request_data dictionary data_ids = request_data.get("ids", []) index = request_data.get("index", "") - # Convert the provided rule_ids to ObjectId - obj_ids = [ObjectId(data_id) for data_id in data_ids] - - # Delete the SensitiveRule documents based on the provided IDs + obj_ids = [] + for data_id in data_ids: + if data_id != "" and len(data_id) > 6: + obj_ids.append(ObjectId(data_id)) result = await db[index].delete_many({"_id": {"$in": obj_ids}}) - # Check if the deletion was successful if result.deleted_count > 0: return {"code": 200, "message": "Data deleted successfully"} else: diff --git a/api/project_aggregation.py b/api/project_aggregation.py index c1f89e0..b6420d1 100644 --- a/api/project_aggregation.py +++ b/api/project_aggregation.py @@ -4,7 +4,7 @@ # @contact : rainy-autumn@outlook.com # @time : 2024/7/8 21:02 # ------------------------------------------- - +import asyncio import time import traceback @@ -30,14 +30,14 @@ router = APIRouter() async def get_projects_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)): id = request_data.get("id", "") result = await db.project.find_one({"_id": ObjectId(id)}, { - "_id": 0, - "tag": 1, - "hour": 1, - "scheduledTasks": 1, - "AssetCount": 1, - "root_domains": 1, - "name":1 - } + "_id": 0, + "tag": 1, + "hour": 1, + "scheduledTasks": 1, + "AssetCount": 1, + "root_domains": 1, + "name": 1 + } ) if result['scheduledTasks']: job = scheduler.get_job(id) @@ -77,7 +77,9 @@ async def get_projects_vul_statistics(request_data: dict, db=Depends(get_mongo_d @router.post("/project/vul/data") async def get_projects_vul_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)): id = request_data.get("id", "") - cursor: AsyncIOMotorCursor = db.vulnerability.find({"project": id}, {"_id": 0, "url": 1, "vulname": 1, "level": 1, "time": 1, "matched": 1}).sort([("time", DESCENDING)]) + cursor: AsyncIOMotorCursor = db.vulnerability.find({"project": id}, + {"_id": 0, "url": 1, "vulname": 1, "level": 1, "time": 1, + "matched": 1}).sort([("time", DESCENDING)]) result = await cursor.to_list(length=None) return { "code": 200, @@ -85,3 +87,41 @@ async def get_projects_vul_data(request_data: dict, db=Depends(get_mongo_db), _: 'list': result } } + + +@router.post("/project/subdomain/data") +async def get_projects_vul_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)): + filter = request_data.get("filter", {}) + project_id = filter["project"][0] + project_query = {} + host_filter = "" + if "host" in filter: + host_filter = filter["host"] + project_query["_id"] = ObjectId(project_id) + doc = await db.project.find_one(project_query, {"_id": 0, "root_domains": 1}) + if not doc or "root_domains" not in doc: + return {"code": 404, "message": "domain is null"} + query = await get_search_query("subdomain", request_data) + if query == "": + return {"message": "Search condition parsing error", "code": 500} + results = [] + for root_domain in doc["root_domains"]: + query["$and"].append({"host": {"$regex": f"{root_domain}$"}}) + cursor: AsyncIOMotorCursor = db['subdomain'].find(query, { + "_id": 0, "id": {"$toString": "$_id"}, "host": 1, "type": 1, "value": 1, "ip": 1, "time": 1 + }).sort([("time", -1)]) + result = await cursor.to_list(length=None) + result_list = [] + for r in result: + if r['value'] is None: + r['value'] = [] + if r['ip'] is None: + r['ip'] = [] + result_list.append(r) + results.append({"host": root_domain, "type": "", "value": [], "ip": [], "id": generate_random_string(5), "children": result_list}) + return { + "code": 200, + "data": { + 'list': results + } + }