This commit is contained in:
Autumn.home 2024-07-04 23:28:11 +08:00
parent 0eb0f122a2
commit 0618ef2dc7
12 changed files with 257 additions and 233 deletions

View File

@ -6,9 +6,7 @@
# -------------------------------------------
from fastapi import APIRouter, Depends
from motor.motor_asyncio import AsyncIOMotorCursor
from pymongo import DESCENDING
from api.users import verify_token
from core.config import POC_LIST
from core.db import get_mongo_db
from core.util import search_to_mongodb
from loguru import logger

View File

@ -177,14 +177,14 @@ async def do_asset_deduplication():
"filters": [],
"groups": ["url", "status", "msg"]
},
"PageMonitoring": {
"filters": [],
"groups": ["url"]
},
"SensitiveResult": {
"filters": [],
"groups": ["url"]
},#############
# "PageMonitoring": {
# "filters": [],
# "groups": ["url"]
# },
# "SensitiveResult": {
# "filters": [],
# "groups": ["url"]
# },
"SubdoaminTakerResult": {
"filters": [],
"groups": ["input", "value"]
@ -196,7 +196,7 @@ async def do_asset_deduplication():
"asset": {
"filters": [],
"groups": [""]
},################
},
"crawler": {
"filters": [],
"groups": ["url", "body"]
@ -205,14 +205,24 @@ async def do_asset_deduplication():
"filters": [],
"groups": ["host", "type", "ip"]
},
"vulnerability": {
"filters": [],
"groups": ["url", "vulnid", "matched"]
}
# "vulnerability": {
# "filters": [],
# "groups": ["url", "vulnid", "matched"]
# }
}
for r in result:
if result[r]:
await asset_data_dedup(db, r, )
if r in f_g_k:
if r == "asset":
# http资产去重
http_filter = [{"type": {"$ne": "other"}}]
http_group = ["url", "statuscode", "hashes.body_mmh3"]
await asset_data_dedup(db, r, http_filter, http_group)
other_filter = [{"type":"other"}]
other_group = ["host", "ip", "protocol"]
await asset_data_dedup(db, r, other_filter, other_group)
else:
await asset_data_dedup(db, r, f_g_k[r]['filters'], f_g_k[r]['groups'])
async def asset_data_dedup(db, collection_name, filters, groups):

View File

@ -10,31 +10,35 @@ from fastapi import APIRouter, Depends
from motor.motor_asyncio import AsyncIOMotorCursor
from api.users import verify_token
from core.db import get_mongo_db
from core.util import search_to_mongodb
from core.util import search_to_mongodb, get_search_query
from loguru import logger
router = APIRouter()
@router.post("/dirscan/result/data")
async def dirscan_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
try:
search_query = request_data.get("search", "")
page_index = request_data.get("pageIndex", 1)
page_size = request_data.get("pageSize", 10)
keyword = {
'project': 'project',
'statuscode': 'status',
'url': 'url',
'redirect': 'msg',
'length': 'length'
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
query = await get_search_query("dir", request_data)
if query == "":
return {"message": "Search condition parsing error", "code": 500}
query = query[0]
total_count = await db['DirScanResult'].count_documents(query)
cursor: AsyncIOMotorCursor = ((db['DirScanResult'].find(query, {"_id": 0, "id": {"$toString": "$_id"}, "url": 1, "status": 1, "msg":1, "length": 1})
.sort([('_id', -1)])
sort = request_data.get("sort", {})
sort_by = [('_id', -1)]
if sort != {}:
if 'length' in sort:
sort_value = sort['length']
if sort_value is not None:
if sort_value == "ascending":
sort_value = 1
else:
sort_value = -1
sort_by = [('length', sort_value)]
cursor: AsyncIOMotorCursor = ((db['DirScanResult'].find(query, {"_id": 0, "id": {"$toString": "$_id"}, "url": 1,
"status": 1, "msg": 1, "length": 1})
.sort(sort_by)
.skip((page_index - 1) * page_size)
.limit(page_size)))
result = await cursor.to_list(length=None)

View File

@ -124,7 +124,7 @@ async def export_data(request_data: dict, db=Depends(get_mongo_db), _: dict = De
"file_size": ""
})
if result.inserted_id:
background_tasks.add_task(export_data_from_mongodb, quantity, query, file_name, index, db)
background_tasks.add_task(export_data_from_mongodb, quantity, query, file_name, index)
return {"message": "Successfully added data export task", "code": 200}
else:
return {"message": "Failed to export data", "code": 500}
@ -154,7 +154,8 @@ async def fetch_data(db, collection, query, quantity, project_list):
return cursor
async def export_data_from_mongodb(quantity, query, file_name, index, db):
async def export_data_from_mongodb(quantity, query, file_name, index):
async for db in get_mongo_db():
try:
cursor = await fetch_data(db, index, query, quantity, Project_List)
result = await cursor.to_list(length=None)

View File

@ -11,7 +11,6 @@ from pymongo import ASCENDING, DESCENDING
from loguru import logger
from core.redis_handler import refresh_config
from core.util import *
from core.config import POC_LIST
router = APIRouter()
@ -124,7 +123,6 @@ async def update_poc_data(request_data: dict, db=Depends(get_mongo_db), _: dict
result = await db.PocList.update_one({"_id": ObjectId(poc_id)}, update_document)
# Check if the update was successful
if result:
POC_LIST[poc_id] = level
await refresh_config('all', 'poc')
return {"message": "Data updated successfully", "code": 200}
else:
@ -160,7 +158,6 @@ async def add_poc_data(request_data: dict, db=Depends(get_mongo_db), _: dict = D
# Check if the insertion was successful
if result.inserted_id:
POC_LIST[str(result.inserted_id)] = level
await refresh_config('all', 'poc')
return {"message": "Data added successfully", "code": 200}
else:
@ -186,9 +183,6 @@ async def delete_poc_rules(request_data: dict, db=Depends(get_mongo_db), _: dict
# Check if the deletion was successful
if result.deleted_count > 0:
for pid in poc_ids:
if pid in POC_LIST:
del POC_LIST[pid]
return {"code": 200, "message": "Poc deleted successfully"}
else:
return {"code": 404, "message": "Poc not found"}

View File

@ -68,7 +68,7 @@ async def get_projects_data(request_data: dict, db=Depends(get_mongo_db), _: dic
for result in results:
result["AssetCount"] = result.get("AssetCount", 0)
result_list[tag].append(result)
background_tasks.add_task(update_project_count, db=db, id=result["id"])
background_tasks.add_task(update_project_count, id=result["id"])
return {
"code": 200,
@ -79,7 +79,8 @@ async def get_projects_data(request_data: dict, db=Depends(get_mongo_db), _: dic
}
async def update_project_count(db, id):
async def update_project_count(id):
async for db in get_mongo_db():
query = {"project": {"$eq": id}}
total_count = await db['asset'].count_documents(query)
update_document = {
@ -190,7 +191,7 @@ async def delete_project_rules(request_data: dict, db=Depends(get_mongo_db), _:
pro_ids = request_data.get("ids", [])
delA = request_data.get("delA", False)
if delA:
background_tasks.add_task(delete_asset, pro_ids, db, True)
background_tasks.add_task(delete_asset, pro_ids, True)
obj_ids = [ObjectId(poc_id) for poc_id in pro_ids]
result = await db.project.delete_many({"_id": {"$in": obj_ids}})
await db.ProjectTargetData.delete_many({"id": {"$in": pro_ids}})

View File

@ -16,7 +16,7 @@ from core.db import get_mongo_db
from core.redis_handler import refresh_config
from loguru import logger
from core.util import search_to_mongodb
from core.util import search_to_mongodb, get_search_query
router = APIRouter()
@ -223,23 +223,13 @@ async def get_sensitive_result_rules(request_data: dict, db=Depends(get_mongo_db
@router.post("/sensitive/result/data2")
async def get_sensitive_result_rules(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
async def get_sensitive_result_data2(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
try:
search_query = request_data.get("search", "")
page_index = request_data.get("pageIndex", 1)
page_size = request_data.get("pageSize", 10)
keyword = {
'url': 'url',
'sname': 'sid',
"body": "body",
"info": "match",
'project': 'project',
'md5': 'md5'
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
query = await get_search_query("sens", request_data)
if query == "":
return {"message": "Search condition parsing error", "code": 500}
query = query[0]
total_count = await db['SensitiveResult'].count_documents(query)
pipeline = [
{

View File

@ -181,7 +181,7 @@ async def delete_task(request_data: dict, db=Depends(get_mongo_db), _: dict = De
scheduler.remove_job(task_id)
await db.ScheduledTasks.delete_many({"id": {"$in": task_ids}})
if delA:
background_tasks.add_task(delete_asset, task_ids, db)
background_tasks.add_task(delete_asset, task_ids)
await redis_con.delete(*redis_key)
# Delete the SensitiveRule documents based on the provided IDs
result = await db.task.delete_many({"_id": {"$in": obj_ids}})
@ -558,7 +558,8 @@ async def scheduler_scan_task(id):
await create_scan_task(doc, task_id, targetList, redis)
async def delete_asset(task_ids, db, is_project = False):
async def delete_asset(task_ids, is_project = False):
async for db in get_mongo_db():
key = ["asset", "subdomain", "SubdoaminTakerResult", "UrlScan", "crawler", "SensitiveResult", "DirScanResult", "vulnerability", "PageMonitoring"]
del_query = {"taskId": {"$in": task_ids}}
if is_project:

View File

@ -9,33 +9,20 @@ from fastapi import APIRouter, Depends
from motor.motor_asyncio import AsyncIOMotorCursor
from pymongo import DESCENDING
from api.users import verify_token
from core.config import POC_LIST
from core.db import get_mongo_db
from core.util import search_to_mongodb
from core.util import search_to_mongodb, get_search_query
from loguru import logger
router = APIRouter()
@router.post("/vul/data")
async def get_vul_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
try:
search_query = request_data.get("search", "")
page_index = request_data.get("pageIndex", 1)
page_size = request_data.get("pageSize", 10)
# MongoDB collection for SensitiveRule
# Fuzzy search based on the name field
keyword = {
'url': 'url',
'vulname': 'vulname',
'project': 'project',
'matched': 'matched',
'request': 'request',
'response': 'response',
'level': 'level'
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
query = await get_search_query("vul", request_data)
if query == "":
return {"message": "Search condition parsing error", "code": 500}
query = query[0]
# Get the total count of documents matching the search criteria
total_count = await db.vulnerability.count_documents(query)
if total_count == 0:
@ -60,9 +47,8 @@ async def get_vul_data(request_data: dict, db=Depends(get_mongo_db), _: dict = D
"time": doc["time"],
"request": doc["request"],
"response": doc["response"],
"level": doc['level']
}
if doc["vulnid"] in POC_LIST:
data["level"] = POC_LIST[doc["vulnid"]]
response_data.append(data)
return {
"code": 200,

View File

@ -27,7 +27,6 @@ NODE_TIMEOUT = 50
TOTAL_LOGS = 1000
APP = {}
SensitiveRuleList = {}
POC_LIST = {}
Project_List = {}
def set_timezone(t):
global TIMEZONE

View File

@ -133,7 +133,6 @@ async def create_database():
{"id": "page_monitoring", "name": "Page Monitoring", 'hour': 24, 'type': 'Page Monitoring', 'state': True})
await get_fingerprint(client[DATABASE_NAME])
await get_sens_rule(client[DATABASE_NAME])
await get_pocList(client[DATABASE_NAME])
await get_project(client[DATABASE_NAME])
except Exception as e:
# 处理异常
@ -166,14 +165,6 @@ async def get_sens_rule(client):
}
async def get_pocList(client):
collection = client["PocList"]
cursor = collection.find({}, {"_id": 1, "level": 1})
async for document in cursor:
document['id'] = str(document['_id'])
POC_LIST[document['id']] = document['level']
async def get_project(client):
collection = client["project"]
cursor = collection.find({}, {"_id": 1, "name": 1})

View File

@ -321,6 +321,55 @@ async def search_to_mongodb(expression_raw, keyword):
logger.error(e)
return ""
async def get_search_query(name, request_data):
search_query = request_data.get("search", "")
search_key_v = {
'sens':{
'url': 'url',
'sname': 'sid',
"body": "body",
"info": "match",
'project': 'project',
'md5': 'md5'
},
'dir': {
'project': 'project',
'statuscode': 'status',
'url': 'url',
'redirect': 'msg',
'length': 'length'
},
'vul': {
'url': 'url',
'vulname': 'vulname',
'project': 'project',
'matched': 'matched',
'request': 'request',
'response': 'response',
'level': 'level'
}
}
keyword = search_key_v[name]
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
return ""
query = query[0]
filter_key = ['color', 'status', 'level']
filter = request_data.get("filter", {})
if filter:
query["$and"] = []
for f in filter:
if f in filter_key:
tmp_or = []
for v in filter[f]:
tmp_or.append({f: v})
if len(tmp_or) != 0:
query["$and"].append({"$or": tmp_or})
if "$and" in query:
if len(query["$and"]) == 0:
query.pop("$and")
return query
def get_root_domain(url):
# 如果URL不带协议添加一个默认的http协议