This commit is contained in:
Autumn.home 2024-07-06 19:32:03 +08:00
parent e108019446
commit 1fc3449541
5 changed files with 130 additions and 176 deletions

View File

@ -8,10 +8,11 @@ from fastapi import APIRouter, Depends
from motor.motor_asyncio import AsyncIOMotorCursor
from api.users import verify_token
from core.db import get_mongo_db
from core.util import search_to_mongodb
from core.util import search_to_mongodb, get_search_query
from loguru import logger
router = APIRouter()
@router.post("/subdomaintaker/data")
async def get_subdomaintaker_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
try:
@ -20,17 +21,9 @@ async def get_subdomaintaker_data(request_data: dict, db=Depends(get_mongo_db),
page_size = request_data.get("pageSize", 10)
# MongoDB collection for SensitiveRule
# Fuzzy search based on the name field
keyword = {
'domain': 'input',
'value': 'value',
'type': 'cname',
'response': 'response',
'project': 'project',
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
query = await get_search_query("subdomainTaker", request_data)
if query == "":
return {"message": "Search condition parsing error", "code": 500}
query = query[0]
# Get the total count of documents matching the search criteria
total_count = await db.SubdoaminTakerResult.count_documents(query)
if total_count == 0:

View File

@ -58,27 +58,11 @@ async def asset_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Dep
"name": document['name'],
"color": document['color']
}
search_query = request_data.get("search", "")
page_index = request_data.get("pageIndex", 1)
page_size = request_data.get("pageSize", 10)
keyword = {
'app': '',
'body': 'responsebody',
'header': 'rawheaders',
'project': 'project',
'title': 'title',
'statuscode': 'statuscode',
'icon': 'faviconmmh3',
'ip': ['host', 'ip'],
'domain': ['host', 'url', 'domain'],
'port': 'port',
'protocol': ['protocol', 'type'],
'banner': 'raw',
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
query = await get_search_query("asset", request_data)
if query == "":
return {"message": "Search condition parsing error", "code": 500}
query = query[0]
total_count = await db['asset'].count_documents(query)
cursor: AsyncIOMotorCursor = ((db['asset'].find(query, {"_id": 0,
"id": {"$toString": "$_id"},
@ -235,25 +219,9 @@ async def asset_detail(request_data: dict, db=Depends(get_mongo_db), _: dict = D
@router.post("/asset/statistics")
async def asset_data_statistics(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
search_query = request_data.get("search", "")
keyword = {
'app': '',
'body': 'responsebody',
'header': 'rawheaders',
'project': 'project',
'title': 'title',
'statuscode': 'statuscode',
'icon': 'faviconmmh3',
'ip': ['host', 'ip'],
'domain': ['host', 'url', 'domain'],
'port': 'port',
'protocol': ['protocol', 'type'],
'banner': 'raw',
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
query = await get_search_query("asset", request_data)
if query == "":
return {"message": "Search condition parsing error", "code": 500}
query = query[0]
cursor: AsyncIOMotorCursor = ((db['asset'].find(query, {
"port": 1,
"protocol": 1,
@ -369,20 +337,11 @@ async def asset_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Dep
@router.post("/url/data")
async def url_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
try:
search_query = request_data.get("search", "")
page_index = request_data.get("pageIndex", 1)
page_size = request_data.get("pageSize", 10)
keyword = {
'url': 'output',
'project': 'project',
'input': 'input',
'source': 'source',
"type": "outputtype"
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
query = await get_search_query("url", request_data)
if query == "":
return {"message": "Search condition parsing error", "code": 500}
query = query[0]
total_count = await db['UrlScan'].count_documents(query)
cursor: AsyncIOMotorCursor = ((db['UrlScan'].find(query, {"_id": 0,
"id": {"$toString": "$_id"},
@ -452,25 +411,9 @@ async def crawler_data(request_data: dict, db=Depends(get_mongo_db), _: dict = D
@router.post("/asset/statistics2")
async def asset_data_statistics2(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
search_query = request_data.get("search", "")
keyword = {
'app': '',
'body': 'responsebody',
'header': 'rawheaders',
'project': 'project',
'title': 'title',
'statuscode': 'statuscode',
'icon': 'faviconmmh3',
'ip': ['host', 'ip'],
'domain': ['host', 'url', 'domain'],
'port': 'port',
'protocol': ['protocol', 'type'],
'banner': 'raw',
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
query = await get_search_query("asset", request_data)
if query == "":
return {"message": "Search condition parsing error", "code": 500}
query = query[0]
pipeline = [
{
"$match": query # 添加搜索条件
@ -553,25 +496,9 @@ async def asset_data_statistics2(request_data: dict, db=Depends(get_mongo_db), _
@router.post("/asset/statistics/port")
async def asset_data_statistics_port(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
search_query = request_data.get("search", "")
keyword = {
'app': '',
'body': 'responsebody',
'header': 'rawheaders',
'project': 'project',
'title': 'title',
'statuscode': 'statuscode',
'icon': 'faviconmmh3',
'ip': ['host', 'ip'],
'domain': ['host', 'url', 'domain'],
'port': 'port',
'protocol': ['protocol', 'type'],
'banner': 'raw',
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
query = await get_search_query("asset", request_data)
if query == "":
return {"message": "Search condition parsing error", "code": 500}
query = query[0]
pipeline = [
{
"$match": query # 添加搜索条件
@ -604,25 +531,9 @@ async def asset_data_statistics_port(request_data: dict, db=Depends(get_mongo_db
@router.post("/asset/statistics/type")
async def asset_data_statistics_type(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
search_query = request_data.get("search", "")
keyword = {
'app': '',
'body': 'responsebody',
'header': 'rawheaders',
'project': 'project',
'title': 'title',
'statuscode': 'statuscode',
'icon': 'faviconmmh3',
'ip': ['host', 'ip'],
'domain': ['host', 'url', 'domain'],
'port': 'port',
'protocol': ['protocol', 'type'],
'banner': 'raw',
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
query = await get_search_query("asset", request_data)
if query == "":
return {"message": "Search condition parsing error", "code": 500}
query = query[0]
pipeline = [
{
"$match": query # 添加搜索条件
@ -660,25 +571,9 @@ async def asset_data_statistics_type(request_data: dict, db=Depends(get_mongo_db
@router.post("/asset/statistics/icon")
async def asset_data_statistics_icon(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
search_query = request_data.get("search", "")
keyword = {
'app': '',
'body': 'responsebody',
'header': 'rawheaders',
'project': 'project',
'title': 'title',
'statuscode': 'statuscode',
'icon': 'faviconmmh3',
'ip': ['host', 'ip'],
'domain': ['host', 'url', 'domain'],
'port': 'port',
'protocol': ['protocol', 'type'],
'banner': 'raw',
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
query = await get_search_query("asset", request_data)
if query == "":
return {"message": "Search condition parsing error", "code": 500}
query = query[0]
pipeline = [
{
"$match": query # 添加搜索条件
@ -769,25 +664,9 @@ async def asset_data_statistics_icon(request_data: dict, db=Depends(get_mongo_db
@router.post("/asset/statistics/app")
async def asset_data_statistics_app(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
search_query = request_data.get("search", "")
keyword = {
'app': '',
'body': 'responsebody',
'header': 'rawheaders',
'project': 'project',
'title': 'title',
'statuscode': 'statuscode',
'icon': 'faviconmmh3',
'ip': ['host', 'ip'],
'domain': ['host', 'url', 'domain'],
'port': 'port',
'protocol': ['protocol', 'type'],
'banner': 'raw',
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
query = await get_search_query("asset", request_data)
if query == "":
return {"message": "Search condition parsing error", "code": 500}
query = query[0]
pipeline = [
{
"$match": query # 添加搜索条件

View File

@ -18,7 +18,7 @@ import pandas as pd
from core.util import *
from pymongo import ASCENDING, DESCENDING, results
from loguru import logger
from openpyxl import Workbook
router = APIRouter()
keywords = {
@ -154,13 +154,32 @@ async def fetch_data(db, collection, query, quantity, project_list):
return cursor
def flatten_dict(d):
items = []
for k, v in d.items():
if isinstance(v, dict):
items.append((k, str(v)))
elif isinstance(v, list):
if k == "webfinger":
tem = ""
for w in v:
tem += str(APP[w]) + ","
items.append((k, tem.strip(",")))
else:
items.append((k, ', '.join(map(str, v))))
else:
items.append((k, v))
return dict(items)
async def export_data_from_mongodb(quantity, query, file_name, index):
logger.info("导出开始")
async for db in get_mongo_db():
try:
cursor = await fetch_data(db, index, query, quantity, Project_List)
result = await cursor.to_list(length=None)
relative_path = f'file/{file_name}.xlsx'
file_path = os.path.join(os.getcwd(), relative_path)
wb = Workbook()
if index == "asset":
http_columns = {
"timestamp": "时间",
@ -201,27 +220,25 @@ async def export_data_from_mongodb(quantity, query, file_name, index):
"project": "项目",
"type": "类型"
}
other_df = pd.DataFrame()
http_df = pd.DataFrame()
# 创建两个工作表
http_ws = wb.active
http_ws.title = 'HTTP Data'
other_ws = wb.create_sheet(title='Other Data')
# 写入HTTP Data列名
http_ws.append(list(http_columns.values()))
# 写入Other Data列名
other_ws.append(list(other_columns.values()))
# 分别写入数据
for doc in result:
flattened_doc = flatten_dict(doc)
if doc["type"] == "other":
other_df = pd.concat([other_df, pd.DataFrame([doc])], ignore_index=True)
row = [flattened_doc.get(col, "") for col in other_columns.keys()]
other_ws.append(row)
else:
if doc['webfinger'] is not None:
webfinger = []
for webfinger_id in doc['webfinger']:
webfinger.append(APP[webfinger_id])
doc['webfinger'] = webfinger
http_df = pd.concat([http_df, pd.DataFrame([doc])], ignore_index=True)
try:
excel_writer = pd.ExcelWriter(file_path, engine='xlsxwriter')
http_df.rename(columns=http_columns, inplace=True)
http_df.to_excel(excel_writer, sheet_name='HTTP Data', index=False)
other_df.rename(columns=other_columns, inplace=True)
other_df.to_excel(excel_writer, sheet_name='Other Data', index=False)
excel_writer.close()
except IllegalCharacterError as e:
logger.error("导出内容有不可见字符,忽略此错误")
row = [flattened_doc.get(col, "") for col in http_columns.keys()]
http_ws.append(row)
else:
columns = {}
if index == "subdomain":
@ -259,12 +276,19 @@ async def export_data_from_mongodb(quantity, query, file_name, index):
'url': 'URL', 'content': '响应体', 'hash': '响应体Hash', 'diff': 'Diff',
'state': '状态', 'project': '项目', 'time': '时间'
}
try:
df = pd.DataFrame(result)
df.rename(columns=columns, inplace=True)
df.to_excel(file_path, index=False)
except IllegalCharacterError as e:
logger.error("导出内容有不可见字符,忽略此错误")
ws = wb.active
ws.title = index
ws.append(list(columns.values()))
for doc in result:
flattened_doc = flatten_dict(doc)
row = [flattened_doc.get(col, "") for col in columns.keys()]
ws.append(row)
try:
wb.save(file_path)
logger.info(f"Data saved to {file_path} successfully.")
except IllegalCharacterError as e:
logger.error("导出内容有不可见字符,忽略此错误")
file_size = os.path.getsize(file_path) / (1024 * 1024) # kb
update_document = {
"$set": {
@ -282,7 +306,7 @@ async def export_data_from_mongodb(quantity, query, file_name, index):
}
}
await db.export.update_one({"file_name": file_name}, update_document)
logger.info("导出结束")
@router.get("/export/record")
async def get_export_record(db=Depends(get_mongo_db), _: dict = Depends(verify_token)):

View File

@ -1,4 +1,5 @@
import time
import traceback
from bson import ObjectId
from fastapi import APIRouter, Depends, BackgroundTasks
@ -79,6 +80,34 @@ async def get_projects_data(request_data: dict, db=Depends(get_mongo_db), _: dic
}
@router.get("/project/all")
async def get_projects_all(db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
try:
pipeline = [
{"$group": {
"_id": "$tag", # 根据 tag 字段分组
"children": {"$push": {"value": {"$toString": "$_id"}, "label": "$name"}} # 将每个文档的 _id 和 name 放入 children 集合中
}},
{"$project": {
"_id": 0,
"label": "$_id",
"value": {"$literal": ""},
"children": 1
}}
]
result = await db['project'].aggregate(pipeline).to_list(None)
return {
"code": 200,
"data": {
'list': result
}
}
except Exception as e:
logger.error(str(e))
logger.error(traceback.format_exc())
return {"message": "error","code":500}
async def update_project_count(id):
async for db in get_mongo_db():
query = {"project": {"$eq": id}}

View File

@ -354,6 +354,34 @@ async def get_search_query(name, request_data):
'type': 'type',
'project': 'project',
'value': 'value'
},
'asset': {
'app': '',
'body': 'responsebody',
'header': 'rawheaders',
'project': 'project',
'title': 'title',
'statuscode': 'statuscode',
'icon': 'faviconmmh3',
'ip': ['host', 'ip'],
'domain': ['host', 'url', 'domain'],
'port': 'port',
'protocol': ['protocol', 'type'],
'banner': 'raw',
},
'subdomainTaker': {
'domain': 'input',
'value': 'value',
'type': 'cname',
'response': 'response',
'project': 'project',
},
'url': {
'url': 'output',
'project': 'project',
'input': 'input',
'source': 'source',
"type": "outputtype"
}
}
keyword = search_key_v[name]
@ -361,7 +389,7 @@ async def get_search_query(name, request_data):
if query == "" or query is None:
return ""
query = query[0]
filter_key = ['color', 'status', 'level', 'type']
filter_key = ['color', 'status', 'level', 'type', 'project']
filter = request_data.get("filter", {})
if filter:
query["$and"] = []
@ -369,7 +397,8 @@ async def get_search_query(name, request_data):
if f in filter_key:
tmp_or = []
for v in filter[f]:
tmp_or.append({f: v})
if v != "":
tmp_or.append({f: v})
if len(tmp_or) != 0:
query["$and"].append({"$or": tmp_or})
if "$and" in query: