This commit is contained in:
Autumn.home 2024-07-06 23:11:27 +08:00
parent 1fc3449541
commit b7790f589d
4 changed files with 32 additions and 32 deletions

View File

@ -371,19 +371,11 @@ async def url_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Depen
@router.post("/crawler/data") @router.post("/crawler/data")
async def crawler_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)): async def crawler_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
try: try:
search_query = request_data.get("search", "")
page_index = request_data.get("pageIndex", 1) page_index = request_data.get("pageIndex", 1)
page_size = request_data.get("pageSize", 10) page_size = request_data.get("pageSize", 10)
keyword = { query = await get_search_query("crawler", request_data)
'url': 'url', if query == "":
'method': 'method',
'body': 'body',
'project': 'project'
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
return {"message": "Search condition parsing error", "code": 500} return {"message": "Search condition parsing error", "code": 500}
query = query[0]
total_count = await db['crawler'].count_documents(query) total_count = await db['crawler'].count_documents(query)
cursor: AsyncIOMotorCursor = ((db['crawler'].find(query, {"_id": 0, cursor: AsyncIOMotorCursor = ((db['crawler'].find(query, {"_id": 0,
"id": {"$toString": "$_id"}, "id": {"$toString": "$_id"},

View File

@ -31,21 +31,11 @@ async def get_page_monitoring_data(db, all):
@router.post("/page/monitoring/result") @router.post("/page/monitoring/result")
async def page_monitoring_result(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)): async def page_monitoring_result(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
search_query = request_data.get("search", "")
page_index = request_data.get("pageIndex", 1) page_index = request_data.get("pageIndex", 1)
page_size = request_data.get("pageSize", 10) page_size = request_data.get("pageSize", 10)
keyword = { query = await get_search_query("page", request_data)
'url': 'url', if query == "":
'project': 'project',
'hash': 'hash',
'diff': 'diff',
'response': 'response'
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
return {"message": "Search condition parsing error", "code": 500} return {"message": "Search condition parsing error", "code": 500}
query = query[0]
# Get the total count of documents matching the search criteria
query["diff"] = {"$ne": []} query["diff"] = {"$ne": []}
total_count = await db.PageMonitoring.count_documents(query) total_count = await db.PageMonitoring.count_documents(query)
# Perform pagination query and sort by time # Perform pagination query and sort by time

View File

@ -84,16 +84,20 @@ async def get_projects_data(request_data: dict, db=Depends(get_mongo_db), _: dic
async def get_projects_all(db=Depends(get_mongo_db), _: dict = Depends(verify_token)): async def get_projects_all(db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
try: try:
pipeline = [ pipeline = [
{"$group": { {
"_id": "$tag", # 根据 tag 字段分组 "$group": {
"children": {"$push": {"value": {"$toString": "$_id"}, "label": "$name"}} # 将每个文档的 _id 和 name 放入 children 集合中 "_id": "$tag", # 根据 tag 字段分组
}}, "children": {"$push": {"value": {"$toString": "$_id"}, "label": "$name"}} # 将每个文档的 _id 和 name 放入 children 集合中
{"$project": { }
"_id": 0, },
"label": "$_id", {
"value": {"$literal": ""}, "$project": {
"children": 1 "_id": 0,
}} "label": "$_id",
"value": {"$literal": ""},
"children": 1
}
}
] ]
result = await db['project'].aggregate(pipeline).to_list(None) result = await db['project'].aggregate(pipeline).to_list(None)
return { return {

View File

@ -321,6 +321,7 @@ async def search_to_mongodb(expression_raw, keyword):
logger.error(e) logger.error(e)
return "" return ""
async def get_search_query(name, request_data): async def get_search_query(name, request_data):
search_query = request_data.get("search", "") search_query = request_data.get("search", "")
search_key_v = { search_key_v = {
@ -382,6 +383,19 @@ async def get_search_query(name, request_data):
'input': 'input', 'input': 'input',
'source': 'source', 'source': 'source',
"type": "outputtype" "type": "outputtype"
},
'page': {
'url': 'url',
'project': 'project',
'hash': 'hash',
'diff': 'diff',
'response': 'response'
},
'crawler': {
'url': 'url',
'method': 'method',
'body': 'body',
'project': 'project'
} }
} }
keyword = search_key_v[name] keyword = search_key_v[name]