This commit is contained in:
Autumn.home 2024-07-06 23:11:27 +08:00
parent 1fc3449541
commit b7790f589d
4 changed files with 32 additions and 32 deletions

View File

@ -371,19 +371,11 @@ async def url_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Depen
@router.post("/crawler/data")
async def crawler_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
try:
search_query = request_data.get("search", "")
page_index = request_data.get("pageIndex", 1)
page_size = request_data.get("pageSize", 10)
keyword = {
'url': 'url',
'method': 'method',
'body': 'body',
'project': 'project'
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
query = await get_search_query("crawler", request_data)
if query == "":
return {"message": "Search condition parsing error", "code": 500}
query = query[0]
total_count = await db['crawler'].count_documents(query)
cursor: AsyncIOMotorCursor = ((db['crawler'].find(query, {"_id": 0,
"id": {"$toString": "$_id"},

View File

@ -31,21 +31,11 @@ async def get_page_monitoring_data(db, all):
@router.post("/page/monitoring/result")
async def page_monitoring_result(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
search_query = request_data.get("search", "")
page_index = request_data.get("pageIndex", 1)
page_size = request_data.get("pageSize", 10)
keyword = {
'url': 'url',
'project': 'project',
'hash': 'hash',
'diff': 'diff',
'response': 'response'
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
query = await get_search_query("page", request_data)
if query == "":
return {"message": "Search condition parsing error", "code": 500}
query = query[0]
# Get the total count of documents matching the search criteria
query["diff"] = {"$ne": []}
total_count = await db.PageMonitoring.count_documents(query)
# Perform pagination query and sort by time

View File

@ -84,16 +84,20 @@ async def get_projects_data(request_data: dict, db=Depends(get_mongo_db), _: dic
async def get_projects_all(db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
try:
pipeline = [
{"$group": {
"_id": "$tag", # 根据 tag 字段分组
"children": {"$push": {"value": {"$toString": "$_id"}, "label": "$name"}} # 将每个文档的 _id 和 name 放入 children 集合中
}},
{"$project": {
"_id": 0,
"label": "$_id",
"value": {"$literal": ""},
"children": 1
}}
{
"$group": {
"_id": "$tag", # 根据 tag 字段分组
"children": {"$push": {"value": {"$toString": "$_id"}, "label": "$name"}} # 将每个文档的 _id 和 name 放入 children 集合中
}
},
{
"$project": {
"_id": 0,
"label": "$_id",
"value": {"$literal": ""},
"children": 1
}
}
]
result = await db['project'].aggregate(pipeline).to_list(None)
return {

View File

@ -321,6 +321,7 @@ async def search_to_mongodb(expression_raw, keyword):
logger.error(e)
return ""
async def get_search_query(name, request_data):
search_query = request_data.get("search", "")
search_key_v = {
@ -382,6 +383,19 @@ async def get_search_query(name, request_data):
'input': 'input',
'source': 'source',
"type": "outputtype"
},
'page': {
'url': 'url',
'project': 'project',
'hash': 'hash',
'diff': 'diff',
'response': 'response'
},
'crawler': {
'url': 'url',
'method': 'method',
'body': 'body',
'project': 'project'
}
}
keyword = search_key_v[name]