update node name

This commit is contained in:
Autumn.home 2024-07-14 18:09:30 +08:00
parent 5a2f5f79e6
commit 9d65e4180a
7 changed files with 83 additions and 40 deletions

View File

@ -48,16 +48,6 @@ async def asset_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Dep
document['id'] = str(document['_id'])
del document['_id']
APP[document['id']] = document['name']
if len(SensitiveRuleList) == 0:
collection = db["SensitiveRule"]
cursor = await collection.find({}, {"_id": 1, "name": 1})
async for document in cursor:
document['id'] = str(document['_id'])
del document['_id']
SensitiveRuleList[document['id']] = {
"name": document['name'],
"color": document['color']
}
page_index = request_data.get("pageIndex", 1)
page_size = request_data.get("pageSize", 10)
query = await get_search_query("asset", request_data)
@ -105,7 +95,11 @@ async def asset_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Dep
for k in raw_data:
tmp['banner'] += k + ":" + str(raw_data[k]).strip("\n") + "\n"
except:
tmp['banner'] = ""
try:
raw_data = r['raw'].decode('utf-8')
tmp['banner'] = raw_data
except:
tmp['banner'] = ""
tmp['products'] = []
else:
tmp['domain'] = r['url'].replace(f'{r["type"]}://', '')
@ -342,6 +336,17 @@ async def url_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Depen
query = await get_search_query("url", request_data)
if query == "":
return {"message": "Search condition parsing error", "code": 500}
sort = request_data.get("sort", {})
sort_by = [('_id', -1)]
if sort != {}:
if 'length' in sort:
sort_value = sort['length']
if sort_value is not None:
if sort_value == "ascending":
sort_value = 1
else:
sort_value = -1
sort_by = [('length', sort_value)]
total_count = await db['UrlScan'].count_documents(query)
cursor: AsyncIOMotorCursor = ((db['UrlScan'].find(query, {"_id": 0,
"id": {"$toString": "$_id"},
@ -353,9 +358,9 @@ async def url_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Depen
"url": "$output",
"time": 1,
})
.sort(sort_by)
.skip((page_index - 1) * page_size)
.limit(page_size))
.sort([("time", DESCENDING)]))
.limit(page_size)))
result = await cursor.to_list(length=None)
return {
"code": 200,

View File

@ -2,6 +2,7 @@
# @name: node
# @auth: rainy-autumn@outlook.com
# @version:
import time
from datetime import datetime
from fastapi import WebSocket
from fastapi import APIRouter, Depends
@ -69,6 +70,9 @@ async def node_data_online(_: dict = Depends(verify_token), redis_con=Depends(ge
async def node_config_update(config_data: dict, _: dict = Depends(verify_token), redis_con=Depends(get_redis_pool)):
try:
name = config_data.get("name")
old_name = config_data.get("oldName", "")
if old_name == "":
old_name = name
max_task_num = config_data.get("maxTaskNum")
state = config_data.get("state")
if name is None or max_task_num is None or state is None:
@ -76,6 +80,24 @@ async def node_config_update(config_data: dict, _: dict = Depends(verify_token),
async with redis_con as redis:
key = f"node:{name}"
if old_name != name:
old_name_key = f"node:{old_name}"
value = await redis_con.hgetall(old_name_key)
await redis_con.hmset(key, value)
await refresh_config(old_name, 'UpdateNodeName', name)
await redis_con.delete(old_name_key)
flag = 0
# while True:
# key_exists = await redis_con.exists(key)
# if flag == 5:
# logger.error("未检测节点名称更新")
# break
# if key_exists:
# await redis_con.delete(old_name_key)
# break
# else:
# flag += 1
# time.sleep(4)
redis_state = await redis.hget(key, "state")
if state:
if redis_state == "2":
@ -85,6 +107,7 @@ async def node_config_update(config_data: dict, _: dict = Depends(verify_token),
await redis.hset(key, "state", "2")
del config_data["name"]
del config_data["state"]
del config_data["oldName"]
for c in config_data:
await redis.hset(key, c, config_data[c])
await refresh_config(name, 'nodeConfig')

View File

@ -25,7 +25,7 @@ async def poc_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Depen
# Get the total count of documents matching the search criteria
total_count = await db.PocList.count_documents(query)
# Perform pagination query and sort by time
cursor: AsyncIOMotorCursor = db.PocList.find(query, {"_id": 0, "id": {"$toString": "$_id"}, "name": 1, "level": 1, "time": 1}).sort([("level", DESCENDING), ("time", DESCENDING)]).skip((page_index - 1) * page_size).limit(page_size)
cursor: AsyncIOMotorCursor = db.PocList.find(query, {"_id": 0, "id": {"$toString": "$_id"}, "name": 1, "level": 1, "time": 1}).sort([("time", DESCENDING)]).skip((page_index - 1) * page_size).limit(page_size)
result = await cursor.to_list(length=None)
return {
"code": 200,

View File

@ -11,7 +11,6 @@ from pymongo import DESCENDING
from api.users import verify_token
from motor.motor_asyncio import AsyncIOMotorCursor
from core.config import SensitiveRuleList
from core.db import get_mongo_db
from core.redis_handler import refresh_config
from loguru import logger
@ -20,6 +19,7 @@ from core.util import search_to_mongodb, get_search_query
router = APIRouter()
@router.post("/sensitive/data")
async def get_sensitive_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
try:
@ -78,10 +78,6 @@ async def upgrade_sensitive_rule(request_data: dict, db=Depends(get_mongo_db), _
# Perform the update
result = await db.SensitiveRule.update_one(update_query, update_values)
if result:
SensitiveRuleList[str(rule_id)] = {
"name": name,
"color": color
}
await refresh_config('all', 'sensitive')
return {"code": 200, "message": "SensitiveRule updated successfully"}
else:
@ -115,10 +111,6 @@ async def add_sensitive_rule(request_data: dict, db=Depends(get_mongo_db), _: di
# Check if the insertion was successful
if result.inserted_id:
SensitiveRuleList[str(result.inserted_id)] = {
"name": name,
"color": color
}
await refresh_config('all', 'sensitive')
return {"code": 200, "message": "SensitiveRule added successfully"}
else:
@ -130,6 +122,30 @@ async def add_sensitive_rule(request_data: dict, db=Depends(get_mongo_db), _: di
return {"message": "error", "code": 500}
@router.post("/sensitive/update/state")
async def update_state_sensitive_rule(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
try:
rule_ids = request_data.get("ids", [])
state = request_data.get("state")
if state is None:
return {"code": 500, "message": "state not found"}
obj_ids = []
for rule_id in rule_ids:
if rule_id != None and rule_id != "":
obj_ids.append(ObjectId(rule_id))
result = await db.SensitiveRule.update_many({'_id': {'$in': obj_ids}}, {'$set': {'state': state}})
# Check if the deletion was successful
if result.modified_count > 0:
await refresh_config('all', 'sensitive')
return {"code": 200, "message": "SensitiveRules update successfully"}
else:
return {"code": 404, "message": "SensitiveRules not found"}
except Exception as e:
logger.error(str(e))
# Handle exceptions as needed
return {"message": "error", "code": 500}
@router.post("/sensitive/delete")
async def delete_sensitive_rules(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
try:
@ -146,8 +162,6 @@ async def delete_sensitive_rules(request_data: dict, db=Depends(get_mongo_db), _
# Check if the deletion was successful
if result.deleted_count > 0:
for rule_id in rule_ids:
del SensitiveRuleList[rule_id]
await refresh_config('all', 'sensitive')
return {"code": 200, "message": "SensitiveRules deleted successfully"}
else:

View File

@ -26,8 +26,9 @@ GET_LOG_NAME = []
NODE_TIMEOUT = 50
TOTAL_LOGS = 1000
APP = {}
SensitiveRuleList = {}
Project_List = {}
def set_timezone(t):
global TIMEZONE
TIMEZONE = t

View File

@ -58,7 +58,7 @@ async def create_database():
await collection.insert_one(
{"name": "DirscanThread", 'value': '15', 'type': 'system'})
await collection.insert_one(
{"name": "PortscanThread", 'value': '15', 'type': 'system'})
{"name": "PortscanThread", 'value': '5', 'type': 'system'})
await collection.insert_one(
{"name": "CrawlerThread", 'value': '2', 'type': 'system'})
await collection.insert_one(
@ -132,7 +132,7 @@ async def create_database():
await collection.insert_one(
{"id": "page_monitoring", "name": "Page Monitoring", 'hour': 24, 'type': 'Page Monitoring', 'state': True})
await get_fingerprint(client[DATABASE_NAME])
await get_sens_rule(client[DATABASE_NAME])
# await get_sens_rule(client[DATABASE_NAME])
await get_project(client[DATABASE_NAME])
except Exception as e:
# 处理异常
@ -153,16 +153,16 @@ async def get_fingerprint(client):
APP[document['id']] = document['name']
async def get_sens_rule(client):
collection = client["SensitiveRule"]
cursor = collection.find({}, {"_id": 1, "name": 1, "color": 1})
async for document in cursor:
document['id'] = str(document['_id'])
del document['_id']
SensitiveRuleList[document['id']] = {
"name": document['name'],
"color": document['color']
}
# async def get_sens_rule(client):
# collection = client["SensitiveRule"]
# cursor = collection.find({}, {"_id": 1, "name": 1, "color": 1})
# async for document in cursor:
# document['id'] = str(document['_id'])
# del document['_id']
# SensitiveRuleList[document['id']] = {
# "name": document['name'],
# "color": document['color']
# }
async def get_project(client):

View File

@ -7,7 +7,7 @@ import re
import string
import sys
from loguru import logger
from core.config import TIMEZONE, APP, SensitiveRuleList, Project_List
from core.config import TIMEZONE, APP, Project_List
from datetime import timezone
from datetime import datetime, timedelta
import json
@ -404,7 +404,7 @@ async def get_search_query(name, request_data):
if query == "" or query is None:
return ""
query = query[0]
filter_key = {'app':'app','color': 'color', 'status': 'status', 'level': 'level', 'type': 'type', 'project': 'project', 'port': 'port', 'protocol': ['protocol', 'type'], 'icon': 'faviconmmh3'}
filter_key = {'app':'app','color': 'color', 'status': 'status', 'level': 'level', 'type': 'type', 'project': 'project', 'port': 'port', 'protocol': ['protocol', 'type'], 'icon': 'faviconmmh3', "statuscode":"statuscode"}
filter = request_data.get("filter", {})
if filter:
query["$and"] = []