add update、fix bug

This commit is contained in:
Autumn.home 2024-07-16 23:30:31 +08:00
parent afeffa6d3f
commit 689debfe87
8 changed files with 297 additions and 222 deletions

View File

@ -55,27 +55,24 @@ async def asset_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Dep
if query == "":
return {"message": "Search condition parsing error", "code": 500}
total_count = await db['asset'].count_documents(query)
cursor: AsyncIOMotorCursor = ((db['asset'].find(query, {"_id": 0,
"id": {"$toString": "$_id"},
"host": 1,
"url": 1,
"ip": 1,
"port": 1,
"protocol": 1,
"type": 1,
"title": 1,
"statuscode": 1,
"rawheaders": 1,
"webfinger": 1,
"technologies": 1,
"raw": 1,
"timestamp": 1,
"iconcontent": 1
})
.skip((page_index - 1) * page_size)
.limit(page_size))
.sort([("timestamp", DESCENDING)]))
result = cursor.to_list(length=None)
cursor = db['asset'].find(query, {"_id": 0,
"id": {"$toString": "$_id"},
"host": 1,
"url": 1,
"ip": 1,
"port": 1,
"protocol": 1,
"type": 1,
"title": 1,
"statuscode": 1,
"rawheaders": 1,
"webfinger": 1,
"technologies": 1,
"raw": 1,
"timestamp": 1,
"iconcontent": 1
}).skip((page_index - 1) * page_size).limit(page_size).sort([("timestamp", DESCENDING)])
result = await cursor.to_list(length=None)
result_list = []
for r in result:
tmp = {}

View File

@ -14,7 +14,7 @@ from starlette.responses import FileResponse
from api.users import verify_token
from motor.motor_asyncio import AsyncIOMotorCursor
from core.db import get_mongo_db
from core.db import get_mongo_db, get_project
import pandas as pd
from core.util import *
from pymongo import ASCENDING, DESCENDING, results
@ -151,7 +151,7 @@ async def fetch_data(db, collection, query, quantity, project_list):
{"$project": {"_id": 0, "vulnid": 0}}
]
cursor = await db[collection].aggregate(pipeline)
cursor = db[collection].aggregate(pipeline)
return cursor
@ -172,10 +172,21 @@ def flatten_dict(d):
items.append((k, v))
return dict(items)
def clean_string(value):
if isinstance(value, str):
# 过滤掉非法字符ASCII码 < 32 或 >= 127
return ''.join(char for char in value if 32 <= ord(char) < 127)
return value
async def export_data_from_mongodb(quantity, query, file_name, index):
logger.info("导出开始")
async for db in get_mongo_db():
try:
global Project_List
if len(Project_List) == 0:
await get_project(db)
cursor = await fetch_data(db, index, query, quantity, Project_List)
result = await cursor.to_list(length=None)
relative_path = f'file/{file_name}.xlsx'
@ -235,10 +246,10 @@ async def export_data_from_mongodb(quantity, query, file_name, index):
for doc in result:
flattened_doc = flatten_dict(doc)
if doc["type"] == "other":
row = [flattened_doc.get(col, "") for col in other_columns.keys()]
row = [clean_string(flattened_doc.get(col, "")) for col in other_columns.keys()]
other_ws.append(row)
else:
row = [flattened_doc.get(col, "") for col in http_columns.keys()]
row = [clean_string(flattened_doc.get(col, "")) for col in http_columns.keys()]
http_ws.append(row)
else:
columns = {}
@ -283,7 +294,7 @@ async def export_data_from_mongodb(quantity, query, file_name, index):
for doc in result:
flattened_doc = flatten_dict(doc)
row = [flattened_doc.get(col, "") for col in columns.keys()]
row = [clean_string(flattened_doc.get(col, "")) for col in columns.keys()]
ws.append(row)
try:
wb.save(file_path)

View File

@ -116,23 +116,23 @@ async def get_projects_all(db=Depends(get_mongo_db), _: dict = Depends(verify_to
async def update_project_count():
db = await get_mongo_db()
cursor = db.project.find({}, {"_id": 0, "id": {"$toString": "$_id"}})
results = await cursor.to_list(length=None)
async for db in get_mongo_db():
cursor = db.project.find({}, {"_id": 0, "id": {"$toString": "$_id"}})
results = await cursor.to_list(length=None)
async def update_count(id):
query = {"project": {"$eq": id}}
total_count = await db.asset.count_documents(query)
update_document = {
"$set": {
"AssetCount": total_count
async def update_count(id):
query = {"project": {"$eq": id}}
total_count = await db.asset.count_documents(query)
update_document = {
"$set": {
"AssetCount": total_count
}
}
}
await db.project.update_one({"_id": ObjectId(id)}, update_document)
await db.project.update_one({"_id": ObjectId(id)}, update_document)
fetch_tasks = [update_count(r['id']) for r in results]
fetch_tasks = [update_count(r['id']) for r in results]
await asyncio.gather(*fetch_tasks)
await asyncio.gather(*fetch_tasks)
@router.post("/project/content")

View File

@ -256,7 +256,8 @@ async def get_sensitive_result_data2(request_data: dict, db=Depends(get_mongo_db
"time": 1,
"sid": 1,
"match": 1,
"color": 1
"color": 1,
"md5": 1
}
},
{
@ -267,7 +268,7 @@ async def get_sensitive_result_data2(request_data: dict, db=Depends(get_mongo_db
"_id": "$url",
"time": {"$first": "$time"}, # 记录相同url下最早插入数据的时间
"url": {"$first": "$url"},
"body_id": {"$last": {"$toString": "$_id"}}, # 记录相同url下最早插入数据的_id
"body_id": {"$last": {"$toString": "$md5"}}, # 记录相同url下最早插入数据的_id
"children": {
"$push": {
"id": {"$toString": "$_id"},
@ -316,7 +317,7 @@ async def get_sensitive_result_body_rules(request_data: dict, db=Depends(get_mon
return {"message": "ID is missing in the request data", "code": 400}
# Query the database for content based on ID
query = {"_id": ObjectId(sensitive_result_id)}
query = {"md5": sensitive_result_id}
doc = await db.SensitiveResult.find_one(query)
if not doc:

View File

@ -8,7 +8,7 @@ import string
import yaml
VERSION = "1.3"
VERSION = "1.4"
UPDATEURL = "http://update.scope-sentry.top"
REMOTE_REPO_URL = "https://github.com/Autumn-27/ScopeSentry.git"
SECRET_KEY = "ScopeSentry-15847412364125411"

View File

@ -94,7 +94,7 @@ async def create_database():
# {"name": "DomainDic", 'value': domainDict, 'type': 'domainDict'})
sensitive_data = get_sensitive()
collection = client[DATABASE_NAME]["SensitiveRule"]
if sensitiveList:
if sensitive_data:
await collection.insert_many(sensitive_data)
collection = client[DATABASE_NAME]["ScheduledTasks"]

File diff suppressed because it is too large Load Diff

95
main.py
View File

@ -8,7 +8,7 @@ from starlette.middleware.base import BaseHTTPMiddleware
from starlette.staticfiles import StaticFiles
from core.config import *
from core.default import get_dirDict, get_domainDict
from core.default import get_dirDict, get_domainDict, get_sensitive
set_config()
@ -35,43 +35,64 @@ from core.apscheduler_handler import scheduler
async def update():
async for db in get_mongo_db():
# 默认项目有个root_domain为空导致匹配上所有资产
cursor = db.project.find({"root_domains": ""}, {"_id": 1, "root_domains": 1})
async for document in cursor:
logger.info("Update found empty root_domains")
root_domain = []
for root in document["root_domains"]:
if root != "":
root_domain.append(root)
update_document = {
"$set": {
"root_domains": root_domain,
# 判断版本
result = await db.config.find_one({"name": "version"})
version = 0
update = False
if result is not None:
version = result["version"]
update = result["update"]
if version < float(VERSION):
update = False
else:
await db.config.insert_one({"name": "version", "version": float(VERSION), "update": False})
version = float(VERSION)
if version <= 1.4 and update is False:
# 默认项目有个root_domain为空导致匹配上所有资产
cursor = db.project.find({"root_domains": ""}, {"_id": 1, "root_domains": 1})
async for document in cursor:
logger.info("Update found empty root_domains")
root_domain = []
for root in document["root_domains"]:
if root != "":
root_domain.append(root)
update_document = {
"$set": {
"root_domains": root_domain,
}
}
}
await db.project.update_one({"_id": document['_id']}, update_document)
# 修改目录字典存储方式
fs = AsyncIOMotorGridFSBucket(db)
result = await db.config.find_one({"name": "DirDic"})
if result:
await db.config.delete_one({"name": "DirDic"})
content = get_dirDict()
if content:
byte_content = content.encode('utf-8')
await fs.upload_from_stream('dirdict', byte_content)
logger.info("Document DirDict uploaded to GridFS.")
else:
logger.error("No dirdict content to upload.")
# 修改子域名字典存储方式
result = await db.config.find_one({"name": "DomainDic"})
if result:
await db.config.delete_one({"name": "DomainDic"})
content = get_domainDict()
if content:
byte_content = content.encode('utf-8')
await fs.upload_from_stream('DomainDic', byte_content)
logger.info("Document DomainDic uploaded to GridFS.")
else:
logger.error("No DomainDic content to upload.")
await db.project.update_one({"_id": document['_id']}, update_document)
# 修改目录字典存储方式
fs = AsyncIOMotorGridFSBucket(db)
result = await db.config.find_one({"name": "DirDic"})
if result:
await db.config.delete_one({"name": "DirDic"})
content = get_dirDict()
if content:
byte_content = content.encode('utf-8')
await fs.upload_from_stream('dirdict', byte_content)
logger.info("Document DirDict uploaded to GridFS.")
else:
logger.error("No dirdict content to upload.")
# 修改子域名字典存储方式
result = await db.config.find_one({"name": "DomainDic"})
if result:
await db.config.delete_one({"name": "DomainDic"})
content = get_domainDict()
if content:
byte_content = content.encode('utf-8')
await fs.upload_from_stream('DomainDic', byte_content)
logger.info("Document DomainDic uploaded to GridFS.")
else:
logger.error("No DomainDic content to upload.")
# 更新敏感信息
await db.SensitiveRule.delete_many({})
sensitive_data = get_sensitive()
collection = db["SensitiveRule"]
if sensitive_data:
await collection.insert_many(sensitive_data)
await db.config.update_one({"name": "version"}, {"$set": {"update": True, "version": float(VERSION)}})
@app.on_event("startup")