This commit is contained in:
Autumn.home 2024-06-29 20:34:22 +08:00
parent 4adcc89403
commit 656d74054d
6 changed files with 356 additions and 32 deletions

View File

@ -54,6 +54,7 @@ async def get_subdomaintaker_data(request_data: dict, db=Depends(get_mongo_db),
"value": doc["value"],
"type": doc["cname"],
"response": doc["response"],
"id": str(doc["_id"])
}
response_data.append(data)
return {

View File

@ -558,3 +558,306 @@ async def asset_data_statistics2(request_data: dict, db=Depends(get_mongo_db), _
"code": 200,
"data": result_list
}
@router.post("/asset/statistics/port")
async def asset_data_statistics_port(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
search_query = request_data.get("search", "")
keyword = {
'app': '',
'body': 'responsebody',
'header': 'rawheaders',
'project': 'project',
'title': 'title',
'statuscode': 'statuscode',
'icon': 'faviconmmh3',
'ip': ['host', 'ip'],
'domain': ['host', 'url', 'domain'],
'port': 'port',
'protocol': ['protocol', 'type'],
'banner': 'raw',
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
return {"message": "Search condition parsing error", "code": 500}
query = query[0]
pipeline = [
{
"$match": query # 添加搜索条件
},
{
"$facet": {
"by_port": [
{"$group": {"_id": "$port", "num_tutorial": {"$sum": 1}}},
{"$match": {"_id": {"$ne": None}}}
]
}
}
]
result = await db['asset'].aggregate(pipeline).to_list(None)
result_list = {"Port": []}
port_list = {}
for r in result:
for port in r['by_port']:
port_list[port["_id"]] = port["num_tutorial"]
port_list = dict(sorted(port_list.items(), key=lambda item: -item[1]))
for port in port_list:
result_list['Port'].append({"value": port, "number": port_list[port]})
return {
"code": 200,
"data": result_list
}
@router.post("/asset/statistics/type")
async def asset_data_statistics_type(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
search_query = request_data.get("search", "")
keyword = {
'app': '',
'body': 'responsebody',
'header': 'rawheaders',
'project': 'project',
'title': 'title',
'statuscode': 'statuscode',
'icon': 'faviconmmh3',
'ip': ['host', 'ip'],
'domain': ['host', 'url', 'domain'],
'port': 'port',
'protocol': ['protocol', 'type'],
'banner': 'raw',
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
return {"message": "Search condition parsing error", "code": 500}
query = query[0]
pipeline = [
{
"$match": query # 添加搜索条件
},
{
"$facet": {
"by_type": [
{"$group": {"_id": "$type", "num_tutorial": {"$sum": 1}}},
{"$match": {"_id": {"$ne": None}}}
],
"by_protocol": [
{"$group": {"_id": "$protocol", "num_tutorial": {"$sum": 1}}},
{"$match": {"_id": {"$ne": None}}}
]
}
}
]
result = await db['asset'].aggregate(pipeline).to_list(None)
result_list = {"Service": []}
service_list = {}
for r in result:
for t in r['by_type']:
if t['_id'] != 'other':
service_list[t['_id']] = t['num_tutorial']
for p in r['by_protocol']:
service_list[p['_id']] = p['num_tutorial']
service_list = dict(sorted(service_list.items(), key=lambda item: -item[1]))
for service in service_list:
result_list['Service'].append({"value": service, "number": service_list[service]})
return {
"code": 200,
"data": result_list
}
@router.post("/asset/statistics/icon")
async def asset_data_statistics_icon(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
search_query = request_data.get("search", "")
keyword = {
'app': '',
'body': 'responsebody',
'header': 'rawheaders',
'project': 'project',
'title': 'title',
'statuscode': 'statuscode',
'icon': 'faviconmmh3',
'ip': ['host', 'ip'],
'domain': ['host', 'url', 'domain'],
'port': 'port',
'protocol': ['protocol', 'type'],
'banner': 'raw',
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
return {"message": "Search condition parsing error", "code": 500}
query = query[0]
pipeline = [
{
"$match": query # 添加搜索条件
},
{
"$project": {
"faviconmmh3": 1,
"iconcontent": 1
}
},
{
"$facet": {
"by_icon": [
{"$group": {"_id": "$faviconmmh3",
"num_tutorial": {"$sum": 1},
"iconcontent": {"$first": "$iconcontent"}
}
},
{"$match": {"_id": {"$ne": ""}}}
]
}
}
]
result = await db['asset'].aggregate(pipeline).to_list(None)
result_list = {"Icon": []}
icon_list = {}
icon_tmp = {}
for r in result:
for icon in r['by_icon']:
if icon['_id'] != "":
icon_tmp[icon['_id']] = icon['iconcontent']
icon_list[icon['_id']] = icon['num_tutorial']
icon_list = dict(sorted(icon_list.items(), key=lambda item: -item[1]))
for ic in icon_list:
result_list['Icon'].append({"value": icon_tmp[ic], "number": icon_list[ic], "icon_hash": ic})
return {
"code": 200,
"data": result_list
}
# @router.post("/asset/statistics/icon2")
# async def asset_data_statistics_icon2(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
# search_query = request_data.get("search", "")
# keyword = {
# 'app': '',
# 'body': 'responsebody',
# 'header': 'rawheaders',
# 'project': 'project',
# 'title': 'title',
# 'statuscode': 'statuscode',
# 'icon': 'faviconmmh3',
# 'ip': ['host', 'ip'],
# 'domain': ['host', 'url', 'domain'],
# 'port': 'port',
# 'protocol': ['protocol', 'type'],
# 'banner': 'raw',
# }
# query = await search_to_mongodb(search_query, keyword)
# if query == "" or query is None:
# return {"message": "Search condition parsing error", "code": 500}
# query = query[0]
# query["faviconmmh3"] = {"$ne": ""}
# cursor = db.asset.find(query, {"_id": 0,
# "faviconmmh3": 1,
# "iconcontent": 1
# })
# results = await cursor.to_list(length=None)
# result_list = {"Icon": []}
# icon_list = {}
# icon_tmp = {}
# for r in results:
# if r['faviconmmh3'] not in icon_list:
# r['faviconmmh3'] = 1
# icon_tmp[r['faviconmmh3']] = r['iconcontent']
# else:
# r['faviconmmh3'] += 1
# icon_list = dict(sorted(icon_list.items(), key=lambda item: -item[1]))
# for ic in icon_list:
# result_list['Icon'].append({"value": icon_tmp[ic], "number": icon_list[ic], "icon_hash": ic})
#
# return {
# "code": 200,
# "data": result_list
# }
@router.post("/asset/statistics/app")
async def asset_data_statistics_app(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
search_query = request_data.get("search", "")
keyword = {
'app': '',
'body': 'responsebody',
'header': 'rawheaders',
'project': 'project',
'title': 'title',
'statuscode': 'statuscode',
'icon': 'faviconmmh3',
'ip': ['host', 'ip'],
'domain': ['host', 'url', 'domain'],
'port': 'port',
'protocol': ['protocol', 'type'],
'banner': 'raw',
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
return {"message": "Search condition parsing error", "code": 500}
query = query[0]
pipeline = [
{
"$match": query # 添加搜索条件
},
{
"$facet": {
"by_webfinger": [
{"$unwind": "$webfinger"},
{"$group": {"_id": "$webfinger", "num_tutorial": {"$sum": 1}}},
{"$match": {"_id": {"$ne": None}}}
],
"by_technologies": [
{"$unwind": "$technologies"},
{"$group": {"_id": "$technologies", "num_tutorial": {"$sum": 1}}},
{"$match": {"_id": {"$ne": None}}}
]
}
}
]
result = await db['asset'].aggregate(pipeline).to_list(None)
result_list = {"Product": []}
tec_list = {}
for r in result:
for technologie in r['by_technologies']:
tec_list[technologie['_id']] = technologie['num_tutorial']
for webfinger in r['by_webfinger']:
try:
if APP[webfinger['_id']] not in tec_list:
tec_list[APP[webfinger['_id']]] = webfinger['num_tutorial']
else:
tec_list[APP[webfinger['_id']]] += webfinger['num_tutorial']
except:
pass
tec_list = dict(sorted(tec_list.items(), key=lambda item: -item[1]))
for tec in tec_list:
result_list['Product'].append({"value": tec, "number": tec_list[tec]})
return {
"code": 200,
"data": result_list
}
@router.post("/data/delete")
async def delete_data(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token)):
try:
# Extract the list of IDs from the request_data dictionary
data_ids = request_data.get("ids", [])
index = request_data.get("index", "")
# Convert the provided rule_ids to ObjectId
obj_ids = [ObjectId(data_id) for data_id in data_ids]
# Delete the SensitiveRule documents based on the provided IDs
result = await db[index].delete_many({"_id": {"$in": obj_ids}})
# Check if the deletion was successful
if result.deleted_count > 0:
return {"code": 200, "message": "Data deleted successfully"}
else:
return {"code": 404, "message": "Data not found"}
except Exception as e:
logger.error(str(e))
# Handle exceptions as needed
return {"message": "error", "code": 500}

View File

@ -5,6 +5,7 @@
# @time : 2024/5/14 21:59
# -------------------------------------------
import subprocess
import traceback
from fastapi import APIRouter, Depends
import git
@ -24,17 +25,26 @@ async def get_system_version(redis_con=Depends(get_redis_pool), _: dict = Depend
scan_lversion = ""
scan_msg = ""
async with httpx.AsyncClient() as client:
async with httpx.AsyncClient(verify=False) as client:
try:
r = await client.get(f"https://raw.githubusercontent.com/Autumn-27/ScopeSentry/main/version.json", timeout=5)
r = await client.get(f"https://gitee.com/constL/scope-sentry/raw/main/version.json", timeout=10)
r_json = r.json()
server_lversion = r_json["server"]
server_msg = r_json['server_msg']
scan_lversion = r_json["scan"]
scan_msg = r_json['scan_msg']
except Exception as e:
# 这里可以添加一些日志记录错误信息
logger.error(str(e))
except:
try:
r = await client.get(f"https://raw.githubusercontent.com/Autumn-27/ScopeSentry/main/version.json",
timeout=10)
r_json = r.json()
server_lversion = r_json["server"]
server_msg = r_json['server_msg']
scan_lversion = r_json["scan"]
scan_msg = r_json['scan_msg']
except Exception as e:
logger.error(traceback.format_exc())
logger.error(f"An unexpected error occurred: {e}")
result_list = [{"name": "ScopeSentry-Server", "cversion": VERSION, "lversion": server_lversion, "msg": server_msg}]
try:
@ -43,21 +53,22 @@ async def get_system_version(redis_con=Depends(get_redis_pool), _: dict = Depend
for key in keys:
name = key.split(":")[1]
hash_data = await redis.hgetall(key)
result_list.append({"name": name, "cversion": hash_data["version"], "lversion": scan_lversion, "msg": scan_msg})
result_list.append(
{"name": name, "cversion": hash_data["version"], "lversion": scan_lversion, "msg": scan_msg})
except:
pass
return {
"code": 200,
"data": {
'list': result_list
}
"code": 200,
"data": {
'list': result_list
}
}
@router.get("/system/update")
async def system_update():
await update_server()
await refresh_config("all", 'UpdateSystem')
# @router.get("/system/update")
# async def system_update():
# await update_server()
# await refresh_config("all", 'UpdateSystem')
async def update_server():
@ -65,7 +76,8 @@ async def update_server():
file_path = os.path.join(os.getcwd(), relative_path)
async with httpx.AsyncClient() as client:
try:
r = await client.get(f"https://raw.githubusercontent.com/Autumn-27/ScopeSentry/main/requirements.txt", timeout=5)
r = await client.get(f"https://raw.githubusercontent.com/Autumn-27/ScopeSentry/main/requirements.txt",
timeout=5)
content = r.text
with open(file_path, "w") as f:
f.write(content)
@ -97,4 +109,3 @@ async def update_server():
result = repo.remotes.origin.pull()
for info in result:
print(info)

View File

@ -63,6 +63,15 @@ def set_config():
REDIS_PASSWORD = data['redis']['password']
TOTAL_LOGS = data['logs']['total_logs']
TIMEZONE = data['system']['timezone']
env_db_user = os.environ.get("DATABASE_USER", default='')
if env_db_user != '' and env_db_user != DATABASE_USER:
DATABASE_USER = env_db_user
env_db_password = os.environ.get("DATABASE_PASSWORD", default='')
if env_db_password != '' and env_db_password != DATABASE_PASSWORD:
DATABASE_PASSWORD = env_db_password
env_redis_password = os.environ.get("REDIS_PASSWORD", default='')
if env_redis_password != '' and env_redis_password != REDIS_PASSWORD:
REDIS_PASSWORD = env_redis_password
else:
TIMEZONE = os.environ.get("TIMEZONE", default='Asia/Shanghai')
MONGODB_IP = os.environ.get("MONGODB_IP", default='127.0.0.1')

32
main.py
View File

@ -25,27 +25,26 @@ from starlette.exceptions import HTTPException as StarletteHTTPException
from starlette.websockets import WebSocketDisconnect
from core.redis_handler import subscribe_log_channel
app = FastAPI()
app = FastAPI(timeout=None)
from core.apscheduler_handler import scheduler
async def update():
if float(VERSION) <= 1.2:
async for db in get_mongo_db():
cursor = db.project.find({"root_domains": ""},{"_id":1, "root_domains": 1})
async for document in cursor:
logger.info("Update found empty root_domains")
root_domain = []
for root in document["root_domains"]:
if root != "":
root_domain.append(root)
update_document = {
"$set": {
"root_domains": root_domain,
}
async for db in get_mongo_db():
cursor = db.project.find({"root_domains": ""}, {"_id": 1, "root_domains": 1})
async for document in cursor:
logger.info("Update found empty root_domains")
root_domain = []
for root in document["root_domains"]:
if root != "":
root_domain.append(root)
update_document = {
"$set": {
"root_domains": root_domain,
}
await db.project.update_one({"_id": document['_id']}, update_document)
}
await db.project.update_one({"_id": document['_id']}, update_document)
@app.on_event("startup")
@ -151,7 +150,7 @@ class MongoDBQueryTimeMiddleware(BaseHTTPMiddleware):
return response
SQLTIME = False
SQLTIME = True
if SQLTIME:
app.add_middleware(MongoDBQueryTimeMiddleware)
@ -193,6 +192,7 @@ def banner():
| | __/ |
|_| |___/ '''
print(banner)
print("Server Version:", VERSION)
if __name__ == "__main__":

Binary file not shown.