This commit is contained in:
Autumn.home 2024-06-30 23:40:24 +08:00
parent 656d74054d
commit ac00e0969f
9 changed files with 127627 additions and 101 deletions

View File

@ -706,7 +706,7 @@ async def asset_data_statistics_icon(request_data: dict, db=Depends(get_mongo_db
"iconcontent": {"$first": "$iconcontent"}
}
},
{"$match": {"_id": {"$ne": ""}}}
{"$match": {"_id": {"$ne": None}}}
]
}
}

View File

@ -25,14 +25,15 @@ async def dirscan_data(request_data: dict, db=Depends(get_mongo_db), _: dict = D
'project': 'project',
'statuscode': 'status',
'url': 'url',
'redirect': 'msg'
'redirect': 'msg',
'length': 'length'
}
query = await search_to_mongodb(search_query, keyword)
if query == "" or query is None:
return {"message": "Search condition parsing error", "code": 500}
query = query[0]
total_count = await db['DirScanResult'].count_documents(query)
cursor: AsyncIOMotorCursor = ((db['DirScanResult'].find(query, {"_id": 0})
cursor: AsyncIOMotorCursor = ((db['DirScanResult'].find(query, {"_id": 0, "id": {"$toString": "$_id"}, "url": 1, "status": 1, "msg":1, "length": 1})
.sort([('_id', -1)])
.skip((page_index - 1) * page_size)
.limit(page_size)))

View File

@ -3,7 +3,7 @@ import time
from bson import ObjectId
from fastapi import APIRouter, Depends, BackgroundTasks
from api.task import create_scan_task
from api.task import create_scan_task, delete_asset
from api.users import verify_token
from motor.motor_asyncio import AsyncIOMotorCursor
@ -187,25 +187,25 @@ async def add_project_rule(request_data: dict, db=Depends(get_mongo_db), _: dict
async def delete_project_rules(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token),
background_tasks: BackgroundTasks = BackgroundTasks()):
try:
# Extract the list of IDs from the request_data dictionary
pro_id = request_data.get("id", '')
# Convert the provided rule_ids to ObjectId
obj_id = ObjectId(pro_id)
# Delete the SensitiveRule documents based on the provided IDs
result = await db.project.delete_many({"_id": {"$eq": obj_id}})
await db.ProjectTargetData.delete_many({"id": {"$eq": pro_id}})
pro_ids = request_data.get("ids", [])
delA = request_data.get("delA", False)
if delA:
background_tasks.add_task(delete_asset, pro_ids, db, True)
obj_ids = [ObjectId(poc_id) for poc_id in pro_ids]
result = await db.project.delete_many({"_id": {"$in": obj_ids}})
await db.ProjectTargetData.delete_many({"id": {"$in": pro_ids}})
# Check if the deletion was successful
if result.deleted_count > 0:
job = scheduler.get_job(pro_id)
if job:
scheduler.remove_job(pro_id)
background_tasks.add_task(delete_asset_project_handler, pro_id)
for project_id in Project_List:
if pro_id == Project_List[project_id]:
del Project_List[project_id]
break
for pro_id in pro_ids:
job = scheduler.get_job(pro_id)
if job:
scheduler.remove_job(pro_id)
background_tasks.add_task(delete_asset_project_handler, pro_id)
for project_id in Project_List:
if pro_id == Project_List[project_id]:
del Project_List[project_id]
break
await db.ScheduledTasks.delete_many({"id": {"$in": pro_ids}})
return {"code": 200, "message": "Project deleted successfully"}
else:
return {"code": 404, "message": "Project not found"}
@ -323,7 +323,8 @@ async def add_asset_project(db, domain, project_id, updata=False):
cursor: AsyncIOMotorCursor = ((db['asset'].find(query, {
"_id": 0, "id": {"$toString": "$_id"},
"url": 1,
"host": 1
"host": 1,
"project": 1,
})))
result = await cursor.to_list(length=None)
logger.debug(f"asset project null number is {len(result)}")
@ -349,12 +350,13 @@ async def add_asset_project(db, domain, project_id, updata=False):
}
await db['asset'].update_one({"_id": ObjectId(r['id'])}, update_document)
else:
update_document = {
"$set": {
"project": "",
if r["project"] != "":
update_document = {
"$set": {
"project": "",
}
}
}
await db['asset'].update_one({"_id": ObjectId(r['id'])}, update_document)
await db['asset'].update_one({"_id": ObjectId(r['id'])}, update_document)
except Exception as e:
logger.error(f"add_asset_project error:{e}")
@ -367,7 +369,8 @@ async def add_subdomain_project(db, domain, project_id, updata=False):
query = {"project": {"$eq": ""}}
cursor: AsyncIOMotorCursor = ((db['subdomain'].find(query, {
"_id": 0, "id": {"$toString": "$_id"},
"host": 1
"host": 1,
"project": 1
})))
result = await cursor.to_list(length=None)
logger.debug(f"subdomain project null number is {len(result)}")
@ -389,12 +392,13 @@ async def add_subdomain_project(db, domain, project_id, updata=False):
}
await db['subdomain'].update_one({"_id": ObjectId(r['id'])}, update_document)
else:
update_document = {
"$set": {
"project": "",
if r["project"] != "":
update_document = {
"$set": {
"project": "",
}
}
}
await db['subdomain'].update_one({"_id": ObjectId(r['id'])}, update_document)
await db['subdomain'].update_one({"_id": ObjectId(r['id'])}, update_document)
except Exception as e:
logger.error(f"add_subdomain_project error:{e}")
@ -407,7 +411,8 @@ async def add_url_project(db, domain, project_id, updata=False):
query = {"project": {"$eq": ""}}
cursor: AsyncIOMotorCursor = ((db['UrlScan'].find(query, {
"_id": 0, "id": {"$toString": "$_id"},
"input": 1
"input": 1,
"project": 1
})))
result = await cursor.to_list(length=None)
logger.debug(f"url project null number is {len(result)}")
@ -429,12 +434,13 @@ async def add_url_project(db, domain, project_id, updata=False):
}
await db['UrlScan'].update_one({"_id": ObjectId(r['id'])}, update_document)
else:
update_document = {
"$set": {
"project": "",
if r["project"] != "":
update_document = {
"$set": {
"project": "",
}
}
}
await db['UrlScan'].update_one({"_id": ObjectId(r['id'])}, update_document)
await db['UrlScan'].update_one({"_id": ObjectId(r['id'])}, update_document)
except Exception as e:
logger.error(f"add_url_project error:{e}")
@ -447,7 +453,8 @@ async def add_crawler_project(db, domain, project_id, updata=False):
query = {"project": {"$eq": ""}}
cursor: AsyncIOMotorCursor = ((db['crawler'].find(query, {
"_id": 0, "id": {"$toString": "$_id"},
"url": 1
"url": 1,
"project": 1
})))
result = await cursor.to_list(length=None)
logger.debug(f"crawler project null number is {len(result)}")
@ -469,12 +476,13 @@ async def add_crawler_project(db, domain, project_id, updata=False):
}
await db['crawler'].update_one({"_id": ObjectId(r['id'])}, update_document)
else:
update_document = {
"$set": {
"project": "",
if r["project"] != "":
update_document = {
"$set": {
"project": "",
}
}
}
await db['crawler'].update_one({"_id": ObjectId(r['id'])}, update_document)
await db['crawler'].update_one({"_id": ObjectId(r['id'])}, update_document)
except Exception as e:
logger.error(f"add_crawler_project error:{e}")
@ -487,7 +495,8 @@ async def add_sensitive_project(db, domain, project_id, updata=False):
query = {"project": {"$eq": ""}}
cursor: AsyncIOMotorCursor = ((db['SensitiveResult'].find(query, {
"_id": 0, "id": {"$toString": "$_id"},
"url": 1
"url": 1,
"project": 1
})))
result = await cursor.to_list(length=None)
logger.debug(f"sensitive project null number is {len(result)}")
@ -509,12 +518,13 @@ async def add_sensitive_project(db, domain, project_id, updata=False):
}
await db['SensitiveResult'].update_one({"_id": ObjectId(r['id'])}, update_document)
else:
update_document = {
"$set": {
"project": "",
if r["project"] != "":
update_document = {
"$set": {
"project": "",
}
}
}
await db['SensitiveResult'].update_one({"_id": ObjectId(r['id'])}, update_document)
await db['SensitiveResult'].update_one({"_id": ObjectId(r['id'])}, update_document)
except Exception as e:
logger.error(f"add_sensitive_project error:{e}")
@ -527,7 +537,8 @@ async def add_dir_project(db, domain, project_id, updata=False):
query = {"project": {"$eq": ""}}
cursor: AsyncIOMotorCursor = ((db['DirScanResult'].find(query, {
"_id": 0, "id": {"$toString": "$_id"},
"url": 1
"url": 1,
"project": 1
})))
result = await cursor.to_list(length=None)
logger.debug(f"dir project null number is {len(result)}")
@ -549,12 +560,13 @@ async def add_dir_project(db, domain, project_id, updata=False):
}
await db['DirScanResult'].update_one({"_id": ObjectId(r['id'])}, update_document)
else:
update_document = {
"$set": {
"project": "",
if r["project"] != "":
update_document = {
"$set": {
"project": "",
}
}
}
await db['DirScanResult'].update_one({"_id": ObjectId(r['id'])}, update_document)
await db['DirScanResult'].update_one({"_id": ObjectId(r['id'])}, update_document)
except Exception as e:
logger.error(f"add_dir_project error:{e}")
@ -567,7 +579,8 @@ async def add_vul_project(db, domain, project_id, updata=False):
query = {"project": {"$eq": ""}}
cursor: AsyncIOMotorCursor = ((db['vulnerability'].find(query, {
"_id": 0, "id": {"$toString": "$_id"},
"url": 1
"url": 1,
"project": 1
})))
result = await cursor.to_list(length=None)
logger.debug(f"vul project null number is {len(result)}")
@ -589,12 +602,13 @@ async def add_vul_project(db, domain, project_id, updata=False):
}
await db['vulnerability'].update_one({"_id": ObjectId(r['id'])}, update_document)
else:
update_document = {
"$set": {
"project": "",
if r["project"] != "":
update_document = {
"$set": {
"project": "",
}
}
}
await db['vulnerability'].update_one({"_id": ObjectId(r['id'])}, update_document)
await db['vulnerability'].update_one({"_id": ObjectId(r['id'])}, update_document)
except Exception as e:
logger.error(f"add_vul_project error:{e}")
@ -608,7 +622,8 @@ async def add_PageMonitoring_project(db, domain, project_id, updata=False):
query = {"project": {"$eq": ""}}
cursor: AsyncIOMotorCursor = ((db['PageMonitoring'].find(query, {
"_id": 0, "id": {"$toString": "$_id"},
"url": 1
"url": 1,
"project": 1
})))
result = await cursor.to_list(length=None)
logger.debug(f"PageMonitoring project null number is {len(result)}")
@ -630,12 +645,13 @@ async def add_PageMonitoring_project(db, domain, project_id, updata=False):
}
await db['PageMonitoring'].update_one({"_id": ObjectId(r['id'])}, update_document)
else:
update_document = {
"$set": {
"project": "",
if r["project"] != "":
update_document = {
"$set": {
"project": "",
}
}
}
await db['PageMonitoring'].update_one({"_id": ObjectId(r['id'])}, update_document)
await db['PageMonitoring'].update_one({"_id": ObjectId(r['id'])}, update_document)
except Exception as e:
logger.error(f"add_PageMonitoring_project error:{e}")
@ -648,7 +664,8 @@ async def add_SubTaker_project(db, domain, project_id, updata=False):
query = {"project": {"$eq": ""}}
cursor: AsyncIOMotorCursor = ((db['SubdoaminTakerResult'].find(query, {
"_id": 0, "id": {"$toString": "$_id"},
"Input": 1
"Input": 1,
"project": 1
})))
result = await cursor.to_list(length=None)
logger.debug(f"SubTaker project null number is {len(result)}")
@ -670,12 +687,13 @@ async def add_SubTaker_project(db, domain, project_id, updata=False):
}
await db['SubdoaminTakerResult'].update_one({"_id": ObjectId(r['id'])}, update_document)
else:
update_document = {
"$set": {
"project": "",
if r["project"] != "":
update_document = {
"$set": {
"project": "",
}
}
}
await db['SubdoaminTakerResult'].update_one({"_id": ObjectId(r['id'])}, update_document)
await db['SubdoaminTakerResult'].update_one({"_id": ObjectId(r['id'])}, update_document)
except Exception as e:
logger.error(f"add_SubTaker_project error:{e}")

View File

@ -43,7 +43,7 @@ async def get_system_version(redis_con=Depends(get_redis_pool), _: dict = Depend
scan_lversion = r_json["scan"]
scan_msg = r_json['scan_msg']
except Exception as e:
logger.error(traceback.format_exc())
# logger.error(traceback.format_exc())
logger.error(f"An unexpected error occurred: {e}")
result_list = [{"name": "ScopeSentry-Server", "cversion": VERSION, "lversion": server_lversion, "msg": server_msg}]

View File

@ -70,6 +70,10 @@ async def get_task_data(request_data: dict, db=Depends(get_mongo_db), _: dict =
async def add_task(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token), redis_con=Depends(get_redis_pool)):
try:
name = request_data.get("name")
cursor = db.task.find({"name": {"$eq": name}}, {"_id": 1})
results = await cursor.to_list(length=None)
if len(results) != 0:
return {"code": 400, "message": "name already exists"}
target = request_data.get("target", "")
node = request_data.get("node")
if name == "" or target == "" or node == []:
@ -160,10 +164,11 @@ async def task_content(request_data: dict, db=Depends(get_mongo_db), _: dict = D
@router.post("/task/delete")
async def delete_task(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token), redis_con=Depends(get_redis_pool)):
async def delete_task(request_data: dict, db=Depends(get_mongo_db), _: dict = Depends(verify_token), redis_con=Depends(get_redis_pool), background_tasks: BackgroundTasks = BackgroundTasks()):
try:
# Extract the list of IDs from the request_data dictionary
task_ids = request_data.get("ids", [])
delA = request_data.get("delA", False)
# Convert the provided rule_ids to ObjectId
obj_ids = []
@ -171,6 +176,12 @@ async def delete_task(request_data: dict, db=Depends(get_mongo_db), _: dict = De
for task_id in task_ids:
obj_ids.append(ObjectId(task_id))
redis_key.append("TaskInfo:" + task_id)
job = scheduler.get_job(task_id)
if job:
scheduler.remove_job(task_id)
await db.ScheduledTasks.delete_many({"id": {"$in": task_ids}})
if delA:
background_tasks.add_task(delete_asset, task_ids, db)
await redis_con.delete(*redis_key)
# Delete the SensitiveRule documents based on the provided IDs
result = await db.task.delete_many({"_id": {"$in": obj_ids}})
@ -514,6 +525,7 @@ async def progress_info(request_data: dict, _: dict = Depends(verify_token), red
}
}
async def scheduler_scan_task(id):
logger.info(f"Scheduler scan {id}")
async for db in get_mongo_db():
@ -543,4 +555,22 @@ async def scheduler_scan_task(id):
t = t.strip("\n").strip("\r").strip()
if t != "" and t not in targetList:
targetList.append(t)
await create_scan_task(doc, task_id, targetList, redis)
await create_scan_task(doc, task_id, targetList, redis)
async def delete_asset(task_ids, db, is_project = False):
key = ["asset", "subdomain", "SubdoaminTakerResult", "UrlScan", "crawler", "SensitiveResult", "DirScanResult", "vulnerability", "PageMonitoring"]
del_query = {"taskId": {"$in": task_ids}}
if is_project:
del_query = {
"$or": [
{"taskId": {"$in": task_ids}},
{"project": {"$in": task_ids}}
]
}
for k in key:
result = await db[k].delete_many(del_query)
if result.deleted_count > 0:
logger.info("Deleted {} {} documents".format(k, result.deleted_count))
else:
logger.info("Deleted {} None documents".format(k))

View File

@ -8,7 +8,6 @@ from urllib.parse import quote_plus
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorCursor
from core.default import *
from core.config import *
from core.util import string_to_postfix
from loguru import logger
@ -33,11 +32,11 @@ async def create_database():
serverSelectionTimeoutMS=2000)
break
except Exception as e:
time.sleep(5)
time.sleep(10)
check_flag += 1
if check_flag == 5:
if check_flag == 10:
logger.error(f"Error re creating database: {e}")
exit(0)
exit(1)
# 获取数据库列表
database_names = await client.list_database_names()
@ -120,22 +119,8 @@ async def create_database():
await collection.insert_many(target_data)
collection = client[DATABASE_NAME]["FingerprintRules"]
fingerprint_rules = get_fingerprint_data()
for rule in fingerprint_rules:
express = string_to_postfix(rule['rule'])
if express == "":
continue
default_rule = {
'name': rule['product'],
'rule': rule['rule'],
'express': express,
'category': rule['category'],
'parent_category': rule['parent_category'],
'company': rule['company'],
'amount': 0,
'state': True
}
await collection.insert_one(default_rule)
fingerprint = get_finger()
await collection.insert_many(fingerprint)
else:
collection = client[DATABASE_NAME]["config"]
result = await collection.find_one({"name": "timezone"})

View File

@ -7,7 +7,6 @@ import os
from bson import ObjectId
from core.util import *
from loguru import logger
current_directory = os.getcwd()
@ -16,6 +15,12 @@ dict_directory = "dicts"
combined_directory = os.path.join(current_directory, dict_directory)
def read_json_file(file_path):
with open(file_path, encoding='utf-8') as f:
data = json.load(f)
return data
def get_domainDict():
domainDict = ""
try:
@ -46,6 +51,13 @@ def get_poc():
return data
def get_finger():
fingerPath = os.path.join(combined_directory, "ScopeSentry.FingerprintRules.json")
data = read_json_file(fingerPath)
for d in data:
d.pop('_id', None)
return data
def get_project_data():
project_path = os.path.join(combined_directory, "ScopeSentry.project.json")
data = read_json_file(project_path)

View File

@ -13,6 +13,8 @@ from datetime import datetime, timedelta
import json
from urllib.parse import urlparse
from core.db import get_mongo_db
def calculate_md5_from_content(content):
md5 = hashlib.md5()
@ -206,6 +208,7 @@ def string_to_postfix(expression):
async def search_to_mongodb(expression_raw, keyword):
try:
keyword["task"] = "taskId"
if expression_raw == "":
return [{}]
if len(APP) == 0:
@ -226,7 +229,7 @@ async def search_to_mongodb(expression_raw, keyword):
key = key.strip()
if key in keyword:
value = value.strip("\"")
if key == 'statuscode':
if key == 'statuscode' or key == 'length':
value = int(value)
if key == 'project':
if value.lower() in Project_List:
@ -258,7 +261,14 @@ async def search_to_mongodb(expression_raw, keyword):
key = key.strip()
if key in keyword:
value = value.strip("\"")
if key == 'statuscode':
if key == "task":
async for db in get_mongo_db():
query = {"name": {"$eq": value}}
doc = await db.task.find_one(query)
if doc is not None:
taskid = str(doc.get("_id"))
value = taskid
if key == 'statuscode' or key == 'length':
value = int(value)
if key == 'project':
if value.lower() in Project_List:

File diff suppressed because it is too large Load Diff