Skip to content
This repository has been archived by the owner on Dec 16, 2022. It is now read-only.

Commit

Permalink
Merge pull request #128 from sethsec/dev
Browse files Browse the repository at this point in the history
bugfixes for v1.3
  • Loading branch information
sethsec authored Feb 27, 2020
2 parents 78d91a9 + 00c2a4d commit be44075
Show file tree
Hide file tree
Showing 6 changed files with 22 additions and 165 deletions.
7 changes: 3 additions & 4 deletions celerystalk
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ import csv

from lib.nmap import nmapcommand

build=str(239)
build=str(242)


def print_banner():
Expand Down Expand Up @@ -467,7 +467,7 @@ def main(arguments):
lib.utils.restart_services(config_file)
exit()
if arguments["reset"]:
lib.administrative.backup_all_workspaces(backup_file=arguments["-f"])
backup_result = lib.administrative.backup_all_workspaces(backup_file=arguments["-f"])
print("[!] WARNING: This action destroy the celerystalk database and flush the redis DB.")
answer = raw_input("Are you sure you want to continue? (y\N): ")
print("")
Expand All @@ -482,7 +482,7 @@ def main(arguments):
lib.utils.shutdown_background_jobs()
exit()
if arguments["backup"]:
lib.administrative.backup_all_workspaces(backup_file=arguments["-f"])
backup_result = lib.administrative.backup_all_workspaces(backup_file=arguments["-f"])
if arguments["restore"]:
if arguments["-f"]:
restore_file = arguments["-f"]
Expand Down Expand Up @@ -557,7 +557,6 @@ def main(arguments):
answer = raw_input("[!] Would you like to take screenshots of all paths before generating the report? [Y\\n] ")
if (answer == "Y") or (answer == "y") or (answer == ""):
lib.screenshot.screenshot_command(arguments)
exit()
if arguments["-t"]:
target_list = lib.utils.target_splitter(arguments["-t"])
lib.report.report(workspace,config_file,target_list)
Expand Down
4 changes: 2 additions & 2 deletions lib/administrative.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def backup_all_workspaces(backup_file=None):
for workspace in workspaces:
archive.add(workspace[1], recursive=True)
print("[+] Successfully backed up DB and [{0}] workspaces to [{1}]".format(len(workspaces),backup_filename))

return {'Result':'Success','Workspaces':len(workspaces),'FileName':backup_filename}


def restore_all_workspaces(restore_file):
Expand All @@ -26,7 +26,7 @@ def restore_all_workspaces(restore_file):
answer = raw_input("Are you sure you want to continue? (y\N): ")
print("")
if (answer == "Y") or (answer == "y"):
backup_all_workspaces()
backup_result = backup_all_workspaces()
os.chdir("/")
tar = tarfile.open(restore_file)
tar.extractall()
Expand Down
22 changes: 0 additions & 22 deletions lib/scan.py
Original file line number Diff line number Diff line change
Expand Up @@ -253,13 +253,6 @@ def send_commands_to_celery(populated_command_tuple,output_base_dir,simulation):
# run the command. run_task takes care of marking the task as started and then completed.
# The si tells run_cmd to ignore the data returned from a previous task
tasks.run_cmd.si(cmd_name, populated_command, celery_path, task_id).set(task_id=task_id),

# right now, every executed command gets sent to a generic post_process task that can do
# additinoal stuff based on the command that just ran.
tasks.post_process.si(cmd_name, populated_command, output_base_dir, workspace, vhost, host_dir,
simulation,
scanned_service_port, scanned_service_name, scanned_service_protocol,
celery_path),
)() # .apply_async()

#task_id_list.append(result.task_id)
Expand Down Expand Up @@ -421,12 +414,6 @@ def process_url(url, workspace, output_dir, arguments,config_file=None):
# run the command. run_task takes care of marking the task as started and then completed.
# The si tells run_cmd to ignore the data returned from a previous task
tasks.run_cmd.si(cmd_name, populated_command, celery_path, task_id).set(task_id=task_id),

# right now, every executed command gets sent to a generic post_process task that can do
# additinoal stuff based on the command that just ran.
tasks.post_process.si(cmd_name, populated_command, output_dir, workspace, vhost,
host_dir,
simulation, port, scheme, proto, celery_path),
)() # .apply_async()

task_id_list.append(result.task_id)
Expand Down Expand Up @@ -614,11 +601,6 @@ def parse_config_and_send_commands_to_celery(scanned_service_name, scanned_servi
# run the command. run_task takes care of marking the task as started and then completed.
# The si tells run_cmd to ignore the data returned from a previous task
tasks.run_cmd.si(cmd_name, populated_command,celery_path,task_id).set(task_id=task_id),

# right now, every executed command gets sent to a generic post_process task that can do
# additinoal stuff based on the command that just ran.
tasks.post_process.si(cmd_name, populated_command, output_base_dir, workspace, ip, host_dir, simulation,
scanned_service_port, scanned_service_name, scanned_service_protocol,celery_path),
)() # .apply_async()

task_id_list.append(result.task_id)
Expand Down Expand Up @@ -659,10 +641,6 @@ def create_dns_recon_tasks(domains,simulation,workspace,output_base_dir,out_of_s
# run the command. run_task takes care of marking the task as started and then completed.
# The si tells run_cmd to ignore the data returned from a previous task
tasks.run_cmd.si(cmd_name, populated_command,celery_path,task_id,process_domain_tuple=process_domain_tuple).set(task_id=task_id),

# right now, every executed command gets sent to a generic post_process task that can do
# additinoal stuff based on the command that just ran.
#tasks.post_process_domains.s(cmd_name, populated_command, output_base_dir, workspace, domain, simulation,celery_path,workspace_mode),
)() # .apply_async()
task_id_list.append(result.task_id)

Expand Down
2 changes: 1 addition & 1 deletion parsers/generic_urlextract.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def extract_in_scope_urls_from_task_output(tool_output):

def check_if_page_exists(url):
try:
response = requests.head(url, timeout=5)
response = requests.head(url, timeout=5, verify=False)
status_code = response.status_code
reason = response.reason
except requests.exceptions.ConnectionError:
Expand Down
24 changes: 16 additions & 8 deletions setup/config_default.ini
Original file line number Diff line number Diff line change
Expand Up @@ -121,13 +121,17 @@ wayback : curl -sX GET "http://web.archive.org/cdx/search/cdx?url=ht
cewl : cewl http://[TARGET]:[PORT]/[PATH] -m 6 -w [OUTPUT].txt
nikto : nikto -h http://[TARGET]:[PORT]/[PATH] -output [OUTPUT].txt
wpscan : wpscan --url http://[TARGET]:[PORT]/[PATH] --disable-tls-checks --no-banner -f cli-no-color --enumerate p t tt u | tee [OUTPUT].txt
gobuster_common : gobuster dir -u http://[TARGET]:[PORT]/[PATH] -f -k -w /usr/share/seclists/Discovery/Web-Content/common.txt -s '200,204,302,307,403,500' -e -n -q | tee [OUTPUT].txt
gobuster_common : gobuster dir -u http://[TARGET]:[PORT]/[PATH] -k -w /usr/share/seclists/Discovery/Web-Content/common.txt -s '200,204,302,307,403,500' -e -n -q | tee [OUTPUT].txt
gobuster_cmn_w_slash: gobuster dir -u http://[TARGET]:[PORT]/[PATH] -f -k -w /usr/share/seclists/Discovery/Web-Content/common.txt -s '200,204,302,307,403,500' -e -n -q | tee [OUTPUT].txt
;gobuster_robots_dis : gobuster dir -u http://[TARGET]:[PORT]/[PATH] -k -w /usr/share/seclists/Discovery/Web-Content/RobotsDisallowed-Top1000.txt -s '200,204,302,307,403,500' -e -n -q | tee [OUTPUT].txt
;gobuster_robots_slash : gobuster dir -u http://[TARGET]:[PORT]/[PATH] -f -k -w /usr/share/seclists/Discovery/Web-Content/RobotsDisallowed-Top1000.txt -s '200,204,302,307,403,500' -e -n -q | tee [OUTPUT].txt
;gobuster_2.3-medium : gobuster dir -u http://[TARGET]:[PORT]/[PATH] -k -w /usr/share/wordlists/dirbuster/directory-list-lowercase-2.3-medium.txt -s '200,204,307,403,500' -e -n -q | tee [OUTPUT].txt
;gobuster_2.3-md_slash : gobuster dir -u http://[TARGET]:[PORT]/[PATH] -f -k -w /usr/share/wordlists/dirbuster/directory-list-lowercase-2.3-medium.txt -s '200,204,307,403,500' -e -n -q | tee [OUTPUT].txt
;gobuster_cgis : gobuster dir -u http://[TARGET]:[PORT]/[PATH] -k -w /usr/share/seclists/Discovery/Web_Content/cgis.txt -s '200,204,301,307,403,500' -e -n -q > '[OUTPUT].txt
;gobuster_cgis_w_sl : gobuster dir -u http://[TARGET]:[PORT]/[PATH] -f -k -w /usr/share/seclists/Discovery/Web_Content/cgis.txt -s '200,204,301,307,403,500' -e -n -q > '[OUTPUT].txt
;wappalyzer_cli : docker run --rm wappalyzer/cli http://[TARGET]:[PORT]/[PATH] | jq . | tee [OUTPUT].txt
;whatweb-4 : whatweb http://[TARGET]:[PORT]/[PATH] -a4 --colour=never | sed s/],/]\\n/g | tee [OUTPUT].txt
;sqlmap : sqlmap -u http://[TARGET]:[PORT]/[PATH] --batch --crawl=5 --level 1 --risk 1 -f -a --smart -v0 --disable-coloring | tee [OUTPUT].txt
;gobuster_robots-dis : gobuster dir -u http://[TARGET]:[PORT]/[PATH] -f -k -w /usr/share/seclists/Discovery/Web-Content/RobotsDisallowed-Top1000.txt -s '200,204,302,307,403,500' -e -n -q | tee [OUTPUT].txt
;gobuster_2.3-medium : gobuster dir -u http://[TARGET]:[PORT]/[PATH] -f -k -w /usr/share/wordlists/dirbuster/directory-list-lowercase-2.3-medium.txt -s '200,204,307,403,500' -e -n -q | tee [OUTPUT].txt
;gobuster_cgis : gobuster dir -u http://[TARGET]:[PORT]/[PATH] -f -k -w /usr/share/seclists/Discovery/Web_Content/cgis.txt -s '200,204,301,307,403,500' -e -n -q > '[OUTPUT].txt
;curl_robots : curl http://[TARGET]:[PORT]/[PATH]/robots.txt --user-agent 'Googlebot/2.1 (+http://www.google.com/bot.html)' --connect-timeout 30 --max-time 180 | tee [OUTPUT].txt
;nmap_https_vanquish : nmap [TARGET] -p [PORT] -Pn --script='http* and not (broadcast or dos or external or http-slowloris* or fuzzer)' -oN [output].txt --host-timeout 40m --script-timeout 20m
;nmap_http_vuln : nmap -sC -sV -Pn -v -p [PORT] --script=http-vuln* --script-args http-vuln-cve2014-3704.cmd="uname -a",http-vuln-cve2014-3704.uri="/drupal" [TARGET] -oN [OUTPUT].txt --host-timeout 40m --script-timeout 20m
Expand All @@ -142,13 +146,17 @@ wayback : curl -sX GET "http://web.archive.org/cdx/search/cdx?url=ht
cewl : cewl https://[TARGET]:[PORT]/[PATH] -m 6 -w [OUTPUT].txt
nikto : nikto -h https://[TARGET]:[PORT]/[PATH] -ssl -output [OUTPUT].txt
wpscan : wpscan --url https://[TARGET]:[PORT]/[PATH] --disable-tls-checks --no-banner -f cli-no-color --enumerate p t tt u | tee [OUTPUT].txt
gobuster_common : gobuster dir -u https://[TARGET]:[PORT]/[PATH] -f -k -w /usr/share/seclists/Discovery/Web-Content/common.txt -s '200,204,302,307,403,500' -e -n -q | tee [OUTPUT].txt
gobuster_common : gobuster dir -u https://[TARGET]:[PORT]/[PATH] -k -w /usr/share/seclists/Discovery/Web-Content/common.txt -s '200,204,302,307,403,500' -e -n -q | tee [OUTPUT].txt
gobuster_cm_w_slash : gobuster dir -u https://[TARGET]:[PORT]/[PATH] -f -k -w /usr/share/seclists/Discovery/Web-Content/common.txt -s '200,204,302,307,403,500' -e -n -q | tee [OUTPUT].txt
;gobuster_2.3-medium : gobuster dir -u https://[TARGET]:[PORT]/[PATH] -k -w /usr/share/wordlists/dirbuster/directory-list-lowercase-2.3-medium.txt -s '200,204,307,403,500' -e -n -q | tee [OUTPUT].txt
;gobuster_2.3-md_slash : gobuster dir -u https://[TARGET]:[PORT]/[PATH] -f -k -w /usr/share/wordlists/dirbuster/directory-list-lowercase-2.3-medium.txt -s '200,204,307,403,500' -e -n -q | tee [OUTPUT].txt
;gobuster_robots1 : gobuster dir -u https://[TARGET]:[PORT]/[PATH] -k -w /usr/share/seclists/Discovery/Web-Content/RobotsDisallowed-Top1000.txt -s '200,204,302,307,403,500' -e -n -q | tee [OUTPUT].txt
;gobuster_robots_slash : gobuster dir -u https://[TARGET]:[PORT]/[PATH] -f -k -w /usr/share/seclists/Discovery/Web-Content/RobotsDisallowed-Top1000.txt -s '200,204,302,307,403,500' -e -n -q | tee [OUTPUT].txt
;gobuster_cgis : gobuster dir -u https://[TARGET]:[PORT]/ -k -w /usr/share/seclists/Discovery/Web_Content/cgis.txt -s '200,204,301,307,403,500' -e -n -q > '[OUTPUT].txt
;gobuster_cgis_slash : gobuster dir -u https://[TARGET]:[PORT]/ -f -k -w /usr/share/seclists/Discovery/Web_Content/cgis.txt -s '200,204,301,307,403,500' -e -n -q > '[OUTPUT].txt
;wappalyzer_cli : docker run --rm wappalyzer/cli https://[TARGET]:[PORT]/[PATH] | jq . | tee [OUTPUT].txt
;whatweb-4 : whatweb https://[TARGET]:[PORT]/[PATH] -a4 --colour=never | sed s/],/]\\n/g | tee [OUTPUT].txt
;sqlmap : sqlmap -u https://[TARGET]:[PORT]/[PATH] --batch --crawl=5 --level 1 --risk 1 -f -a --smart -v0 --disable-coloring | tee [OUTPUT].txt
;gobuster_robots_dis : gobuster dir -u https://[TARGET]:[PORT]/[PATH] -f -k -w /usr/share/seclists/Discovery/Web-Content/RobotsDisallowed-Top1000.txt -s '200,204,302,307,403,500' -e -n -q | tee [OUTPUT].txt
;gobuster_2.3-medium : gobuster dir -u https://[TARGET]:[PORT]/ -f -k -w /usr/share/wordlists/dirbuster/directory-list-lowercase-2.3-medium.txt -s '200,204,307,403,500' -e -n -q | tee [OUTPUT].txt
;gobuster_cgis : gobuster dir -u https://[TARGET]:[PORT]/ -f -k -w /usr/share/seclists/Discovery/Web_Content/cgis.txt -s '200,204,301,307,403,500' -e -n -q > '[OUTPUT].txt
;curl_robots : curl https://[TARGET]:[PORT]/[PATH]/robots.txt --user-agent 'Googlebot/2.1 (+http://www.google.com/bot.html)' --connect-timeout 30 --max-time 180 | tee [OUTPUT].txt
;nmap_https_vuln : nmap [TARGET] -p [PORT] -sC -sV -Pn --script=http-vuln* --script-args http-vuln-cve2014-3704.cmd="uname -a",http-vuln-cve2014-3704.uri="/drupal" [TARGET] -oN [OUTPUT].txt --host-timeout 40m --script-timeout 20m
;nmap_https_vanquish : nmap [TARGET] -p [PORT] --script='(http* or ssl*) and not (broadcast or dos or external or http-slowloris* or fuzzer)' -oN [output].txt --host-timeout 20m --script-timeout 20m
Expand Down
Loading

0 comments on commit be44075

Please sign in to comment.