You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
-tc TRUSTED_CERTIFICATES, --trusted-certificates TRUSTED_CERTIFICATES (crawler option) trust this CA_BUNDLE file (.pem) or directory with certificates
77
+
-h, --help show this help message and exit
78
+
-c, --crawl use the crawler to scan all the entire domain
79
+
-vp, --verify-payload use a javascript engine to verify if the payload was executed (otherwise false positives may occur)
80
+
-av ANGULAR_VERSION, --angular-version ANGULAR_VERSION manually pass the angular version (e.g. 1.4.2) if the automatic check doesn't work
81
+
-vrl VULNERABLE_REQUESTS_LOG, --vulnerable-requests-log VULNERABLE_REQUESTS_LOG log all vulnerable requests to this file (e.g. /var/logs/acstis.log or urls.log)
82
+
-siv, --stop-if-vulnerable (crawler option) stop scanning if a vulnerability was found
83
+
-pmm, --protocol-must-match (crawler option) only scan pages with the same protocol as the startpoint (e.g. only https)
84
+
-sos, --scan-other-subdomains (crawler option) also scan pages that have another subdomain than the startpoint
85
+
-soh, --scan-other-hostnames (crawler option) also scan pages that have another hostname than the startpoint
86
+
-sot, --scan-other-tlds (crawler option) also scan pages that have another tld than the startpoint
87
+
-md MAX_DEPTH, --max-depth MAX_DEPTH (crawler option) the maximum search depth (default is unlimited)
88
+
-mt MAX_THREADS, --max-threads MAX_THREADS (crawler option) the maximum amount of simultaneous threads to use (default is 8)
optional.add_argument("-c", "--crawl", help="use the crawler to scan all the entire domain", action="store_true")
52
52
optional.add_argument("-vp", "--verify-payload", help="use a javascript engine to verify if the payload was executed (otherwise false positives may occur)", action="store_true")
53
53
optional.add_argument("-av", "--angular-version", help="manually pass the angular version (e.g. 1.4.2) if the automatic check doesn't work", type=str, default=None)
54
+
optional.add_argument("-vrl", "--vulnerable-requests-log", help="log all vulnerable requests to this file (e.g. /var/logs/acstis.log or urls.log)", type=str, default=None)
54
55
optional.add_argument("-siv", "--stop-if-vulnerable", help="(crawler option) stop scanning if a vulnerability was found", action="store_true")
55
56
optional.add_argument("-pmm", "--protocol-must-match", help="(crawler option) only scan pages with the same protocol as the startpoint (e.g. only https)", action="store_true")
56
57
optional.add_argument("-sos", "--scan-other-subdomains", help="(crawler option) also scan pages that have another subdomain than the startpoint", action="store_true")
Copy file name to clipboardExpand all lines: extended.py
+1Lines changed: 1 addition & 0 deletions
Original file line number
Diff line number
Diff line change
@@ -82,6 +82,7 @@ def require_arguments():
82
82
optional.add_argument("-c", "--crawl", help="use the crawler to scan all the entire domain", action="store_true")
83
83
optional.add_argument("-vp", "--verify-payload", help="use a javascript engine to verify if the payload was executed (otherwise false positives may occur)", action="store_true")
84
84
optional.add_argument("-av", "--angular-version", help="manually pass the angular version (e.g. 1.4.2) if the automatic check doesn't work", type=str, default=None)
85
+
optional.add_argument("-vrl", "--vulnerable-requests-log", help="log all vulnerable requests to this file (e.g. /var/logs/acstis.log or urls.log)", type=str, default=None)
85
86
optional.add_argument("-siv", "--stop-if-vulnerable", help="(crawler option) stop scanning if a vulnerability was found", action="store_true")
86
87
optional.add_argument("-pmm", "--protocol-must-match", help="(crawler option) only scan pages with the same protocol as the startpoint (e.g. only https)", action="store_true")
87
88
optional.add_argument("-sos", "--scan-other-subdomains", help="(crawler option) also scan pages that have another subdomain than the startpoint", action="store_true")
0 commit comments