我们从Python开源项目中,提取了以下19个代码示例,用于说明如何使用httplib.INTERNAL_SERVER_ERROR。
def _get(url, **kwargs): next_delay_sec = 1 for i in xrange(TRY_LIMIT): if i > 0: # Retry server error status codes. LOGGER.info('Encountered server error; retrying after %d second(s).', next_delay_sec) time.sleep(next_delay_sec) next_delay_sec *= 2 p = urlparse.urlparse(url) c = GetConnectionClass(protocol=p.scheme)(p.netloc) c.request('GET', url, **kwargs) resp = c.getresponse() LOGGER.debug('GET [%s] #%d/%d (%d)', url, i+1, TRY_LIMIT, resp.status) if resp.status < httplib.INTERNAL_SERVER_ERROR: return resp
def __do_batch_create(self, project, create_list): """Create the new descriptors as a batch request.""" create_method = (self.__stackdriver.stub.projects() .metricDescriptors().create) def create_invocation(descriptor): name = descriptor['name'] logging.info('batch CREATE %s', name) return create_method( name='projects/{0}'.format(project), body=descriptor) get_descriptor_name = lambda descriptor: descriptor['name'] processor = BatchProcessor( project, self.__stackdriver, create_list, create_invocation, get_descriptor_name) processor.process() response_code = (httplib.OK if processor.num_ok == len(create_list) else httplib.INTERNAL_SERVER_ERROR) headers, body = processor.make_response( None, False, 'Created', 'Added Descriptor') return response_code, headers, body
def __init__(self): self.api_key_valid = True self.response_code = httplib.INTERNAL_SERVER_ERROR self.response_size = report_request.NOT_SET self.request_size = report_request.NOT_SET self.http_method = None self.url = None
def test_should_include_detail_in_error_text_when_needed(self): detail = u'details, details, details' resp = sc_messages.AllocateQuotaResponse( allocateErrors = [ sc_messages.QuotaError( code=sc_messages.QuotaError.CodeValueValuesEnum.OUT_OF_RANGE, description=detail) ] ) code, got = quota_request.convert_response(resp, self.PROJECT_ID) expect(code).to(equal(httplib.INTERNAL_SERVER_ERROR)) assert got.endswith(detail)
def test_send_project_invalid_server_res(self, mock_cs): mock_cs.side_effect = [None] api = API.apiCalls.ApiCalls( client_id="", client_secret="", base_URL="", username="", password="" ) session_response = Foo() setattr(session_response, "status_code", httplib.INTERNAL_SERVER_ERROR) setattr(session_response, "text", "Server unavailable") session_post = MagicMock(side_effect=[session_response]) session = Foo() setattr(session, "post", session_post) api.session = session api.get_link = lambda x, y, targ_dict="": None proj = API.apiCalls.Project("project1", "projectDescription", "1") with self.assertRaises(API.apiCalls.ProjectError) as err: api.send_project(proj) self.assertTrue(str(session_response.status_code) + " " + session_response.text in str(err.exception))
def showHttpErrorCodes(): """ Shows all HTTP error codes raised till now """ if kb.httpErrorCodes: warnMsg = "HTTP error codes detected during run:\n" warnMsg += ", ".join("%d (%s) - %d times" % (code, httplib.responses[code] \ if code in httplib.responses else '?', count) \ for code, count in kb.httpErrorCodes.items()) logger.warn(warnMsg) if any((str(_).startswith('4') or str(_).startswith('5')) and _ != httplib.INTERNAL_SERVER_ERROR and _ != kb.originalCode for _ in kb.httpErrorCodes.keys()): msg = "too many 4xx and/or 5xx HTTP error codes " msg += "could mean that some kind of protection is involved (e.g. WAF)" logger.debug(msg)
def should_retry(self, error, retries_attempted): """Return true if the http client should retry the request. :param error: the caught error. :type error: Exception :param retries_attempted: the number of retries which has been attempted before. :type retries_attempted: int :return: true if the http client should retry the request. :rtype: bool """ # stop retrying when the maximum number of retries is reached if retries_attempted >= self.max_error_retry: return False # always retry on IOError if isinstance(error, IOError): _logger.debug('Retry for IOError.') return True # Only retry on a subset of service exceptions if isinstance(error, BceServerError): if error.status_code == httplib.INTERNAL_SERVER_ERROR: _logger.debug('Retry for internal server error.') return True if error.status_code == httplib.SERVICE_UNAVAILABLE: _logger.debug('Retry for service unavailable.') return True if error.code == BceServerError.REQUEST_EXPIRED: _logger.debug('Retry for request expired.') return True return False
def scan_page(url, data=None): retval, usable = False, False url, data = re.sub(r"=(&|\Z)", "=1\g<1>", url) if url else url, re.sub(r"=(&|\Z)", "=1\g<1>", data) if data else data try: for phase in (GET, POST): original, current = None, url if phase is GET else (data or "") for match in re.finditer(r"((\A|[?&])(?P<parameter>[^_]\w*)=)(?P<value>[^&#]+)", current): vulnerable, usable = False, True #print "* scanning %s parameter '%s'" % (phase, match.group("parameter")) original = original or (_retrieve_content(current, data) if phase is GET else _retrieve_content(url, current)) tampered = current.replace(match.group(0), "%s%s" % (match.group(0), urllib.quote("".join(random.sample(TAMPER_SQL_CHAR_POOL, len(TAMPER_SQL_CHAR_POOL)))))) content = _retrieve_content(tampered, data) if phase is GET else _retrieve_content(url, tampered) for (dbms, regex) in ((dbms, regex) for dbms in DBMS_ERRORS for regex in DBMS_ERRORS[dbms]): if not vulnerable and re.search(regex, content[HTML], re.I) and not re.search(regex, original[HTML], re.I): #print " (i) %s parameter '%s' appears to be error SQLi vulnerable (%s)" % (phase, match.group("parameter"), dbms) print url retval = vulnerable = True vulnerable = False for prefix, boolean, suffix, inline_comment in itertools.product(PREFIXES, BOOLEAN_TESTS, SUFFIXES, (False, True)): if not vulnerable: template = ("%s%s%s" % (prefix, boolean, suffix)).replace(" " if inline_comment else "/**/", "/**/") payloads = dict((_, current.replace(match.group(0), "%s%s" % (match.group(0), urllib.quote(template % (RANDINT if _ else RANDINT + 1, RANDINT), safe='%')))) for _ in (True, False)) contents = dict((_, _retrieve_content(payloads[_], data) if phase is GET else _retrieve_content(url, payloads[_])) for _ in (False, True)) if all(_[HTTPCODE] and _[HTTPCODE] < httplib.INTERNAL_SERVER_ERROR for _ in (original, contents[True], contents[False])): if any(original[_] == contents[True][_] != contents[False][_] for _ in (HTTPCODE, TITLE)): vulnerable = True else: ratios = dict((_, difflib.SequenceMatcher(None, original[TEXT], contents[_][TEXT]).quick_ratio()) for _ in (False, True)) vulnerable = all(ratios.values()) and min(ratios.values()) < FUZZY_THRESHOLD < max(ratios.values()) and abs(ratios[True] - ratios[False]) > FUZZY_THRESHOLD / 10 if vulnerable: #print " (i) %s parameter '%s' appears to be blind SQLi vulnerable (e.g.: '%s')" % (phase, match.group("parameter"), payloads[True]) print url retval = True except KeyboardInterrupt: pass return retval
def scan_page(url, data=None): retval, usable = False, False url, data = re.sub(r"=(&|\Z)", "=1\g<1>", url) if url else url, re.sub(r"=(&|\Z)", "=1\g<1>", data) if data else data # tmpurl= re.sub(r"=(&|\Z)", "=1\g<1>", url) try: for phase in (GET, POST): original, current = None, url if phase is GET else (data or "") for match in re.finditer(r"((\A|[?&])(?P<parameter>[^_]\w*)=)(?P<value>[^&#]+)", current): vulnerable, usable = False, True print "* scanning %s parameter '%s'" % (phase, match.group("parameter")) original = original or (_retrieve_content(current, data) if phase is GET else _retrieve_content(url, current)) tampered = current.replace(match.group(0), "%s%s" % (match.group(0), urllib.quote("".join(random.sample(TAMPER_SQL_CHAR_POOL, len(TAMPER_SQL_CHAR_POOL)))))) content = _retrieve_content(tampered, data) if phase is GET else _retrieve_content(url, tampered) for (dbms, regex) in ((dbms, regex) for dbms in DBMS_ERRORS for regex in DBMS_ERRORS[dbms]): if not vulnerable and re.search(regex, content[HTML], re.I) and not re.search(regex, original[HTML], re.I): print " (i) %s parameter '%s' appears to be error SQLi vulnerable (%s)" % (phase, match.group("parameter"), dbms) retval = vulnerable = True vulnerable = False for prefix, boolean, suffix, inline_comment in itertools.product(PREFIXES, BOOLEAN_TESTS, SUFFIXES, (False, True)): if not vulnerable: template = ("%s%s%s" % (prefix, boolean, suffix)).replace(" " if inline_comment else "/**/", "/**/") payloads = dict((_, current.replace(match.group(0), "%s%s" % (match.group(0), urllib.quote(template % (RANDINT if _ else RANDINT + 1, RANDINT), safe='%')))) for _ in (True, False)) contents = dict((_, _retrieve_content(payloads[_], data) if phase is GET else _retrieve_content(url, payloads[_])) for _ in (False, True)) if all(_[HTTPCODE] and _[HTTPCODE] < httplib.INTERNAL_SERVER_ERROR for _ in (original, contents[True], contents[False])): if any(original[_] == contents[True][_] != contents[False][_] for _ in (HTTPCODE, TITLE)): vulnerable = True else: ratios = dict((_, difflib.SequenceMatcher(None, original[TEXT], contents[_][TEXT]).quick_ratio()) for _ in (False, True)) vulnerable = all(ratios.values()) and min(ratios.values()) < FUZZY_THRESHOLD < max(ratios.values()) and abs(ratios[True] - ratios[False]) > FUZZY_THRESHOLD / 10 if vulnerable: print " (i) %s parameter '%s' appears to be blind SQLi vulnerable (e.g.: '%s')" % (phase, match.group("parameter"), payloads[True]) retval = True if not usable: print " (x) no usable GET/POST parameters found" except KeyboardInterrupt: print "\r (x) Ctrl-C pressed" return retval
def _call_api(self, method, uri, **kwargs): def _set_token_in_request(): if 'params' in kwargs: kwargs['params']['access_token'] = self._access_token elif 'data' in kwargs: kwargs['data']['access_token'] = self._access_token else: kwargs['params'] = dict(access_token=self._access_token) _set_token_in_request() try: response = NetatmoClient._invoke(method, '%s%s' % (NetatmoClient.API_BASE_URL, uri), **kwargs) except InvalidStatusCode, i: if NetatmoClient._is_token_expired(i): try: self.request_refresh_token() _set_token_in_request() response = NetatmoClient._invoke(method, '%s%s' % (NetatmoClient.API_BASE_URL, uri), **kwargs) except InvalidStatusCode, other: if other.status_code / 100 == 4: self._access_token = None self._refresh_token = None raise else: raise if kwargs.get('raw_api_call', False): return response else: result = response.json() _logger.debug('%s - %s', uri, json.dumps(result)) if result['status'] != "ok": raise InvalidStatusCode(httplib.INTERNAL_SERVER_ERROR, result) else: return result.get('body')
def process_web_request(self, request, path, params, fragment): """Implements CommandHandler.""" options = dict(get_global_options()) options.update(params) type_map, processor = self.__do_clear(options) response_code = (httplib.OK if processor.num_ok == len(type_map) else httplib.INTERNAL_SERVER_ERROR) headers, body = processor.make_response( request, self.accepts_content_type(request, 'text/html'), 'Deleted', 'Cleared Time Series') request.respond(response_code, headers, body)
def check_resp_status_and_retry(resp, image_id, url): # Note(Jesse): This branch sorts errors into those that are permanent, # those that are ephemeral, and those that are unexpected. if resp.status in (httplib.BAD_REQUEST, # 400 httplib.UNAUTHORIZED, # 401 httplib.PAYMENT_REQUIRED, # 402 httplib.FORBIDDEN, # 403 httplib.METHOD_NOT_ALLOWED, # 405 httplib.NOT_ACCEPTABLE, # 406 httplib.PROXY_AUTHENTICATION_REQUIRED, # 407 httplib.CONFLICT, # 409 httplib.GONE, # 410 httplib.LENGTH_REQUIRED, # 411 httplib.PRECONDITION_FAILED, # 412 httplib.REQUEST_ENTITY_TOO_LARGE, # 413 httplib.REQUEST_URI_TOO_LONG, # 414 httplib.UNSUPPORTED_MEDIA_TYPE, # 415 httplib.REQUESTED_RANGE_NOT_SATISFIABLE, # 416 httplib.EXPECTATION_FAILED, # 417 httplib.UNPROCESSABLE_ENTITY, # 422 httplib.LOCKED, # 423 httplib.FAILED_DEPENDENCY, # 424 httplib.UPGRADE_REQUIRED, # 426 httplib.NOT_IMPLEMENTED, # 501 httplib.HTTP_VERSION_NOT_SUPPORTED, # 505 httplib.NOT_EXTENDED, # 510 ): raise PluginError("Got Permanent Error response [%i] while " "uploading image [%s] to glance [%s]" % (resp.status, image_id, url)) # Nova service would process the exception elif resp.status == httplib.NOT_FOUND: # 404 exc = XenAPI.Failure('ImageNotFound') raise exc # NOTE(nikhil): Only a sub-set of the 500 errors are retryable. We # optimistically retry on 500 errors below. elif resp.status in (httplib.REQUEST_TIMEOUT, # 408 httplib.INTERNAL_SERVER_ERROR, # 500 httplib.BAD_GATEWAY, # 502 httplib.SERVICE_UNAVAILABLE, # 503 httplib.GATEWAY_TIMEOUT, # 504 httplib.INSUFFICIENT_STORAGE, # 507 ): raise RetryableError("Got Ephemeral Error response [%i] while " "uploading image [%s] to glance [%s]" % (resp.status, image_id, url)) else: # Note(Jesse): Assume unexpected errors are retryable. If you are # seeing this error message, the error should probably be added # to either the ephemeral or permanent error list. raise RetryableError("Got Unexpected Error response [%i] while " "uploading image [%s] to glance [%s]" % (resp.status, image_id, url))
def do_GET(self): path, query = self.path.split('?', 1) if '?' in self.path else (self.path, "") code, content, params, cursor = httplib.OK, HTML_PREFIX, dict((match.group("parameter"), urllib.unquote(','.join(re.findall(r"(?:\A|[?&])%s=([^&]+)" % match.group("parameter"), query)))) for match in re.finditer(r"((\A|[?&])(?P<parameter>[\w\[\]]+)=)([^&]+)", query)), connection.cursor() try: if path == '/': if "id" in params: cursor.execute("SELECT id, username, name, surname FROM users WHERE id=" + params["id"]) content += "<div><span>Result(s):</span></div><table><thead><th>id</th><th>username</th><th>name</th><th>surname</th></thead>%s</table>%s" % ("".join("<tr>%s</tr>" % "".join("<td>%s</td>" % ("-" if _ is None else _) for _ in row) for row in cursor.fetchall()), HTML_POSTFIX) elif "v" in params: content += re.sub(r"(v<b>)[^<]+(</b>)", r"\g<1>%s\g<2>" % params["v"], HTML_POSTFIX) elif "object" in params: content = str(pickle.loads(params["object"])) elif "path" in params: content = (open(os.path.abspath(params["path"]), "rb") if not "://" in params["path"] else urllib.urlopen(params["path"])).read() elif "domain" in params: content = subprocess.check_output("nslookup " + params["domain"], shell=True, stderr=subprocess.STDOUT, stdin=subprocess.PIPE) elif "xml" in params: content = lxml.etree.tostring(lxml.etree.parse(cStringIO.StringIO(params["xml"]), lxml.etree.XMLParser(no_network=False)), pretty_print=True) elif "name" in params: found = lxml.etree.parse(cStringIO.StringIO(USERS_XML)).xpath(".//user[name/text()='%s']" % params["name"]) content += "<b>Surname:</b> %s%s" % (found[-1].find("surname").text if found else "-", HTML_POSTFIX) elif "size" in params: start, _ = time.time(), "<br>".join("#" * int(params["size"]) for _ in range(int(params["size"]))) content += "<b>Time required</b> (to 'resize image' to %dx%d): %.6f seconds%s" % (int(params["size"]), int(params["size"]), time.time() - start, HTML_POSTFIX) elif "comment" in params or query == "comment=": if "comment" in params: cursor.execute("INSERT INTO comments VALUES(NULL, '%s', '%s')" % (params["comment"], time.ctime())) content += "Thank you for leaving the comment. Please click here <a href=\"/?comment=\">here</a> to see all comments%s" % HTML_POSTFIX else: cursor.execute("SELECT id, comment, time FROM comments") content += "<div><span>Comment(s):</span></div><table><thead><th>id</th><th>comment</th><th>time</th></thead>%s</table>%s" % ("".join("<tr>%s</tr>" % "".join("<td>%s</td>" % ("-" if _ is None else _) for _ in row) for row in cursor.fetchall()), HTML_POSTFIX) elif "include" in params: backup, sys.stdout, program, envs = sys.stdout, cStringIO.StringIO(), (open(params["include"], "rb") if not "://" in params["include"] else urllib.urlopen(params["include"])).read(), {"DOCUMENT_ROOT": os.getcwd(), "HTTP_USER_AGENT": self.headers.get("User-Agent"), "REMOTE_ADDR": self.client_address[0], "REMOTE_PORT": self.client_address[1], "PATH": path, "QUERY_STRING": query} exec(program) in envs content += sys.stdout.getvalue() sys.stdout = backup elif "redir" in params: content = content.replace("<head>", "<head><meta http-equiv=\"refresh\" content=\"0; url=%s\"/>" % params["redir"]) if HTML_PREFIX in content and HTML_POSTFIX not in content: content += "<div><span>Attacks:</span></div>\n<ul>%s\n</ul>\n" % ("".join("\n<li%s>%s - <a href=\"%s\">vulnerable</a>|<a href=\"%s\">exploit</a>|<a href=\"%s\" target=\"_blank\">info</a></li>" % (" class=\"disabled\" title=\"module 'python-lxml' not installed\"" if ("lxml.etree" not in sys.modules and any(_ in case[0].upper() for _ in ("XML", "XPATH"))) else "", case[0], case[1], case[2], case[3]) for case in CASES)).replace("<a href=\"None\">vulnerable</a>|", "<b>-</b>|") elif path == "/users.json": content = "%s%s%s" % ("" if not "callback" in params else "%s(" % params["callback"], json.dumps(dict((_.findtext("username"), _.findtext("surname")) for _ in xml.etree.ElementTree.fromstring(USERS_XML).findall("user"))), "" if not "callback" in params else ")") elif path == "/login": cursor.execute("SELECT * FROM users WHERE username='" + re.sub(r"[^\w]", "", params.get("username", "")) + "' AND password='" + params.get("password", "") + "'") content += "Welcome <b>%s</b><meta http-equiv=\"Set-Cookie\" content=\"SESSIONID=%s; path=/\"><meta http-equiv=\"refresh\" content=\"1; url=/\"/>" % (re.sub(r"[^\w]", "", params.get("username", "")), "".join(random.sample(string.letters + string.digits, 20))) if cursor.fetchall() else "The username and/or password is incorrect<meta http-equiv=\"Set-Cookie\" content=\"SESSIONID=; path=/; expires=Thu, 01 Jan 1970 00:00:00 GMT\">" else: code = httplib.NOT_FOUND except Exception, ex: content = ex.output if isinstance(ex, subprocess.CalledProcessError) else traceback.format_exc() code = httplib.INTERNAL_SERVER_ERROR finally: self.send_response(code) self.send_header("Connection", "close") self.send_header("X-XSS-Protection", "0") self.send_header("Content-Type", "%s%s" % ("text/html" if content.startswith("<!DOCTYPE html>") else "text/plain", "; charset=%s" % params.get("charset", "utf8"))) self.end_headers() self.wfile.write("%s%s" % (content, HTML_POSTFIX if HTML_PREFIX in content and GITHUB not in content else "")) self.wfile.flush() self.wfile.close()
def __do_batch_update(self, project, update_list, original_type_map): """Orchestrate updates of existing descriptors. Args: project: [string] The project we're updating in. update_list: [list of descriptors] The new descriptor definitions. original_type_map: [type to descriptor] The original definitions in case we need to restore them. """ get_descriptor_name = lambda descriptor: descriptor['name'] delete_errors = [] create_errors = [] restore_errors = [] failed_list = [] create_list = [] success_list = [] restore_list = [] not_updated_list = [] lost_list = [] if update_list: self.__do_batch_update_delete_helper( project, update_list, create_list, failed_list, delete_errors) if create_list: self.__do_batch_update_create_helper( project, create_list, success_list, restore_list, create_errors) restore_list = [original_type_map[elem['type']] for elem in restore_list] if restore_list: # If we successfully restore, we left it in the original unupdated state. # If we failed to restore, then we've lost the descriptor entirely. self.__do_batch_update_create_helper( project, restore_list, not_updated_list, lost_list, restore_errors) response_code = (httplib.OK if len(failed_list) + len(create_errors) == 0 else httplib.INTERNAL_SERVER_ERROR) bodies = [] for elem in success_list: bodies.append('Updated {0} to {1}'.format(elem['type'], elem)) for index, elem in enumerate(failed_list): bodies.append('Failed to update {0} to {1}: {2}'.format( elem['type'], elem, delete_errors[index])) for index, elem in enumerate(restore_list): bodies.append('Failed to update {0} to {1}: {2}'.format( elem['type'], elem, create_errors[index])) for index, elem in enumerate(lost_list): bodies.append('Lost {0}. It used to be {1}: {2}'.format( elem['type'], elem, restore_errors[index])) return response_code, {'Content-Type': 'text/plain'}, '\n'.join(bodies)