add comments to connect.py

pull/3/head
wang 3 months ago
parent 574649a2f0
commit 9f3934a717

@ -5,6 +5,7 @@ Copyright (c) 2006-2024 sqlmap developers (https://sqlmap.org/)
See the file 'LICENSE' for copying permission
"""
# 导入所需的标准库
import binascii
import inspect
import logging
@ -18,6 +19,7 @@ import sys
import time
import traceback
# 尝试导入websocket库,如果不存在则定义一个简单的异常类
try:
import websocket
from websocket import WebSocketException
@ -25,6 +27,7 @@ except ImportError:
class WebSocketException(Exception):
pass
# 导入sqlmap自定义的库和工具函数
from lib.core.agent import agent
from lib.core.common import asciifyUrl
from lib.core.common import calculateDeltaSeconds
@ -146,13 +149,18 @@ from thirdparty.socks.socks import ProxyError
class Connect(object):
"""
This class defines methods used to perform HTTP requests
这个类定义了用于执行HTTP请求的方法
"""
@staticmethod
def _getPageProxy(**kwargs):
"""
代理方法,用于处理页面请求
检查递归深度并调用getPage方法
"""
try:
if (len(inspect.stack()) > sys.getrecursionlimit() // 2): # Note: https://github.com/sqlmapproject/sqlmap/issues/4525
# 检查调用栈深度是否超过限制
if (len(inspect.stack()) > sys.getrecursionlimit() // 2):
warnMsg = "unable to connect to the target URL"
raise SqlmapConnectionException(warnMsg)
except (TypeError, UnicodeError):
@ -165,9 +173,15 @@ class Connect(object):
@staticmethod
def _retryProxy(**kwargs):
"""
重试代理方法
处理请求失败时的重试逻辑
"""
# 获取当前线程数据
threadData = getCurrentThreadData()
threadData.retriesCount += 1
# 如果配置了代理列表且重试次数达到上限,则更换代理
if conf.proxyList and threadData.retriesCount >= conf.retries and not kb.locks.handlers.locked():
warnMsg = "changing proxy"
logger.warning(warnMsg)
@ -177,9 +191,8 @@ class Connect(object):
setHTTPHandlers()
# 处理基于时间的测试模式
if kb.testMode and kb.previousMethod == PAYLOAD.METHOD.TIME:
# timed based payloads can cause web server unresponsiveness
# if the injectable piece of code is some kind of JOIN-like query
warnMsg = "most likely web server instance hasn't recovered yet "
warnMsg += "from previous timed based payload. If the problem "
warnMsg += "persists please wait for a few minutes and rerun "
@ -188,6 +201,7 @@ class Connect(object):
warnMsg += "lower the value of option '--time-sec' (e.g. '--time-sec=2')"
singleTimeWarnMessage(warnMsg)
# 处理原始页面为空的情况
elif kb.originalPage is None:
if conf.tor:
warnMsg = "please make sure that you have "
@ -214,20 +228,28 @@ class Connect(object):
singleTimeWarnMessage(warnMsg)
# 处理多线程情况
elif conf.threads > 1:
warnMsg = "if the problem persists please try to lower "
warnMsg += "the number of used threads (option '--threads')"
singleTimeWarnMessage(warnMsg)
# 重试请求
kwargs['retrying'] = True
return Connect._getPageProxy(**kwargs)
@staticmethod
def _connReadProxy(conn):
"""
读取连接响应的代理方法
处理压缩和大响应的情况
"""
retVal = b""
# 如果不是DNS模式且连接存在
if not kb.dnsMode and conn:
headers = conn.info()
# 处理压缩响应
if kb.pageCompress and headers and hasattr(headers, "getheader") and (headers.getheader(HTTP_HEADER.CONTENT_ENCODING, "").lower() in ("gzip", "deflate") or "text" not in headers.getheader(HTTP_HEADER.CONTENT_TYPE, "").lower()):
retVal = conn.read(MAX_CONNECTION_TOTAL_SIZE)
if len(retVal) == MAX_CONNECTION_TOTAL_SIZE:
@ -236,6 +258,7 @@ class Connect(object):
kb.pageCompress = False
raise SqlmapCompressionException
else:
# 分块读取大响应
while True:
if not conn:
break
@ -254,11 +277,13 @@ class Connect(object):
retVal += part
break
# 检查总响应大小是否超过限制
if len(retVal) > MAX_CONNECTION_TOTAL_SIZE:
warnMsg = "too large response detected. Automatically trimming it"
singleTimeWarnMessage(warnMsg)
break
# 处理特殊的响应放大因子
if conf.yuge:
retVal = YUGE_FACTOR * retVal
@ -267,13 +292,14 @@ class Connect(object):
@staticmethod
def getPage(**kwargs):
"""
This method connects to the target URL or proxy and returns
the target URL page content
这个方法连接到目标URL或代理并返回目标URL页面内容
"""
# 如果是离线模式直接返回
if conf.offline:
return None, None, None
# 获取请求参数
url = kwargs.get("url", None) or conf.url
get = kwargs.get("get", None)
post = kwargs.get("post", None)
@ -297,16 +323,19 @@ class Connect(object):
finalCode = kwargs.get("finalCode", False)
chunked = kwargs.get("chunked", False) or conf.chunked
# 处理请求延迟
if isinstance(conf.delay, (int, float)) and conf.delay > 0:
time.sleep(conf.delay)
start = time.time()
# 获取当前线程数据
threadData = getCurrentThreadData()
with kb.locks.request:
kb.requestCounter += 1
threadData.lastRequestUID = kb.requestCounter
# 处理代理频率
if conf.proxyFreq:
if kb.requestCounter % conf.proxyFreq == 0:
conf.proxy = None
@ -316,6 +345,7 @@ class Connect(object):
setHTTPHandlers()
# 处理测试模式
if conf.dummy or conf.murphyRate and randomInt() % conf.murphyRate == 0:
if conf.murphyRate:
time.sleep(randomInt() % (MAX_MURPHY_SLEEP_TIME + 1))
@ -327,6 +357,7 @@ class Connect(object):
return page, headers, code
# 处理cookie
if conf.liveCookies:
with kb.locks.liveCookies:
if not checkFile(conf.liveCookies, raiseOnError=False) or os.path.getsize(conf.liveCookies) == 0:
@ -351,6 +382,7 @@ class Connect(object):
cookie = openFile(conf.liveCookies).read().strip()
cookie = re.sub(r"(?i)\ACookie:\s*", "", cookie)
# 处理multipart请求
if multipart:
post = multipart
else:
@ -361,20 +393,20 @@ class Connect(object):
post = _urllib.parse.unquote(post)
post = chunkSplitPostData(post)
# 处理WebSocket请求
webSocket = url.lower().startswith("ws")
if not _urllib.parse.urlsplit(url).netloc:
url = _urllib.parse.urljoin(conf.url, url)
# flag to know if we are dealing with the same target host
# 检查是否是相同的目标主机
target = checkSameHost(url, conf.url)
if not retrying:
# Reset the number of connection retries
# 重置连接重试次数
threadData.retriesCount = 0
# fix for known issue when urllib2 just skips the other part of provided
# url splitted with space char while urlencoding it in the later phase
# 修复URL中的空格
url = url.replace(" ", "%20")
if "://" not in url:
@ -396,8 +428,7 @@ class Connect(object):
raise404 = raise404 and not kb.ignoreNotFound
# support for non-latin (e.g. cyrillic) URLs as urllib/urllib2 doesn't
# support those by default
# 支持非拉丁字符的URL
url = asciifyUrl(url)
try:
@ -440,7 +471,7 @@ class Connect(object):
requestMsg += " %s" % _http_client.HTTPConnection._http_vsn_str
# Prepare HTTP headers
# 准备HTTP头
headers = forgeHeaders({HTTP_HEADER.COOKIE: cookie, HTTP_HEADER.USER_AGENT: ua, HTTP_HEADER.REFERER: referer, HTTP_HEADER.HOST: getHeader(dict(conf.httpHeaders), HTTP_HEADER.HOST) or getHostHeader(url)}, base=None if target else {})
if HTTP_HEADER.COOKIE in headers:
@ -624,11 +655,11 @@ class Connect(object):
if not kb.proxyAuthHeader and getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION):
kb.proxyAuthHeader = getRequestHeader(req, HTTP_HEADER.PROXY_AUTHORIZATION)
# Return response object
# 返回响应对象
if response:
return conn, None, None
# Get HTTP response
# 获取HTTP响应
if hasattr(conn, "redurl"):
page = (threadData.lastRedirectMsg[1] if kb.choices.redirect == REDIRECTION.NO else Connect._connReadProxy(conn)) if not skipRead else None
skipLogTraffic = kb.choices.redirect == REDIRECTION.NO

Loading…
Cancel
Save