You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
sqlmap/src/sqlmap-master/lib/core/bigarray.py

236 lines
7.4 KiB

This file contains ambiguous Unicode characters!

This file contains ambiguous Unicode characters that may be confused with others in your current locale. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to highlight these characters.

#!/usr/bin/env python
"""
Copyright (c) 2006-2024 sqlmap developers (https://sqlmap.org/)
See the file 'LICENSE' for copying permission
"""
# 尝试导入cPickle模块如果失败则导入pickle模块
try:
import cPickle as pickle
except:
import pickle
import itertools
import os
import sys
import tempfile
import zlib
# 从lib.core.compat模块中导入xrange函数
from lib.core.compat import xrange
# 从lib.core.enums模块中导入MKSTEMP_PREFIX枚举
from lib.core.enums import MKSTEMP_PREFIX
# 从lib.core.exception模块中导入SqlmapSystemException异常
from lib.core.exception import SqlmapSystemException
# 从lib.core.settings模块中导入BIGARRAY_CHUNK_SIZE和BIGARRAY_COMPRESS_LEVEL常量
from lib.core.settings import BIGARRAY_CHUNK_SIZE
from lib.core.settings import BIGARRAY_COMPRESS_LEVEL
# 尝试获取object()对象的大小如果失败则默认大小为16字节
try:
DEFAULT_SIZE_OF = sys.getsizeof(object())
except TypeError:
DEFAULT_SIZE_OF = 16
# 定义一个函数,用于返回给定实例/对象的总大小(以字节为单位)
def _size_of(instance):
"""
Returns total size of a given instance / object (in bytes)
"""
# 获取实例/对象的大小
retval = sys.getsizeof(instance, DEFAULT_SIZE_OF)
# 如果实例/对象是字典类型,则递归计算字典中所有元素的大小
if isinstance(instance, dict):
retval += sum(_size_of(_) for _ in itertools.chain.from_iterable(instance.items()))
# 如果实例/对象是可迭代类型,则递归计算可迭代对象中所有元素的大小
elif hasattr(instance, "__iter__"):
retval += sum(_size_of(_) for _ in instance if _ != instance)
return retval
# 定义一个辅助类,用于存储缓存块
class Cache(object):
"""
Auxiliary class used for storing cached chunks
"""
# 初始化函数,接收三个参数:索引、数据和脏标记
def __init__(self, index, data, dirty):
self.index = index
self.data = data
self.dirty = dirty
class BigArray(list):
"""
List-like class used for storing large amounts of data (disk cached)
>>> _ = BigArray(xrange(100000))
>>> _[20] = 0
>>> _[99999]
99999
>>> _ += [0]
>>> _[100000]
0
>>> _ = _ + [1]
>>> _[-1]
1
>>> len([_ for _ in BigArray(xrange(100000))])
100000
"""
def __init__(self, items=None):
self.chunks = [[]]
self.chunk_length = sys.maxsize
self.cache = None
self.filenames = set()
self._os_remove = os.remove
self._size_counter = 0
for item in (items or []):
self.append(item)
def __add__(self, value):
retval = BigArray(self)
for _ in value:
retval.append(_)
return retval
def __iadd__(self, value):
for _ in value:
self.append(_)
return self
# 添加元素到BigArray中
def append(self, value):
self.chunks[-1].append(value)
# 如果当前chunk的大小超过了设定的chunk大小则将当前chunk写入临时文件并创建一个新的chunk
if self.chunk_length == sys.maxsize:
self._size_counter += _size_of(value)
if self._size_counter >= BIGARRAY_CHUNK_SIZE:
self.chunk_length = len(self.chunks[-1])
self._size_counter = None
if len(self.chunks[-1]) >= self.chunk_length:
filename = self._dump(self.chunks[-1])
self.chunks[-1] = filename
self.chunks.append([])
# 扩展BigArray
def extend(self, value):
for _ in value:
self.append(_)
# 从BigArray中弹出元素
def pop(self):
if len(self.chunks[-1]) < 1:
self.chunks.pop()
try:
with open(self.chunks[-1], "rb") as f:
self.chunks[-1] = pickle.loads(zlib.decompress(f.read()))
except IOError as ex:
errMsg = "exception occurred while retrieving data "
errMsg += "from a temporary file ('%s')" % ex
raise SqlmapSystemException(errMsg)
return self.chunks[-1].pop()
# 在BigArray中查找元素
def index(self, value):
for index in xrange(len(self)):
if self[index] == value:
return index
return ValueError, "%s is not in list" % value
# 将chunk写入临时文件
def _dump(self, chunk):
try:
handle, filename = tempfile.mkstemp(prefix=MKSTEMP_PREFIX.BIG_ARRAY)
self.filenames.add(filename)
os.close(handle)
with open(filename, "w+b") as f:
f.write(zlib.compress(pickle.dumps(chunk, pickle.HIGHEST_PROTOCOL), BIGARRAY_COMPRESS_LEVEL))
return filename
except (OSError, IOError) as ex:
errMsg = "exception occurred while storing data "
errMsg += "to a temporary file ('%s'). Please " % ex
errMsg += "make sure that there is enough disk space left. If problem persists, "
errMsg += "try to set environment variable 'TEMP' to a location "
errMsg += "writeable by the current user"
raise SqlmapSystemException(errMsg)
# 检查缓存
def _checkcache(self, index):
if (self.cache and self.cache.index != index and self.cache.dirty):
filename = self._dump(self.cache.data)
self.chunks[self.cache.index] = filename
if not (self.cache and self.cache.index == index):
try:
with open(self.chunks[index], "rb") as f:
self.cache = Cache(index, pickle.loads(zlib.decompress(f.read())), False)
except Exception as ex:
errMsg = "exception occurred while retrieving data "
errMsg += "from a temporary file ('%s')" % ex
raise SqlmapSystemException(errMsg)
# 将BigArray序列化
def __getstate__(self):
return self.chunks, self.filenames
# 将BigArray反序列化
def __setstate__(self, state):
self.__init__()
self.chunks, self.filenames = state
# 获取BigArray中指定索引的元素
def __getitem__(self, y):
while y < 0:
y += len(self)
index = y // self.chunk_length
offset = y % self.chunk_length
chunk = self.chunks[index]
if isinstance(chunk, list):
return chunk[offset]
else:
self._checkcache(index)
return self.cache.data[offset]
# 设置BigArray中指定索引的元素
def __setitem__(self, y, value):
index = y // self.chunk_length
offset = y % self.chunk_length
chunk = self.chunks[index]
if isinstance(chunk, list):
chunk[offset] = value
else:
self._checkcache(index)
self.cache.data[offset] = value
self.cache.dirty = True
# 返回BigArray的字符串表示
def __repr__(self):
return "%s%s" % ("..." if len(self.chunks) > 1 else "", self.chunks[-1].__repr__())
# 返回BigArray的迭代器
def __iter__(self):
for i in xrange(len(self)):
try:
yield self[i]
except IndexError:
break
# 返回BigArray的长度
def __len__(self):
return len(self.chunks[-1]) if len(self.chunks) == 1 else (len(self.chunks) - 1) * self.chunk_length + len(self.chunks[-1])