v11.11
This commit is contained in:
parent
74caa39ed9
commit
51e9a59f2c
|
@ -11,9 +11,7 @@ Licenses
|
|||
* Goldleaf is licensed under [GPLv3](https://github.com/XorTroll/Goldleaf/blob/master/LICENSE)
|
||||
* Sys-Netcheat is licensed under [GPLv3](https://github.com/jakibaki/sys-netcheat/blob/master/LICENSE)
|
||||
* Hekate is licensed under [GPLv2](https://github.com/CTCaer/hekate/blob/master/LICENSE)
|
||||
* Checkpoint is licensed under [GPLv3](https://github.com/BernardoGiordano/Checkpoint/blob/master/LICENSE)
|
||||
* Noexes is licensed under [GPLv3](https://github.com/mdbell/Noexes/blob/master/LICENSE)
|
||||
* Ldn_mitm is licensed under [GPLv2](https://github.com/spacemeowx2/ldn_mitm/blob/master/LICENSE)
|
||||
* Lockpick is licensed under [GPLv2](https://github.com/shchmue/Lockpick/blob/master/LICENSE)
|
||||
|
||||
All patches made to the original software respect their original license.
|
||||
|
|
|
@ -8,9 +8,7 @@ sys-ftpd | FTPD sys-module by jakibaki
|
|||
sys-netcheat | netcheat sys-module by jakibaki
|
||||
edizon | EdiZon
|
||||
appstore | AppstoreNX
|
||||
Checkpoint | Save File Manager
|
||||
KosmosUpdater | A homebrew for directly updating Kosmos
|
||||
hbmenu | The homebrew menu
|
||||
kosmos_toolkit | Homebrew for modifying CFW settings
|
||||
ldn_mitm | This allows **online** ad-hoc / lan-play for games such as Smash and Mario Kart 8!
|
||||
lockpick | This allows dumping all keys used by the Switch
|
||||
|
|
Binary file not shown.
File diff suppressed because it is too large
Load diff
3186
Modules/edizon/EdiZon/editor/01007E3006DDA000.json
Normal file
3186
Modules/edizon/EdiZon/editor/01007E3006DDA000.json
Normal file
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
85
Modules/edizon/EdiZon/editor/scripts/kirbysa.py
Normal file
85
Modules/edizon/EdiZon/editor/scripts/kirbysa.py
Normal file
|
@ -0,0 +1,85 @@
|
|||
# kirbysa.py - by Ac_K
|
||||
|
||||
import edizon
|
||||
|
||||
saveFileBuffer = edizon.getSaveFileBuffer()
|
||||
save_slot_id = 0
|
||||
save_slots = [saveFileBuffer[0x100000:0x200000], saveFileBuffer[0x200000:0x300000], saveFileBuffer[0x300000:0x400000]]
|
||||
|
||||
def find_offset(section_name, item_name):
|
||||
section_offset = save_slots[save_slot_id].index(section_name.encode())
|
||||
|
||||
section_buffer = save_slots[save_slot_id][section_offset:section_offset + 0x1000]
|
||||
|
||||
item_offset = section_buffer.index(item_name.encode())
|
||||
|
||||
return section_offset + item_offset
|
||||
|
||||
def check_slot_exist(index):
|
||||
if index == 0:
|
||||
meta_buffer = saveFileBuffer[0x100002:0x100006].decode()
|
||||
elif index == 1:
|
||||
meta_buffer = saveFileBuffer[0x200002:0x200006].decode()
|
||||
elif index == 2:
|
||||
meta_buffer = saveFileBuffer[0x300002:0x300006].decode()
|
||||
else:
|
||||
meta_buffer = ""
|
||||
|
||||
if meta_buffer == "meta":
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def getDummyValue():
|
||||
return save_slot_id + 1
|
||||
|
||||
def setDummyValue(value):
|
||||
global save_slot_id
|
||||
|
||||
value -= 1
|
||||
|
||||
if check_slot_exist(value):
|
||||
save_slot_id = value
|
||||
else:
|
||||
save_slot_id = 0
|
||||
|
||||
def getValueFromSaveFile():
|
||||
strArgs = edizon.getStrArgs()
|
||||
intArgs = edizon.getIntArgs()
|
||||
|
||||
sectionId = strArgs[0]
|
||||
itemId = strArgs[1]
|
||||
padding = int(strArgs[2], 16)
|
||||
|
||||
valueSize = intArgs[0]
|
||||
|
||||
item_offset = find_offset(sectionId, itemId) + padding
|
||||
|
||||
value = 0
|
||||
|
||||
for i in range(0, valueSize):
|
||||
value = value | (save_slots[save_slot_id][item_offset + i] << i * 8)
|
||||
|
||||
return value
|
||||
|
||||
def setValueInSaveFile(value):
|
||||
global save_slots
|
||||
|
||||
strArgs = edizon.getStrArgs()
|
||||
intArgs = edizon.getIntArgs()
|
||||
|
||||
sectionId = strArgs[0]
|
||||
itemId = strArgs[1]
|
||||
padding = int(strArgs[2], 16)
|
||||
|
||||
valueSize = intArgs[0]
|
||||
|
||||
item_offset = find_offset(sectionId, itemId) + padding
|
||||
|
||||
for i in range(0, valueSize):
|
||||
save_slots[save_slot_id][item_offset + i] = (value & (0xFF << i * 8)) >> (i * 8)
|
||||
|
||||
def getModifiedSaveFile():
|
||||
new_save_buffer = saveFileBuffer[0:0x100000] + save_slots[0] + save_slots[1] + save_slots[2]
|
||||
|
||||
return new_save_buffer
|
434
Modules/edizon/EdiZon/editor/scripts/lib/python3.5/byml.py
Normal file
434
Modules/edizon/EdiZon/editor/scripts/lib/python3.5/byml.py
Normal file
|
@ -0,0 +1,434 @@
|
|||
# Copyright 2018 leoetlino <leo@leolam.fr>
|
||||
# Licensed under GPLv2+
|
||||
from enum import IntEnum
|
||||
from sortedcontainers import SortedDict # type: ignore
|
||||
from collections import OrderedDict # used to fix a pyNX issue who don't appear on PC!
|
||||
import struct
|
||||
import typing
|
||||
import io
|
||||
|
||||
class NodeType(IntEnum):
|
||||
STRING = 0xa0
|
||||
ARRAY = 0xc0
|
||||
HASH = 0xc1
|
||||
STRING_TABLE = 0xc2
|
||||
BOOL = 0xd0
|
||||
INT = 0xd1
|
||||
FLOAT = 0xd2
|
||||
UINT = 0xd3
|
||||
INT64 = 0xd4
|
||||
UINT64 = 0xd5
|
||||
DOUBLE = 0xd6
|
||||
NULL = 0xff
|
||||
|
||||
_NUL_CHAR = b'\x00'
|
||||
|
||||
def _get_unpack_endian_character(big_endian: bool):
|
||||
return '>' if big_endian else '<'
|
||||
|
||||
def _align_up(value: int, size: int) -> int:
|
||||
return value + (size - value % size) % size
|
||||
|
||||
def _is_container_type(node_type: int) -> bool:
|
||||
return node_type == NodeType.ARRAY or node_type == NodeType.HASH
|
||||
|
||||
def _is_value_type(node_type: NodeType) -> bool:
|
||||
return node_type == NodeType.STRING or (NodeType.BOOL <= node_type <= NodeType.UINT) or node_type == NodeType.NULL
|
||||
|
||||
# Nintendo uses uint nodes for some crc32 hashes. The problem is that they seem to be using
|
||||
# uints randomly and the signed getters in their byml library will not look at uint nodes.
|
||||
# So uints will be represented by specific classes in order to not lose type information.
|
||||
# And while we are at it, let's use classes for other types to avoid unintended type conversions.
|
||||
class Int(int):
|
||||
pass
|
||||
class Float(float):
|
||||
pass
|
||||
class UInt(int):
|
||||
pass
|
||||
class Int64(int):
|
||||
pass
|
||||
class UInt64(int):
|
||||
pass
|
||||
class Double(float):
|
||||
pass
|
||||
|
||||
class Byml:
|
||||
"""A simple BYMLv2 parser that handles both big endian and little endian documents."""
|
||||
|
||||
def __init__(self, data: bytes) -> None:
|
||||
self._data = data
|
||||
|
||||
magic = self._data[0:2]
|
||||
if magic == b'BY':
|
||||
self._be = True
|
||||
elif magic == b'YB':
|
||||
self._be = False
|
||||
else:
|
||||
raise ValueError("Invalid magic: %s (expected 'BY' or 'YB')" % magic)
|
||||
|
||||
version = self._read_u16(2)
|
||||
if not (1 <= version <= 3):
|
||||
raise ValueError("Invalid version: %u (expected 1-3)" % version)
|
||||
if version == 1 and self._be:
|
||||
raise ValueError("Invalid version: %u-wiiu (expected 1-3)" % version)
|
||||
|
||||
self._hash_key_table_offset = self._read_u32(4)
|
||||
self._string_table_offset = self._read_u32(8)
|
||||
|
||||
if self._hash_key_table_offset != 0:
|
||||
self._hash_key_table = self._parse_string_table(self._hash_key_table_offset)
|
||||
if self._string_table_offset != 0:
|
||||
self._string_table = self._parse_string_table(self._string_table_offset)
|
||||
|
||||
def parse(self) -> typing.Union[list, dict, None]:
|
||||
"""Parse the BYML and get the root node with all children."""
|
||||
root_node_offset = self._read_u32(12)
|
||||
if root_node_offset == 0:
|
||||
return None
|
||||
|
||||
node_type = self._data[root_node_offset]
|
||||
if not _is_container_type(node_type):
|
||||
raise ValueError("Invalid root node: expected array or dict, got type 0x%x" % node_type)
|
||||
return self._parse_node(node_type, 12)
|
||||
|
||||
def _parse_string_table(self, offset) -> typing.List[str]:
|
||||
if self._data[offset] != NodeType.STRING_TABLE:
|
||||
raise ValueError("Invalid node type: 0x%x (expected 0xc2)" % self._data[offset])
|
||||
|
||||
array = list()
|
||||
size = self._read_u24(offset + 1)
|
||||
for i in range(size):
|
||||
string_offset = offset + self._read_u32(offset + 4 + 4*i)
|
||||
array.append(self._read_string(string_offset))
|
||||
return array
|
||||
|
||||
def _parse_node(self, node_type: int, offset: int):
|
||||
if node_type == NodeType.STRING:
|
||||
return self._parse_string_node(self._read_u32(offset))
|
||||
if node_type == NodeType.ARRAY:
|
||||
return self._parse_array_node(self._read_u32(offset))
|
||||
if node_type == NodeType.HASH:
|
||||
return self._parse_hash_node(self._read_u32(offset))
|
||||
if node_type == NodeType.BOOL:
|
||||
return self._parse_bool_node(offset)
|
||||
if node_type == NodeType.INT:
|
||||
return self._parse_int_node(offset)
|
||||
if node_type == NodeType.FLOAT:
|
||||
return self._parse_float_node(offset)
|
||||
if node_type == NodeType.UINT:
|
||||
return self._parse_uint_node(offset)
|
||||
if node_type == NodeType.INT64:
|
||||
return self._parse_int64_node(self._read_u32(offset))
|
||||
if node_type == NodeType.UINT64:
|
||||
return self._parse_uint64_node(self._read_u32(offset))
|
||||
if node_type == NodeType.DOUBLE:
|
||||
return self._parse_double_node(self._read_u32(offset))
|
||||
if node_type == NodeType.NULL:
|
||||
return None
|
||||
raise ValueError("Unknown node type: 0x%x" % node_type)
|
||||
|
||||
def _parse_string_node(self, index: int) -> str:
|
||||
return self._string_table[index]
|
||||
|
||||
def _parse_array_node(self, offset: int) -> list:
|
||||
size = self._read_u24(offset + 1)
|
||||
array = list()
|
||||
value_array_offset = offset + _align_up(size, 4) + 4
|
||||
for i in range(size):
|
||||
node_type = self._data[offset + 4 + i]
|
||||
array.append(self._parse_node(node_type, value_array_offset + 4*i))
|
||||
return array
|
||||
|
||||
def _parse_hash_node(self, offset: int) -> dict:
|
||||
size = self._read_u24(offset + 1)
|
||||
result = OrderedDict()
|
||||
for i in range(size):
|
||||
entry_offset = offset + 4 + 8*i
|
||||
string_index = self._read_u24(entry_offset + 0)
|
||||
name = self._hash_key_table[string_index]
|
||||
|
||||
node_type = self._data[entry_offset + 3]
|
||||
result[name] = self._parse_node(node_type, entry_offset + 4)
|
||||
|
||||
return result
|
||||
|
||||
def _parse_bool_node(self, offset: int) -> bool:
|
||||
return self._parse_uint_node(offset) != 0
|
||||
|
||||
def _parse_int_node(self, offset: int) -> Int:
|
||||
return Int(struct.unpack_from(_get_unpack_endian_character(self._be) + 'i', self._data, offset)[0])
|
||||
|
||||
def _parse_float_node(self, offset: int) -> Float:
|
||||
return Float(struct.unpack_from(_get_unpack_endian_character(self._be) + 'f', self._data, offset)[0])
|
||||
|
||||
def _parse_uint_node(self, offset: int) -> UInt:
|
||||
return UInt(self._read_u32(offset))
|
||||
|
||||
def _parse_int64_node(self, offset: int) -> Int64:
|
||||
return Int64(struct.unpack_from(_get_unpack_endian_character(self._be) + 'q', self._data, offset)[0])
|
||||
|
||||
def _parse_uint64_node(self, offset: int) -> UInt64:
|
||||
return UInt64(struct.unpack_from(_get_unpack_endian_character(self._be) + 'Q', self._data, offset)[0])
|
||||
|
||||
def _parse_double_node(self, offset: int) -> Double:
|
||||
return Double(struct.unpack_from(_get_unpack_endian_character(self._be) + 'd', self._data, offset)[0])
|
||||
|
||||
def _read_u16(self, offset: int) -> int:
|
||||
return struct.unpack_from(_get_unpack_endian_character(self._be) + 'H', self._data, offset)[0]
|
||||
|
||||
def _read_u24(self, offset: int) -> int:
|
||||
if self._be:
|
||||
return struct.unpack('>I', _NUL_CHAR + self._data[offset:offset+3])[0]
|
||||
return struct.unpack('<I', self._data[offset:offset+3] + _NUL_CHAR)[0]
|
||||
|
||||
def _read_u32(self, offset: int) -> int:
|
||||
return struct.unpack_from(_get_unpack_endian_character(self._be) + 'I', self._data, offset)[0]
|
||||
|
||||
def _read_string(self, offset: int) -> str:
|
||||
end = self._data.find(_NUL_CHAR, offset)
|
||||
return self._data[offset:end].decode('utf-8')
|
||||
|
||||
class _PlaceholderOffsetWriter:
|
||||
"""Writes a placeholder offset value that will be filled later."""
|
||||
def __init__(self, stream, parent) -> None:
|
||||
self._stream = stream
|
||||
self._offset = stream.tell()
|
||||
self._parent = parent
|
||||
def write_placeholder(self) -> None:
|
||||
self._stream.write(self._parent._u32(0xffffffff))
|
||||
def write_offset(self, offset: int, base: int = 0) -> None:
|
||||
current_offset = self._stream.tell()
|
||||
self._stream.seek(self._offset)
|
||||
self._stream.write(self._parent._u32(offset - base))
|
||||
self._stream.seek(current_offset)
|
||||
def write_current_offset(self, base: int = 0) -> None:
|
||||
self.write_offset(self._stream.tell(), base)
|
||||
|
||||
_NodeToOffsetMap = typing.Dict[typing.Tuple[NodeType, typing.Any], int]
|
||||
def _freeze_object(o):
|
||||
def _freeze(o):
|
||||
if isinstance(o, dict):
|
||||
return frozenset({ k: _freeze(v) for k,v in o.items()}.items())
|
||||
if isinstance(o, list):
|
||||
return tuple([_freeze(v) for v in o])
|
||||
return o
|
||||
return _freeze(o)
|
||||
|
||||
class Writer:
|
||||
"""BYMLv2 writer."""
|
||||
|
||||
def __init__(self, data, be=False, version=2) -> None:
|
||||
self._data = data
|
||||
self._be = be
|
||||
self._version = version
|
||||
|
||||
if not isinstance(data, list) and not isinstance(data, dict):
|
||||
raise ValueError("Data should be a dict or a list")
|
||||
|
||||
if not (1 <= version <= 3):
|
||||
raise ValueError("Invalid version: %u (expected 1-3)" % version)
|
||||
if version == 1 and be:
|
||||
raise ValueError("Invalid version: %u-wiiu (expected 1-3)" % version)
|
||||
|
||||
self._hash_key_table = SortedDict()
|
||||
self._string_table = SortedDict()
|
||||
self._make_string_table(self._data, self._hash_key_table, self._string_table)
|
||||
# Nintendo seems to sort entries in alphabetical order.
|
||||
self._sort_string_table(self._hash_key_table)
|
||||
self._sort_string_table(self._string_table)
|
||||
|
||||
def write(self, stream) -> None:
|
||||
# Header
|
||||
stream.write(b'BY' if self._be else b'YB')
|
||||
stream.write(self._u16(self._version))
|
||||
if self._hash_key_table:
|
||||
hash_key_table_offset_writer = self._write_placeholder_offset(stream)
|
||||
else:
|
||||
stream.write(self._u32(0))
|
||||
if self._string_table:
|
||||
string_table_offset_writer = self._write_placeholder_offset(stream)
|
||||
else:
|
||||
stream.write(self._u32(0))
|
||||
root_node_offset_writer = self._write_placeholder_offset(stream)
|
||||
|
||||
# Hash key table
|
||||
if self._hash_key_table:
|
||||
hash_key_table_offset_writer.write_current_offset()
|
||||
self._write_string_table(stream, self._hash_key_table)
|
||||
stream.seek(_align_up(stream.tell(), 4))
|
||||
|
||||
# String table
|
||||
if self._string_table:
|
||||
string_table_offset_writer.write_current_offset()
|
||||
self._write_string_table(stream, self._string_table)
|
||||
stream.seek(_align_up(stream.tell(), 4))
|
||||
|
||||
# Root node
|
||||
root_node_offset_writer.write_current_offset()
|
||||
# Nintendo attempts to minimize document size by reusing nodes where possible.
|
||||
# Let us do so too.
|
||||
node_to_offset_map = OrderedDict() # : _NodeToOffsetMap = dict()
|
||||
self._write_nonvalue_node(stream, self._data, node_to_offset_map)
|
||||
|
||||
stream.seek(_align_up(stream.tell(), 4))
|
||||
|
||||
def _make_string_table(self, data, hash_key_table, string_table):
|
||||
if isinstance(data, str) and data not in string_table:
|
||||
string_table[data] = 0xffffffff
|
||||
elif isinstance(data, list):
|
||||
for item in data:
|
||||
self._make_string_table(item, hash_key_table, string_table)
|
||||
elif isinstance(data, dict):
|
||||
for (key, value) in data.items():
|
||||
if key not in hash_key_table:
|
||||
hash_key_table[key] = 0xffffffff
|
||||
self._make_string_table(value, hash_key_table, string_table)
|
||||
|
||||
def _sort_string_table(self, table):
|
||||
for (i, key) in enumerate(table.keys()):
|
||||
table[key] = i
|
||||
|
||||
def _write_string_table(self, stream, table):
|
||||
base = stream.tell()
|
||||
stream.write(self._u8(NodeType.STRING_TABLE))
|
||||
stream.write(self._u24(len(table)))
|
||||
offset_writers = []#: typing.List[_PlaceholderOffsetWriter] = []
|
||||
|
||||
for i in range(len(table)):
|
||||
offset_writers.append(self._write_placeholder_offset(stream))
|
||||
last_offset_writer = self._write_placeholder_offset(stream)
|
||||
|
||||
for (string, offset_writer) in zip(table.keys(), offset_writers):
|
||||
offset_writer.write_current_offset(base)
|
||||
stream.write(bytes(string, "utf8"))
|
||||
stream.write(_NUL_CHAR)
|
||||
last_offset_writer.write_current_offset(base)
|
||||
|
||||
def _write_nonvalue_node(self, stream, data, node_to_offset_map) -> None:
|
||||
nonvalue_nodes = []#: typing.List[typing.Tuple[typing.Any, _PlaceholderOffsetWriter]] = []
|
||||
|
||||
if isinstance(data, list):
|
||||
stream.write(self._u8(NodeType.ARRAY))
|
||||
stream.write(self._u24(len(data)))
|
||||
for item in data:
|
||||
stream.write(self._u8(self._to_byml_type(item)))
|
||||
stream.seek(_align_up(stream.tell(), 4))
|
||||
for item in data:
|
||||
if _is_value_type(self._to_byml_type(item)):
|
||||
stream.write(self._to_byml_value(item))
|
||||
else:
|
||||
nonvalue_nodes.append((item, self._write_placeholder_offset(stream)))
|
||||
elif isinstance(data, dict):
|
||||
stream.write(self._u8(NodeType.HASH))
|
||||
stream.write(self._u24(len(data)))
|
||||
for (key, value) in data.items():
|
||||
stream.write(self._u24(self._hash_key_table[key]))
|
||||
node_type = self._to_byml_type(value)
|
||||
stream.write(self._u8(node_type))
|
||||
if _is_value_type(node_type):
|
||||
stream.write(self._to_byml_value(value))
|
||||
else:
|
||||
nonvalue_nodes.append((value, self._write_placeholder_offset(stream)))
|
||||
elif isinstance(data, UInt64):
|
||||
stream.write(self._u64(data))
|
||||
elif isinstance(data, Int64):
|
||||
stream.write(self._s64(data))
|
||||
elif isinstance(data, Double):
|
||||
stream.write(self._f64(data))
|
||||
elif isinstance(data, int) or isinstance(data, float):
|
||||
raise ValueError("Implicit conversions from int/float are not supported -- "
|
||||
"please use Int/Float/UInt/Int64/UInt64/Double")
|
||||
else:
|
||||
raise ValueError("Invalid non-value type")
|
||||
|
||||
for (data, offset_writer) in nonvalue_nodes:
|
||||
node = (self._to_byml_type(data), _freeze_object(data))
|
||||
if node in node_to_offset_map:
|
||||
offset_writer.write_offset(node_to_offset_map[node])
|
||||
else:
|
||||
offset_writer.write_current_offset()
|
||||
node_to_offset_map[node] = stream.tell()
|
||||
self._write_nonvalue_node(stream, data, node_to_offset_map)
|
||||
|
||||
def _to_byml_type(self, data) -> NodeType:
|
||||
if isinstance(data, str):
|
||||
return NodeType.STRING
|
||||
if isinstance(data, list):
|
||||
return NodeType.ARRAY
|
||||
if isinstance(data, dict):
|
||||
return NodeType.HASH
|
||||
if isinstance(data, bool):
|
||||
return NodeType.BOOL
|
||||
if isinstance(data, Int):
|
||||
return NodeType.INT
|
||||
if isinstance(data, Float):
|
||||
return NodeType.FLOAT
|
||||
if isinstance(data, UInt):
|
||||
return NodeType.UINT
|
||||
if isinstance(data, Int64):
|
||||
return NodeType.INT64
|
||||
if isinstance(data, UInt64):
|
||||
return NodeType.UINT64
|
||||
if isinstance(data, Double):
|
||||
return NodeType.DOUBLE
|
||||
if data is None:
|
||||
return NodeType.NULL
|
||||
if isinstance(data, int) or isinstance(data, float):
|
||||
raise ValueError("Implicit conversions from int/float are not supported -- "
|
||||
"please use Int/Float/UInt/Int64/UInt64/Double")
|
||||
raise ValueError("Invalid value type")
|
||||
|
||||
def _to_byml_value(self, value) -> bytes:
|
||||
if isinstance(value, str):
|
||||
return self._u32(self._string_table[value])
|
||||
if isinstance(value, bool):
|
||||
return self._u32(1 if value != 0 else 0)
|
||||
if isinstance(value, Int):
|
||||
return self._s32(value)
|
||||
if isinstance(value, UInt):
|
||||
return self._u32(value)
|
||||
if isinstance(value, Float):
|
||||
return self._f32(value)
|
||||
if value is None:
|
||||
return self._u32(0)
|
||||
raise ValueError("Invalid value type")
|
||||
|
||||
def _write_placeholder_offset(self, stream) -> _PlaceholderOffsetWriter:
|
||||
p = _PlaceholderOffsetWriter(stream, self)
|
||||
p.write_placeholder()
|
||||
return p
|
||||
|
||||
def _u8(self, value) -> bytes:
|
||||
return struct.pack('B', value)
|
||||
def _u16(self, value) -> bytes:
|
||||
return struct.pack(_get_unpack_endian_character(self._be) + 'H', value)
|
||||
def _s16(self, value) -> bytes:
|
||||
return struct.pack(_get_unpack_endian_character(self._be) + 'h', value)
|
||||
def _u24(self, value) -> bytes:
|
||||
b = struct.pack(_get_unpack_endian_character(self._be) + 'I', value)
|
||||
return b[1:] if self._be else b[:-1]
|
||||
def _u32(self, value) -> bytes:
|
||||
return struct.pack(_get_unpack_endian_character(self._be) + 'I', value)
|
||||
def _s32(self, value) -> bytes:
|
||||
return struct.pack(_get_unpack_endian_character(self._be) + 'i', value)
|
||||
def _u64(self, value) -> bytes:
|
||||
return struct.pack(_get_unpack_endian_character(self._be) + 'Q', value)
|
||||
def _s64(self, value) -> bytes:
|
||||
return struct.pack(_get_unpack_endian_character(self._be) + 'q', value)
|
||||
def _f32(self, value) -> bytes:
|
||||
return struct.pack(_get_unpack_endian_character(self._be) + 'f', value)
|
||||
def _f64(self, value) -> bytes:
|
||||
return struct.pack(_get_unpack_endian_character(self._be) + 'd', value)
|
||||
|
||||
# Helper method by Ac_K
|
||||
|
||||
def bymlbuffer_to_object(input_buffer):
|
||||
byml_data = Byml(input_buffer).parse()
|
||||
|
||||
return byml_data
|
||||
|
||||
def object_to_bymlbuffer(input_buffer, endian, version):
|
||||
output_buffer = io.BytesIO()
|
||||
Writer(input_buffer, be=endian, version=version).write(output_buffer)
|
||||
|
||||
return output_buffer
|
|
@ -0,0 +1,74 @@
|
|||
"""Sorted Containers -- Sorted List, Sorted Dict, Sorted Set
|
||||
|
||||
Sorted Containers is an Apache2 licensed containers library, written in
|
||||
pure-Python, and fast as C-extensions.
|
||||
|
||||
Python's standard library is great until you need a sorted collections
|
||||
type. Many will attest that you can get really far without one, but the moment
|
||||
you **really need** a sorted list, dict, or set, you're faced with a dozen
|
||||
different implementations, most using C-extensions without great documentation
|
||||
and benchmarking.
|
||||
|
||||
In Python, we can do better. And we can do it in pure-Python!
|
||||
|
||||
::
|
||||
|
||||
>>> from sortedcontainers import SortedList
|
||||
>>> sl = SortedList(['e', 'a', 'c', 'd', 'b'])
|
||||
>>> sl
|
||||
SortedList(['a', 'b', 'c', 'd', 'e'])
|
||||
>>> sl *= 1000000
|
||||
>>> sl.count('c')
|
||||
1000000
|
||||
>>> sl[-3:]
|
||||
['e', 'e', 'e']
|
||||
>>> from sortedcontainers import SortedDict
|
||||
>>> sd = SortedDict({'c': 3, 'a': 1, 'b': 2})
|
||||
>>> sd
|
||||
SortedDict({'a': 1, 'b': 2, 'c': 3})
|
||||
>>> sd.popitem(index=-1)
|
||||
('c', 3)
|
||||
>>> from sortedcontainers import SortedSet
|
||||
>>> ss = SortedSet('abracadabra')
|
||||
>>> ss
|
||||
SortedSet(['a', 'b', 'c', 'd', 'r'])
|
||||
>>> ss.bisect_left('c')
|
||||
2
|
||||
|
||||
Sorted Containers takes all of the work out of Python sorted types - making
|
||||
your deployment and use of Python easy. There's no need to install a C compiler
|
||||
or pre-build and distribute custom extensions. Performance is a feature and
|
||||
testing has 100% coverage with unit tests and hours of stress.
|
||||
|
||||
:copyright: (c) 2014-2018 by Grant Jenks.
|
||||
:license: Apache 2.0, see LICENSE for more details.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
from .sortedlist import SortedList, SortedKeyList, SortedListWithKey
|
||||
from .sortedset import SortedSet
|
||||
from .sorteddict import (
|
||||
SortedDict,
|
||||
SortedKeysView,
|
||||
SortedItemsView,
|
||||
SortedValuesView,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'SortedList',
|
||||
'SortedKeyList',
|
||||
'SortedListWithKey',
|
||||
'SortedDict',
|
||||
'SortedKeysView',
|
||||
'SortedItemsView',
|
||||
'SortedValuesView',
|
||||
'SortedSet',
|
||||
]
|
||||
|
||||
__title__ = 'sortedcontainers'
|
||||
__version__ = '2.1.0'
|
||||
__build__ = 0x020100
|
||||
__author__ = 'Grant Jenks'
|
||||
__license__ = 'Apache 2.0'
|
||||
__copyright__ = '2014-2018, Grant Jenks'
|
|
@ -0,0 +1,800 @@
|
|||
"""Sorted Dict
|
||||
==============
|
||||
|
||||
:doc:`Sorted Containers<index>` is an Apache2 licensed Python sorted
|
||||
collections library, written in pure-Python, and fast as C-extensions. The
|
||||
:doc:`introduction<introduction>` is the best way to get started.
|
||||
|
||||
Sorted dict implementations:
|
||||
|
||||
.. currentmodule:: sortedcontainers
|
||||
|
||||
* :class:`SortedDict`
|
||||
* :class:`SortedKeysView`
|
||||
* :class:`SortedItemsView`
|
||||
* :class:`SortedValuesView`
|
||||
|
||||
"""
|
||||
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
from .sortedlist import SortedList, recursive_repr
|
||||
from .sortedset import SortedSet
|
||||
|
||||
###############################################################################
|
||||
# BEGIN Python 2/3 Shims
|
||||
###############################################################################
|
||||
|
||||
try:
|
||||
from collections.abc import ItemsView, KeysView, ValuesView, Sequence
|
||||
except ImportError:
|
||||
from collections import ItemsView, KeysView, ValuesView, Sequence
|
||||
|
||||
###############################################################################
|
||||
# END Python 2/3 Shims
|
||||
###############################################################################
|
||||
|
||||
|
||||
class SortedDict(dict):
|
||||
"""Sorted dict is a sorted mutable mapping.
|
||||
|
||||
Sorted dict keys are maintained in sorted order. The design of sorted dict
|
||||
is simple: sorted dict inherits from dict to store items and maintains a
|
||||
sorted list of keys.
|
||||
|
||||
Sorted dict keys must be hashable and comparable. The hash and total
|
||||
ordering of keys must not change while they are stored in the sorted dict.
|
||||
|
||||
Mutable mapping methods:
|
||||
|
||||
* :func:`SortedDict.__getitem__` (inherited from dict)
|
||||
* :func:`SortedDict.__setitem__`
|
||||
* :func:`SortedDict.__delitem__`
|
||||
* :func:`SortedDict.__iter__`
|
||||
* :func:`SortedDict.__len__` (inherited from dict)
|
||||
|
||||
Methods for adding items:
|
||||
|
||||
* :func:`SortedDict.setdefault`
|
||||
* :func:`SortedDict.update`
|
||||
|
||||
Methods for removing items:
|
||||
|
||||
* :func:`SortedDict.clear`
|
||||
* :func:`SortedDict.pop`
|
||||
* :func:`SortedDict.popitem`
|
||||
|
||||
Methods for looking up items:
|
||||
|
||||
* :func:`SortedDict.__contains__` (inherited from dict)
|
||||
* :func:`SortedDict.get` (inherited from dict)
|
||||
* :func:`SortedDict.peekitem`
|
||||
|
||||
Methods for views:
|
||||
|
||||
* :func:`SortedDict.keys`
|
||||
* :func:`SortedDict.items`
|
||||
* :func:`SortedDict.values`
|
||||
|
||||
Methods for miscellany:
|
||||
|
||||
* :func:`SortedDict.copy`
|
||||
* :func:`SortedDict.fromkeys`
|
||||
* :func:`SortedDict.__reversed__`
|
||||
* :func:`SortedDict.__eq__` (inherited from dict)
|
||||
* :func:`SortedDict.__ne__` (inherited from dict)
|
||||
* :func:`SortedDict.__repr__`
|
||||
* :func:`SortedDict._check`
|
||||
|
||||
Sorted list methods available (applies to keys):
|
||||
|
||||
* :func:`SortedList.bisect_left`
|
||||
* :func:`SortedList.bisect_right`
|
||||
* :func:`SortedList.count`
|
||||
* :func:`SortedList.index`
|
||||
* :func:`SortedList.irange`
|
||||
* :func:`SortedList.islice`
|
||||
* :func:`SortedList._reset`
|
||||
|
||||
Additional sorted list methods available, if key-function used:
|
||||
|
||||
* :func:`SortedKeyList.bisect_key_left`
|
||||
* :func:`SortedKeyList.bisect_key_right`
|
||||
* :func:`SortedKeyList.irange_key`
|
||||
|
||||
Sorted dicts may only be compared for equality and inequality.
|
||||
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize sorted dict instance.
|
||||
|
||||
Optional key-function argument defines a callable that, like the `key`
|
||||
argument to the built-in `sorted` function, extracts a comparison key
|
||||
from each dictionary key. If no function is specified, the default
|
||||
compares the dictionary keys directly. The key-function argument must
|
||||
be provided as a positional argument and must come before all other
|
||||
arguments.
|
||||
|
||||
Optional iterable argument provides an initial sequence of pairs to
|
||||
initialize the sorted dict. Each pair in the sequence defines the key
|
||||
and corresponding value. If a key is seen more than once, the last
|
||||
value associated with it is stored in the new sorted dict.
|
||||
|
||||
Optional mapping argument provides an initial mapping of items to
|
||||
initialize the sorted dict.
|
||||
|
||||
If keyword arguments are given, the keywords themselves, with their
|
||||
associated values, are added as items to the dictionary. If a key is
|
||||
specified both in the positional argument and as a keyword argument,
|
||||
the value associated with the keyword is stored in the
|
||||
sorted dict.
|
||||
|
||||
Sorted dict keys must be hashable, per the requirement for Python's
|
||||
dictionaries. Keys (or the result of the key-function) must also be
|
||||
comparable, per the requirement for sorted lists.
|
||||
|
||||
>>> d = {'alpha': 1, 'beta': 2}
|
||||
>>> SortedDict([('alpha', 1), ('beta', 2)]) == d
|
||||
True
|
||||
>>> SortedDict({'alpha': 1, 'beta': 2}) == d
|
||||
True
|
||||
>>> SortedDict(alpha=1, beta=2) == d
|
||||
True
|
||||
|
||||
"""
|
||||
if args and (args[0] is None or callable(args[0])):
|
||||
_key = self._key = args[0]
|
||||
args = args[1:]
|
||||
else:
|
||||
_key = self._key = None
|
||||
|
||||
self._list = SortedList(key=_key)
|
||||
|
||||
# Calls to super() are expensive so cache references to dict methods on
|
||||
# sorted dict instances.
|
||||
|
||||
_dict = super(SortedDict, self)
|
||||
self._dict_clear = _dict.clear
|
||||
self._dict_delitem = _dict.__delitem__
|
||||
self._dict_iter = _dict.__iter__
|
||||
self._dict_pop = _dict.pop
|
||||
self._dict_setitem = _dict.__setitem__
|
||||
self._dict_update = _dict.update
|
||||
|
||||
# Reaching through ``self._list`` repeatedly adds unnecessary overhead
|
||||
# so cache references to sorted list methods.
|
||||
|
||||
_list = self._list
|
||||
self._list_add = _list.add
|
||||
self._list_clear = _list.clear
|
||||
self._list_iter = _list.__iter__
|
||||
self._list_reversed = _list.__reversed__
|
||||
self._list_pop = _list.pop
|
||||
self._list_remove = _list.remove
|
||||
self._list_update = _list.update
|
||||
|
||||
# Expose some sorted list methods publicly.
|
||||
|
||||
self.bisect_left = _list.bisect_left
|
||||
self.bisect = _list.bisect_right
|
||||
self.bisect_right = _list.bisect_right
|
||||
self.index = _list.index
|
||||
self.irange = _list.irange
|
||||
self.islice = _list.islice
|
||||
self._reset = _list._reset
|
||||
|
||||
if _key is not None:
|
||||
self.bisect_key_left = _list.bisect_key_left
|
||||
self.bisect_key_right = _list.bisect_key_right
|
||||
self.bisect_key = _list.bisect_key
|
||||
self.irange_key = _list.irange_key
|
||||
|
||||
self._update(*args, **kwargs)
|
||||
|
||||
|
||||
@property
|
||||
def key(self):
|
||||
"""Function used to extract comparison key from keys.
|
||||
|
||||
Sorted dict compares keys directly when the key function is none.
|
||||
|
||||
"""
|
||||
return self._key
|
||||
|
||||
|
||||
@property
|
||||
def iloc(self):
|
||||
"""Cached reference of sorted keys view.
|
||||
|
||||
Deprecated in version 2 of Sorted Containers. Use
|
||||
:func:`SortedDict.keys` instead.
|
||||
|
||||
"""
|
||||
# pylint: disable=attribute-defined-outside-init
|
||||
try:
|
||||
return self._iloc
|
||||
except AttributeError:
|
||||
warnings.warn(
|
||||
'sorted_dict.iloc is deprecated.'
|
||||
' Use SortedDict.keys() instead.',
|
||||
DeprecationWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
_iloc = self._iloc = SortedKeysView(self)
|
||||
return _iloc
|
||||
|
||||
|
||||
def clear(self):
|
||||
|
||||
"""Remove all items from sorted dict.
|
||||
|
||||
Runtime complexity: `O(n)`
|
||||
|
||||
"""
|
||||
self._dict_clear()
|
||||
self._list_clear()
|
||||
|
||||
|
||||
def __delitem__(self, key):
|
||||
"""Remove item from sorted dict identified by `key`.
|
||||
|
||||
``sd.__delitem__(key)`` <==> ``del sd[key]``
|
||||
|
||||
Runtime complexity: `O(log(n))` -- approximate.
|
||||
|
||||
>>> sd = SortedDict({'a': 1, 'b': 2, 'c': 3})
|
||||
>>> del sd['b']
|
||||
>>> sd
|
||||
SortedDict({'a': 1, 'c': 3})
|
||||
>>> del sd['z']
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
KeyError: 'z'
|
||||
|
||||
:param key: `key` for item lookup
|
||||
:raises KeyError: if key not found
|
||||
|
||||
"""
|
||||
self._dict_delitem(key)
|
||||
self._list_remove(key)
|
||||
|
||||
|
||||
def __iter__(self):
|
||||
"""Return an iterator over the keys of the sorted dict.
|
||||
|
||||
``sd.__iter__()`` <==> ``iter(sd)``
|
||||
|
||||
Iterating the sorted dict while adding or deleting items may raise a
|
||||
:exc:`RuntimeError` or fail to iterate over all keys.
|
||||
|
||||
"""
|
||||
return self._list_iter()
|
||||
|
||||
|
||||
def __reversed__(self):
|
||||
"""Return a reverse iterator over the keys of the sorted dict.
|
||||
|
||||
``sd.__reversed__()`` <==> ``reversed(sd)``
|
||||
|
||||
Iterating the sorted dict while adding or deleting items may raise a
|
||||
:exc:`RuntimeError` or fail to iterate over all keys.
|
||||
|
||||
"""
|
||||
return self._list_reversed()
|
||||
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
"""Store item in sorted dict with `key` and corresponding `value`.
|
||||
|
||||
``sd.__setitem__(key, value)`` <==> ``sd[key] = value``
|
||||
|
||||
Runtime complexity: `O(log(n))` -- approximate.
|
||||
|
||||
>>> sd = SortedDict()
|
||||
>>> sd['c'] = 3
|
||||
>>> sd['a'] = 1
|
||||
>>> sd['b'] = 2
|
||||
>>> sd
|
||||
SortedDict({'a': 1, 'b': 2, 'c': 3})
|
||||
|
||||
:param key: key for item
|
||||
:param value: value for item
|
||||
|
||||
"""
|
||||
if key not in self:
|
||||
self._list_add(key)
|
||||
self._dict_setitem(key, value)
|
||||
|
||||
_setitem = __setitem__
|
||||
|
||||
|
||||
def copy(self):
|
||||
"""Return a shallow copy of the sorted dict.
|
||||
|
||||
Runtime complexity: `O(n)`
|
||||
|
||||
:return: new sorted dict
|
||||
|
||||
"""
|
||||
return self.__class__(self._key, self.items())
|
||||
|
||||
__copy__ = copy
|
||||
|
||||
|
||||
@classmethod
|
||||
def fromkeys(cls, iterable, value=None):
|
||||
"""Return a new sorted dict initailized from `iterable` and `value`.
|
||||
|
||||
Items in the sorted dict have keys from `iterable` and values equal to
|
||||
`value`.
|
||||
|
||||
Runtime complexity: `O(n*log(n))`
|
||||
|
||||
:return: new sorted dict
|
||||
|
||||
"""
|
||||
return cls((key, value) for key in iterable)
|
||||
|
||||
|
||||
def keys(self):
|
||||
"""Return new sorted keys view of the sorted dict's keys.
|
||||
|
||||
See :class:`SortedKeysView` for details.
|
||||
|
||||
:return: new sorted keys view
|
||||
|
||||
"""
|
||||
return SortedKeysView(self)
|
||||
|
||||
|
||||
def items(self):
|
||||
"""Return new sorted items view of the sorted dict's items.
|
||||
|
||||
See :class:`SortedItemsView` for details.
|
||||
|
||||
:return: new sorted items view
|
||||
|
||||
"""
|
||||
return SortedItemsView(self)
|
||||
|
||||
|
||||
def values(self):
|
||||
"""Return new sorted values view of the sorted dict's values.
|
||||
|
||||
See :class:`SortedValuesView` for details.
|
||||
|
||||
:return: new sorted values view
|
||||
|
||||
"""
|
||||
return SortedValuesView(self)
|
||||
|
||||
|
||||
if sys.hexversion < 0x03000000:
|
||||
def __make_raise_attributeerror(original, alternate):
|
||||
# pylint: disable=no-self-argument
|
||||
message = (
|
||||
'SortedDict.{original}() is not implemented.'
|
||||
' Use SortedDict.{alternate}() instead.'
|
||||
).format(original=original, alternate=alternate)
|
||||
def method(self):
|
||||
# pylint: disable=missing-docstring,unused-argument
|
||||
raise AttributeError(message)
|
||||
method.__name__ = original
|
||||
method.__doc__ = message
|
||||
return property(method)
|
||||
|
||||
iteritems = __make_raise_attributeerror('iteritems', 'items')
|
||||
iterkeys = __make_raise_attributeerror('iterkeys', 'keys')
|
||||
itervalues = __make_raise_attributeerror('itervalues', 'values')
|
||||
viewitems = __make_raise_attributeerror('viewitems', 'items')
|
||||
viewkeys = __make_raise_attributeerror('viewkeys', 'keys')
|
||||
viewvalues = __make_raise_attributeerror('viewvalues', 'values')
|
||||
|
||||
|
||||
class _NotGiven(object):
|
||||
# pylint: disable=too-few-public-methods
|
||||
def __repr__(self):
|
||||
return '<not-given>'
|
||||
|
||||
__not_given = _NotGiven()
|
||||
|
||||
def pop(self, key, default=__not_given):
|
||||
"""Remove and return value for item identified by `key`.
|
||||
|
||||
If the `key` is not found then return `default` if given. If `default`
|
||||
is not given then raise :exc:`KeyError`.
|
||||
|
||||
Runtime complexity: `O(log(n))` -- approximate.
|
||||
|
||||
>>> sd = SortedDict({'a': 1, 'b': 2, 'c': 3})
|
||||
>>> sd.pop('c')
|
||||
3
|
||||
>>> sd.pop('z', 26)
|
||||
26
|
||||
>>> sd.pop('y')
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
KeyError: 'y'
|
||||
|
||||
:param key: `key` for item
|
||||
:param default: `default` value if key not found (optional)
|
||||
:return: value for item
|
||||
:raises KeyError: if `key` not found and `default` not given
|
||||
|
||||
"""
|
||||
if key in self:
|
||||
self._list_remove(key)
|
||||
return self._dict_pop(key)
|
||||
else:
|
||||
if default is self.__not_given:
|
||||
raise KeyError(key)
|
||||
else:
|
||||
return default
|
||||
|
||||
|
||||
def popitem(self, index=-1):
|
||||
"""Remove and return ``(key, value)`` pair at `index` from sorted dict.
|
||||
|
||||
Optional argument `index` defaults to -1, the last item in the sorted
|
||||
dict. Specify ``index=0`` for the first item in the sorted dict.
|
||||
|
||||
If the sorted dict is empty, raises :exc:`KeyError`.
|
||||
|
||||
If the `index` is out of range, raises :exc:`IndexError`.
|
||||
|
||||
Runtime complexity: `O(log(n))`
|
||||
|
||||
>>> sd = SortedDict({'a': 1, 'b': 2, 'c': 3})
|
||||
>>> sd.popitem()
|
||||
('c', 3)
|
||||
>>> sd.popitem(0)
|
||||
('a', 1)
|
||||
>>> sd.popitem(100)
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
IndexError: list index out of range
|
||||
|
||||
:param int index: `index` of item (default -1)
|
||||
:return: key and value pair
|
||||
:raises KeyError: if sorted dict is empty
|
||||
:raises IndexError: if `index` out of range
|
||||
|
||||
"""
|
||||
if not self:
|
||||
raise KeyError('popitem(): dictionary is empty')
|
||||
|
||||
key = self._list_pop(index)
|
||||
value = self._dict_pop(key)
|
||||
return (key, value)
|
||||
|
||||
|
||||
def peekitem(self, index=-1):
|
||||
"""Return ``(key, value)`` pair at `index` in sorted dict.
|
||||
|
||||
Optional argument `index` defaults to -1, the last item in the sorted
|
||||
dict. Specify ``index=0`` for the first item in the sorted dict.
|
||||
|
||||
Unlike :func:`SortedDict.popitem`, the sorted dict is not modified.
|
||||
|
||||
If the `index` is out of range, raises :exc:`IndexError`.
|
||||
|
||||
Runtime complexity: `O(log(n))`
|
||||
|
||||
>>> sd = SortedDict({'a': 1, 'b': 2, 'c': 3})
|
||||
>>> sd.peekitem()
|
||||
('c', 3)
|
||||
>>> sd.peekitem(0)
|
||||
('a', 1)
|
||||
>>> sd.peekitem(100)
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
IndexError: list index out of range
|
||||
|
||||
:param int index: index of item (default -1)
|
||||
:return: key and value pair
|
||||
:raises IndexError: if `index` out of range
|
||||
|
||||
"""
|
||||
key = self._list[index]
|
||||
return key, self[key]
|
||||
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
"""Return value for item identified by `key` in sorted dict.
|
||||
|
||||
If `key` is in the sorted dict then return its value. If `key` is not
|
||||
in the sorted dict then insert `key` with value `default` and return
|
||||
`default`.
|
||||
|
||||
Optional argument `default` defaults to none.
|
||||
|
||||
Runtime complexity: `O(log(n))` -- approximate.
|
||||
|
||||
>>> sd = SortedDict()
|
||||
>>> sd.setdefault('a', 1)
|
||||
1
|
||||
>>> sd.setdefault('a', 10)
|
||||
1
|
||||
>>> sd
|
||||
SortedDict({'a': 1})
|
||||
|
||||
:param key: key for item
|
||||
:param default: value for item (default None)
|
||||
:return: value for item identified by `key`
|
||||
|
||||
"""
|
||||
if key in self:
|
||||
return self[key]
|
||||
self._dict_setitem(key, default)
|
||||
self._list_add(key)
|
||||
return default
|
||||
|
||||
|
||||
def update(self, *args, **kwargs):
|
||||
"""Update sorted dict with items from `args` and `kwargs`.
|
||||
|
||||
Overwrites existing items.
|
||||
|
||||
Optional arguments `args` and `kwargs` may be a mapping, an iterable of
|
||||
pairs or keyword arguments. See :func:`SortedDict.__init__` for
|
||||
details.
|
||||
|
||||
:param args: mapping or iterable of pairs
|
||||
:param kwargs: keyword arguments mapping
|
||||
|
||||
"""
|
||||
if not self:
|
||||
self._dict_update(*args, **kwargs)
|
||||
self._list_update(self._dict_iter())
|
||||
return
|
||||
|
||||
if not kwargs and len(args) == 1 and isinstance(args[0], dict):
|
||||
pairs = args[0]
|
||||
else:
|
||||
pairs = dict(*args, **kwargs)
|
||||
|
||||
if (10 * len(pairs)) > len(self):
|
||||
self._dict_update(pairs)
|
||||
self._list_clear()
|
||||
self._list_update(self._dict_iter())
|
||||
else:
|
||||
for key in pairs:
|
||||
self._setitem(key, pairs[key])
|
||||
|
||||
_update = update
|
||||
|
||||
|
||||
def __reduce__(self):
|
||||
"""Support for pickle.
|
||||
|
||||
The tricks played with caching references in
|
||||
:func:`SortedDict.__init__` confuse pickle so customize the reducer.
|
||||
|
||||
"""
|
||||
return (self.__class__, (self._key, list(self.items())))
|
||||
|
||||
|
||||
@recursive_repr()
|
||||
def __repr__(self):
|
||||
"""Return string representation of sorted dict.
|
||||
|
||||
``sd.__repr__()`` <==> ``repr(sd)``
|
||||
|
||||
:return: string representation
|
||||
|
||||
"""
|
||||
_key = self._key
|
||||
type_name = type(self).__name__
|
||||
key_arg = '' if _key is None else '{0!r}, '.format(_key)
|
||||
item_format = '{0!r}: {1!r}'.format
|
||||
items = ', '.join(item_format(key, self[key]) for key in self._list)
|
||||
return '{0}({1}{{{2}}})'.format(type_name, key_arg, items)
|
||||
|
||||
|
||||
def _check(self):
|
||||
"""Check invariants of sorted dict.
|
||||
|
||||
Runtime complexity: `O(n)`
|
||||
|
||||
"""
|
||||
_list = self._list
|
||||
_list._check()
|
||||
assert len(self) == len(_list)
|
||||
assert all(key in self for key in _list)
|
||||
|
||||
|
||||
def _view_delitem(self, index):
|
||||
"""Remove item at `index` from sorted dict.
|
||||
|
||||
``view.__delitem__(index)`` <==> ``del view[index]``
|
||||
|
||||
Supports slicing.
|
||||
|
||||
Runtime complexity: `O(log(n))` -- approximate.
|
||||
|
||||
>>> sd = SortedDict({'a': 1, 'b': 2, 'c': 3})
|
||||
>>> view = sd.keys()
|
||||
>>> del view[0]
|
||||
>>> sd
|
||||
SortedDict({'b': 2, 'c': 3})
|
||||
>>> del view[-1]
|
||||
>>> sd
|
||||
SortedDict({'b': 2})
|
||||
>>> del view[:]
|
||||
>>> sd
|
||||
SortedDict({})
|
||||
|
||||
:param index: integer or slice for indexing
|
||||
:raises IndexError: if index out of range
|
||||
|
||||
"""
|
||||
_mapping = self._mapping
|
||||
_list = _mapping._list
|
||||
_dict_delitem = _mapping._dict_delitem
|
||||
if isinstance(index, slice):
|
||||
keys = _list[index]
|
||||
del _list[index]
|
||||
for key in keys:
|
||||
_dict_delitem(key)
|
||||
else:
|
||||
key = _list.pop(index)
|
||||
_dict_delitem(key)
|
||||
|
||||
|
||||
class SortedKeysView(KeysView, Sequence):
|
||||
"""Sorted keys view is a dynamic view of the sorted dict's keys.
|
||||
|
||||
When the sorted dict's keys change, the view reflects those changes.
|
||||
|
||||
The keys view implements the set and sequence abstract base classes.
|
||||
|
||||
"""
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
@classmethod
|
||||
def _from_iterable(cls, it):
|
||||
return SortedSet(it)
|
||||
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""Lookup key at `index` in sorted keys views.
|
||||
|
||||
``skv.__getitem__(index)`` <==> ``skv[index]``
|
||||
|
||||
Supports slicing.
|
||||
|
||||
Runtime complexity: `O(log(n))` -- approximate.
|
||||
|
||||
>>> sd = SortedDict({'a': 1, 'b': 2, 'c': 3})
|
||||
>>> skv = sd.keys()
|
||||
>>> skv[0]
|
||||
'a'
|
||||
>>> skv[-1]
|
||||
'c'
|
||||
>>> skv[:]
|
||||
['a', 'b', 'c']
|
||||
>>> skv[100]
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
IndexError: list index out of range
|
||||
|
||||
:param index: integer or slice for indexing
|
||||
:return: key or list of keys
|
||||
:raises IndexError: if index out of range
|
||||
|
||||
"""
|
||||
return self._mapping._list[index]
|
||||
|
||||
|
||||
__delitem__ = _view_delitem
|
||||
|
||||
|
||||
class SortedItemsView(ItemsView, Sequence):
|
||||
"""Sorted items view is a dynamic view of the sorted dict's items.
|
||||
|
||||
When the sorted dict's items change, the view reflects those changes.
|
||||
|
||||
The items view implements the set and sequence abstract base classes.
|
||||
|
||||
"""
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
@classmethod
|
||||
def _from_iterable(cls, it):
|
||||
return SortedSet(it)
|
||||
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""Lookup item at `index` in sorted items view.
|
||||
|
||||
``siv.__getitem__(index)`` <==> ``siv[index]``
|
||||
|
||||
Supports slicing.
|
||||
|
||||
Runtime complexity: `O(log(n))` -- approximate.
|
||||
|
||||
>>> sd = SortedDict({'a': 1, 'b': 2, 'c': 3})
|
||||
>>> siv = sd.items()
|
||||
>>> siv[0]
|
||||
('a', 1)
|
||||
>>> siv[-1]
|
||||
('c', 3)
|
||||
>>> siv[:]
|
||||
[('a', 1), ('b', 2), ('c', 3)]
|
||||
>>> siv[100]
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
IndexError: list index out of range
|
||||
|
||||
:param index: integer or slice for indexing
|
||||
:return: item or list of items
|
||||
:raises IndexError: if index out of range
|
||||
|
||||
"""
|
||||
_mapping = self._mapping
|
||||
_mapping_list = _mapping._list
|
||||
|
||||
if isinstance(index, slice):
|
||||
keys = _mapping_list[index]
|
||||
return [(key, _mapping[key]) for key in keys]
|
||||
|
||||
key = _mapping_list[index]
|
||||
return key, _mapping[key]
|
||||
|
||||
|
||||
__delitem__ = _view_delitem
|
||||
|
||||
|
||||
class SortedValuesView(ValuesView, Sequence):
|
||||
"""Sorted values view is a dynamic view of the sorted dict's values.
|
||||
|
||||
When the sorted dict's values change, the view reflects those changes.
|
||||
|
||||
The values view implements the sequence abstract base class.
|
||||
|
||||
"""
|
||||
__slots__ = ()
|
||||
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""Lookup value at `index` in sorted values view.
|
||||
|
||||
``siv.__getitem__(index)`` <==> ``siv[index]``
|
||||
|
||||
Supports slicing.
|
||||
|
||||
Runtime complexity: `O(log(n))` -- approximate.
|
||||
|
||||
>>> sd = SortedDict({'a': 1, 'b': 2, 'c': 3})
|
||||
>>> svv = sd.values()
|
||||
>>> svv[0]
|
||||
1
|
||||
>>> svv[-1]
|
||||
3
|
||||
>>> svv[:]
|
||||
[1, 2, 3]
|
||||
>>> svv[100]
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
IndexError: list index out of range
|
||||
|
||||
:param index: integer or slice for indexing
|
||||
:return: value or list of values
|
||||
:raises IndexError: if index out of range
|
||||
|
||||
"""
|
||||
_mapping = self._mapping
|
||||
_mapping_list = _mapping._list
|
||||
|
||||
if isinstance(index, slice):
|
||||
keys = _mapping_list[index]
|
||||
return [_mapping[key] for key in keys]
|
||||
|
||||
key = _mapping_list[index]
|
||||
return _mapping[key]
|
||||
|
||||
|
||||
__delitem__ = _view_delitem
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,733 @@
|
|||
"""Sorted Set
|
||||
=============
|
||||
|
||||
:doc:`Sorted Containers<index>` is an Apache2 licensed Python sorted
|
||||
collections library, written in pure-Python, and fast as C-extensions. The
|
||||
:doc:`introduction<introduction>` is the best way to get started.
|
||||
|
||||
Sorted set implementations:
|
||||
|
||||
.. currentmodule:: sortedcontainers
|
||||
|
||||
* :class:`SortedSet`
|
||||
|
||||
"""
|
||||
|
||||
from itertools import chain
|
||||
from operator import eq, ne, gt, ge, lt, le
|
||||
from textwrap import dedent
|
||||
|
||||
from .sortedlist import SortedList, recursive_repr
|
||||
|
||||
###############################################################################
|
||||
# BEGIN Python 2/3 Shims
|
||||
###############################################################################
|
||||
|
||||
try:
|
||||
from collections.abc import MutableSet, Sequence, Set
|
||||
except ImportError:
|
||||
from collections import MutableSet, Sequence, Set
|
||||
|
||||
###############################################################################
|
||||
# END Python 2/3 Shims
|
||||
###############################################################################
|
||||
|
||||
|
||||
class SortedSet(MutableSet, Sequence):
|
||||
"""Sorted set is a sorted mutable set.
|
||||
|
||||
Sorted set values are maintained in sorted order. The design of sorted set
|
||||
is simple: sorted set uses a set for set-operations and maintains a sorted
|
||||
list of values.
|
||||
|
||||
Sorted set values must be hashable and comparable. The hash and total
|
||||
ordering of values must not change while they are stored in the sorted set.
|
||||
|
||||
Mutable set methods:
|
||||
|
||||
* :func:`SortedSet.__contains__`
|
||||
* :func:`SortedSet.__iter__`
|
||||
* :func:`SortedSet.__len__`
|
||||
* :func:`SortedSet.add`
|
||||
* :func:`SortedSet.discard`
|
||||
|
||||
Sequence methods:
|
||||
|
||||
* :func:`SortedSet.__getitem__`
|
||||
* :func:`SortedSet.__delitem__`
|
||||
* :func:`SortedSet.__reversed__`
|
||||
|
||||
Methods for removing values:
|
||||
|
||||
* :func:`SortedSet.clear`
|
||||
* :func:`SortedSet.pop`
|
||||
* :func:`SortedSet.remove`
|
||||
|
||||
Set-operation methods:
|
||||
|
||||
* :func:`SortedSet.difference`
|
||||
* :func:`SortedSet.difference_update`
|
||||
* :func:`SortedSet.intersection`
|
||||
* :func:`SortedSet.intersection_update`
|
||||
* :func:`SortedSet.symmetric_difference`
|
||||
* :func:`SortedSet.symmetric_difference_update`
|
||||
* :func:`SortedSet.union`
|
||||
* :func:`SortedSet.update`
|
||||
|
||||
Methods for miscellany:
|
||||
|
||||
* :func:`SortedSet.copy`
|
||||
* :func:`SortedSet.count`
|
||||
* :func:`SortedSet.__repr__`
|
||||
* :func:`SortedSet._check`
|
||||
|
||||
Sorted list methods available:
|
||||
|
||||
* :func:`SortedList.bisect_left`
|
||||
* :func:`SortedList.bisect_right`
|
||||
* :func:`SortedList.index`
|
||||
* :func:`SortedList.irange`
|
||||
* :func:`SortedList.islice`
|
||||
* :func:`SortedList._reset`
|
||||
|
||||
Additional sorted list methods available, if key-function used:
|
||||
|
||||
* :func:`SortedKeyList.bisect_key_left`
|
||||
* :func:`SortedKeyList.bisect_key_right`
|
||||
* :func:`SortedKeyList.irange_key`
|
||||
|
||||
Sorted set comparisons use subset and superset relations. Two sorted sets
|
||||
are equal if and only if every element of each sorted set is contained in
|
||||
the other (each is a subset of the other). A sorted set is less than
|
||||
another sorted set if and only if the first sorted set is a proper subset
|
||||
of the second sorted set (is a subset, but is not equal). A sorted set is
|
||||
greater than another sorted set if and only if the first sorted set is a
|
||||
proper superset of the second sorted set (is a superset, but is not equal).
|
||||
|
||||
"""
|
||||
def __init__(self, iterable=None, key=None):
|
||||
"""Initialize sorted set instance.
|
||||
|
||||
Optional `iterable` argument provides an initial iterable of values to
|
||||
initialize the sorted set.
|
||||
|
||||
Optional `key` argument defines a callable that, like the `key`
|
||||
argument to Python's `sorted` function, extracts a comparison key from
|
||||
each value. The default, none, compares values directly.
|
||||
|
||||
Runtime complexity: `O(n*log(n))`
|
||||
|
||||
>>> ss = SortedSet([3, 1, 2, 5, 4])
|
||||
>>> ss
|
||||
SortedSet([1, 2, 3, 4, 5])
|
||||
>>> from operator import neg
|
||||
>>> ss = SortedSet([3, 1, 2, 5, 4], neg)
|
||||
>>> ss
|
||||
SortedSet([5, 4, 3, 2, 1], key=<built-in function neg>)
|
||||
|
||||
:param iterable: initial values (optional)
|
||||
:param key: function used to extract comparison key (optional)
|
||||
|
||||
"""
|
||||
self._key = key
|
||||
|
||||
# SortedSet._fromset calls SortedSet.__init__ after initializing the
|
||||
# _set attribute. So only create a new set if the _set attribute is not
|
||||
# already present.
|
||||
|
||||
if not hasattr(self, '_set'):
|
||||
self._set = set()
|
||||
|
||||
self._list = SortedList(self._set, key=key)
|
||||
|
||||
# Expose some set methods publicly.
|
||||
|
||||
_set = self._set
|
||||
self.isdisjoint = _set.isdisjoint
|
||||
self.issubset = _set.issubset
|
||||
self.issuperset = _set.issuperset
|
||||
|
||||
# Expose some sorted list methods publicly.
|
||||
|
||||
_list = self._list
|
||||
self.bisect_left = _list.bisect_left
|
||||
self.bisect = _list.bisect
|
||||
self.bisect_right = _list.bisect_right
|
||||
self.index = _list.index
|
||||
self.irange = _list.irange
|
||||
self.islice = _list.islice
|
||||
self._reset = _list._reset
|
||||
|
||||
if key is not None:
|
||||
self.bisect_key_left = _list.bisect_key_left
|
||||
self.bisect_key_right = _list.bisect_key_right
|
||||
self.bisect_key = _list.bisect_key
|
||||
self.irange_key = _list.irange_key
|
||||
|
||||
if iterable is not None:
|
||||
self._update(iterable)
|
||||
|
||||
|
||||
@classmethod
|
||||
def _fromset(cls, values, key=None):
|
||||
"""Initialize sorted set from existing set.
|
||||
|
||||
Used internally by set operations that return a new set.
|
||||
|
||||
"""
|
||||
sorted_set = object.__new__(cls)
|
||||
sorted_set._set = values
|
||||
sorted_set.__init__(key=key)
|
||||
return sorted_set
|
||||
|
||||
|
||||
@property
|
||||
def key(self):
|
||||
"""Function used to extract comparison key from values.
|
||||
|
||||
Sorted set compares values directly when the key function is none.
|
||||
|
||||
"""
|
||||
return self._key
|
||||
|
||||
|
||||
def __contains__(self, value):
|
||||
"""Return true if `value` is an element of the sorted set.
|
||||
|
||||
``ss.__contains__(value)`` <==> ``value in ss``
|
||||
|
||||
Runtime complexity: `O(1)`
|
||||
|
||||
>>> ss = SortedSet([1, 2, 3, 4, 5])
|
||||
>>> 3 in ss
|
||||
True
|
||||
|
||||
:param value: search for value in sorted set
|
||||
:return: true if `value` in sorted set
|
||||
|
||||
"""
|
||||
return value in self._set
|
||||
|
||||
|
||||
def __getitem__(self, index):
|
||||
"""Lookup value at `index` in sorted set.
|
||||
|
||||
``ss.__getitem__(index)`` <==> ``ss[index]``
|
||||
|
||||
Supports slicing.
|
||||
|
||||
Runtime complexity: `O(log(n))` -- approximate.
|
||||
|
||||
>>> ss = SortedSet('abcde')
|
||||
>>> ss[2]
|
||||
'c'
|
||||
>>> ss[-1]
|
||||
'e'
|
||||
>>> ss[2:5]
|
||||
['c', 'd', 'e']
|
||||
|
||||
:param index: integer or slice for indexing
|
||||
:return: value or list of values
|
||||
:raises IndexError: if index out of range
|
||||
|
||||
"""
|
||||
return self._list[index]
|
||||
|
||||
|
||||
def __delitem__(self, index):
|
||||
"""Remove value at `index` from sorted set.
|
||||
|
||||
``ss.__delitem__(index)`` <==> ``del ss[index]``
|
||||
|
||||
Supports slicing.
|
||||
|
||||
Runtime complexity: `O(log(n))` -- approximate.
|
||||
|
||||
>>> ss = SortedSet('abcde')
|
||||
>>> del ss[2]
|
||||
>>> ss
|
||||
SortedSet(['a', 'b', 'd', 'e'])
|
||||
>>> del ss[:2]
|
||||
>>> ss
|
||||
SortedSet(['d', 'e'])
|
||||
|
||||
:param index: integer or slice for indexing
|
||||
:raises IndexError: if index out of range
|
||||
|
||||
"""
|
||||
_set = self._set
|
||||
_list = self._list
|
||||
if isinstance(index, slice):
|
||||
values = _list[index]
|
||||
_set.difference_update(values)
|
||||
else:
|
||||
value = _list[index]
|
||||
_set.remove(value)
|
||||
del _list[index]
|
||||
|
||||
|
||||
def __make_cmp(set_op, symbol, doc):
|
||||
"Make comparator method."
|
||||
def comparer(self, other):
|
||||
"Compare method for sorted set and set."
|
||||
if isinstance(other, SortedSet):
|
||||
return set_op(self._set, other._set)
|
||||
elif isinstance(other, Set):
|
||||
return set_op(self._set, other)
|
||||
return NotImplemented
|
||||
|
||||
set_op_name = set_op.__name__
|
||||
comparer.__name__ = '__{0}__'.format(set_op_name)
|
||||
doc_str = """Return true if and only if sorted set is {0} `other`.
|
||||
|
||||
``ss.__{1}__(other)`` <==> ``ss {2} other``
|
||||
|
||||
Comparisons use subset and superset semantics as with sets.
|
||||
|
||||
Runtime complexity: `O(n)`
|
||||
|
||||
:param other: `other` set
|
||||
:return: true if sorted set is {0} `other`
|
||||
|
||||
"""
|
||||
comparer.__doc__ = dedent(doc_str.format(doc, set_op_name, symbol))
|
||||
return comparer
|
||||
|
||||
|
||||
__eq__ = __make_cmp(eq, '==', 'equal to')
|
||||
__ne__ = __make_cmp(ne, '!=', 'not equal to')
|
||||
__lt__ = __make_cmp(lt, '<', 'a proper subset of')
|
||||
__gt__ = __make_cmp(gt, '>', 'a proper superset of')
|
||||
__le__ = __make_cmp(le, '<=', 'a subset of')
|
||||
__ge__ = __make_cmp(ge, '>=', 'a superset of')
|
||||
__make_cmp = staticmethod(__make_cmp)
|
||||
|
||||
|
||||
def __len__(self):
|
||||
"""Return the size of the sorted set.
|
||||
|
||||
``ss.__len__()`` <==> ``len(ss)``
|
||||
|
||||
:return: size of sorted set
|
||||
|
||||
"""
|
||||
return len(self._set)
|
||||
|
||||
|
||||
def __iter__(self):
|
||||
"""Return an iterator over the sorted set.
|
||||
|
||||
``ss.__iter__()`` <==> ``iter(ss)``
|
||||
|
||||
Iterating the sorted set while adding or deleting values may raise a
|
||||
:exc:`RuntimeError` or fail to iterate over all values.
|
||||
|
||||
"""
|
||||
return iter(self._list)
|
||||
|
||||
|
||||
def __reversed__(self):
|
||||
"""Return a reverse iterator over the sorted set.
|
||||
|
||||
``ss.__reversed__()`` <==> ``reversed(ss)``
|
||||
|
||||
Iterating the sorted set while adding or deleting values may raise a
|
||||
:exc:`RuntimeError` or fail to iterate over all values.
|
||||
|
||||
"""
|
||||
return reversed(self._list)
|
||||
|
||||
|
||||
def add(self, value):
|
||||
"""Add `value` to sorted set.
|
||||
|
||||
Runtime complexity: `O(log(n))` -- approximate.
|
||||
|
||||
>>> ss = SortedSet()
|
||||
>>> ss.add(3)
|
||||
>>> ss.add(1)
|
||||
>>> ss.add(2)
|
||||
>>> ss
|
||||
SortedSet([1, 2, 3])
|
||||
|
||||
:param value: value to add to sorted set
|
||||
|
||||
"""
|
||||
_set = self._set
|
||||
if value not in _set:
|
||||
_set.add(value)
|
||||
self._list.add(value)
|
||||
|
||||
_add = add
|
||||
|
||||
|
||||
def clear(self):
|
||||
"""Remove all values from sorted set.
|
||||
|
||||
Runtime complexity: `O(n)`
|
||||
|
||||
"""
|
||||
self._set.clear()
|
||||
self._list.clear()
|
||||
|
||||
|
||||
def copy(self):
|
||||
"""Return a shallow copy of the sorted set.
|
||||
|
||||
Runtime complexity: `O(n)`
|
||||
|
||||
:return: new sorted set
|
||||
|
||||
"""
|
||||
return self._fromset(set(self._set), key=self._key)
|
||||
|
||||
__copy__ = copy
|
||||
|
||||
|
||||
def count(self, value):
|
||||
"""Return number of occurrences of `value` in the sorted set.
|
||||
|
||||
Runtime complexity: `O(1)`
|
||||
|
||||
>>> ss = SortedSet([1, 2, 3, 4, 5])
|
||||
>>> ss.count(3)
|
||||
1
|
||||
|
||||
:param value: value to count in sorted set
|
||||
:return: count
|
||||
|
||||
"""
|
||||
return 1 if value in self._set else 0
|
||||
|
||||
|
||||
def discard(self, value):
|
||||
"""Remove `value` from sorted set if it is a member.
|
||||
|
||||
If `value` is not a member, do nothing.
|
||||
|
||||
Runtime complexity: `O(log(n))` -- approximate.
|
||||
|
||||
>>> ss = SortedSet([1, 2, 3, 4, 5])
|
||||
>>> ss.discard(5)
|
||||
>>> ss.discard(0)
|
||||
>>> ss == set([1, 2, 3, 4])
|
||||
True
|
||||
|
||||
:param value: `value` to discard from sorted set
|
||||
|
||||
"""
|
||||
_set = self._set
|
||||
if value in _set:
|
||||
_set.remove(value)
|
||||
self._list.remove(value)
|
||||
|
||||
_discard = discard
|
||||
|
||||
|
||||
def pop(self, index=-1):
|
||||
"""Remove and return value at `index` in sorted set.
|
||||
|
||||
Raise :exc:`IndexError` if the sorted set is empty or index is out of
|
||||
range.
|
||||
|
||||
Negative indices are supported.
|
||||
|
||||
Runtime complexity: `O(log(n))` -- approximate.
|
||||
|
||||
>>> ss = SortedSet('abcde')
|
||||
>>> ss.pop()
|
||||
'e'
|
||||
>>> ss.pop(2)
|
||||
'c'
|
||||
>>> ss
|
||||
SortedSet(['a', 'b', 'd'])
|
||||
|
||||
:param int index: index of value (default -1)
|
||||
:return: value
|
||||
:raises IndexError: if index is out of range
|
||||
|
||||
"""
|
||||
# pylint: disable=arguments-differ
|
||||
value = self._list.pop(index)
|
||||
self._set.remove(value)
|
||||
return value
|
||||
|
||||
|
||||
def remove(self, value):
|
||||
"""Remove `value` from sorted set; `value` must be a member.
|
||||
|
||||
If `value` is not a member, raise :exc:`KeyError`.
|
||||
|
||||
Runtime complexity: `O(log(n))` -- approximate.
|
||||
|
||||
>>> ss = SortedSet([1, 2, 3, 4, 5])
|
||||
>>> ss.remove(5)
|
||||
>>> ss == set([1, 2, 3, 4])
|
||||
True
|
||||
>>> ss.remove(0)
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
KeyError: 0
|
||||
|
||||
:param value: `value` to remove from sorted set
|
||||
:raises KeyError: if `value` is not in sorted set
|
||||
|
||||
"""
|
||||
self._set.remove(value)
|
||||
self._list.remove(value)
|
||||
|
||||
|
||||
def difference(self, *iterables):
|
||||
"""Return the difference of two or more sets as a new sorted set.
|
||||
|
||||
The `difference` method also corresponds to operator ``-``.
|
||||
|
||||
``ss.__sub__(iterable)`` <==> ``ss - iterable``
|
||||
|
||||
The difference is all values that are in this sorted set but not the
|
||||
other `iterables`.
|
||||
|
||||
>>> ss = SortedSet([1, 2, 3, 4, 5])
|
||||
>>> ss.difference([4, 5, 6, 7])
|
||||
SortedSet([1, 2, 3])
|
||||
|
||||
:param iterables: iterable arguments
|
||||
:return: new sorted set
|
||||
|
||||
"""
|
||||
diff = self._set.difference(*iterables)
|
||||
return self._fromset(diff, key=self._key)
|
||||
|
||||
__sub__ = difference
|
||||
|
||||
|
||||
def difference_update(self, *iterables):
|
||||
"""Remove all values of `iterables` from this sorted set.
|
||||
|
||||
The `difference_update` method also corresponds to operator ``-=``.
|
||||
|
||||
``ss.__isub__(iterable)`` <==> ``ss -= iterable``
|
||||
|
||||
>>> ss = SortedSet([1, 2, 3, 4, 5])
|
||||
>>> _ = ss.difference_update([4, 5, 6, 7])
|
||||
>>> ss
|
||||
SortedSet([1, 2, 3])
|
||||
|
||||
:param iterables: iterable arguments
|
||||
:return: itself
|
||||
|
||||
"""
|
||||
_set = self._set
|
||||
_list = self._list
|
||||
values = set(chain(*iterables))
|
||||
if (4 * len(values)) > len(_set):
|
||||
_set.difference_update(values)
|
||||
_list.clear()
|
||||
_list.update(_set)
|
||||
else:
|
||||
_discard = self._discard
|
||||
for value in values:
|
||||
_discard(value)
|
||||
return self
|
||||
|
||||
__isub__ = difference_update
|
||||
|
||||
|
||||
def intersection(self, *iterables):
|
||||
"""Return the intersection of two or more sets as a new sorted set.
|
||||
|
||||
The `intersection` method also corresponds to operator ``&``.
|
||||
|
||||
``ss.__and__(iterable)`` <==> ``ss & iterable``
|
||||
|
||||
The intersection is all values that are in this sorted set and each of
|
||||
the other `iterables`.
|
||||
|
||||
>>> ss = SortedSet([1, 2, 3, 4, 5])
|
||||
>>> ss.intersection([4, 5, 6, 7])
|
||||
SortedSet([4, 5])
|
||||
|
||||
:param iterables: iterable arguments
|
||||
:return: new sorted set
|
||||
|
||||
"""
|
||||
intersect = self._set.intersection(*iterables)
|
||||
return self._fromset(intersect, key=self._key)
|
||||
|
||||
__and__ = intersection
|
||||
__rand__ = __and__
|
||||
|
||||
|
||||
def intersection_update(self, *iterables):
|
||||
"""Update the sorted set with the intersection of `iterables`.
|
||||
|
||||
The `intersection_update` method also corresponds to operator ``&=``.
|
||||
|
||||
``ss.__iand__(iterable)`` <==> ``ss &= iterable``
|
||||
|
||||
Keep only values found in itself and all `iterables`.
|
||||
|
||||
>>> ss = SortedSet([1, 2, 3, 4, 5])
|
||||
>>> _ = ss.intersection_update([4, 5, 6, 7])
|
||||
>>> ss
|
||||
SortedSet([4, 5])
|
||||
|
||||
:param iterables: iterable arguments
|
||||
:return: itself
|
||||
|
||||
"""
|
||||
_set = self._set
|
||||
_list = self._list
|
||||
_set.intersection_update(*iterables)
|
||||
_list.clear()
|
||||
_list.update(_set)
|
||||
return self
|
||||
|
||||
__iand__ = intersection_update
|
||||
|
||||
|
||||
def symmetric_difference(self, other):
|
||||
"""Return the symmetric difference with `other` as a new sorted set.
|
||||
|
||||
The `symmetric_difference` method also corresponds to operator ``^``.
|
||||
|
||||
``ss.__xor__(other)`` <==> ``ss ^ other``
|
||||
|
||||
The symmetric difference is all values tha are in exactly one of the
|
||||
sets.
|
||||
|
||||
>>> ss = SortedSet([1, 2, 3, 4, 5])
|
||||
>>> ss.symmetric_difference([4, 5, 6, 7])
|
||||
SortedSet([1, 2, 3, 6, 7])
|
||||
|
||||
:param other: `other` iterable
|
||||
:return: new sorted set
|
||||
|
||||
"""
|
||||
diff = self._set.symmetric_difference(other)
|
||||
return self._fromset(diff, key=self._key)
|
||||
|
||||
__xor__ = symmetric_difference
|
||||
__rxor__ = __xor__
|
||||
|
||||
|
||||
def symmetric_difference_update(self, other):
|
||||
"""Update the sorted set with the symmetric difference with `other`.
|
||||
|
||||
The `symmetric_difference_update` method also corresponds to operator
|
||||
``^=``.
|
||||
|
||||
``ss.__ixor__(other)`` <==> ``ss ^= other``
|
||||
|
||||
Keep only values found in exactly one of itself and `other`.
|
||||
|
||||
>>> ss = SortedSet([1, 2, 3, 4, 5])
|
||||
>>> _ = ss.symmetric_difference_update([4, 5, 6, 7])
|
||||
>>> ss
|
||||
SortedSet([1, 2, 3, 6, 7])
|
||||
|
||||
:param other: `other` iterable
|
||||
:return: itself
|
||||
|
||||
"""
|
||||
_set = self._set
|
||||
_list = self._list
|
||||
_set.symmetric_difference_update(other)
|
||||
_list.clear()
|
||||
_list.update(_set)
|
||||
return self
|
||||
|
||||
__ixor__ = symmetric_difference_update
|
||||
|
||||
|
||||
def union(self, *iterables):
|
||||
"""Return new sorted set with values from itself and all `iterables`.
|
||||
|
||||
The `union` method also corresponds to operator ``|``.
|
||||
|
||||
``ss.__or__(iterable)`` <==> ``ss | iterable``
|
||||
|
||||
>>> ss = SortedSet([1, 2, 3, 4, 5])
|
||||
>>> ss.union([4, 5, 6, 7])
|
||||
SortedSet([1, 2, 3, 4, 5, 6, 7])
|
||||
|
||||
:param iterables: iterable arguments
|
||||
:return: new sorted set
|
||||
|
||||
"""
|
||||
return self.__class__(chain(iter(self), *iterables), key=self._key)
|
||||
|
||||
__or__ = union
|
||||
__ror__ = __or__
|
||||
|
||||
|
||||
def update(self, *iterables):
|
||||
"""Update the sorted set adding values from all `iterables`.
|
||||
|
||||
The `update` method also corresponds to operator ``|=``.
|
||||
|
||||
``ss.__ior__(iterable)`` <==> ``ss |= iterable``
|
||||
|
||||
>>> ss = SortedSet([1, 2, 3, 4, 5])
|
||||
>>> _ = ss.update([4, 5, 6, 7])
|
||||
>>> ss
|
||||
SortedSet([1, 2, 3, 4, 5, 6, 7])
|
||||
|
||||
:param iterables: iterable arguments
|
||||
:return: itself
|
||||
|
||||
"""
|
||||
_set = self._set
|
||||
_list = self._list
|
||||
values = set(chain(*iterables))
|
||||
if (4 * len(values)) > len(_set):
|
||||
_list = self._list
|
||||
_set.update(values)
|
||||
_list.clear()
|
||||
_list.update(_set)
|
||||
else:
|
||||
_add = self._add
|
||||
for value in values:
|
||||
_add(value)
|
||||
return self
|
||||
|
||||
__ior__ = update
|
||||
_update = update
|
||||
|
||||
|
||||
def __reduce__(self):
|
||||
"""Support for pickle.
|
||||
|
||||
The tricks played with exposing methods in :func:`SortedSet.__init__`
|
||||
confuse pickle so customize the reducer.
|
||||
|
||||
"""
|
||||
return (type(self), (self._set, self._key))
|
||||
|
||||
|
||||
@recursive_repr()
|
||||
def __repr__(self):
|
||||
"""Return string representation of sorted set.
|
||||
|
||||
``ss.__repr__()`` <==> ``repr(ss)``
|
||||
|
||||
:return: string representation
|
||||
|
||||
"""
|
||||
_key = self._key
|
||||
key = '' if _key is None else ', key={0!r}'.format(_key)
|
||||
type_name = type(self).__name__
|
||||
return '{0}({1!r}{2})'.format(type_name, list(self), key)
|
||||
|
||||
|
||||
def _check(self):
|
||||
"""Check invariants of sorted set.
|
||||
|
||||
Runtime complexity: `O(n)`
|
||||
|
||||
"""
|
||||
_set = self._set
|
||||
_list = self._list
|
||||
_list._check()
|
||||
assert len(_set) == len(_list)
|
||||
assert all(value in _set for value in _list)
|
|
@ -1,23 +0,0 @@
|
|||
# pysqlite2/__init__.py: the pysqlite2 package.
|
||||
#
|
||||
# Copyright (C) 2005 Gerhard Häring <gh@ghaering.de>
|
||||
#
|
||||
# This file is part of pysqlite.
|
||||
#
|
||||
# This software is provided 'as-is', without any express or implied
|
||||
# warranty. In no event will the authors be held liable for any damages
|
||||
# arising from the use of this software.
|
||||
#
|
||||
# Permission is granted to anyone to use this software for any purpose,
|
||||
# including commercial applications, and to alter it and redistribute it
|
||||
# freely, subject to the following restrictions:
|
||||
#
|
||||
# 1. The origin of this software must not be misrepresented; you must not
|
||||
# claim that you wrote the original software. If you use this software
|
||||
# in a product, an acknowledgment in the product documentation would be
|
||||
# appreciated but is not required.
|
||||
# 2. Altered source versions must be plainly marked as such, and must not be
|
||||
# misrepresented as being the original software.
|
||||
# 3. This notice may not be removed or altered from any source distribution.
|
||||
|
||||
from sqlite3.dbapi2 import *
|
|
@ -1,89 +0,0 @@
|
|||
# pysqlite2/dbapi2.py: the DB-API 2.0 interface
|
||||
#
|
||||
# Copyright (C) 2004-2005 Gerhard Häring <gh@ghaering.de>
|
||||
#
|
||||
# This file is part of pysqlite.
|
||||
#
|
||||
# This software is provided 'as-is', without any express or implied
|
||||
# warranty. In no event will the authors be held liable for any damages
|
||||
# arising from the use of this software.
|
||||
#
|
||||
# Permission is granted to anyone to use this software for any purpose,
|
||||
# including commercial applications, and to alter it and redistribute it
|
||||
# freely, subject to the following restrictions:
|
||||
#
|
||||
# 1. The origin of this software must not be misrepresented; you must not
|
||||
# claim that you wrote the original software. If you use this software
|
||||
# in a product, an acknowledgment in the product documentation would be
|
||||
# appreciated but is not required.
|
||||
# 2. Altered source versions must be plainly marked as such, and must not be
|
||||
# misrepresented as being the original software.
|
||||
# 3. This notice may not be removed or altered from any source distribution.
|
||||
|
||||
import datetime
|
||||
import time
|
||||
import collections.abc
|
||||
|
||||
from _sqlite3 import *
|
||||
|
||||
paramstyle = "qmark"
|
||||
|
||||
threadsafety = 1
|
||||
|
||||
apilevel = "2.0"
|
||||
|
||||
Date = datetime.date
|
||||
|
||||
Time = datetime.time
|
||||
|
||||
Timestamp = datetime.datetime
|
||||
|
||||
def DateFromTicks(ticks):
|
||||
return Date(*time.localtime(ticks)[:3])
|
||||
|
||||
def TimeFromTicks(ticks):
|
||||
return Time(*time.localtime(ticks)[3:6])
|
||||
|
||||
def TimestampFromTicks(ticks):
|
||||
return Timestamp(*time.localtime(ticks)[:6])
|
||||
|
||||
version_info = tuple([int(x) for x in version.split(".")])
|
||||
sqlite_version_info = tuple([int(x) for x in sqlite_version.split(".")])
|
||||
|
||||
Binary = memoryview
|
||||
collections.abc.Sequence.register(Row)
|
||||
|
||||
def register_adapters_and_converters():
|
||||
def adapt_date(val):
|
||||
return val.isoformat()
|
||||
|
||||
def adapt_datetime(val):
|
||||
return val.isoformat(" ")
|
||||
|
||||
def convert_date(val):
|
||||
return datetime.date(*map(int, val.split(b"-")))
|
||||
|
||||
def convert_timestamp(val):
|
||||
datepart, timepart = val.split(b" ")
|
||||
year, month, day = map(int, datepart.split(b"-"))
|
||||
timepart_full = timepart.split(b".")
|
||||
hours, minutes, seconds = map(int, timepart_full[0].split(b":"))
|
||||
if len(timepart_full) == 2:
|
||||
microseconds = int('{:0<6.6}'.format(timepart_full[1].decode()))
|
||||
else:
|
||||
microseconds = 0
|
||||
|
||||
val = datetime.datetime(year, month, day, hours, minutes, seconds, microseconds)
|
||||
return val
|
||||
|
||||
|
||||
register_adapter(datetime.date, adapt_date)
|
||||
register_adapter(datetime.datetime, adapt_datetime)
|
||||
register_converter("date", convert_date)
|
||||
register_converter("timestamp", convert_timestamp)
|
||||
|
||||
register_adapters_and_converters()
|
||||
|
||||
# Clean up namespace
|
||||
|
||||
del(register_adapters_and_converters)
|
|
@ -1,70 +0,0 @@
|
|||
# Mimic the sqlite3 console shell's .dump command
|
||||
# Author: Paul Kippes <kippesp@gmail.com>
|
||||
|
||||
# Every identifier in sql is quoted based on a comment in sqlite
|
||||
# documentation "SQLite adds new keywords from time to time when it
|
||||
# takes on new features. So to prevent your code from being broken by
|
||||
# future enhancements, you should normally quote any identifier that
|
||||
# is an English language word, even if you do not have to."
|
||||
|
||||
def _iterdump(connection):
|
||||
"""
|
||||
Returns an iterator to the dump of the database in an SQL text format.
|
||||
|
||||
Used to produce an SQL dump of the database. Useful to save an in-memory
|
||||
database for later restoration. This function should not be called
|
||||
directly but instead called from the Connection method, iterdump().
|
||||
"""
|
||||
|
||||
cu = connection.cursor()
|
||||
yield('BEGIN TRANSACTION;')
|
||||
|
||||
# sqlite_master table contains the SQL CREATE statements for the database.
|
||||
q = """
|
||||
SELECT "name", "type", "sql"
|
||||
FROM "sqlite_master"
|
||||
WHERE "sql" NOT NULL AND
|
||||
"type" == 'table'
|
||||
ORDER BY "name"
|
||||
"""
|
||||
schema_res = cu.execute(q)
|
||||
for table_name, type, sql in schema_res.fetchall():
|
||||
if table_name == 'sqlite_sequence':
|
||||
yield('DELETE FROM "sqlite_sequence";')
|
||||
elif table_name == 'sqlite_stat1':
|
||||
yield('ANALYZE "sqlite_master";')
|
||||
elif table_name.startswith('sqlite_'):
|
||||
continue
|
||||
# NOTE: Virtual table support not implemented
|
||||
#elif sql.startswith('CREATE VIRTUAL TABLE'):
|
||||
# qtable = table_name.replace("'", "''")
|
||||
# yield("INSERT INTO sqlite_master(type,name,tbl_name,rootpage,sql)"\
|
||||
# "VALUES('table','{0}','{0}',0,'{1}');".format(
|
||||
# qtable,
|
||||
# sql.replace("''")))
|
||||
else:
|
||||
yield('{0};'.format(sql))
|
||||
|
||||
# Build the insert statement for each row of the current table
|
||||
table_name_ident = table_name.replace('"', '""')
|
||||
res = cu.execute('PRAGMA table_info("{0}")'.format(table_name_ident))
|
||||
column_names = [str(table_info[1]) for table_info in res.fetchall()]
|
||||
q = """SELECT 'INSERT INTO "{0}" VALUES({1})' FROM "{0}";""".format(
|
||||
table_name_ident,
|
||||
",".join("""'||quote("{0}")||'""".format(col.replace('"', '""')) for col in column_names))
|
||||
query_res = cu.execute(q)
|
||||
for row in query_res:
|
||||
yield("{0};".format(row[0]))
|
||||
|
||||
# Now when the type is 'index', 'trigger', or 'view'
|
||||
q = """
|
||||
SELECT "name", "type", "sql"
|
||||
FROM "sqlite_master"
|
||||
WHERE "sql" NOT NULL AND
|
||||
"type" IN ('index', 'trigger', 'view')
|
||||
"""
|
||||
schema_res = cu.execute(q)
|
||||
for name, type, sql in schema_res.fetchall():
|
||||
yield('{0};'.format(sql))
|
||||
|
||||
yield('COMMIT;')
|
|
@ -1,897 +0,0 @@
|
|||
#-*- coding: iso-8859-1 -*-
|
||||
# pysqlite2/test/dbapi.py: tests for DB-API compliance
|
||||
#
|
||||
# Copyright (C) 2004-2010 Gerhard Häring <gh@ghaering.de>
|
||||
#
|
||||
# This file is part of pysqlite.
|
||||
#
|
||||
# This software is provided 'as-is', without any express or implied
|
||||
# warranty. In no event will the authors be held liable for any damages
|
||||
# arising from the use of this software.
|
||||
#
|
||||
# Permission is granted to anyone to use this software for any purpose,
|
||||
# including commercial applications, and to alter it and redistribute it
|
||||
# freely, subject to the following restrictions:
|
||||
#
|
||||
# 1. The origin of this software must not be misrepresented; you must not
|
||||
# claim that you wrote the original software. If you use this software
|
||||
# in a product, an acknowledgment in the product documentation would be
|
||||
# appreciated but is not required.
|
||||
# 2. Altered source versions must be plainly marked as such, and must not be
|
||||
# misrepresented as being the original software.
|
||||
# 3. This notice may not be removed or altered from any source distribution.
|
||||
|
||||
import unittest
|
||||
import sqlite3 as sqlite
|
||||
try:
|
||||
import threading
|
||||
except ImportError:
|
||||
threading = None
|
||||
|
||||
from test.support import TESTFN, unlink
|
||||
|
||||
|
||||
class ModuleTests(unittest.TestCase):
|
||||
def CheckAPILevel(self):
|
||||
self.assertEqual(sqlite.apilevel, "2.0",
|
||||
"apilevel is %s, should be 2.0" % sqlite.apilevel)
|
||||
|
||||
def CheckThreadSafety(self):
|
||||
self.assertEqual(sqlite.threadsafety, 1,
|
||||
"threadsafety is %d, should be 1" % sqlite.threadsafety)
|
||||
|
||||
def CheckParamStyle(self):
|
||||
self.assertEqual(sqlite.paramstyle, "qmark",
|
||||
"paramstyle is '%s', should be 'qmark'" %
|
||||
sqlite.paramstyle)
|
||||
|
||||
def CheckWarning(self):
|
||||
self.assertTrue(issubclass(sqlite.Warning, Exception),
|
||||
"Warning is not a subclass of Exception")
|
||||
|
||||
def CheckError(self):
|
||||
self.assertTrue(issubclass(sqlite.Error, Exception),
|
||||
"Error is not a subclass of Exception")
|
||||
|
||||
def CheckInterfaceError(self):
|
||||
self.assertTrue(issubclass(sqlite.InterfaceError, sqlite.Error),
|
||||
"InterfaceError is not a subclass of Error")
|
||||
|
||||
def CheckDatabaseError(self):
|
||||
self.assertTrue(issubclass(sqlite.DatabaseError, sqlite.Error),
|
||||
"DatabaseError is not a subclass of Error")
|
||||
|
||||
def CheckDataError(self):
|
||||
self.assertTrue(issubclass(sqlite.DataError, sqlite.DatabaseError),
|
||||
"DataError is not a subclass of DatabaseError")
|
||||
|
||||
def CheckOperationalError(self):
|
||||
self.assertTrue(issubclass(sqlite.OperationalError, sqlite.DatabaseError),
|
||||
"OperationalError is not a subclass of DatabaseError")
|
||||
|
||||
def CheckIntegrityError(self):
|
||||
self.assertTrue(issubclass(sqlite.IntegrityError, sqlite.DatabaseError),
|
||||
"IntegrityError is not a subclass of DatabaseError")
|
||||
|
||||
def CheckInternalError(self):
|
||||
self.assertTrue(issubclass(sqlite.InternalError, sqlite.DatabaseError),
|
||||
"InternalError is not a subclass of DatabaseError")
|
||||
|
||||
def CheckProgrammingError(self):
|
||||
self.assertTrue(issubclass(sqlite.ProgrammingError, sqlite.DatabaseError),
|
||||
"ProgrammingError is not a subclass of DatabaseError")
|
||||
|
||||
def CheckNotSupportedError(self):
|
||||
self.assertTrue(issubclass(sqlite.NotSupportedError,
|
||||
sqlite.DatabaseError),
|
||||
"NotSupportedError is not a subclass of DatabaseError")
|
||||
|
||||
class ConnectionTests(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.cx = sqlite.connect(":memory:")
|
||||
cu = self.cx.cursor()
|
||||
cu.execute("create table test(id integer primary key, name text)")
|
||||
cu.execute("insert into test(name) values (?)", ("foo",))
|
||||
|
||||
def tearDown(self):
|
||||
self.cx.close()
|
||||
|
||||
def CheckCommit(self):
|
||||
self.cx.commit()
|
||||
|
||||
def CheckCommitAfterNoChanges(self):
|
||||
"""
|
||||
A commit should also work when no changes were made to the database.
|
||||
"""
|
||||
self.cx.commit()
|
||||
self.cx.commit()
|
||||
|
||||
def CheckRollback(self):
|
||||
self.cx.rollback()
|
||||
|
||||
def CheckRollbackAfterNoChanges(self):
|
||||
"""
|
||||
A rollback should also work when no changes were made to the database.
|
||||
"""
|
||||
self.cx.rollback()
|
||||
self.cx.rollback()
|
||||
|
||||
def CheckCursor(self):
|
||||
cu = self.cx.cursor()
|
||||
|
||||
def CheckFailedOpen(self):
|
||||
YOU_CANNOT_OPEN_THIS = "/foo/bar/bla/23534/mydb.db"
|
||||
with self.assertRaises(sqlite.OperationalError):
|
||||
con = sqlite.connect(YOU_CANNOT_OPEN_THIS)
|
||||
|
||||
def CheckClose(self):
|
||||
self.cx.close()
|
||||
|
||||
def CheckExceptions(self):
|
||||
# Optional DB-API extension.
|
||||
self.assertEqual(self.cx.Warning, sqlite.Warning)
|
||||
self.assertEqual(self.cx.Error, sqlite.Error)
|
||||
self.assertEqual(self.cx.InterfaceError, sqlite.InterfaceError)
|
||||
self.assertEqual(self.cx.DatabaseError, sqlite.DatabaseError)
|
||||
self.assertEqual(self.cx.DataError, sqlite.DataError)
|
||||
self.assertEqual(self.cx.OperationalError, sqlite.OperationalError)
|
||||
self.assertEqual(self.cx.IntegrityError, sqlite.IntegrityError)
|
||||
self.assertEqual(self.cx.InternalError, sqlite.InternalError)
|
||||
self.assertEqual(self.cx.ProgrammingError, sqlite.ProgrammingError)
|
||||
self.assertEqual(self.cx.NotSupportedError, sqlite.NotSupportedError)
|
||||
|
||||
def CheckInTransaction(self):
|
||||
# Can't use db from setUp because we want to test initial state.
|
||||
cx = sqlite.connect(":memory:")
|
||||
cu = cx.cursor()
|
||||
self.assertEqual(cx.in_transaction, False)
|
||||
cu.execute("create table transactiontest(id integer primary key, name text)")
|
||||
self.assertEqual(cx.in_transaction, False)
|
||||
cu.execute("insert into transactiontest(name) values (?)", ("foo",))
|
||||
self.assertEqual(cx.in_transaction, True)
|
||||
cu.execute("select name from transactiontest where name=?", ["foo"])
|
||||
row = cu.fetchone()
|
||||
self.assertEqual(cx.in_transaction, True)
|
||||
cx.commit()
|
||||
self.assertEqual(cx.in_transaction, False)
|
||||
cu.execute("select name from transactiontest where name=?", ["foo"])
|
||||
row = cu.fetchone()
|
||||
self.assertEqual(cx.in_transaction, False)
|
||||
|
||||
def CheckInTransactionRO(self):
|
||||
with self.assertRaises(AttributeError):
|
||||
self.cx.in_transaction = True
|
||||
|
||||
def CheckOpenUri(self):
|
||||
if sqlite.sqlite_version_info < (3, 7, 7):
|
||||
with self.assertRaises(sqlite.NotSupportedError):
|
||||
sqlite.connect(':memory:', uri=True)
|
||||
return
|
||||
self.addCleanup(unlink, TESTFN)
|
||||
with sqlite.connect(TESTFN) as cx:
|
||||
cx.execute('create table test(id integer)')
|
||||
with sqlite.connect('file:' + TESTFN, uri=True) as cx:
|
||||
cx.execute('insert into test(id) values(0)')
|
||||
with sqlite.connect('file:' + TESTFN + '?mode=ro', uri=True) as cx:
|
||||
with self.assertRaises(sqlite.OperationalError):
|
||||
cx.execute('insert into test(id) values(1)')
|
||||
|
||||
@unittest.skipIf(sqlite.sqlite_version_info >= (3, 3, 1),
|
||||
'needs sqlite versions older than 3.3.1')
|
||||
def CheckSameThreadErrorOnOldVersion(self):
|
||||
with self.assertRaises(sqlite.NotSupportedError) as cm:
|
||||
sqlite.connect(':memory:', check_same_thread=False)
|
||||
self.assertEqual(str(cm.exception), 'shared connections not available')
|
||||
|
||||
class CursorTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.cx = sqlite.connect(":memory:")
|
||||
self.cu = self.cx.cursor()
|
||||
self.cu.execute("create table test(id integer primary key, name text, income number)")
|
||||
self.cu.execute("insert into test(name) values (?)", ("foo",))
|
||||
|
||||
def tearDown(self):
|
||||
self.cu.close()
|
||||
self.cx.close()
|
||||
|
||||
def CheckExecuteNoArgs(self):
|
||||
self.cu.execute("delete from test")
|
||||
|
||||
def CheckExecuteIllegalSql(self):
|
||||
with self.assertRaises(sqlite.OperationalError):
|
||||
self.cu.execute("select asdf")
|
||||
|
||||
def CheckExecuteTooMuchSql(self):
|
||||
with self.assertRaises(sqlite.Warning):
|
||||
self.cu.execute("select 5+4; select 4+5")
|
||||
|
||||
def CheckExecuteTooMuchSql2(self):
|
||||
self.cu.execute("select 5+4; -- foo bar")
|
||||
|
||||
def CheckExecuteTooMuchSql3(self):
|
||||
self.cu.execute("""
|
||||
select 5+4;
|
||||
|
||||
/*
|
||||
foo
|
||||
*/
|
||||
""")
|
||||
|
||||
def CheckExecuteWrongSqlArg(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self.cu.execute(42)
|
||||
|
||||
def CheckExecuteArgInt(self):
|
||||
self.cu.execute("insert into test(id) values (?)", (42,))
|
||||
|
||||
def CheckExecuteArgFloat(self):
|
||||
self.cu.execute("insert into test(income) values (?)", (2500.32,))
|
||||
|
||||
def CheckExecuteArgString(self):
|
||||
self.cu.execute("insert into test(name) values (?)", ("Hugo",))
|
||||
|
||||
def CheckExecuteArgStringWithZeroByte(self):
|
||||
self.cu.execute("insert into test(name) values (?)", ("Hu\x00go",))
|
||||
|
||||
self.cu.execute("select name from test where id=?", (self.cu.lastrowid,))
|
||||
row = self.cu.fetchone()
|
||||
self.assertEqual(row[0], "Hu\x00go")
|
||||
|
||||
def CheckExecuteNonIterable(self):
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
self.cu.execute("insert into test(id) values (?)", 42)
|
||||
self.assertEqual(str(cm.exception), 'parameters are of unsupported type')
|
||||
|
||||
def CheckExecuteWrongNoOfArgs1(self):
|
||||
# too many parameters
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
self.cu.execute("insert into test(id) values (?)", (17, "Egon"))
|
||||
|
||||
def CheckExecuteWrongNoOfArgs2(self):
|
||||
# too little parameters
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
self.cu.execute("insert into test(id) values (?)")
|
||||
|
||||
def CheckExecuteWrongNoOfArgs3(self):
|
||||
# no parameters, parameters are needed
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
self.cu.execute("insert into test(id) values (?)")
|
||||
|
||||
def CheckExecuteParamList(self):
|
||||
self.cu.execute("insert into test(name) values ('foo')")
|
||||
self.cu.execute("select name from test where name=?", ["foo"])
|
||||
row = self.cu.fetchone()
|
||||
self.assertEqual(row[0], "foo")
|
||||
|
||||
def CheckExecuteParamSequence(self):
|
||||
class L(object):
|
||||
def __len__(self):
|
||||
return 1
|
||||
def __getitem__(self, x):
|
||||
assert x == 0
|
||||
return "foo"
|
||||
|
||||
self.cu.execute("insert into test(name) values ('foo')")
|
||||
self.cu.execute("select name from test where name=?", L())
|
||||
row = self.cu.fetchone()
|
||||
self.assertEqual(row[0], "foo")
|
||||
|
||||
def CheckExecuteDictMapping(self):
|
||||
self.cu.execute("insert into test(name) values ('foo')")
|
||||
self.cu.execute("select name from test where name=:name", {"name": "foo"})
|
||||
row = self.cu.fetchone()
|
||||
self.assertEqual(row[0], "foo")
|
||||
|
||||
def CheckExecuteDictMapping_Mapping(self):
|
||||
class D(dict):
|
||||
def __missing__(self, key):
|
||||
return "foo"
|
||||
|
||||
self.cu.execute("insert into test(name) values ('foo')")
|
||||
self.cu.execute("select name from test where name=:name", D())
|
||||
row = self.cu.fetchone()
|
||||
self.assertEqual(row[0], "foo")
|
||||
|
||||
def CheckExecuteDictMappingTooLittleArgs(self):
|
||||
self.cu.execute("insert into test(name) values ('foo')")
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
self.cu.execute("select name from test where name=:name and id=:id", {"name": "foo"})
|
||||
|
||||
def CheckExecuteDictMappingNoArgs(self):
|
||||
self.cu.execute("insert into test(name) values ('foo')")
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
self.cu.execute("select name from test where name=:name")
|
||||
|
||||
def CheckExecuteDictMappingUnnamed(self):
|
||||
self.cu.execute("insert into test(name) values ('foo')")
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
self.cu.execute("select name from test where name=?", {"name": "foo"})
|
||||
|
||||
def CheckClose(self):
|
||||
self.cu.close()
|
||||
|
||||
def CheckRowcountExecute(self):
|
||||
self.cu.execute("delete from test")
|
||||
self.cu.execute("insert into test(name) values ('foo')")
|
||||
self.cu.execute("insert into test(name) values ('foo')")
|
||||
self.cu.execute("update test set name='bar'")
|
||||
self.assertEqual(self.cu.rowcount, 2)
|
||||
|
||||
def CheckRowcountSelect(self):
|
||||
"""
|
||||
pysqlite does not know the rowcount of SELECT statements, because we
|
||||
don't fetch all rows after executing the select statement. The rowcount
|
||||
has thus to be -1.
|
||||
"""
|
||||
self.cu.execute("select 5 union select 6")
|
||||
self.assertEqual(self.cu.rowcount, -1)
|
||||
|
||||
def CheckRowcountExecutemany(self):
|
||||
self.cu.execute("delete from test")
|
||||
self.cu.executemany("insert into test(name) values (?)", [(1,), (2,), (3,)])
|
||||
self.assertEqual(self.cu.rowcount, 3)
|
||||
|
||||
def CheckTotalChanges(self):
|
||||
self.cu.execute("insert into test(name) values ('foo')")
|
||||
self.cu.execute("insert into test(name) values ('foo')")
|
||||
self.assertLess(2, self.cx.total_changes, msg='total changes reported wrong value')
|
||||
|
||||
# Checks for executemany:
|
||||
# Sequences are required by the DB-API, iterators
|
||||
# enhancements in pysqlite.
|
||||
|
||||
def CheckExecuteManySequence(self):
|
||||
self.cu.executemany("insert into test(income) values (?)", [(x,) for x in range(100, 110)])
|
||||
|
||||
def CheckExecuteManyIterator(self):
|
||||
class MyIter:
|
||||
def __init__(self):
|
||||
self.value = 5
|
||||
|
||||
def __next__(self):
|
||||
if self.value == 10:
|
||||
raise StopIteration
|
||||
else:
|
||||
self.value += 1
|
||||
return (self.value,)
|
||||
|
||||
self.cu.executemany("insert into test(income) values (?)", MyIter())
|
||||
|
||||
def CheckExecuteManyGenerator(self):
|
||||
def mygen():
|
||||
for i in range(5):
|
||||
yield (i,)
|
||||
|
||||
self.cu.executemany("insert into test(income) values (?)", mygen())
|
||||
|
||||
def CheckExecuteManyWrongSqlArg(self):
|
||||
with self.assertRaises(ValueError):
|
||||
self.cu.executemany(42, [(3,)])
|
||||
|
||||
def CheckExecuteManySelect(self):
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
self.cu.executemany("select ?", [(3,)])
|
||||
|
||||
def CheckExecuteManyNotIterable(self):
|
||||
with self.assertRaises(TypeError):
|
||||
self.cu.executemany("insert into test(income) values (?)", 42)
|
||||
|
||||
def CheckFetchIter(self):
|
||||
# Optional DB-API extension.
|
||||
self.cu.execute("delete from test")
|
||||
self.cu.execute("insert into test(id) values (?)", (5,))
|
||||
self.cu.execute("insert into test(id) values (?)", (6,))
|
||||
self.cu.execute("select id from test order by id")
|
||||
lst = []
|
||||
for row in self.cu:
|
||||
lst.append(row[0])
|
||||
self.assertEqual(lst[0], 5)
|
||||
self.assertEqual(lst[1], 6)
|
||||
|
||||
def CheckFetchone(self):
|
||||
self.cu.execute("select name from test")
|
||||
row = self.cu.fetchone()
|
||||
self.assertEqual(row[0], "foo")
|
||||
row = self.cu.fetchone()
|
||||
self.assertEqual(row, None)
|
||||
|
||||
def CheckFetchoneNoStatement(self):
|
||||
cur = self.cx.cursor()
|
||||
row = cur.fetchone()
|
||||
self.assertEqual(row, None)
|
||||
|
||||
def CheckArraySize(self):
|
||||
# must default ot 1
|
||||
self.assertEqual(self.cu.arraysize, 1)
|
||||
|
||||
# now set to 2
|
||||
self.cu.arraysize = 2
|
||||
|
||||
# now make the query return 3 rows
|
||||
self.cu.execute("delete from test")
|
||||
self.cu.execute("insert into test(name) values ('A')")
|
||||
self.cu.execute("insert into test(name) values ('B')")
|
||||
self.cu.execute("insert into test(name) values ('C')")
|
||||
self.cu.execute("select name from test")
|
||||
res = self.cu.fetchmany()
|
||||
|
||||
self.assertEqual(len(res), 2)
|
||||
|
||||
def CheckFetchmany(self):
|
||||
self.cu.execute("select name from test")
|
||||
res = self.cu.fetchmany(100)
|
||||
self.assertEqual(len(res), 1)
|
||||
res = self.cu.fetchmany(100)
|
||||
self.assertEqual(res, [])
|
||||
|
||||
def CheckFetchmanyKwArg(self):
|
||||
"""Checks if fetchmany works with keyword arguments"""
|
||||
self.cu.execute("select name from test")
|
||||
res = self.cu.fetchmany(size=100)
|
||||
self.assertEqual(len(res), 1)
|
||||
|
||||
def CheckFetchall(self):
|
||||
self.cu.execute("select name from test")
|
||||
res = self.cu.fetchall()
|
||||
self.assertEqual(len(res), 1)
|
||||
res = self.cu.fetchall()
|
||||
self.assertEqual(res, [])
|
||||
|
||||
def CheckSetinputsizes(self):
|
||||
self.cu.setinputsizes([3, 4, 5])
|
||||
|
||||
def CheckSetoutputsize(self):
|
||||
self.cu.setoutputsize(5, 0)
|
||||
|
||||
def CheckSetoutputsizeNoColumn(self):
|
||||
self.cu.setoutputsize(42)
|
||||
|
||||
def CheckCursorConnection(self):
|
||||
# Optional DB-API extension.
|
||||
self.assertEqual(self.cu.connection, self.cx)
|
||||
|
||||
def CheckWrongCursorCallable(self):
|
||||
with self.assertRaises(TypeError):
|
||||
def f(): pass
|
||||
cur = self.cx.cursor(f)
|
||||
|
||||
def CheckCursorWrongClass(self):
|
||||
class Foo: pass
|
||||
foo = Foo()
|
||||
with self.assertRaises(TypeError):
|
||||
cur = sqlite.Cursor(foo)
|
||||
|
||||
@unittest.skipUnless(threading, 'This test requires threading.')
|
||||
class ThreadTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.con = sqlite.connect(":memory:")
|
||||
self.cur = self.con.cursor()
|
||||
self.cur.execute("create table test(id integer primary key, name text, bin binary, ratio number, ts timestamp)")
|
||||
|
||||
def tearDown(self):
|
||||
self.cur.close()
|
||||
self.con.close()
|
||||
|
||||
def CheckConCursor(self):
|
||||
def run(con, errors):
|
||||
try:
|
||||
cur = con.cursor()
|
||||
errors.append("did not raise ProgrammingError")
|
||||
return
|
||||
except sqlite.ProgrammingError:
|
||||
return
|
||||
except:
|
||||
errors.append("raised wrong exception")
|
||||
|
||||
errors = []
|
||||
t = threading.Thread(target=run, kwargs={"con": self.con, "errors": errors})
|
||||
t.start()
|
||||
t.join()
|
||||
if len(errors) > 0:
|
||||
self.fail("\n".join(errors))
|
||||
|
||||
def CheckConCommit(self):
|
||||
def run(con, errors):
|
||||
try:
|
||||
con.commit()
|
||||
errors.append("did not raise ProgrammingError")
|
||||
return
|
||||
except sqlite.ProgrammingError:
|
||||
return
|
||||
except:
|
||||
errors.append("raised wrong exception")
|
||||
|
||||
errors = []
|
||||
t = threading.Thread(target=run, kwargs={"con": self.con, "errors": errors})
|
||||
t.start()
|
||||
t.join()
|
||||
if len(errors) > 0:
|
||||
self.fail("\n".join(errors))
|
||||
|
||||
def CheckConRollback(self):
|
||||
def run(con, errors):
|
||||
try:
|
||||
con.rollback()
|
||||
errors.append("did not raise ProgrammingError")
|
||||
return
|
||||
except sqlite.ProgrammingError:
|
||||
return
|
||||
except:
|
||||
errors.append("raised wrong exception")
|
||||
|
||||
errors = []
|
||||
t = threading.Thread(target=run, kwargs={"con": self.con, "errors": errors})
|
||||
t.start()
|
||||
t.join()
|
||||
if len(errors) > 0:
|
||||
self.fail("\n".join(errors))
|
||||
|
||||
def CheckConClose(self):
|
||||
def run(con, errors):
|
||||
try:
|
||||
con.close()
|
||||
errors.append("did not raise ProgrammingError")
|
||||
return
|
||||
except sqlite.ProgrammingError:
|
||||
return
|
||||
except:
|
||||
errors.append("raised wrong exception")
|
||||
|
||||
errors = []
|
||||
t = threading.Thread(target=run, kwargs={"con": self.con, "errors": errors})
|
||||
t.start()
|
||||
t.join()
|
||||
if len(errors) > 0:
|
||||
self.fail("\n".join(errors))
|
||||
|
||||
def CheckCurImplicitBegin(self):
|
||||
def run(cur, errors):
|
||||
try:
|
||||
cur.execute("insert into test(name) values ('a')")
|
||||
errors.append("did not raise ProgrammingError")
|
||||
return
|
||||
except sqlite.ProgrammingError:
|
||||
return
|
||||
except:
|
||||
errors.append("raised wrong exception")
|
||||
|
||||
errors = []
|
||||
t = threading.Thread(target=run, kwargs={"cur": self.cur, "errors": errors})
|
||||
t.start()
|
||||
t.join()
|
||||
if len(errors) > 0:
|
||||
self.fail("\n".join(errors))
|
||||
|
||||
def CheckCurClose(self):
|
||||
def run(cur, errors):
|
||||
try:
|
||||
cur.close()
|
||||
errors.append("did not raise ProgrammingError")
|
||||
return
|
||||
except sqlite.ProgrammingError:
|
||||
return
|
||||
except:
|
||||
errors.append("raised wrong exception")
|
||||
|
||||
errors = []
|
||||
t = threading.Thread(target=run, kwargs={"cur": self.cur, "errors": errors})
|
||||
t.start()
|
||||
t.join()
|
||||
if len(errors) > 0:
|
||||
self.fail("\n".join(errors))
|
||||
|
||||
def CheckCurExecute(self):
|
||||
def run(cur, errors):
|
||||
try:
|
||||
cur.execute("select name from test")
|
||||
errors.append("did not raise ProgrammingError")
|
||||
return
|
||||
except sqlite.ProgrammingError:
|
||||
return
|
||||
except:
|
||||
errors.append("raised wrong exception")
|
||||
|
||||
errors = []
|
||||
self.cur.execute("insert into test(name) values ('a')")
|
||||
t = threading.Thread(target=run, kwargs={"cur": self.cur, "errors": errors})
|
||||
t.start()
|
||||
t.join()
|
||||
if len(errors) > 0:
|
||||
self.fail("\n".join(errors))
|
||||
|
||||
def CheckCurIterNext(self):
|
||||
def run(cur, errors):
|
||||
try:
|
||||
row = cur.fetchone()
|
||||
errors.append("did not raise ProgrammingError")
|
||||
return
|
||||
except sqlite.ProgrammingError:
|
||||
return
|
||||
except:
|
||||
errors.append("raised wrong exception")
|
||||
|
||||
errors = []
|
||||
self.cur.execute("insert into test(name) values ('a')")
|
||||
self.cur.execute("select name from test")
|
||||
t = threading.Thread(target=run, kwargs={"cur": self.cur, "errors": errors})
|
||||
t.start()
|
||||
t.join()
|
||||
if len(errors) > 0:
|
||||
self.fail("\n".join(errors))
|
||||
|
||||
class ConstructorTests(unittest.TestCase):
|
||||
def CheckDate(self):
|
||||
d = sqlite.Date(2004, 10, 28)
|
||||
|
||||
def CheckTime(self):
|
||||
t = sqlite.Time(12, 39, 35)
|
||||
|
||||
def CheckTimestamp(self):
|
||||
ts = sqlite.Timestamp(2004, 10, 28, 12, 39, 35)
|
||||
|
||||
def CheckDateFromTicks(self):
|
||||
d = sqlite.DateFromTicks(42)
|
||||
|
||||
def CheckTimeFromTicks(self):
|
||||
t = sqlite.TimeFromTicks(42)
|
||||
|
||||
def CheckTimestampFromTicks(self):
|
||||
ts = sqlite.TimestampFromTicks(42)
|
||||
|
||||
def CheckBinary(self):
|
||||
b = sqlite.Binary(b"\0'")
|
||||
|
||||
class ExtensionTests(unittest.TestCase):
|
||||
def CheckScriptStringSql(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
cur.executescript("""
|
||||
-- bla bla
|
||||
/* a stupid comment */
|
||||
create table a(i);
|
||||
insert into a(i) values (5);
|
||||
""")
|
||||
cur.execute("select i from a")
|
||||
res = cur.fetchone()[0]
|
||||
self.assertEqual(res, 5)
|
||||
|
||||
def CheckScriptSyntaxError(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
with self.assertRaises(sqlite.OperationalError):
|
||||
cur.executescript("create table test(x); asdf; create table test2(x)")
|
||||
|
||||
def CheckScriptErrorNormal(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
with self.assertRaises(sqlite.OperationalError):
|
||||
cur.executescript("create table test(sadfsadfdsa); select foo from hurz;")
|
||||
|
||||
def CheckCursorExecutescriptAsBytes(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
with self.assertRaises(ValueError) as cm:
|
||||
cur.executescript(b"create table test(foo); insert into test(foo) values (5);")
|
||||
self.assertEqual(str(cm.exception), 'script argument must be unicode.')
|
||||
|
||||
def CheckConnectionExecute(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
result = con.execute("select 5").fetchone()[0]
|
||||
self.assertEqual(result, 5, "Basic test of Connection.execute")
|
||||
|
||||
def CheckConnectionExecutemany(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
con.execute("create table test(foo)")
|
||||
con.executemany("insert into test(foo) values (?)", [(3,), (4,)])
|
||||
result = con.execute("select foo from test order by foo").fetchall()
|
||||
self.assertEqual(result[0][0], 3, "Basic test of Connection.executemany")
|
||||
self.assertEqual(result[1][0], 4, "Basic test of Connection.executemany")
|
||||
|
||||
def CheckConnectionExecutescript(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
con.executescript("create table test(foo); insert into test(foo) values (5);")
|
||||
result = con.execute("select foo from test").fetchone()[0]
|
||||
self.assertEqual(result, 5, "Basic test of Connection.executescript")
|
||||
|
||||
class ClosedConTests(unittest.TestCase):
|
||||
def CheckClosedConCursor(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
con.close()
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
cur = con.cursor()
|
||||
|
||||
def CheckClosedConCommit(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
con.close()
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
con.commit()
|
||||
|
||||
def CheckClosedConRollback(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
con.close()
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
con.rollback()
|
||||
|
||||
def CheckClosedCurExecute(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
con.close()
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
cur.execute("select 4")
|
||||
|
||||
def CheckClosedCreateFunction(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
con.close()
|
||||
def f(x): return 17
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
con.create_function("foo", 1, f)
|
||||
|
||||
def CheckClosedCreateAggregate(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
con.close()
|
||||
class Agg:
|
||||
def __init__(self):
|
||||
pass
|
||||
def step(self, x):
|
||||
pass
|
||||
def finalize(self):
|
||||
return 17
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
con.create_aggregate("foo", 1, Agg)
|
||||
|
||||
def CheckClosedSetAuthorizer(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
con.close()
|
||||
def authorizer(*args):
|
||||
return sqlite.DENY
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
con.set_authorizer(authorizer)
|
||||
|
||||
def CheckClosedSetProgressCallback(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
con.close()
|
||||
def progress(): pass
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
con.set_progress_handler(progress, 100)
|
||||
|
||||
def CheckClosedCall(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
con.close()
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
con()
|
||||
|
||||
class ClosedCurTests(unittest.TestCase):
|
||||
def CheckClosed(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
cur.close()
|
||||
|
||||
for method_name in ("execute", "executemany", "executescript", "fetchall", "fetchmany", "fetchone"):
|
||||
if method_name in ("execute", "executescript"):
|
||||
params = ("select 4 union select 5",)
|
||||
elif method_name == "executemany":
|
||||
params = ("insert into foo(bar) values (?)", [(3,), (4,)])
|
||||
else:
|
||||
params = []
|
||||
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
method = getattr(cur, method_name)
|
||||
method(*params)
|
||||
|
||||
|
||||
class SqliteOnConflictTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for SQLite's "insert on conflict" feature.
|
||||
|
||||
See https://www.sqlite.org/lang_conflict.html for details.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
self.cx = sqlite.connect(":memory:")
|
||||
self.cu = self.cx.cursor()
|
||||
self.cu.execute("""
|
||||
CREATE TABLE test(
|
||||
id INTEGER PRIMARY KEY, name TEXT, unique_name TEXT UNIQUE
|
||||
);
|
||||
""")
|
||||
|
||||
def tearDown(self):
|
||||
self.cu.close()
|
||||
self.cx.close()
|
||||
|
||||
def CheckOnConflictRollbackWithExplicitTransaction(self):
|
||||
self.cx.isolation_level = None # autocommit mode
|
||||
self.cu = self.cx.cursor()
|
||||
# Start an explicit transaction.
|
||||
self.cu.execute("BEGIN")
|
||||
self.cu.execute("INSERT INTO test(name) VALUES ('abort_test')")
|
||||
self.cu.execute("INSERT OR ROLLBACK INTO test(unique_name) VALUES ('foo')")
|
||||
with self.assertRaises(sqlite.IntegrityError):
|
||||
self.cu.execute("INSERT OR ROLLBACK INTO test(unique_name) VALUES ('foo')")
|
||||
# Use connection to commit.
|
||||
self.cx.commit()
|
||||
self.cu.execute("SELECT name, unique_name from test")
|
||||
# Transaction should have rolled back and nothing should be in table.
|
||||
self.assertEqual(self.cu.fetchall(), [])
|
||||
|
||||
def CheckOnConflictAbortRaisesWithExplicitTransactions(self):
|
||||
# Abort cancels the current sql statement but doesn't change anything
|
||||
# about the current transaction.
|
||||
self.cx.isolation_level = None # autocommit mode
|
||||
self.cu = self.cx.cursor()
|
||||
# Start an explicit transaction.
|
||||
self.cu.execute("BEGIN")
|
||||
self.cu.execute("INSERT INTO test(name) VALUES ('abort_test')")
|
||||
self.cu.execute("INSERT OR ABORT INTO test(unique_name) VALUES ('foo')")
|
||||
with self.assertRaises(sqlite.IntegrityError):
|
||||
self.cu.execute("INSERT OR ABORT INTO test(unique_name) VALUES ('foo')")
|
||||
self.cx.commit()
|
||||
self.cu.execute("SELECT name, unique_name FROM test")
|
||||
# Expect the first two inserts to work, third to do nothing.
|
||||
self.assertEqual(self.cu.fetchall(), [('abort_test', None), (None, 'foo',)])
|
||||
|
||||
def CheckOnConflictRollbackWithoutTransaction(self):
|
||||
# Start of implicit transaction
|
||||
self.cu.execute("INSERT INTO test(name) VALUES ('abort_test')")
|
||||
self.cu.execute("INSERT OR ROLLBACK INTO test(unique_name) VALUES ('foo')")
|
||||
with self.assertRaises(sqlite.IntegrityError):
|
||||
self.cu.execute("INSERT OR ROLLBACK INTO test(unique_name) VALUES ('foo')")
|
||||
self.cu.execute("SELECT name, unique_name FROM test")
|
||||
# Implicit transaction is rolled back on error.
|
||||
self.assertEqual(self.cu.fetchall(), [])
|
||||
|
||||
def CheckOnConflictAbortRaisesWithoutTransactions(self):
|
||||
# Abort cancels the current sql statement but doesn't change anything
|
||||
# about the current transaction.
|
||||
self.cu.execute("INSERT INTO test(name) VALUES ('abort_test')")
|
||||
self.cu.execute("INSERT OR ABORT INTO test(unique_name) VALUES ('foo')")
|
||||
with self.assertRaises(sqlite.IntegrityError):
|
||||
self.cu.execute("INSERT OR ABORT INTO test(unique_name) VALUES ('foo')")
|
||||
# Make sure all other values were inserted.
|
||||
self.cu.execute("SELECT name, unique_name FROM test")
|
||||
self.assertEqual(self.cu.fetchall(), [('abort_test', None), (None, 'foo',)])
|
||||
|
||||
def CheckOnConflictFail(self):
|
||||
self.cu.execute("INSERT OR FAIL INTO test(unique_name) VALUES ('foo')")
|
||||
with self.assertRaises(sqlite.IntegrityError):
|
||||
self.cu.execute("INSERT OR FAIL INTO test(unique_name) VALUES ('foo')")
|
||||
self.assertEqual(self.cu.fetchall(), [])
|
||||
|
||||
def CheckOnConflictIgnore(self):
|
||||
self.cu.execute("INSERT OR IGNORE INTO test(unique_name) VALUES ('foo')")
|
||||
# Nothing should happen.
|
||||
self.cu.execute("INSERT OR IGNORE INTO test(unique_name) VALUES ('foo')")
|
||||
self.cu.execute("SELECT unique_name FROM test")
|
||||
self.assertEqual(self.cu.fetchall(), [('foo',)])
|
||||
|
||||
def CheckOnConflictReplace(self):
|
||||
self.cu.execute("INSERT OR REPLACE INTO test(name, unique_name) VALUES ('Data!', 'foo')")
|
||||
# There shouldn't be an IntegrityError exception.
|
||||
self.cu.execute("INSERT OR REPLACE INTO test(name, unique_name) VALUES ('Very different data!', 'foo')")
|
||||
self.cu.execute("SELECT name, unique_name FROM test")
|
||||
self.assertEqual(self.cu.fetchall(), [('Very different data!', 'foo')])
|
||||
|
||||
|
||||
def suite():
|
||||
module_suite = unittest.makeSuite(ModuleTests, "Check")
|
||||
connection_suite = unittest.makeSuite(ConnectionTests, "Check")
|
||||
cursor_suite = unittest.makeSuite(CursorTests, "Check")
|
||||
thread_suite = unittest.makeSuite(ThreadTests, "Check")
|
||||
constructor_suite = unittest.makeSuite(ConstructorTests, "Check")
|
||||
ext_suite = unittest.makeSuite(ExtensionTests, "Check")
|
||||
closed_con_suite = unittest.makeSuite(ClosedConTests, "Check")
|
||||
closed_cur_suite = unittest.makeSuite(ClosedCurTests, "Check")
|
||||
on_conflict_suite = unittest.makeSuite(SqliteOnConflictTests, "Check")
|
||||
return unittest.TestSuite((
|
||||
module_suite, connection_suite, cursor_suite, thread_suite,
|
||||
constructor_suite, ext_suite, closed_con_suite, closed_cur_suite,
|
||||
on_conflict_suite,
|
||||
))
|
||||
|
||||
def test():
|
||||
runner = unittest.TextTestRunner()
|
||||
runner.run(suite())
|
||||
|
||||
if __name__ == "__main__":
|
||||
test()
|
|
@ -1,81 +0,0 @@
|
|||
# Author: Paul Kippes <kippesp@gmail.com>
|
||||
|
||||
import unittest
|
||||
import sqlite3 as sqlite
|
||||
|
||||
class DumpTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.cx = sqlite.connect(":memory:")
|
||||
self.cu = self.cx.cursor()
|
||||
|
||||
def tearDown(self):
|
||||
self.cx.close()
|
||||
|
||||
def CheckTableDump(self):
|
||||
expected_sqls = [
|
||||
"""CREATE TABLE "index"("index" blob);"""
|
||||
,
|
||||
"""INSERT INTO "index" VALUES(X'01');"""
|
||||
,
|
||||
"""CREATE TABLE "quoted""table"("quoted""field" text);"""
|
||||
,
|
||||
"""INSERT INTO "quoted""table" VALUES('quoted''value');"""
|
||||
,
|
||||
"CREATE TABLE t1(id integer primary key, s1 text, " \
|
||||
"t1_i1 integer not null, i2 integer, unique (s1), " \
|
||||
"constraint t1_idx1 unique (i2));"
|
||||
,
|
||||
"INSERT INTO \"t1\" VALUES(1,'foo',10,20);"
|
||||
,
|
||||
"INSERT INTO \"t1\" VALUES(2,'foo2',30,30);"
|
||||
,
|
||||
"CREATE TABLE t2(id integer, t2_i1 integer, " \
|
||||
"t2_i2 integer, primary key (id)," \
|
||||
"foreign key(t2_i1) references t1(t1_i1));"
|
||||
,
|
||||
"CREATE TRIGGER trigger_1 update of t1_i1 on t1 " \
|
||||
"begin " \
|
||||
"update t2 set t2_i1 = new.t1_i1 where t2_i1 = old.t1_i1; " \
|
||||
"end;"
|
||||
,
|
||||
"CREATE VIEW v1 as select * from t1 left join t2 " \
|
||||
"using (id);"
|
||||
]
|
||||
[self.cu.execute(s) for s in expected_sqls]
|
||||
i = self.cx.iterdump()
|
||||
actual_sqls = [s for s in i]
|
||||
expected_sqls = ['BEGIN TRANSACTION;'] + expected_sqls + \
|
||||
['COMMIT;']
|
||||
[self.assertEqual(expected_sqls[i], actual_sqls[i])
|
||||
for i in range(len(expected_sqls))]
|
||||
|
||||
def CheckUnorderableRow(self):
|
||||
# iterdump() should be able to cope with unorderable row types (issue #15545)
|
||||
class UnorderableRow:
|
||||
def __init__(self, cursor, row):
|
||||
self.row = row
|
||||
def __getitem__(self, index):
|
||||
return self.row[index]
|
||||
self.cx.row_factory = UnorderableRow
|
||||
CREATE_ALPHA = """CREATE TABLE "alpha" ("one");"""
|
||||
CREATE_BETA = """CREATE TABLE "beta" ("two");"""
|
||||
expected = [
|
||||
"BEGIN TRANSACTION;",
|
||||
CREATE_ALPHA,
|
||||
CREATE_BETA,
|
||||
"COMMIT;"
|
||||
]
|
||||
self.cu.execute(CREATE_BETA)
|
||||
self.cu.execute(CREATE_ALPHA)
|
||||
got = list(self.cx.iterdump())
|
||||
self.assertEqual(expected, got)
|
||||
|
||||
def suite():
|
||||
return unittest.TestSuite(unittest.makeSuite(DumpTests, "Check"))
|
||||
|
||||
def test():
|
||||
runner = unittest.TextTestRunner()
|
||||
runner.run(suite())
|
||||
|
||||
if __name__ == "__main__":
|
||||
test()
|
|
@ -1,293 +0,0 @@
|
|||
#-*- coding: iso-8859-1 -*-
|
||||
# pysqlite2/test/factory.py: tests for the various factories in pysqlite
|
||||
#
|
||||
# Copyright (C) 2005-2007 Gerhard Häring <gh@ghaering.de>
|
||||
#
|
||||
# This file is part of pysqlite.
|
||||
#
|
||||
# This software is provided 'as-is', without any express or implied
|
||||
# warranty. In no event will the authors be held liable for any damages
|
||||
# arising from the use of this software.
|
||||
#
|
||||
# Permission is granted to anyone to use this software for any purpose,
|
||||
# including commercial applications, and to alter it and redistribute it
|
||||
# freely, subject to the following restrictions:
|
||||
#
|
||||
# 1. The origin of this software must not be misrepresented; you must not
|
||||
# claim that you wrote the original software. If you use this software
|
||||
# in a product, an acknowledgment in the product documentation would be
|
||||
# appreciated but is not required.
|
||||
# 2. Altered source versions must be plainly marked as such, and must not be
|
||||
# misrepresented as being the original software.
|
||||
# 3. This notice may not be removed or altered from any source distribution.
|
||||
|
||||
import unittest
|
||||
import sqlite3 as sqlite
|
||||
from collections.abc import Sequence
|
||||
|
||||
class MyConnection(sqlite.Connection):
|
||||
def __init__(self, *args, **kwargs):
|
||||
sqlite.Connection.__init__(self, *args, **kwargs)
|
||||
|
||||
def dict_factory(cursor, row):
|
||||
d = {}
|
||||
for idx, col in enumerate(cursor.description):
|
||||
d[col[0]] = row[idx]
|
||||
return d
|
||||
|
||||
class MyCursor(sqlite.Cursor):
|
||||
def __init__(self, *args, **kwargs):
|
||||
sqlite.Cursor.__init__(self, *args, **kwargs)
|
||||
self.row_factory = dict_factory
|
||||
|
||||
class ConnectionFactoryTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.con = sqlite.connect(":memory:", factory=MyConnection)
|
||||
|
||||
def tearDown(self):
|
||||
self.con.close()
|
||||
|
||||
def CheckIsInstance(self):
|
||||
self.assertIsInstance(self.con, MyConnection)
|
||||
|
||||
class CursorFactoryTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.con = sqlite.connect(":memory:")
|
||||
|
||||
def tearDown(self):
|
||||
self.con.close()
|
||||
|
||||
def CheckIsInstance(self):
|
||||
cur = self.con.cursor()
|
||||
self.assertIsInstance(cur, sqlite.Cursor)
|
||||
cur = self.con.cursor(MyCursor)
|
||||
self.assertIsInstance(cur, MyCursor)
|
||||
cur = self.con.cursor(factory=lambda con: MyCursor(con))
|
||||
self.assertIsInstance(cur, MyCursor)
|
||||
|
||||
def CheckInvalidFactory(self):
|
||||
# not a callable at all
|
||||
self.assertRaises(TypeError, self.con.cursor, None)
|
||||
# invalid callable with not exact one argument
|
||||
self.assertRaises(TypeError, self.con.cursor, lambda: None)
|
||||
# invalid callable returning non-cursor
|
||||
self.assertRaises(TypeError, self.con.cursor, lambda con: None)
|
||||
|
||||
class RowFactoryTestsBackwardsCompat(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.con = sqlite.connect(":memory:")
|
||||
|
||||
def CheckIsProducedByFactory(self):
|
||||
cur = self.con.cursor(factory=MyCursor)
|
||||
cur.execute("select 4+5 as foo")
|
||||
row = cur.fetchone()
|
||||
self.assertIsInstance(row, dict)
|
||||
cur.close()
|
||||
|
||||
def tearDown(self):
|
||||
self.con.close()
|
||||
|
||||
class RowFactoryTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.con = sqlite.connect(":memory:")
|
||||
|
||||
def CheckCustomFactory(self):
|
||||
self.con.row_factory = lambda cur, row: list(row)
|
||||
row = self.con.execute("select 1, 2").fetchone()
|
||||
self.assertIsInstance(row, list)
|
||||
|
||||
def CheckSqliteRowIndex(self):
|
||||
self.con.row_factory = sqlite.Row
|
||||
row = self.con.execute("select 1 as a, 2 as b").fetchone()
|
||||
self.assertIsInstance(row, sqlite.Row)
|
||||
|
||||
col1, col2 = row["a"], row["b"]
|
||||
self.assertEqual(col1, 1, "by name: wrong result for column 'a'")
|
||||
self.assertEqual(col2, 2, "by name: wrong result for column 'a'")
|
||||
|
||||
col1, col2 = row["A"], row["B"]
|
||||
self.assertEqual(col1, 1, "by name: wrong result for column 'A'")
|
||||
self.assertEqual(col2, 2, "by name: wrong result for column 'B'")
|
||||
|
||||
self.assertEqual(row[0], 1, "by index: wrong result for column 0")
|
||||
self.assertEqual(row[1], 2, "by index: wrong result for column 1")
|
||||
self.assertEqual(row[-1], 2, "by index: wrong result for column -1")
|
||||
self.assertEqual(row[-2], 1, "by index: wrong result for column -2")
|
||||
|
||||
with self.assertRaises(IndexError):
|
||||
row['c']
|
||||
with self.assertRaises(IndexError):
|
||||
row[2]
|
||||
with self.assertRaises(IndexError):
|
||||
row[-3]
|
||||
with self.assertRaises(IndexError):
|
||||
row[2**1000]
|
||||
|
||||
def CheckSqliteRowSlice(self):
|
||||
# A sqlite.Row can be sliced like a list.
|
||||
self.con.row_factory = sqlite.Row
|
||||
row = self.con.execute("select 1, 2, 3, 4").fetchone()
|
||||
self.assertEqual(row[0:0], ())
|
||||
self.assertEqual(row[0:1], (1,))
|
||||
self.assertEqual(row[1:3], (2, 3))
|
||||
self.assertEqual(row[3:1], ())
|
||||
# Explicit bounds are optional.
|
||||
self.assertEqual(row[1:], (2, 3, 4))
|
||||
self.assertEqual(row[:3], (1, 2, 3))
|
||||
# Slices can use negative indices.
|
||||
self.assertEqual(row[-2:-1], (3,))
|
||||
self.assertEqual(row[-2:], (3, 4))
|
||||
# Slicing supports steps.
|
||||
self.assertEqual(row[0:4:2], (1, 3))
|
||||
self.assertEqual(row[3:0:-2], (4, 2))
|
||||
|
||||
def CheckSqliteRowIter(self):
|
||||
"""Checks if the row object is iterable"""
|
||||
self.con.row_factory = sqlite.Row
|
||||
row = self.con.execute("select 1 as a, 2 as b").fetchone()
|
||||
for col in row:
|
||||
pass
|
||||
|
||||
def CheckSqliteRowAsTuple(self):
|
||||
"""Checks if the row object can be converted to a tuple"""
|
||||
self.con.row_factory = sqlite.Row
|
||||
row = self.con.execute("select 1 as a, 2 as b").fetchone()
|
||||
t = tuple(row)
|
||||
self.assertEqual(t, (row['a'], row['b']))
|
||||
|
||||
def CheckSqliteRowAsDict(self):
|
||||
"""Checks if the row object can be correctly converted to a dictionary"""
|
||||
self.con.row_factory = sqlite.Row
|
||||
row = self.con.execute("select 1 as a, 2 as b").fetchone()
|
||||
d = dict(row)
|
||||
self.assertEqual(d["a"], row["a"])
|
||||
self.assertEqual(d["b"], row["b"])
|
||||
|
||||
def CheckSqliteRowHashCmp(self):
|
||||
"""Checks if the row object compares and hashes correctly"""
|
||||
self.con.row_factory = sqlite.Row
|
||||
row_1 = self.con.execute("select 1 as a, 2 as b").fetchone()
|
||||
row_2 = self.con.execute("select 1 as a, 2 as b").fetchone()
|
||||
row_3 = self.con.execute("select 1 as a, 3 as b").fetchone()
|
||||
|
||||
self.assertEqual(row_1, row_1)
|
||||
self.assertEqual(row_1, row_2)
|
||||
self.assertTrue(row_2 != row_3)
|
||||
|
||||
self.assertFalse(row_1 != row_1)
|
||||
self.assertFalse(row_1 != row_2)
|
||||
self.assertFalse(row_2 == row_3)
|
||||
|
||||
self.assertEqual(row_1, row_2)
|
||||
self.assertEqual(hash(row_1), hash(row_2))
|
||||
self.assertNotEqual(row_1, row_3)
|
||||
self.assertNotEqual(hash(row_1), hash(row_3))
|
||||
|
||||
def CheckSqliteRowAsSequence(self):
|
||||
""" Checks if the row object can act like a sequence """
|
||||
self.con.row_factory = sqlite.Row
|
||||
row = self.con.execute("select 1 as a, 2 as b").fetchone()
|
||||
|
||||
as_tuple = tuple(row)
|
||||
self.assertEqual(list(reversed(row)), list(reversed(as_tuple)))
|
||||
self.assertIsInstance(row, Sequence)
|
||||
|
||||
def CheckFakeCursorClass(self):
|
||||
# Issue #24257: Incorrect use of PyObject_IsInstance() caused
|
||||
# segmentation fault.
|
||||
# Issue #27861: Also applies for cursor factory.
|
||||
class FakeCursor(str):
|
||||
__class__ = sqlite.Cursor
|
||||
self.con.row_factory = sqlite.Row
|
||||
self.assertRaises(TypeError, self.con.cursor, FakeCursor)
|
||||
self.assertRaises(TypeError, sqlite.Row, FakeCursor(), ())
|
||||
|
||||
def tearDown(self):
|
||||
self.con.close()
|
||||
|
||||
class TextFactoryTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.con = sqlite.connect(":memory:")
|
||||
|
||||
def CheckUnicode(self):
|
||||
austria = "Österreich"
|
||||
row = self.con.execute("select ?", (austria,)).fetchone()
|
||||
self.assertEqual(type(row[0]), str, "type of row[0] must be unicode")
|
||||
|
||||
def CheckString(self):
|
||||
self.con.text_factory = bytes
|
||||
austria = "Österreich"
|
||||
row = self.con.execute("select ?", (austria,)).fetchone()
|
||||
self.assertEqual(type(row[0]), bytes, "type of row[0] must be bytes")
|
||||
self.assertEqual(row[0], austria.encode("utf-8"), "column must equal original data in UTF-8")
|
||||
|
||||
def CheckCustom(self):
|
||||
self.con.text_factory = lambda x: str(x, "utf-8", "ignore")
|
||||
austria = "Österreich"
|
||||
row = self.con.execute("select ?", (austria,)).fetchone()
|
||||
self.assertEqual(type(row[0]), str, "type of row[0] must be unicode")
|
||||
self.assertTrue(row[0].endswith("reich"), "column must contain original data")
|
||||
|
||||
def CheckOptimizedUnicode(self):
|
||||
# In py3k, str objects are always returned when text_factory
|
||||
# is OptimizedUnicode
|
||||
self.con.text_factory = sqlite.OptimizedUnicode
|
||||
austria = "Österreich"
|
||||
germany = "Deutchland"
|
||||
a_row = self.con.execute("select ?", (austria,)).fetchone()
|
||||
d_row = self.con.execute("select ?", (germany,)).fetchone()
|
||||
self.assertEqual(type(a_row[0]), str, "type of non-ASCII row must be str")
|
||||
self.assertEqual(type(d_row[0]), str, "type of ASCII-only row must be str")
|
||||
|
||||
def tearDown(self):
|
||||
self.con.close()
|
||||
|
||||
class TextFactoryTestsWithEmbeddedZeroBytes(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.con = sqlite.connect(":memory:")
|
||||
self.con.execute("create table test (value text)")
|
||||
self.con.execute("insert into test (value) values (?)", ("a\x00b",))
|
||||
|
||||
def CheckString(self):
|
||||
# text_factory defaults to str
|
||||
row = self.con.execute("select value from test").fetchone()
|
||||
self.assertIs(type(row[0]), str)
|
||||
self.assertEqual(row[0], "a\x00b")
|
||||
|
||||
def CheckBytes(self):
|
||||
self.con.text_factory = bytes
|
||||
row = self.con.execute("select value from test").fetchone()
|
||||
self.assertIs(type(row[0]), bytes)
|
||||
self.assertEqual(row[0], b"a\x00b")
|
||||
|
||||
def CheckBytearray(self):
|
||||
self.con.text_factory = bytearray
|
||||
row = self.con.execute("select value from test").fetchone()
|
||||
self.assertIs(type(row[0]), bytearray)
|
||||
self.assertEqual(row[0], b"a\x00b")
|
||||
|
||||
def CheckCustom(self):
|
||||
# A custom factory should receive a bytes argument
|
||||
self.con.text_factory = lambda x: x
|
||||
row = self.con.execute("select value from test").fetchone()
|
||||
self.assertIs(type(row[0]), bytes)
|
||||
self.assertEqual(row[0], b"a\x00b")
|
||||
|
||||
def tearDown(self):
|
||||
self.con.close()
|
||||
|
||||
def suite():
|
||||
connection_suite = unittest.makeSuite(ConnectionFactoryTests, "Check")
|
||||
cursor_suite = unittest.makeSuite(CursorFactoryTests, "Check")
|
||||
row_suite_compat = unittest.makeSuite(RowFactoryTestsBackwardsCompat, "Check")
|
||||
row_suite = unittest.makeSuite(RowFactoryTests, "Check")
|
||||
text_suite = unittest.makeSuite(TextFactoryTests, "Check")
|
||||
text_zero_bytes_suite = unittest.makeSuite(TextFactoryTestsWithEmbeddedZeroBytes, "Check")
|
||||
return unittest.TestSuite((connection_suite, cursor_suite, row_suite_compat, row_suite, text_suite, text_zero_bytes_suite))
|
||||
|
||||
def test():
|
||||
runner = unittest.TextTestRunner()
|
||||
runner.run(suite())
|
||||
|
||||
if __name__ == "__main__":
|
||||
test()
|
|
@ -1,264 +0,0 @@
|
|||
#-*- coding: iso-8859-1 -*-
|
||||
# pysqlite2/test/hooks.py: tests for various SQLite-specific hooks
|
||||
#
|
||||
# Copyright (C) 2006-2007 Gerhard Häring <gh@ghaering.de>
|
||||
#
|
||||
# This file is part of pysqlite.
|
||||
#
|
||||
# This software is provided 'as-is', without any express or implied
|
||||
# warranty. In no event will the authors be held liable for any damages
|
||||
# arising from the use of this software.
|
||||
#
|
||||
# Permission is granted to anyone to use this software for any purpose,
|
||||
# including commercial applications, and to alter it and redistribute it
|
||||
# freely, subject to the following restrictions:
|
||||
#
|
||||
# 1. The origin of this software must not be misrepresented; you must not
|
||||
# claim that you wrote the original software. If you use this software
|
||||
# in a product, an acknowledgment in the product documentation would be
|
||||
# appreciated but is not required.
|
||||
# 2. Altered source versions must be plainly marked as such, and must not be
|
||||
# misrepresented as being the original software.
|
||||
# 3. This notice may not be removed or altered from any source distribution.
|
||||
|
||||
import unittest
|
||||
import sqlite3 as sqlite
|
||||
|
||||
class CollationTests(unittest.TestCase):
|
||||
def CheckCreateCollationNotString(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
with self.assertRaises(TypeError):
|
||||
con.create_collation(None, lambda x, y: (x > y) - (x < y))
|
||||
|
||||
def CheckCreateCollationNotCallable(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
with self.assertRaises(TypeError) as cm:
|
||||
con.create_collation("X", 42)
|
||||
self.assertEqual(str(cm.exception), 'parameter must be callable')
|
||||
|
||||
def CheckCreateCollationNotAscii(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
con.create_collation("collä", lambda x, y: (x > y) - (x < y))
|
||||
|
||||
def CheckCreateCollationBadUpper(self):
|
||||
class BadUpperStr(str):
|
||||
def upper(self):
|
||||
return None
|
||||
con = sqlite.connect(":memory:")
|
||||
mycoll = lambda x, y: -((x > y) - (x < y))
|
||||
con.create_collation(BadUpperStr("mycoll"), mycoll)
|
||||
result = con.execute("""
|
||||
select x from (
|
||||
select 'a' as x
|
||||
union
|
||||
select 'b' as x
|
||||
) order by x collate mycoll
|
||||
""").fetchall()
|
||||
self.assertEqual(result[0][0], 'b')
|
||||
self.assertEqual(result[1][0], 'a')
|
||||
|
||||
@unittest.skipIf(sqlite.sqlite_version_info < (3, 2, 1),
|
||||
'old SQLite versions crash on this test')
|
||||
def CheckCollationIsUsed(self):
|
||||
def mycoll(x, y):
|
||||
# reverse order
|
||||
return -((x > y) - (x < y))
|
||||
|
||||
con = sqlite.connect(":memory:")
|
||||
con.create_collation("mycoll", mycoll)
|
||||
sql = """
|
||||
select x from (
|
||||
select 'a' as x
|
||||
union
|
||||
select 'b' as x
|
||||
union
|
||||
select 'c' as x
|
||||
) order by x collate mycoll
|
||||
"""
|
||||
result = con.execute(sql).fetchall()
|
||||
self.assertEqual(result, [('c',), ('b',), ('a',)],
|
||||
msg='the expected order was not returned')
|
||||
|
||||
con.create_collation("mycoll", None)
|
||||
with self.assertRaises(sqlite.OperationalError) as cm:
|
||||
result = con.execute(sql).fetchall()
|
||||
self.assertEqual(str(cm.exception), 'no such collation sequence: mycoll')
|
||||
|
||||
def CheckCollationReturnsLargeInteger(self):
|
||||
def mycoll(x, y):
|
||||
# reverse order
|
||||
return -((x > y) - (x < y)) * 2**32
|
||||
con = sqlite.connect(":memory:")
|
||||
con.create_collation("mycoll", mycoll)
|
||||
sql = """
|
||||
select x from (
|
||||
select 'a' as x
|
||||
union
|
||||
select 'b' as x
|
||||
union
|
||||
select 'c' as x
|
||||
) order by x collate mycoll
|
||||
"""
|
||||
result = con.execute(sql).fetchall()
|
||||
self.assertEqual(result, [('c',), ('b',), ('a',)],
|
||||
msg="the expected order was not returned")
|
||||
|
||||
def CheckCollationRegisterTwice(self):
|
||||
"""
|
||||
Register two different collation functions under the same name.
|
||||
Verify that the last one is actually used.
|
||||
"""
|
||||
con = sqlite.connect(":memory:")
|
||||
con.create_collation("mycoll", lambda x, y: (x > y) - (x < y))
|
||||
con.create_collation("mycoll", lambda x, y: -((x > y) - (x < y)))
|
||||
result = con.execute("""
|
||||
select x from (select 'a' as x union select 'b' as x) order by x collate mycoll
|
||||
""").fetchall()
|
||||
self.assertEqual(result[0][0], 'b')
|
||||
self.assertEqual(result[1][0], 'a')
|
||||
|
||||
def CheckDeregisterCollation(self):
|
||||
"""
|
||||
Register a collation, then deregister it. Make sure an error is raised if we try
|
||||
to use it.
|
||||
"""
|
||||
con = sqlite.connect(":memory:")
|
||||
con.create_collation("mycoll", lambda x, y: (x > y) - (x < y))
|
||||
con.create_collation("mycoll", None)
|
||||
with self.assertRaises(sqlite.OperationalError) as cm:
|
||||
con.execute("select 'a' as x union select 'b' as x order by x collate mycoll")
|
||||
self.assertEqual(str(cm.exception), 'no such collation sequence: mycoll')
|
||||
|
||||
class ProgressTests(unittest.TestCase):
|
||||
def CheckProgressHandlerUsed(self):
|
||||
"""
|
||||
Test that the progress handler is invoked once it is set.
|
||||
"""
|
||||
con = sqlite.connect(":memory:")
|
||||
progress_calls = []
|
||||
def progress():
|
||||
progress_calls.append(None)
|
||||
return 0
|
||||
con.set_progress_handler(progress, 1)
|
||||
con.execute("""
|
||||
create table foo(a, b)
|
||||
""")
|
||||
self.assertTrue(progress_calls)
|
||||
|
||||
|
||||
def CheckOpcodeCount(self):
|
||||
"""
|
||||
Test that the opcode argument is respected.
|
||||
"""
|
||||
con = sqlite.connect(":memory:")
|
||||
progress_calls = []
|
||||
def progress():
|
||||
progress_calls.append(None)
|
||||
return 0
|
||||
con.set_progress_handler(progress, 1)
|
||||
curs = con.cursor()
|
||||
curs.execute("""
|
||||
create table foo (a, b)
|
||||
""")
|
||||
first_count = len(progress_calls)
|
||||
progress_calls = []
|
||||
con.set_progress_handler(progress, 2)
|
||||
curs.execute("""
|
||||
create table bar (a, b)
|
||||
""")
|
||||
second_count = len(progress_calls)
|
||||
self.assertGreaterEqual(first_count, second_count)
|
||||
|
||||
def CheckCancelOperation(self):
|
||||
"""
|
||||
Test that returning a non-zero value stops the operation in progress.
|
||||
"""
|
||||
con = sqlite.connect(":memory:")
|
||||
progress_calls = []
|
||||
def progress():
|
||||
progress_calls.append(None)
|
||||
return 1
|
||||
con.set_progress_handler(progress, 1)
|
||||
curs = con.cursor()
|
||||
self.assertRaises(
|
||||
sqlite.OperationalError,
|
||||
curs.execute,
|
||||
"create table bar (a, b)")
|
||||
|
||||
def CheckClearHandler(self):
|
||||
"""
|
||||
Test that setting the progress handler to None clears the previously set handler.
|
||||
"""
|
||||
con = sqlite.connect(":memory:")
|
||||
action = 0
|
||||
def progress():
|
||||
nonlocal action
|
||||
action = 1
|
||||
return 0
|
||||
con.set_progress_handler(progress, 1)
|
||||
con.set_progress_handler(None, 1)
|
||||
con.execute("select 1 union select 2 union select 3").fetchall()
|
||||
self.assertEqual(action, 0, "progress handler was not cleared")
|
||||
|
||||
class TraceCallbackTests(unittest.TestCase):
|
||||
def CheckTraceCallbackUsed(self):
|
||||
"""
|
||||
Test that the trace callback is invoked once it is set.
|
||||
"""
|
||||
con = sqlite.connect(":memory:")
|
||||
traced_statements = []
|
||||
def trace(statement):
|
||||
traced_statements.append(statement)
|
||||
con.set_trace_callback(trace)
|
||||
con.execute("create table foo(a, b)")
|
||||
self.assertTrue(traced_statements)
|
||||
self.assertTrue(any("create table foo" in stmt for stmt in traced_statements))
|
||||
|
||||
def CheckClearTraceCallback(self):
|
||||
"""
|
||||
Test that setting the trace callback to None clears the previously set callback.
|
||||
"""
|
||||
con = sqlite.connect(":memory:")
|
||||
traced_statements = []
|
||||
def trace(statement):
|
||||
traced_statements.append(statement)
|
||||
con.set_trace_callback(trace)
|
||||
con.set_trace_callback(None)
|
||||
con.execute("create table foo(a, b)")
|
||||
self.assertFalse(traced_statements, "trace callback was not cleared")
|
||||
|
||||
def CheckUnicodeContent(self):
|
||||
"""
|
||||
Test that the statement can contain unicode literals.
|
||||
"""
|
||||
unicode_value = '\xf6\xe4\xfc\xd6\xc4\xdc\xdf\u20ac'
|
||||
con = sqlite.connect(":memory:")
|
||||
traced_statements = []
|
||||
def trace(statement):
|
||||
traced_statements.append(statement)
|
||||
con.set_trace_callback(trace)
|
||||
con.execute("create table foo(x)")
|
||||
# Can't execute bound parameters as their values don't appear
|
||||
# in traced statements before SQLite 3.6.21
|
||||
# (cf. http://www.sqlite.org/draft/releaselog/3_6_21.html)
|
||||
con.execute('insert into foo(x) values ("%s")' % unicode_value)
|
||||
con.commit()
|
||||
self.assertTrue(any(unicode_value in stmt for stmt in traced_statements),
|
||||
"Unicode data %s garbled in trace callback: %s"
|
||||
% (ascii(unicode_value), ', '.join(map(ascii, traced_statements))))
|
||||
|
||||
|
||||
|
||||
def suite():
|
||||
collation_suite = unittest.makeSuite(CollationTests, "Check")
|
||||
progress_suite = unittest.makeSuite(ProgressTests, "Check")
|
||||
trace_suite = unittest.makeSuite(TraceCallbackTests, "Check")
|
||||
return unittest.TestSuite((collation_suite, progress_suite, trace_suite))
|
||||
|
||||
def test():
|
||||
runner = unittest.TextTestRunner()
|
||||
runner.run(suite())
|
||||
|
||||
if __name__ == "__main__":
|
||||
test()
|
|
@ -1,389 +0,0 @@
|
|||
#-*- coding: iso-8859-1 -*-
|
||||
# pysqlite2/test/regression.py: pysqlite regression tests
|
||||
#
|
||||
# Copyright (C) 2006-2010 Gerhard Häring <gh@ghaering.de>
|
||||
#
|
||||
# This file is part of pysqlite.
|
||||
#
|
||||
# This software is provided 'as-is', without any express or implied
|
||||
# warranty. In no event will the authors be held liable for any damages
|
||||
# arising from the use of this software.
|
||||
#
|
||||
# Permission is granted to anyone to use this software for any purpose,
|
||||
# including commercial applications, and to alter it and redistribute it
|
||||
# freely, subject to the following restrictions:
|
||||
#
|
||||
# 1. The origin of this software must not be misrepresented; you must not
|
||||
# claim that you wrote the original software. If you use this software
|
||||
# in a product, an acknowledgment in the product documentation would be
|
||||
# appreciated but is not required.
|
||||
# 2. Altered source versions must be plainly marked as such, and must not be
|
||||
# misrepresented as being the original software.
|
||||
# 3. This notice may not be removed or altered from any source distribution.
|
||||
|
||||
import datetime
|
||||
import unittest
|
||||
import sqlite3 as sqlite
|
||||
|
||||
class RegressionTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.con = sqlite.connect(":memory:")
|
||||
|
||||
def tearDown(self):
|
||||
self.con.close()
|
||||
|
||||
def CheckPragmaUserVersion(self):
|
||||
# This used to crash pysqlite because this pragma command returns NULL for the column name
|
||||
cur = self.con.cursor()
|
||||
cur.execute("pragma user_version")
|
||||
|
||||
def CheckPragmaSchemaVersion(self):
|
||||
# This still crashed pysqlite <= 2.2.1
|
||||
con = sqlite.connect(":memory:", detect_types=sqlite.PARSE_COLNAMES)
|
||||
try:
|
||||
cur = self.con.cursor()
|
||||
cur.execute("pragma schema_version")
|
||||
finally:
|
||||
cur.close()
|
||||
con.close()
|
||||
|
||||
def CheckStatementReset(self):
|
||||
# pysqlite 2.1.0 to 2.2.0 have the problem that not all statements are
|
||||
# reset before a rollback, but only those that are still in the
|
||||
# statement cache. The others are not accessible from the connection object.
|
||||
con = sqlite.connect(":memory:", cached_statements=5)
|
||||
cursors = [con.cursor() for x in range(5)]
|
||||
cursors[0].execute("create table test(x)")
|
||||
for i in range(10):
|
||||
cursors[0].executemany("insert into test(x) values (?)", [(x,) for x in range(10)])
|
||||
|
||||
for i in range(5):
|
||||
cursors[i].execute(" " * i + "select x from test")
|
||||
|
||||
con.rollback()
|
||||
|
||||
def CheckColumnNameWithSpaces(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute('select 1 as "foo bar [datetime]"')
|
||||
self.assertEqual(cur.description[0][0], "foo bar")
|
||||
|
||||
cur.execute('select 1 as "foo baz"')
|
||||
self.assertEqual(cur.description[0][0], "foo baz")
|
||||
|
||||
def CheckStatementFinalizationOnCloseDb(self):
|
||||
# pysqlite versions <= 2.3.3 only finalized statements in the statement
|
||||
# cache when closing the database. statements that were still
|
||||
# referenced in cursors weren't closed and could provoke "
|
||||
# "OperationalError: Unable to close due to unfinalised statements".
|
||||
con = sqlite.connect(":memory:")
|
||||
cursors = []
|
||||
# default statement cache size is 100
|
||||
for i in range(105):
|
||||
cur = con.cursor()
|
||||
cursors.append(cur)
|
||||
cur.execute("select 1 x union select " + str(i))
|
||||
con.close()
|
||||
|
||||
@unittest.skipIf(sqlite.sqlite_version_info < (3, 2, 2), 'needs sqlite 3.2.2 or newer')
|
||||
def CheckOnConflictRollback(self):
|
||||
con = sqlite.connect(":memory:")
|
||||
con.execute("create table foo(x, unique(x) on conflict rollback)")
|
||||
con.execute("insert into foo(x) values (1)")
|
||||
try:
|
||||
con.execute("insert into foo(x) values (1)")
|
||||
except sqlite.DatabaseError:
|
||||
pass
|
||||
con.execute("insert into foo(x) values (2)")
|
||||
try:
|
||||
con.commit()
|
||||
except sqlite.OperationalError:
|
||||
self.fail("pysqlite knew nothing about the implicit ROLLBACK")
|
||||
|
||||
def CheckWorkaroundForBuggySqliteTransferBindings(self):
|
||||
"""
|
||||
pysqlite would crash with older SQLite versions unless
|
||||
a workaround is implemented.
|
||||
"""
|
||||
self.con.execute("create table foo(bar)")
|
||||
self.con.execute("drop table foo")
|
||||
self.con.execute("create table foo(bar)")
|
||||
|
||||
def CheckEmptyStatement(self):
|
||||
"""
|
||||
pysqlite used to segfault with SQLite versions 3.5.x. These return NULL
|
||||
for "no-operation" statements
|
||||
"""
|
||||
self.con.execute("")
|
||||
|
||||
def CheckTypeMapUsage(self):
|
||||
"""
|
||||
pysqlite until 2.4.1 did not rebuild the row_cast_map when recompiling
|
||||
a statement. This test exhibits the problem.
|
||||
"""
|
||||
SELECT = "select * from foo"
|
||||
con = sqlite.connect(":memory:",detect_types=sqlite.PARSE_DECLTYPES)
|
||||
con.execute("create table foo(bar timestamp)")
|
||||
con.execute("insert into foo(bar) values (?)", (datetime.datetime.now(),))
|
||||
con.execute(SELECT)
|
||||
con.execute("drop table foo")
|
||||
con.execute("create table foo(bar integer)")
|
||||
con.execute("insert into foo(bar) values (5)")
|
||||
con.execute(SELECT)
|
||||
|
||||
def CheckErrorMsgDecodeError(self):
|
||||
# When porting the module to Python 3.0, the error message about
|
||||
# decoding errors disappeared. This verifies they're back again.
|
||||
with self.assertRaises(sqlite.OperationalError) as cm:
|
||||
self.con.execute("select 'xxx' || ? || 'yyy' colname",
|
||||
(bytes(bytearray([250])),)).fetchone()
|
||||
msg = "Could not decode to UTF-8 column 'colname' with text 'xxx"
|
||||
self.assertIn(msg, str(cm.exception))
|
||||
|
||||
def CheckRegisterAdapter(self):
|
||||
"""
|
||||
See issue 3312.
|
||||
"""
|
||||
self.assertRaises(TypeError, sqlite.register_adapter, {}, None)
|
||||
|
||||
def CheckSetIsolationLevel(self):
|
||||
# See issue 27881.
|
||||
class CustomStr(str):
|
||||
def upper(self):
|
||||
return None
|
||||
def __del__(self):
|
||||
con.isolation_level = ""
|
||||
|
||||
con = sqlite.connect(":memory:")
|
||||
con.isolation_level = None
|
||||
for level in "", "DEFERRED", "IMMEDIATE", "EXCLUSIVE":
|
||||
with self.subTest(level=level):
|
||||
con.isolation_level = level
|
||||
con.isolation_level = level.lower()
|
||||
con.isolation_level = level.capitalize()
|
||||
con.isolation_level = CustomStr(level)
|
||||
|
||||
# setting isolation_level failure should not alter previous state
|
||||
con.isolation_level = None
|
||||
con.isolation_level = "DEFERRED"
|
||||
pairs = [
|
||||
(1, TypeError), (b'', TypeError), ("abc", ValueError),
|
||||
("IMMEDIATE\0EXCLUSIVE", ValueError), ("\xe9", ValueError),
|
||||
]
|
||||
for value, exc in pairs:
|
||||
with self.subTest(level=value):
|
||||
with self.assertRaises(exc):
|
||||
con.isolation_level = value
|
||||
self.assertEqual(con.isolation_level, "DEFERRED")
|
||||
|
||||
def CheckCursorConstructorCallCheck(self):
|
||||
"""
|
||||
Verifies that cursor methods check whether base class __init__ was
|
||||
called.
|
||||
"""
|
||||
class Cursor(sqlite.Cursor):
|
||||
def __init__(self, con):
|
||||
pass
|
||||
|
||||
con = sqlite.connect(":memory:")
|
||||
cur = Cursor(con)
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
cur.execute("select 4+5").fetchall()
|
||||
|
||||
def CheckStrSubclass(self):
|
||||
"""
|
||||
The Python 3.0 port of the module didn't cope with values of subclasses of str.
|
||||
"""
|
||||
class MyStr(str): pass
|
||||
self.con.execute("select ?", (MyStr("abc"),))
|
||||
|
||||
def CheckConnectionConstructorCallCheck(self):
|
||||
"""
|
||||
Verifies that connection methods check whether base class __init__ was
|
||||
called.
|
||||
"""
|
||||
class Connection(sqlite.Connection):
|
||||
def __init__(self, name):
|
||||
pass
|
||||
|
||||
con = Connection(":memory:")
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
cur = con.cursor()
|
||||
|
||||
def CheckCursorRegistration(self):
|
||||
"""
|
||||
Verifies that subclassed cursor classes are correctly registered with
|
||||
the connection object, too. (fetch-across-rollback problem)
|
||||
"""
|
||||
class Connection(sqlite.Connection):
|
||||
def cursor(self):
|
||||
return Cursor(self)
|
||||
|
||||
class Cursor(sqlite.Cursor):
|
||||
def __init__(self, con):
|
||||
sqlite.Cursor.__init__(self, con)
|
||||
|
||||
con = Connection(":memory:")
|
||||
cur = con.cursor()
|
||||
cur.execute("create table foo(x)")
|
||||
cur.executemany("insert into foo(x) values (?)", [(3,), (4,), (5,)])
|
||||
cur.execute("select x from foo")
|
||||
con.rollback()
|
||||
with self.assertRaises(sqlite.InterfaceError):
|
||||
cur.fetchall()
|
||||
|
||||
def CheckAutoCommit(self):
|
||||
"""
|
||||
Verifies that creating a connection in autocommit mode works.
|
||||
2.5.3 introduced a regression so that these could no longer
|
||||
be created.
|
||||
"""
|
||||
con = sqlite.connect(":memory:", isolation_level=None)
|
||||
|
||||
def CheckPragmaAutocommit(self):
|
||||
"""
|
||||
Verifies that running a PRAGMA statement that does an autocommit does
|
||||
work. This did not work in 2.5.3/2.5.4.
|
||||
"""
|
||||
cur = self.con.cursor()
|
||||
cur.execute("create table foo(bar)")
|
||||
cur.execute("insert into foo(bar) values (5)")
|
||||
|
||||
cur.execute("pragma page_size")
|
||||
row = cur.fetchone()
|
||||
|
||||
def CheckSetDict(self):
|
||||
"""
|
||||
See http://bugs.python.org/issue7478
|
||||
|
||||
It was possible to successfully register callbacks that could not be
|
||||
hashed. Return codes of PyDict_SetItem were not checked properly.
|
||||
"""
|
||||
class NotHashable:
|
||||
def __call__(self, *args, **kw):
|
||||
pass
|
||||
def __hash__(self):
|
||||
raise TypeError()
|
||||
var = NotHashable()
|
||||
self.assertRaises(TypeError, self.con.create_function, var)
|
||||
self.assertRaises(TypeError, self.con.create_aggregate, var)
|
||||
self.assertRaises(TypeError, self.con.set_authorizer, var)
|
||||
self.assertRaises(TypeError, self.con.set_progress_handler, var)
|
||||
|
||||
def CheckConnectionCall(self):
|
||||
"""
|
||||
Call a connection with a non-string SQL request: check error handling
|
||||
of the statement constructor.
|
||||
"""
|
||||
self.assertRaises(sqlite.Warning, self.con, 1)
|
||||
|
||||
def CheckCollation(self):
|
||||
def collation_cb(a, b):
|
||||
return 1
|
||||
self.assertRaises(sqlite.ProgrammingError, self.con.create_collation,
|
||||
# Lone surrogate cannot be encoded to the default encoding (utf8)
|
||||
"\uDC80", collation_cb)
|
||||
|
||||
def CheckRecursiveCursorUse(self):
|
||||
"""
|
||||
http://bugs.python.org/issue10811
|
||||
|
||||
Recursively using a cursor, such as when reusing it from a generator led to segfaults.
|
||||
Now we catch recursive cursor usage and raise a ProgrammingError.
|
||||
"""
|
||||
con = sqlite.connect(":memory:")
|
||||
|
||||
cur = con.cursor()
|
||||
cur.execute("create table a (bar)")
|
||||
cur.execute("create table b (baz)")
|
||||
|
||||
def foo():
|
||||
cur.execute("insert into a (bar) values (?)", (1,))
|
||||
yield 1
|
||||
|
||||
with self.assertRaises(sqlite.ProgrammingError):
|
||||
cur.executemany("insert into b (baz) values (?)",
|
||||
((i,) for i in foo()))
|
||||
|
||||
def CheckConvertTimestampMicrosecondPadding(self):
|
||||
"""
|
||||
http://bugs.python.org/issue14720
|
||||
|
||||
The microsecond parsing of convert_timestamp() should pad with zeros,
|
||||
since the microsecond string "456" actually represents "456000".
|
||||
"""
|
||||
|
||||
con = sqlite.connect(":memory:", detect_types=sqlite.PARSE_DECLTYPES)
|
||||
cur = con.cursor()
|
||||
cur.execute("CREATE TABLE t (x TIMESTAMP)")
|
||||
|
||||
# Microseconds should be 456000
|
||||
cur.execute("INSERT INTO t (x) VALUES ('2012-04-04 15:06:00.456')")
|
||||
|
||||
# Microseconds should be truncated to 123456
|
||||
cur.execute("INSERT INTO t (x) VALUES ('2012-04-04 15:06:00.123456789')")
|
||||
|
||||
cur.execute("SELECT * FROM t")
|
||||
values = [x[0] for x in cur.fetchall()]
|
||||
|
||||
self.assertEqual(values, [
|
||||
datetime.datetime(2012, 4, 4, 15, 6, 0, 456000),
|
||||
datetime.datetime(2012, 4, 4, 15, 6, 0, 123456),
|
||||
])
|
||||
|
||||
def CheckInvalidIsolationLevelType(self):
|
||||
# isolation level is a string, not an integer
|
||||
self.assertRaises(TypeError,
|
||||
sqlite.connect, ":memory:", isolation_level=123)
|
||||
|
||||
|
||||
def CheckNullCharacter(self):
|
||||
# Issue #21147
|
||||
con = sqlite.connect(":memory:")
|
||||
self.assertRaises(ValueError, con, "\0select 1")
|
||||
self.assertRaises(ValueError, con, "select 1\0")
|
||||
cur = con.cursor()
|
||||
self.assertRaises(ValueError, cur.execute, " \0select 2")
|
||||
self.assertRaises(ValueError, cur.execute, "select 2\0")
|
||||
|
||||
def CheckCommitCursorReset(self):
|
||||
"""
|
||||
Connection.commit() did reset cursors, which made sqlite3
|
||||
to return rows multiple times when fetched from cursors
|
||||
after commit. See issues 10513 and 23129 for details.
|
||||
"""
|
||||
con = sqlite.connect(":memory:")
|
||||
con.executescript("""
|
||||
create table t(c);
|
||||
create table t2(c);
|
||||
insert into t values(0);
|
||||
insert into t values(1);
|
||||
insert into t values(2);
|
||||
""")
|
||||
|
||||
self.assertEqual(con.isolation_level, "")
|
||||
|
||||
counter = 0
|
||||
for i, row in enumerate(con.execute("select c from t")):
|
||||
with self.subTest(i=i, row=row):
|
||||
con.execute("insert into t2(c) values (?)", (i,))
|
||||
con.commit()
|
||||
if counter == 0:
|
||||
self.assertEqual(row[0], 0)
|
||||
elif counter == 1:
|
||||
self.assertEqual(row[0], 1)
|
||||
elif counter == 2:
|
||||
self.assertEqual(row[0], 2)
|
||||
counter += 1
|
||||
self.assertEqual(counter, 3, "should have returned exactly three rows")
|
||||
|
||||
|
||||
def suite():
|
||||
regression_suite = unittest.makeSuite(RegressionTests, "Check")
|
||||
return unittest.TestSuite((regression_suite,))
|
||||
|
||||
def test():
|
||||
runner = unittest.TextTestRunner()
|
||||
runner.run(suite())
|
||||
|
||||
if __name__ == "__main__":
|
||||
test()
|
|
@ -1,185 +0,0 @@
|
|||
#-*- coding: iso-8859-1 -*-
|
||||
# pysqlite2/test/transactions.py: tests transactions
|
||||
#
|
||||
# Copyright (C) 2005-2007 Gerhard Häring <gh@ghaering.de>
|
||||
#
|
||||
# This file is part of pysqlite.
|
||||
#
|
||||
# This software is provided 'as-is', without any express or implied
|
||||
# warranty. In no event will the authors be held liable for any damages
|
||||
# arising from the use of this software.
|
||||
#
|
||||
# Permission is granted to anyone to use this software for any purpose,
|
||||
# including commercial applications, and to alter it and redistribute it
|
||||
# freely, subject to the following restrictions:
|
||||
#
|
||||
# 1. The origin of this software must not be misrepresented; you must not
|
||||
# claim that you wrote the original software. If you use this software
|
||||
# in a product, an acknowledgment in the product documentation would be
|
||||
# appreciated but is not required.
|
||||
# 2. Altered source versions must be plainly marked as such, and must not be
|
||||
# misrepresented as being the original software.
|
||||
# 3. This notice may not be removed or altered from any source distribution.
|
||||
|
||||
import os, unittest
|
||||
import sqlite3 as sqlite
|
||||
|
||||
def get_db_path():
|
||||
return "sqlite_testdb"
|
||||
|
||||
class TransactionTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
try:
|
||||
os.remove(get_db_path())
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
self.con1 = sqlite.connect(get_db_path(), timeout=0.1)
|
||||
self.cur1 = self.con1.cursor()
|
||||
|
||||
self.con2 = sqlite.connect(get_db_path(), timeout=0.1)
|
||||
self.cur2 = self.con2.cursor()
|
||||
|
||||
def tearDown(self):
|
||||
self.cur1.close()
|
||||
self.con1.close()
|
||||
|
||||
self.cur2.close()
|
||||
self.con2.close()
|
||||
|
||||
try:
|
||||
os.unlink(get_db_path())
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def CheckDMLdoesAutoCommitBefore(self):
|
||||
self.cur1.execute("create table test(i)")
|
||||
self.cur1.execute("insert into test(i) values (5)")
|
||||
self.cur1.execute("create table test2(j)")
|
||||
self.cur2.execute("select i from test")
|
||||
res = self.cur2.fetchall()
|
||||
self.assertEqual(len(res), 1)
|
||||
|
||||
def CheckInsertStartsTransaction(self):
|
||||
self.cur1.execute("create table test(i)")
|
||||
self.cur1.execute("insert into test(i) values (5)")
|
||||
self.cur2.execute("select i from test")
|
||||
res = self.cur2.fetchall()
|
||||
self.assertEqual(len(res), 0)
|
||||
|
||||
def CheckUpdateStartsTransaction(self):
|
||||
self.cur1.execute("create table test(i)")
|
||||
self.cur1.execute("insert into test(i) values (5)")
|
||||
self.con1.commit()
|
||||
self.cur1.execute("update test set i=6")
|
||||
self.cur2.execute("select i from test")
|
||||
res = self.cur2.fetchone()[0]
|
||||
self.assertEqual(res, 5)
|
||||
|
||||
def CheckDeleteStartsTransaction(self):
|
||||
self.cur1.execute("create table test(i)")
|
||||
self.cur1.execute("insert into test(i) values (5)")
|
||||
self.con1.commit()
|
||||
self.cur1.execute("delete from test")
|
||||
self.cur2.execute("select i from test")
|
||||
res = self.cur2.fetchall()
|
||||
self.assertEqual(len(res), 1)
|
||||
|
||||
def CheckReplaceStartsTransaction(self):
|
||||
self.cur1.execute("create table test(i)")
|
||||
self.cur1.execute("insert into test(i) values (5)")
|
||||
self.con1.commit()
|
||||
self.cur1.execute("replace into test(i) values (6)")
|
||||
self.cur2.execute("select i from test")
|
||||
res = self.cur2.fetchall()
|
||||
self.assertEqual(len(res), 1)
|
||||
self.assertEqual(res[0][0], 5)
|
||||
|
||||
def CheckToggleAutoCommit(self):
|
||||
self.cur1.execute("create table test(i)")
|
||||
self.cur1.execute("insert into test(i) values (5)")
|
||||
self.con1.isolation_level = None
|
||||
self.assertEqual(self.con1.isolation_level, None)
|
||||
self.cur2.execute("select i from test")
|
||||
res = self.cur2.fetchall()
|
||||
self.assertEqual(len(res), 1)
|
||||
|
||||
self.con1.isolation_level = "DEFERRED"
|
||||
self.assertEqual(self.con1.isolation_level , "DEFERRED")
|
||||
self.cur1.execute("insert into test(i) values (5)")
|
||||
self.cur2.execute("select i from test")
|
||||
res = self.cur2.fetchall()
|
||||
self.assertEqual(len(res), 1)
|
||||
|
||||
@unittest.skipIf(sqlite.sqlite_version_info < (3, 2, 2),
|
||||
'test hangs on sqlite versions older than 3.2.2')
|
||||
def CheckRaiseTimeout(self):
|
||||
self.cur1.execute("create table test(i)")
|
||||
self.cur1.execute("insert into test(i) values (5)")
|
||||
with self.assertRaises(sqlite.OperationalError):
|
||||
self.cur2.execute("insert into test(i) values (5)")
|
||||
|
||||
@unittest.skipIf(sqlite.sqlite_version_info < (3, 2, 2),
|
||||
'test hangs on sqlite versions older than 3.2.2')
|
||||
def CheckLocking(self):
|
||||
"""
|
||||
This tests the improved concurrency with pysqlite 2.3.4. You needed
|
||||
to roll back con2 before you could commit con1.
|
||||
"""
|
||||
self.cur1.execute("create table test(i)")
|
||||
self.cur1.execute("insert into test(i) values (5)")
|
||||
with self.assertRaises(sqlite.OperationalError):
|
||||
self.cur2.execute("insert into test(i) values (5)")
|
||||
# NO self.con2.rollback() HERE!!!
|
||||
self.con1.commit()
|
||||
|
||||
def CheckRollbackCursorConsistency(self):
|
||||
"""
|
||||
Checks if cursors on the connection are set into a "reset" state
|
||||
when a rollback is done on the connection.
|
||||
"""
|
||||
con = sqlite.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
cur.execute("create table test(x)")
|
||||
cur.execute("insert into test(x) values (5)")
|
||||
cur.execute("select 1 union select 2 union select 3")
|
||||
|
||||
con.rollback()
|
||||
with self.assertRaises(sqlite.InterfaceError):
|
||||
cur.fetchall()
|
||||
|
||||
class SpecialCommandTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.con = sqlite.connect(":memory:")
|
||||
self.cur = self.con.cursor()
|
||||
|
||||
def CheckVacuum(self):
|
||||
self.cur.execute("create table test(i)")
|
||||
self.cur.execute("insert into test(i) values (5)")
|
||||
self.cur.execute("vacuum")
|
||||
|
||||
def CheckDropTable(self):
|
||||
self.cur.execute("create table test(i)")
|
||||
self.cur.execute("insert into test(i) values (5)")
|
||||
self.cur.execute("drop table test")
|
||||
|
||||
def CheckPragma(self):
|
||||
self.cur.execute("create table test(i)")
|
||||
self.cur.execute("insert into test(i) values (5)")
|
||||
self.cur.execute("pragma count_changes=1")
|
||||
|
||||
def tearDown(self):
|
||||
self.cur.close()
|
||||
self.con.close()
|
||||
|
||||
def suite():
|
||||
default_suite = unittest.makeSuite(TransactionTests, "Check")
|
||||
special_command_suite = unittest.makeSuite(SpecialCommandTests, "Check")
|
||||
return unittest.TestSuite((default_suite, special_command_suite))
|
||||
|
||||
def test():
|
||||
runner = unittest.TextTestRunner()
|
||||
runner.run(suite())
|
||||
|
||||
if __name__ == "__main__":
|
||||
test()
|
|
@ -1,421 +0,0 @@
|
|||
#-*- coding: iso-8859-1 -*-
|
||||
# pysqlite2/test/types.py: tests for type conversion and detection
|
||||
#
|
||||
# Copyright (C) 2005 Gerhard Häring <gh@ghaering.de>
|
||||
#
|
||||
# This file is part of pysqlite.
|
||||
#
|
||||
# This software is provided 'as-is', without any express or implied
|
||||
# warranty. In no event will the authors be held liable for any damages
|
||||
# arising from the use of this software.
|
||||
#
|
||||
# Permission is granted to anyone to use this software for any purpose,
|
||||
# including commercial applications, and to alter it and redistribute it
|
||||
# freely, subject to the following restrictions:
|
||||
#
|
||||
# 1. The origin of this software must not be misrepresented; you must not
|
||||
# claim that you wrote the original software. If you use this software
|
||||
# in a product, an acknowledgment in the product documentation would be
|
||||
# appreciated but is not required.
|
||||
# 2. Altered source versions must be plainly marked as such, and must not be
|
||||
# misrepresented as being the original software.
|
||||
# 3. This notice may not be removed or altered from any source distribution.
|
||||
|
||||
import datetime
|
||||
import unittest
|
||||
import sqlite3 as sqlite
|
||||
try:
|
||||
import zlib
|
||||
except ImportError:
|
||||
zlib = None
|
||||
|
||||
|
||||
class SqliteTypeTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.con = sqlite.connect(":memory:")
|
||||
self.cur = self.con.cursor()
|
||||
self.cur.execute("create table test(i integer, s varchar, f number, b blob)")
|
||||
|
||||
def tearDown(self):
|
||||
self.cur.close()
|
||||
self.con.close()
|
||||
|
||||
def CheckString(self):
|
||||
self.cur.execute("insert into test(s) values (?)", ("Österreich",))
|
||||
self.cur.execute("select s from test")
|
||||
row = self.cur.fetchone()
|
||||
self.assertEqual(row[0], "Österreich")
|
||||
|
||||
def CheckSmallInt(self):
|
||||
self.cur.execute("insert into test(i) values (?)", (42,))
|
||||
self.cur.execute("select i from test")
|
||||
row = self.cur.fetchone()
|
||||
self.assertEqual(row[0], 42)
|
||||
|
||||
def CheckLargeInt(self):
|
||||
num = 2**40
|
||||
self.cur.execute("insert into test(i) values (?)", (num,))
|
||||
self.cur.execute("select i from test")
|
||||
row = self.cur.fetchone()
|
||||
self.assertEqual(row[0], num)
|
||||
|
||||
def CheckFloat(self):
|
||||
val = 3.14
|
||||
self.cur.execute("insert into test(f) values (?)", (val,))
|
||||
self.cur.execute("select f from test")
|
||||
row = self.cur.fetchone()
|
||||
self.assertEqual(row[0], val)
|
||||
|
||||
def CheckBlob(self):
|
||||
sample = b"Guglhupf"
|
||||
val = memoryview(sample)
|
||||
self.cur.execute("insert into test(b) values (?)", (val,))
|
||||
self.cur.execute("select b from test")
|
||||
row = self.cur.fetchone()
|
||||
self.assertEqual(row[0], sample)
|
||||
|
||||
def CheckUnicodeExecute(self):
|
||||
self.cur.execute("select 'Österreich'")
|
||||
row = self.cur.fetchone()
|
||||
self.assertEqual(row[0], "Österreich")
|
||||
|
||||
class DeclTypesTests(unittest.TestCase):
|
||||
class Foo:
|
||||
def __init__(self, _val):
|
||||
if isinstance(_val, bytes):
|
||||
# sqlite3 always calls __init__ with a bytes created from a
|
||||
# UTF-8 string when __conform__ was used to store the object.
|
||||
_val = _val.decode('utf-8')
|
||||
self.val = _val
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, DeclTypesTests.Foo):
|
||||
return NotImplemented
|
||||
return self.val == other.val
|
||||
|
||||
def __conform__(self, protocol):
|
||||
if protocol is sqlite.PrepareProtocol:
|
||||
return self.val
|
||||
else:
|
||||
return None
|
||||
|
||||
def __str__(self):
|
||||
return "<%s>" % self.val
|
||||
|
||||
def setUp(self):
|
||||
self.con = sqlite.connect(":memory:", detect_types=sqlite.PARSE_DECLTYPES)
|
||||
self.cur = self.con.cursor()
|
||||
self.cur.execute("create table test(i int, s str, f float, b bool, u unicode, foo foo, bin blob, n1 number, n2 number(5))")
|
||||
|
||||
# override float, make them always return the same number
|
||||
sqlite.converters["FLOAT"] = lambda x: 47.2
|
||||
|
||||
# and implement two custom ones
|
||||
sqlite.converters["BOOL"] = lambda x: bool(int(x))
|
||||
sqlite.converters["FOO"] = DeclTypesTests.Foo
|
||||
sqlite.converters["WRONG"] = lambda x: "WRONG"
|
||||
sqlite.converters["NUMBER"] = float
|
||||
|
||||
def tearDown(self):
|
||||
del sqlite.converters["FLOAT"]
|
||||
del sqlite.converters["BOOL"]
|
||||
del sqlite.converters["FOO"]
|
||||
del sqlite.converters["NUMBER"]
|
||||
self.cur.close()
|
||||
self.con.close()
|
||||
|
||||
def CheckString(self):
|
||||
# default
|
||||
self.cur.execute("insert into test(s) values (?)", ("foo",))
|
||||
self.cur.execute('select s as "s [WRONG]" from test')
|
||||
row = self.cur.fetchone()
|
||||
self.assertEqual(row[0], "foo")
|
||||
|
||||
def CheckSmallInt(self):
|
||||
# default
|
||||
self.cur.execute("insert into test(i) values (?)", (42,))
|
||||
self.cur.execute("select i from test")
|
||||
row = self.cur.fetchone()
|
||||
self.assertEqual(row[0], 42)
|
||||
|
||||
def CheckLargeInt(self):
|
||||
# default
|
||||
num = 2**40
|
||||
self.cur.execute("insert into test(i) values (?)", (num,))
|
||||
self.cur.execute("select i from test")
|
||||
row = self.cur.fetchone()
|
||||
self.assertEqual(row[0], num)
|
||||
|
||||
def CheckFloat(self):
|
||||
# custom
|
||||
val = 3.14
|
||||
self.cur.execute("insert into test(f) values (?)", (val,))
|
||||
self.cur.execute("select f from test")
|
||||
row = self.cur.fetchone()
|
||||
self.assertEqual(row[0], 47.2)
|
||||
|
||||
def CheckBool(self):
|
||||
# custom
|
||||
self.cur.execute("insert into test(b) values (?)", (False,))
|
||||
self.cur.execute("select b from test")
|
||||
row = self.cur.fetchone()
|
||||
self.assertEqual(row[0], False)
|
||||
|
||||
self.cur.execute("delete from test")
|
||||
self.cur.execute("insert into test(b) values (?)", (True,))
|
||||
self.cur.execute("select b from test")
|
||||
row = self.cur.fetchone()
|
||||
self.assertEqual(row[0], True)
|
||||
|
||||
def CheckUnicode(self):
|
||||
# default
|
||||
val = "\xd6sterreich"
|
||||
self.cur.execute("insert into test(u) values (?)", (val,))
|
||||
self.cur.execute("select u from test")
|
||||
row = self.cur.fetchone()
|
||||
self.assertEqual(row[0], val)
|
||||
|
||||
def CheckFoo(self):
|
||||
val = DeclTypesTests.Foo("bla")
|
||||
self.cur.execute("insert into test(foo) values (?)", (val,))
|
||||
self.cur.execute("select foo from test")
|
||||
row = self.cur.fetchone()
|
||||
self.assertEqual(row[0], val)
|
||||
|
||||
def CheckUnsupportedSeq(self):
|
||||
class Bar: pass
|
||||
val = Bar()
|
||||
with self.assertRaises(sqlite.InterfaceError):
|
||||
self.cur.execute("insert into test(f) values (?)", (val,))
|
||||
|
||||
def CheckUnsupportedDict(self):
|
||||
class Bar: pass
|
||||
val = Bar()
|
||||
with self.assertRaises(sqlite.InterfaceError):
|
||||
self.cur.execute("insert into test(f) values (:val)", {"val": val})
|
||||
|
||||
def CheckBlob(self):
|
||||
# default
|
||||
sample = b"Guglhupf"
|
||||
val = memoryview(sample)
|
||||
self.cur.execute("insert into test(bin) values (?)", (val,))
|
||||
self.cur.execute("select bin from test")
|
||||
row = self.cur.fetchone()
|
||||
self.assertEqual(row[0], sample)
|
||||
|
||||
def CheckNumber1(self):
|
||||
self.cur.execute("insert into test(n1) values (5)")
|
||||
value = self.cur.execute("select n1 from test").fetchone()[0]
|
||||
# if the converter is not used, it's an int instead of a float
|
||||
self.assertEqual(type(value), float)
|
||||
|
||||
def CheckNumber2(self):
|
||||
"""Checks whether converter names are cut off at '(' characters"""
|
||||
self.cur.execute("insert into test(n2) values (5)")
|
||||
value = self.cur.execute("select n2 from test").fetchone()[0]
|
||||
# if the converter is not used, it's an int instead of a float
|
||||
self.assertEqual(type(value), float)
|
||||
|
||||
class ColNamesTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.con = sqlite.connect(":memory:", detect_types=sqlite.PARSE_COLNAMES)
|
||||
self.cur = self.con.cursor()
|
||||
self.cur.execute("create table test(x foo)")
|
||||
|
||||
sqlite.converters["FOO"] = lambda x: "[%s]" % x.decode("ascii")
|
||||
sqlite.converters["BAR"] = lambda x: "<%s>" % x.decode("ascii")
|
||||
sqlite.converters["EXC"] = lambda x: 5/0
|
||||
sqlite.converters["B1B1"] = lambda x: "MARKER"
|
||||
|
||||
def tearDown(self):
|
||||
del sqlite.converters["FOO"]
|
||||
del sqlite.converters["BAR"]
|
||||
del sqlite.converters["EXC"]
|
||||
del sqlite.converters["B1B1"]
|
||||
self.cur.close()
|
||||
self.con.close()
|
||||
|
||||
def CheckDeclTypeNotUsed(self):
|
||||
"""
|
||||
Assures that the declared type is not used when PARSE_DECLTYPES
|
||||
is not set.
|
||||
"""
|
||||
self.cur.execute("insert into test(x) values (?)", ("xxx",))
|
||||
self.cur.execute("select x from test")
|
||||
val = self.cur.fetchone()[0]
|
||||
self.assertEqual(val, "xxx")
|
||||
|
||||
def CheckNone(self):
|
||||
self.cur.execute("insert into test(x) values (?)", (None,))
|
||||
self.cur.execute("select x from test")
|
||||
val = self.cur.fetchone()[0]
|
||||
self.assertEqual(val, None)
|
||||
|
||||
def CheckColName(self):
|
||||
self.cur.execute("insert into test(x) values (?)", ("xxx",))
|
||||
self.cur.execute('select x as "x [bar]" from test')
|
||||
val = self.cur.fetchone()[0]
|
||||
self.assertEqual(val, "<xxx>")
|
||||
|
||||
# Check if the stripping of colnames works. Everything after the first
|
||||
# whitespace should be stripped.
|
||||
self.assertEqual(self.cur.description[0][0], "x")
|
||||
|
||||
def CheckCaseInConverterName(self):
|
||||
self.cur.execute("select 'other' as \"x [b1b1]\"")
|
||||
val = self.cur.fetchone()[0]
|
||||
self.assertEqual(val, "MARKER")
|
||||
|
||||
def CheckCursorDescriptionNoRow(self):
|
||||
"""
|
||||
cursor.description should at least provide the column name(s), even if
|
||||
no row returned.
|
||||
"""
|
||||
self.cur.execute("select * from test where 0 = 1")
|
||||
self.assertEqual(self.cur.description[0][0], "x")
|
||||
|
||||
def CheckCursorDescriptionInsert(self):
|
||||
self.cur.execute("insert into test values (1)")
|
||||
self.assertIsNone(self.cur.description)
|
||||
|
||||
|
||||
@unittest.skipIf(sqlite.sqlite_version_info < (3, 8, 3), "CTEs not supported")
|
||||
class CommonTableExpressionTests(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.con = sqlite.connect(":memory:")
|
||||
self.cur = self.con.cursor()
|
||||
self.cur.execute("create table test(x foo)")
|
||||
|
||||
def tearDown(self):
|
||||
self.cur.close()
|
||||
self.con.close()
|
||||
|
||||
def CheckCursorDescriptionCTESimple(self):
|
||||
self.cur.execute("with one as (select 1) select * from one")
|
||||
self.assertIsNotNone(self.cur.description)
|
||||
self.assertEqual(self.cur.description[0][0], "1")
|
||||
|
||||
def CheckCursorDescriptionCTESMultipleColumns(self):
|
||||
self.cur.execute("insert into test values(1)")
|
||||
self.cur.execute("insert into test values(2)")
|
||||
self.cur.execute("with testCTE as (select * from test) select * from testCTE")
|
||||
self.assertIsNotNone(self.cur.description)
|
||||
self.assertEqual(self.cur.description[0][0], "x")
|
||||
|
||||
def CheckCursorDescriptionCTE(self):
|
||||
self.cur.execute("insert into test values (1)")
|
||||
self.cur.execute("with bar as (select * from test) select * from test where x = 1")
|
||||
self.assertIsNotNone(self.cur.description)
|
||||
self.assertEqual(self.cur.description[0][0], "x")
|
||||
self.cur.execute("with bar as (select * from test) select * from test where x = 2")
|
||||
self.assertIsNotNone(self.cur.description)
|
||||
self.assertEqual(self.cur.description[0][0], "x")
|
||||
|
||||
|
||||
class ObjectAdaptationTests(unittest.TestCase):
|
||||
def cast(obj):
|
||||
return float(obj)
|
||||
cast = staticmethod(cast)
|
||||
|
||||
def setUp(self):
|
||||
self.con = sqlite.connect(":memory:")
|
||||
try:
|
||||
del sqlite.adapters[int]
|
||||
except:
|
||||
pass
|
||||
sqlite.register_adapter(int, ObjectAdaptationTests.cast)
|
||||
self.cur = self.con.cursor()
|
||||
|
||||
def tearDown(self):
|
||||
del sqlite.adapters[(int, sqlite.PrepareProtocol)]
|
||||
self.cur.close()
|
||||
self.con.close()
|
||||
|
||||
def CheckCasterIsUsed(self):
|
||||
self.cur.execute("select ?", (4,))
|
||||
val = self.cur.fetchone()[0]
|
||||
self.assertEqual(type(val), float)
|
||||
|
||||
@unittest.skipUnless(zlib, "requires zlib")
|
||||
class BinaryConverterTests(unittest.TestCase):
|
||||
def convert(s):
|
||||
return zlib.decompress(s)
|
||||
convert = staticmethod(convert)
|
||||
|
||||
def setUp(self):
|
||||
self.con = sqlite.connect(":memory:", detect_types=sqlite.PARSE_COLNAMES)
|
||||
sqlite.register_converter("bin", BinaryConverterTests.convert)
|
||||
|
||||
def tearDown(self):
|
||||
self.con.close()
|
||||
|
||||
def CheckBinaryInputForConverter(self):
|
||||
testdata = b"abcdefg" * 10
|
||||
result = self.con.execute('select ? as "x [bin]"', (memoryview(zlib.compress(testdata)),)).fetchone()[0]
|
||||
self.assertEqual(testdata, result)
|
||||
|
||||
class DateTimeTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.con = sqlite.connect(":memory:", detect_types=sqlite.PARSE_DECLTYPES)
|
||||
self.cur = self.con.cursor()
|
||||
self.cur.execute("create table test(d date, ts timestamp)")
|
||||
|
||||
def tearDown(self):
|
||||
self.cur.close()
|
||||
self.con.close()
|
||||
|
||||
def CheckSqliteDate(self):
|
||||
d = sqlite.Date(2004, 2, 14)
|
||||
self.cur.execute("insert into test(d) values (?)", (d,))
|
||||
self.cur.execute("select d from test")
|
||||
d2 = self.cur.fetchone()[0]
|
||||
self.assertEqual(d, d2)
|
||||
|
||||
def CheckSqliteTimestamp(self):
|
||||
ts = sqlite.Timestamp(2004, 2, 14, 7, 15, 0)
|
||||
self.cur.execute("insert into test(ts) values (?)", (ts,))
|
||||
self.cur.execute("select ts from test")
|
||||
ts2 = self.cur.fetchone()[0]
|
||||
self.assertEqual(ts, ts2)
|
||||
|
||||
@unittest.skipIf(sqlite.sqlite_version_info < (3, 1),
|
||||
'the date functions are available on 3.1 or later')
|
||||
def CheckSqlTimestamp(self):
|
||||
now = datetime.datetime.utcnow()
|
||||
self.cur.execute("insert into test(ts) values (current_timestamp)")
|
||||
self.cur.execute("select ts from test")
|
||||
ts = self.cur.fetchone()[0]
|
||||
self.assertEqual(type(ts), datetime.datetime)
|
||||
self.assertEqual(ts.year, now.year)
|
||||
|
||||
def CheckDateTimeSubSeconds(self):
|
||||
ts = sqlite.Timestamp(2004, 2, 14, 7, 15, 0, 500000)
|
||||
self.cur.execute("insert into test(ts) values (?)", (ts,))
|
||||
self.cur.execute("select ts from test")
|
||||
ts2 = self.cur.fetchone()[0]
|
||||
self.assertEqual(ts, ts2)
|
||||
|
||||
def CheckDateTimeSubSecondsFloatingPoint(self):
|
||||
ts = sqlite.Timestamp(2004, 2, 14, 7, 15, 0, 510241)
|
||||
self.cur.execute("insert into test(ts) values (?)", (ts,))
|
||||
self.cur.execute("select ts from test")
|
||||
ts2 = self.cur.fetchone()[0]
|
||||
self.assertEqual(ts, ts2)
|
||||
|
||||
def suite():
|
||||
sqlite_type_suite = unittest.makeSuite(SqliteTypeTests, "Check")
|
||||
decltypes_type_suite = unittest.makeSuite(DeclTypesTests, "Check")
|
||||
colnames_type_suite = unittest.makeSuite(ColNamesTests, "Check")
|
||||
adaptation_suite = unittest.makeSuite(ObjectAdaptationTests, "Check")
|
||||
bin_suite = unittest.makeSuite(BinaryConverterTests, "Check")
|
||||
date_suite = unittest.makeSuite(DateTimeTests, "Check")
|
||||
cte_suite = unittest.makeSuite(CommonTableExpressionTests, "Check")
|
||||
return unittest.TestSuite((sqlite_type_suite, decltypes_type_suite, colnames_type_suite, adaptation_suite, bin_suite, date_suite, cte_suite))
|
||||
|
||||
def test():
|
||||
runner = unittest.TextTestRunner()
|
||||
runner.run(suite())
|
||||
|
||||
if __name__ == "__main__":
|
||||
test()
|
|
@ -1,473 +0,0 @@
|
|||
#-*- coding: iso-8859-1 -*-
|
||||
# pysqlite2/test/userfunctions.py: tests for user-defined functions and
|
||||
# aggregates.
|
||||
#
|
||||
# Copyright (C) 2005-2007 Gerhard Häring <gh@ghaering.de>
|
||||
#
|
||||
# This file is part of pysqlite.
|
||||
#
|
||||
# This software is provided 'as-is', without any express or implied
|
||||
# warranty. In no event will the authors be held liable for any damages
|
||||
# arising from the use of this software.
|
||||
#
|
||||
# Permission is granted to anyone to use this software for any purpose,
|
||||
# including commercial applications, and to alter it and redistribute it
|
||||
# freely, subject to the following restrictions:
|
||||
#
|
||||
# 1. The origin of this software must not be misrepresented; you must not
|
||||
# claim that you wrote the original software. If you use this software
|
||||
# in a product, an acknowledgment in the product documentation would be
|
||||
# appreciated but is not required.
|
||||
# 2. Altered source versions must be plainly marked as such, and must not be
|
||||
# misrepresented as being the original software.
|
||||
# 3. This notice may not be removed or altered from any source distribution.
|
||||
|
||||
import unittest
|
||||
import sqlite3 as sqlite
|
||||
|
||||
def func_returntext():
|
||||
return "foo"
|
||||
def func_returnunicode():
|
||||
return "bar"
|
||||
def func_returnint():
|
||||
return 42
|
||||
def func_returnfloat():
|
||||
return 3.14
|
||||
def func_returnnull():
|
||||
return None
|
||||
def func_returnblob():
|
||||
return b"blob"
|
||||
def func_returnlonglong():
|
||||
return 1<<31
|
||||
def func_raiseexception():
|
||||
5/0
|
||||
|
||||
def func_isstring(v):
|
||||
return type(v) is str
|
||||
def func_isint(v):
|
||||
return type(v) is int
|
||||
def func_isfloat(v):
|
||||
return type(v) is float
|
||||
def func_isnone(v):
|
||||
return type(v) is type(None)
|
||||
def func_isblob(v):
|
||||
return isinstance(v, (bytes, memoryview))
|
||||
def func_islonglong(v):
|
||||
return isinstance(v, int) and v >= 1<<31
|
||||
|
||||
def func(*args):
|
||||
return len(args)
|
||||
|
||||
class AggrNoStep:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def finalize(self):
|
||||
return 1
|
||||
|
||||
class AggrNoFinalize:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def step(self, x):
|
||||
pass
|
||||
|
||||
class AggrExceptionInInit:
|
||||
def __init__(self):
|
||||
5/0
|
||||
|
||||
def step(self, x):
|
||||
pass
|
||||
|
||||
def finalize(self):
|
||||
pass
|
||||
|
||||
class AggrExceptionInStep:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def step(self, x):
|
||||
5/0
|
||||
|
||||
def finalize(self):
|
||||
return 42
|
||||
|
||||
class AggrExceptionInFinalize:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def step(self, x):
|
||||
pass
|
||||
|
||||
def finalize(self):
|
||||
5/0
|
||||
|
||||
class AggrCheckType:
|
||||
def __init__(self):
|
||||
self.val = None
|
||||
|
||||
def step(self, whichType, val):
|
||||
theType = {"str": str, "int": int, "float": float, "None": type(None),
|
||||
"blob": bytes}
|
||||
self.val = int(theType[whichType] is type(val))
|
||||
|
||||
def finalize(self):
|
||||
return self.val
|
||||
|
||||
class AggrCheckTypes:
|
||||
def __init__(self):
|
||||
self.val = 0
|
||||
|
||||
def step(self, whichType, *vals):
|
||||
theType = {"str": str, "int": int, "float": float, "None": type(None),
|
||||
"blob": bytes}
|
||||
for val in vals:
|
||||
self.val += int(theType[whichType] is type(val))
|
||||
|
||||
def finalize(self):
|
||||
return self.val
|
||||
|
||||
class AggrSum:
|
||||
def __init__(self):
|
||||
self.val = 0.0
|
||||
|
||||
def step(self, val):
|
||||
self.val += val
|
||||
|
||||
def finalize(self):
|
||||
return self.val
|
||||
|
||||
class FunctionTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.con = sqlite.connect(":memory:")
|
||||
|
||||
self.con.create_function("returntext", 0, func_returntext)
|
||||
self.con.create_function("returnunicode", 0, func_returnunicode)
|
||||
self.con.create_function("returnint", 0, func_returnint)
|
||||
self.con.create_function("returnfloat", 0, func_returnfloat)
|
||||
self.con.create_function("returnnull", 0, func_returnnull)
|
||||
self.con.create_function("returnblob", 0, func_returnblob)
|
||||
self.con.create_function("returnlonglong", 0, func_returnlonglong)
|
||||
self.con.create_function("raiseexception", 0, func_raiseexception)
|
||||
|
||||
self.con.create_function("isstring", 1, func_isstring)
|
||||
self.con.create_function("isint", 1, func_isint)
|
||||
self.con.create_function("isfloat", 1, func_isfloat)
|
||||
self.con.create_function("isnone", 1, func_isnone)
|
||||
self.con.create_function("isblob", 1, func_isblob)
|
||||
self.con.create_function("islonglong", 1, func_islonglong)
|
||||
self.con.create_function("spam", -1, func)
|
||||
|
||||
def tearDown(self):
|
||||
self.con.close()
|
||||
|
||||
def CheckFuncErrorOnCreate(self):
|
||||
with self.assertRaises(sqlite.OperationalError):
|
||||
self.con.create_function("bla", -100, lambda x: 2*x)
|
||||
|
||||
def CheckFuncRefCount(self):
|
||||
def getfunc():
|
||||
def f():
|
||||
return 1
|
||||
return f
|
||||
f = getfunc()
|
||||
globals()["foo"] = f
|
||||
# self.con.create_function("reftest", 0, getfunc())
|
||||
self.con.create_function("reftest", 0, f)
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select reftest()")
|
||||
|
||||
def CheckFuncReturnText(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select returntext()")
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(type(val), str)
|
||||
self.assertEqual(val, "foo")
|
||||
|
||||
def CheckFuncReturnUnicode(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select returnunicode()")
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(type(val), str)
|
||||
self.assertEqual(val, "bar")
|
||||
|
||||
def CheckFuncReturnInt(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select returnint()")
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(type(val), int)
|
||||
self.assertEqual(val, 42)
|
||||
|
||||
def CheckFuncReturnFloat(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select returnfloat()")
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(type(val), float)
|
||||
if val < 3.139 or val > 3.141:
|
||||
self.fail("wrong value")
|
||||
|
||||
def CheckFuncReturnNull(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select returnnull()")
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(type(val), type(None))
|
||||
self.assertEqual(val, None)
|
||||
|
||||
def CheckFuncReturnBlob(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select returnblob()")
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(type(val), bytes)
|
||||
self.assertEqual(val, b"blob")
|
||||
|
||||
def CheckFuncReturnLongLong(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select returnlonglong()")
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(val, 1<<31)
|
||||
|
||||
def CheckFuncException(self):
|
||||
cur = self.con.cursor()
|
||||
with self.assertRaises(sqlite.OperationalError) as cm:
|
||||
cur.execute("select raiseexception()")
|
||||
cur.fetchone()
|
||||
self.assertEqual(str(cm.exception), 'user-defined function raised exception')
|
||||
|
||||
def CheckParamString(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select isstring(?)", ("foo",))
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(val, 1)
|
||||
|
||||
def CheckParamInt(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select isint(?)", (42,))
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(val, 1)
|
||||
|
||||
def CheckParamFloat(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select isfloat(?)", (3.14,))
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(val, 1)
|
||||
|
||||
def CheckParamNone(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select isnone(?)", (None,))
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(val, 1)
|
||||
|
||||
def CheckParamBlob(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select isblob(?)", (memoryview(b"blob"),))
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(val, 1)
|
||||
|
||||
def CheckParamLongLong(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select islonglong(?)", (1<<42,))
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(val, 1)
|
||||
|
||||
def CheckAnyArguments(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select spam(?, ?)", (1, 2))
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(val, 2)
|
||||
|
||||
|
||||
class AggregateTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.con = sqlite.connect(":memory:")
|
||||
cur = self.con.cursor()
|
||||
cur.execute("""
|
||||
create table test(
|
||||
t text,
|
||||
i integer,
|
||||
f float,
|
||||
n,
|
||||
b blob
|
||||
)
|
||||
""")
|
||||
cur.execute("insert into test(t, i, f, n, b) values (?, ?, ?, ?, ?)",
|
||||
("foo", 5, 3.14, None, memoryview(b"blob"),))
|
||||
|
||||
self.con.create_aggregate("nostep", 1, AggrNoStep)
|
||||
self.con.create_aggregate("nofinalize", 1, AggrNoFinalize)
|
||||
self.con.create_aggregate("excInit", 1, AggrExceptionInInit)
|
||||
self.con.create_aggregate("excStep", 1, AggrExceptionInStep)
|
||||
self.con.create_aggregate("excFinalize", 1, AggrExceptionInFinalize)
|
||||
self.con.create_aggregate("checkType", 2, AggrCheckType)
|
||||
self.con.create_aggregate("checkTypes", -1, AggrCheckTypes)
|
||||
self.con.create_aggregate("mysum", 1, AggrSum)
|
||||
|
||||
def tearDown(self):
|
||||
#self.cur.close()
|
||||
#self.con.close()
|
||||
pass
|
||||
|
||||
def CheckAggrErrorOnCreate(self):
|
||||
with self.assertRaises(sqlite.OperationalError):
|
||||
self.con.create_function("bla", -100, AggrSum)
|
||||
|
||||
def CheckAggrNoStep(self):
|
||||
cur = self.con.cursor()
|
||||
with self.assertRaises(AttributeError) as cm:
|
||||
cur.execute("select nostep(t) from test")
|
||||
self.assertEqual(str(cm.exception), "'AggrNoStep' object has no attribute 'step'")
|
||||
|
||||
def CheckAggrNoFinalize(self):
|
||||
cur = self.con.cursor()
|
||||
with self.assertRaises(sqlite.OperationalError) as cm:
|
||||
cur.execute("select nofinalize(t) from test")
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(str(cm.exception), "user-defined aggregate's 'finalize' method raised error")
|
||||
|
||||
def CheckAggrExceptionInInit(self):
|
||||
cur = self.con.cursor()
|
||||
with self.assertRaises(sqlite.OperationalError) as cm:
|
||||
cur.execute("select excInit(t) from test")
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(str(cm.exception), "user-defined aggregate's '__init__' method raised error")
|
||||
|
||||
def CheckAggrExceptionInStep(self):
|
||||
cur = self.con.cursor()
|
||||
with self.assertRaises(sqlite.OperationalError) as cm:
|
||||
cur.execute("select excStep(t) from test")
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(str(cm.exception), "user-defined aggregate's 'step' method raised error")
|
||||
|
||||
def CheckAggrExceptionInFinalize(self):
|
||||
cur = self.con.cursor()
|
||||
with self.assertRaises(sqlite.OperationalError) as cm:
|
||||
cur.execute("select excFinalize(t) from test")
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(str(cm.exception), "user-defined aggregate's 'finalize' method raised error")
|
||||
|
||||
def CheckAggrCheckParamStr(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select checkType('str', ?)", ("foo",))
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(val, 1)
|
||||
|
||||
def CheckAggrCheckParamInt(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select checkType('int', ?)", (42,))
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(val, 1)
|
||||
|
||||
def CheckAggrCheckParamsInt(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select checkTypes('int', ?, ?)", (42, 24))
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(val, 2)
|
||||
|
||||
def CheckAggrCheckParamFloat(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select checkType('float', ?)", (3.14,))
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(val, 1)
|
||||
|
||||
def CheckAggrCheckParamNone(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select checkType('None', ?)", (None,))
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(val, 1)
|
||||
|
||||
def CheckAggrCheckParamBlob(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("select checkType('blob', ?)", (memoryview(b"blob"),))
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(val, 1)
|
||||
|
||||
def CheckAggrCheckAggrSum(self):
|
||||
cur = self.con.cursor()
|
||||
cur.execute("delete from test")
|
||||
cur.executemany("insert into test(i) values (?)", [(10,), (20,), (30,)])
|
||||
cur.execute("select mysum(i) from test")
|
||||
val = cur.fetchone()[0]
|
||||
self.assertEqual(val, 60)
|
||||
|
||||
class AuthorizerTests(unittest.TestCase):
|
||||
@staticmethod
|
||||
def authorizer_cb(action, arg1, arg2, dbname, source):
|
||||
if action != sqlite.SQLITE_SELECT:
|
||||
return sqlite.SQLITE_DENY
|
||||
if arg2 == 'c2' or arg1 == 't2':
|
||||
return sqlite.SQLITE_DENY
|
||||
return sqlite.SQLITE_OK
|
||||
|
||||
def setUp(self):
|
||||
self.con = sqlite.connect(":memory:")
|
||||
self.con.executescript("""
|
||||
create table t1 (c1, c2);
|
||||
create table t2 (c1, c2);
|
||||
insert into t1 (c1, c2) values (1, 2);
|
||||
insert into t2 (c1, c2) values (4, 5);
|
||||
""")
|
||||
|
||||
# For our security test:
|
||||
self.con.execute("select c2 from t2")
|
||||
|
||||
self.con.set_authorizer(self.authorizer_cb)
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_table_access(self):
|
||||
with self.assertRaises(sqlite.DatabaseError) as cm:
|
||||
self.con.execute("select * from t2")
|
||||
self.assertIn('prohibited', str(cm.exception))
|
||||
|
||||
def test_column_access(self):
|
||||
with self.assertRaises(sqlite.DatabaseError) as cm:
|
||||
self.con.execute("select c2 from t1")
|
||||
self.assertIn('prohibited', str(cm.exception))
|
||||
|
||||
class AuthorizerRaiseExceptionTests(AuthorizerTests):
|
||||
@staticmethod
|
||||
def authorizer_cb(action, arg1, arg2, dbname, source):
|
||||
if action != sqlite.SQLITE_SELECT:
|
||||
raise ValueError
|
||||
if arg2 == 'c2' or arg1 == 't2':
|
||||
raise ValueError
|
||||
return sqlite.SQLITE_OK
|
||||
|
||||
class AuthorizerIllegalTypeTests(AuthorizerTests):
|
||||
@staticmethod
|
||||
def authorizer_cb(action, arg1, arg2, dbname, source):
|
||||
if action != sqlite.SQLITE_SELECT:
|
||||
return 0.0
|
||||
if arg2 == 'c2' or arg1 == 't2':
|
||||
return 0.0
|
||||
return sqlite.SQLITE_OK
|
||||
|
||||
class AuthorizerLargeIntegerTests(AuthorizerTests):
|
||||
@staticmethod
|
||||
def authorizer_cb(action, arg1, arg2, dbname, source):
|
||||
if action != sqlite.SQLITE_SELECT:
|
||||
return 2**32
|
||||
if arg2 == 'c2' or arg1 == 't2':
|
||||
return 2**32
|
||||
return sqlite.SQLITE_OK
|
||||
|
||||
|
||||
def suite():
|
||||
function_suite = unittest.makeSuite(FunctionTests, "Check")
|
||||
aggregate_suite = unittest.makeSuite(AggregateTests, "Check")
|
||||
authorizer_suite = unittest.makeSuite(AuthorizerTests)
|
||||
return unittest.TestSuite((
|
||||
function_suite,
|
||||
aggregate_suite,
|
||||
authorizer_suite,
|
||||
unittest.makeSuite(AuthorizerRaiseExceptionTests),
|
||||
unittest.makeSuite(AuthorizerIllegalTypeTests),
|
||||
unittest.makeSuite(AuthorizerLargeIntegerTests),
|
||||
))
|
||||
|
||||
def test():
|
||||
runner = unittest.TextTestRunner()
|
||||
runner.run(suite())
|
||||
|
||||
if __name__ == "__main__":
|
||||
test()
|
|
@ -4,8 +4,32 @@
|
|||
|
||||
checksum = require("lib.checksum")
|
||||
|
||||
struct_start = 0x10
|
||||
quick_struct_start = 0xC40
|
||||
struct_size = 0x208
|
||||
offset_pipes = 0x21
|
||||
offset_no_crash = 0x51
|
||||
offset_unlock_nabbit = 0x4D
|
||||
saveFileBuffer = edizon.getSaveFileBuffer()
|
||||
|
||||
|
||||
for slot_num = 0, 5 do
|
||||
if saveFileBuffer[quick_struct_start + 1 + slot_num * struct_size] > 0 then
|
||||
for i = 0, struct_size do
|
||||
saveFileBuffer[i + struct_start + 1 + slot_num * struct_size] = saveFileBuffer[i + quick_struct_start + 1 + slot_num * struct_size]
|
||||
saveFileBuffer[i + quick_struct_start + 1 + slot_num * struct_size] = 0
|
||||
end
|
||||
|
||||
saveFileBuffer[quick_struct_start + struct_size - 3 + slot_num * struct_size] = 0xAC
|
||||
saveFileBuffer[quick_struct_start + struct_size - 2 + slot_num * struct_size] = 0x72
|
||||
saveFileBuffer[quick_struct_start + struct_size - 1 + slot_num * struct_size] = 0x7B
|
||||
saveFileBuffer[quick_struct_start + struct_size + slot_num * struct_size] = 0x17
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
|
||||
|
||||
function getValueFromSaveFile()
|
||||
strArgs = edizon.getStrArgs()
|
||||
intArgs = edizon.getIntArgs()
|
||||
|
@ -16,9 +40,8 @@ function getValueFromSaveFile()
|
|||
|
||||
value = 0
|
||||
|
||||
struct_start = 0x10
|
||||
struct_size = 0x207
|
||||
|
||||
|
||||
|
||||
for i = 0, valueSize - 1 do
|
||||
value = value | (saveFileBuffer[struct_start + offset + i + 1 + slot * struct_size] << i * 8)
|
||||
|
@ -31,17 +54,51 @@ end
|
|||
function StarCoins()
|
||||
strArgs = edizon.getStrArgs()
|
||||
slot = tonumber(strArgs[1], 16)
|
||||
offsetStarCoins = tonumber(strArgs[2], 16)
|
||||
offset = tonumber(strArgs[2], 16)
|
||||
|
||||
for i = 0, 40 do
|
||||
saveFileBuffer[i + 1 + offsetStarCoins + 0x10 + 0x207 * slot] = 119
|
||||
saveFileBuffer[i + 1 + offset + struct_start + struct_size * slot] = 119
|
||||
end
|
||||
|
||||
|
||||
|
||||
return "All Star Coins unlocked"
|
||||
return "Star Coins unlocked"
|
||||
end
|
||||
|
||||
function UnlockLevels()
|
||||
strArgs = edizon.getStrArgs()
|
||||
slot = tonumber(strArgs[1], 16)
|
||||
offset = tonumber(strArgs[2], 16)
|
||||
|
||||
lvl_array = {0x47, 0xCF, 0x47, 0x47, 0x47, 0x1, 0x1, 0x1, 0, 0, 0, 0x47, 0x47, 0x47, 0x47, 0x47, 0x47, 0xCF, 0x47, 0x47, 0x1, 0x1, 0x1, 0x42, 0, 0x47, 0x47, 0x47, 0x47, 0x47, 0x47, 0x47, 0x47, 0x1, 0x1, 0x1, 0, 0, 0x47, 0xCF, 0x47, 0x47, 0x47, 0x47, 0x47, 0x47, 0x47, 0x1, 0x1, 0x1, 0, 0, 0, 0x42, 0x47, 0xCF, 0x47, 0x47, 0x47, 0xCF, 0x47, 0xCF, 0xCF, 0x47, 0x47, 0x1, 0x1, 0x1, 0, 0x42, 0, 0x47, 0xCF, 0x47, 0x47, 0x47, 0x47, 0x47, 0x47, 0x47, 0xCF, 0x47, 0x47, 0x1, 0x1, 0x1, 0xCF, 0x47, 0xCF, 0x47, 0x47, 0x47, 0x47, 0x47, 0x47, 0x1, 0x1, 0x1, 0, 0, 0xCF, 0x47, 0x47, 0x47, 0xCF, 0x47, 0x47, 0x47, 0x47, 0x47, 0x47, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1}
|
||||
|
||||
for i = 1, #lvl_array do
|
||||
if (saveFileBuffer[i + offset + struct_start + struct_size * slot] < 0x42) or (lvl_array[i] == 0xCF and saveFileBuffer[i + offset + struct_start + struct_size * slot] == 1) then
|
||||
saveFileBuffer[i + offset + struct_start + struct_size * slot] = lvl_array[i]
|
||||
elseif (saveFileBuffer[i + offset + struct_start + struct_size * slot] == 0x43) and (lvl_array[i] == 0xCF) then
|
||||
saveFileBuffer[i + offset + struct_start + struct_size * slot] = 0xC3
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
if slot >=3 then
|
||||
saveFileBuffer[1 + struct_start + struct_size * slot + offset_pipes - 1] = 0xFF
|
||||
else
|
||||
saveFileBuffer[1 + struct_start + struct_size * slot + offset_pipes] = 0xFF
|
||||
end
|
||||
|
||||
|
||||
if saveFileBuffer[1 + struct_start + struct_size * slot + offset_no_crash] < 3 then
|
||||
saveFileBuffer[1 + struct_start + struct_size * slot + offset_no_crash] = 0x03
|
||||
end
|
||||
|
||||
if saveFileBuffer[1 + struct_start + struct_size * slot + offset_unlock_nabbit] == 0xFF then
|
||||
saveFileBuffer[1 + struct_start + struct_size * slot + offset_unlock_nabbit] = 0x00
|
||||
saveFileBuffer[1 + struct_start + struct_size * slot + offset_unlock_nabbit + 1] = 0x00
|
||||
end
|
||||
|
||||
return "Levels unlocked"
|
||||
end
|
||||
|
||||
function setValueInSaveFile(value)
|
||||
strArgs = edizon.getStrArgs()
|
||||
|
@ -53,8 +110,6 @@ function setValueInSaveFile(value)
|
|||
addressSize = intArgs[1]
|
||||
valueSize = intArgs[2]
|
||||
|
||||
struct_start = 0x10
|
||||
struct_size = 0x207
|
||||
|
||||
|
||||
if dummy == "Lives" then
|
||||
|
@ -69,6 +124,7 @@ function setValueInSaveFile(value)
|
|||
|
||||
end
|
||||
|
||||
|
||||
function copyquickslot()
|
||||
s1=0x10
|
||||
s2=0x218
|
||||
|
@ -77,70 +133,48 @@ function copyquickslot()
|
|||
qs2=0xE48
|
||||
qs3=0x1050
|
||||
|
||||
if saveFileBuffer[qs1+1] > 0 then
|
||||
for i = 0, struct_size - 3 do
|
||||
saveFileBuffer[i + s1 + 1] = saveFileBuffer[i + qs1 + 1]
|
||||
saveFileBuffer[i + qs1 + 1] = 0
|
||||
end
|
||||
for slot_num = 0, 5 do
|
||||
if saveFileBuffer[qs1 + 1 + slot_num * struct_size] > 0 then
|
||||
for i = 0, struct_size do
|
||||
saveFileBuffer[i + s1 + 1 + slot_num * struct_size] = saveFileBuffer[i + qs1 + 1 + slot_num * struct_size]
|
||||
saveFileBuffer[i + qs1 + 1 + slot_num * struct_size] = 0
|
||||
end
|
||||
|
||||
saveFileBuffer[qs1 + struct_size - 2] = 0xAC
|
||||
saveFileBuffer[qs1 + struct_size - 1] = 0x72
|
||||
saveFileBuffer[qs1 + struct_size] = 0x7B
|
||||
saveFileBuffer[qs1 + struct_size + 1] = 0x17
|
||||
saveFileBuffer[qs1 + struct_size - 3 + slot_num * struct_size] = 0xAC
|
||||
saveFileBuffer[qs1 + struct_size - 2 + slot_num * struct_size] = 0x72
|
||||
saveFileBuffer[qs1 + struct_size - 1 + slot_num * struct_size] = 0x7B
|
||||
saveFileBuffer[qs1 + struct_size + slot_num * struct_size] = 0x17
|
||||
end
|
||||
end
|
||||
|
||||
if saveFileBuffer[qs2+1] > 0 then
|
||||
for i = 0, struct_size - 3 do
|
||||
saveFileBuffer[i + s2 + 1] = saveFileBuffer[i + qs2 + 1]
|
||||
saveFileBuffer[i + qs2 + 1] = 0
|
||||
end
|
||||
|
||||
saveFileBuffer[qs2 + struct_size - 2] = 0xAC
|
||||
saveFileBuffer[qs2 + struct_size - 1] = 0x72
|
||||
saveFileBuffer[qs2 + struct_size] = 0x7B
|
||||
saveFileBuffer[qs2 + struct_size + 1] = 0x17
|
||||
end
|
||||
|
||||
if saveFileBuffer[qs3+1] > 0 then
|
||||
for i = 0, struct_size - 3 do
|
||||
saveFileBuffer[i + s3 + 1] = saveFileBuffer[i + qs3 + 1]
|
||||
saveFileBuffer[i + qs3 + 1] = 0
|
||||
end
|
||||
|
||||
saveFileBuffer[qs3 + struct_size - 2] = 0xAC
|
||||
saveFileBuffer[qs3 + struct_size - 1] = 0x72
|
||||
saveFileBuffer[qs3 + struct_size] = 0x7B
|
||||
saveFileBuffer[qs3 + struct_size + 1] = 0x17
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
|
||||
function setChecksum()
|
||||
gameFileBuffer = {}
|
||||
|
||||
struct_start = 0x10
|
||||
struct_size = 0x207
|
||||
|
||||
|
||||
for num_struct = 0,2 do
|
||||
for num_struct = 0,5 do
|
||||
address = 1
|
||||
|
||||
for i = 1 + struct_start + num_struct * struct_size + num_struct, 1 + struct_start + struct_size * (num_struct + 1) - 4 + num_struct do
|
||||
gameFileBuffer[address] = saveFileBuffer[i]
|
||||
for i = 1 + struct_start + num_struct * struct_size, struct_start + (struct_size * (num_struct + 1)) - 4 do
|
||||
gameFileBuffer[address] = saveFileBuffer[i]
|
||||
address = address + 1
|
||||
end
|
||||
|
||||
crc = checksum.crc32(string.char(table.unpack(gameFileBuffer)))
|
||||
|
||||
for i = 0, 3 do
|
||||
saveFileBuffer[i + 1 + struct_start + struct_size * (num_struct + 1) - 3 + num_struct] = (crc & (0xFF000000 >> (i * 8))) >> (24 - i * 8)
|
||||
saveFileBuffer[i + struct_start + struct_size * (num_struct + 1) - 3 ] = (crc & (0xFF000000 >> (i * 8))) >> (24 - i * 8)
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
function getModifiedSaveFile()
|
||||
copyquickslot()
|
||||
--copyquickslot()
|
||||
setChecksum()
|
||||
return saveFileBuffer
|
||||
end
|
116
Modules/edizon/EdiZon/editor/scripts/smo.py
Normal file
116
Modules/edizon/EdiZon/editor/scripts/smo.py
Normal file
|
@ -0,0 +1,116 @@
|
|||
# SMO.py - by Ac_K
|
||||
# Thanks to WerWolv and Leoetlino for helped me!
|
||||
|
||||
import edizon, io, operator
|
||||
|
||||
from byml import *
|
||||
|
||||
savedata_buffer = edizon.getSaveFileBuffer()
|
||||
savedata_header = savedata_buffer[0:0xC]
|
||||
savedata_dict = bymlbuffer_to_object(savedata_buffer[0x10:]) # skip the save data header
|
||||
|
||||
# TODO: Add more editable values :P
|
||||
# print(savedata_dict)
|
||||
|
||||
def get_first_empty_index(item_list):
|
||||
i = 0
|
||||
|
||||
for x in item_list:
|
||||
if x == "":
|
||||
return i
|
||||
i += 1
|
||||
|
||||
return -1
|
||||
|
||||
def find_item_index(item_list, item):
|
||||
i = 0
|
||||
|
||||
for x in item_list:
|
||||
if x == item:
|
||||
return i
|
||||
i += 1
|
||||
|
||||
return -1
|
||||
|
||||
def remove_from_category(item_list, item):
|
||||
index_item = find_item_index(item_list, item)
|
||||
|
||||
if index_item != -1:
|
||||
del item_list[index_item]
|
||||
item_list.append('')
|
||||
|
||||
def add_to_category(item_list, item):
|
||||
if item not in item_list:
|
||||
empty_index = get_first_empty_index(item_list)
|
||||
item_list[empty_index] = item
|
||||
|
||||
def getValueFromSaveFile():
|
||||
strArgs = edizon.getStrArgs()
|
||||
intArgs = edizon.getIntArgs()
|
||||
|
||||
itemType = strArgs[0]
|
||||
|
||||
if itemType == "Number":
|
||||
return int(savedata_dict[strArgs[1]])
|
||||
|
||||
if itemType == "ListItem":
|
||||
item_list = savedata_dict[strArgs[1]]
|
||||
item_index = find_item_index(item_list, strArgs[2])
|
||||
|
||||
if item_index >= 0:
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
|
||||
def setValueInSaveFile(value):
|
||||
global savedata_dict
|
||||
|
||||
strArgs = edizon.getStrArgs()
|
||||
intArgs = edizon.getIntArgs()
|
||||
|
||||
itemType = strArgs[0]
|
||||
|
||||
if itemType == "Number":
|
||||
if intArgs[0] == 0: recast_value = Int(value)
|
||||
if intArgs[0] == 4: recast_value = UInt(value)
|
||||
if intArgs[0] == 8: recast_value = UInt64(value)
|
||||
if intArgs[0] == 10: recast_value = Double(value)
|
||||
if intArgs[0] == 11: recast_value = Float(value)
|
||||
|
||||
savedata_dict[strArgs[1]] = recast_value
|
||||
|
||||
if itemType == "ListItem":
|
||||
item_list = savedata_dict[strArgs[1]]
|
||||
|
||||
if value == 1:
|
||||
add_to_category(item_list, strArgs[2])
|
||||
if value == 0:
|
||||
remove_from_category(item_list, strArgs[2])
|
||||
|
||||
savedata_dict[strArgs[1]] = item_list
|
||||
|
||||
def getModifiedSaveFile():
|
||||
modified_savedata_byml = object_to_bymlbuffer(savedata_dict, False, 3)
|
||||
|
||||
# store new byml size
|
||||
byml_size = modified_savedata_byml.getbuffer().nbytes
|
||||
|
||||
# write some dummy bytes to get the right save size
|
||||
modified_savedata_byml.seek(0x20000B - 0x10)
|
||||
modified_savedata_byml.write(bytes(0x01))
|
||||
modified_savedata_byml.seek(0)
|
||||
|
||||
# TODO: Handle the whole 0x10 header
|
||||
# 0x00 - u32 - CRC32 checkum from 0x04 to EOF
|
||||
# 0x04 - u32 - Version? (Always 0x01)
|
||||
# 0x08 - u32 - Save file size
|
||||
# 0x0C - u32 - Byml section size
|
||||
|
||||
# write back the header without the byml size
|
||||
output_buffer = savedata_header
|
||||
# write the new byml size
|
||||
output_buffer += struct.pack("<I", byml_size)
|
||||
# write the byml data
|
||||
output_buffer += modified_savedata_byml.getvalue()
|
||||
|
||||
return output_buffer
|
Binary file not shown.
Binary file not shown.
BIN
Modules/hekate_payload/hekate_ctcaer_4.9.bin
Normal file
BIN
Modules/hekate_payload/hekate_ctcaer_4.9.bin
Normal file
Binary file not shown.
|
@ -1,11 +0,0 @@
|
|||
{----- LDN_MITM ----}
|
||||
[ldn_mitm + CFW]
|
||||
kip1=modules/required/*
|
||||
kip1=modules/ldn_mitm/ldn_mitm.kip
|
||||
kip1patch=nosigchk
|
||||
secmon=modules/required/exosphere.bin
|
||||
warmboot=modules/required/lp0fw.bin
|
||||
logopath=bootloader/bootlogo.bmp
|
||||
atmosphere=1
|
||||
debugmode=1
|
||||
{ }
|
Binary file not shown.
Binary file not shown.
BIN
Modules/lockpick/bootloader/payloads/Lockpick_RCM.bin
Normal file
BIN
Modules/lockpick/bootloader/payloads/Lockpick_RCM.bin
Normal file
Binary file not shown.
Binary file not shown.
14
Modules/must_have/atmosphere/BCT.ini
Normal file
14
Modules/must_have/atmosphere/BCT.ini
Normal file
|
@ -0,0 +1,14 @@
|
|||
BCT0
|
||||
[stage1]
|
||||
stage2_path = atmosphere/fusee-secondary.bin
|
||||
stage2_addr = 0xF0000000
|
||||
stage2_entrypoint = 0xF0000000
|
||||
|
||||
[exosphere]
|
||||
; Note: Disabling debugmode will cause parts of ams.tma to not work, in the future.
|
||||
debugmode = 1
|
||||
debugmode_user = 0
|
||||
|
||||
[stratosphere]
|
||||
; To force-enable nogc, add nogc = 1
|
||||
; To force-disable nogc, add nogc = 0
|
BIN
Modules/must_have/atmosphere/fusee-secondary.bin
Normal file
BIN
Modules/must_have/atmosphere/fusee-secondary.bin
Normal file
Binary file not shown.
|
@ -4,4 +4,5 @@ path=atmosphere/hbl.nsp
|
|||
override_key=!R
|
||||
|
||||
[default_config]
|
||||
override_key=!L
|
||||
override_key=!L
|
||||
cheat_enable_key=!L
|
Binary file not shown.
|
@ -3,9 +3,12 @@
|
|||
upload_enabled = u8!0x0
|
||||
; Enable USB 3.0 superspeed for homebrew
|
||||
[usb]
|
||||
usb30_force_enabled = u8!0x1
|
||||
usb30_force_enabled = u8!0x0
|
||||
; Atmosphere custom settings
|
||||
[atmosphere]
|
||||
; Make the power menu's "reboot" button reboot to payload.
|
||||
; Set to "normal" for normal reboot, "rcm" for rcm reboot.
|
||||
power_menu_reboot_function = str!payload
|
||||
power_menu_reboot_function = str!payload
|
||||
; Controls whether dmnt cheats should be toggled on or off by
|
||||
; default. 1 = toggled on by default, 0 = toggled off by default.
|
||||
dmnt_cheats_enabled_by_default = u8!0x0
|
BIN
Modules/must_have/atmosphere/titles/010000000000000D/exefs.nsp
Normal file
BIN
Modules/must_have/atmosphere/titles/010000000000000D/exefs.nsp
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -3,12 +3,12 @@ autoboot=0
|
|||
autoboot_list=0
|
||||
bootwait=5
|
||||
customlogo=1
|
||||
verification=2
|
||||
verification=1
|
||||
backlight=100
|
||||
autohosoff=0
|
||||
autonogc=1
|
||||
|
||||
{AtlasNX/Kosmos v11.10.2}
|
||||
{AtlasNX/Kosmos v11.11}
|
||||
{ }
|
||||
{Discord: discord.teamatlasnx.com}
|
||||
{Github: git.teamatlasnx.com}
|
||||
|
@ -17,13 +17,10 @@ autonogc=1
|
|||
|
||||
{-- Custom Firmwares --}
|
||||
[CFW]
|
||||
kip1=modules/required/*
|
||||
fss0=atmosphere/fusee-secondary.bin
|
||||
kip1patch=nosigchk
|
||||
secmon=modules/required/exosphere.bin
|
||||
warmboot=modules/required/lp0fw.bin
|
||||
logopath=bootloader/bootlogo.bmp
|
||||
atmosphere=1
|
||||
debugmode=1
|
||||
logopath=bootloader/bootlogo.bmp
|
||||
{ }
|
||||
|
||||
{---- Miscellaneous ---}
|
||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
Modules/must_have/sept/sept-secondary.bin
Normal file
BIN
Modules/must_have/sept/sept-secondary.bin
Normal file
Binary file not shown.
Binary file not shown.
|
@ -2,12 +2,9 @@
|
|||
{Port: 5000}
|
||||
{Dont run SDFilesUpdater with this!}
|
||||
[FTP + CFW]
|
||||
kip1=modules/required/*
|
||||
fss0=atmosphere/fusee-secondary.bin
|
||||
kip1=modules/sysftpd/sys-ftpd.kip
|
||||
kip1patch=nosigchk
|
||||
secmon=modules/required/exosphere.bin
|
||||
warmboot=modules/required/lp0fw.bin
|
||||
logopath=bootloader/bootlogo.bmp
|
||||
atmosphere=1
|
||||
debugmode=1
|
||||
{ }
|
|
@ -1,11 +1,8 @@
|
|||
{---- sys-netcheat ----}
|
||||
[CFW + sys-netcheat]
|
||||
kip1=modules/required/*
|
||||
fss0=atmosphere/fusee-secondary.bin
|
||||
kip1=modules/sysnetcheat/sys-netcheat.kip
|
||||
kip1patch=nosigchk
|
||||
secmon=modules/required/exosphere.bin
|
||||
warmboot=modules/required/lp0fw.bin
|
||||
logopath=bootloader/bootlogo.bmp
|
||||
atmosphere=1
|
||||
debugmode=1
|
||||
{ }
|
||||
|
|
22
README.md
22
README.md
|
@ -1,11 +1,13 @@
|
|||
![Image](https://user-images.githubusercontent.com/25822956/52866161-64c6e300-313e-11e9-9e3a-87636ed67467.png)
|
||||
|
||||
*formerly known as SDFilesSwitch*
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/AtlasNX/Kosmos/releases">
|
||||
<img src="https://user-images.githubusercontent.com/25822956/52866161-64c6e300-313e-11e9-9e3a-87636ed67467.png"></a>
|
||||
<br>
|
||||
This handy All-in-One package includes everything you need to run Hekate / Atmosphere with some extra patches to enhance your experience.
|
||||
|
||||
## [Join us on Discord](https://discord.gg/qbRAuy7)
|
||||
## [Donations](https://www.patreon.com/atlasnx)
|
||||
<br>
|
||||
<br>
|
||||
<a href="https://discord.gg/qbRAuy7"><img src="https://discordapp.com/api/guilds/477891535174631424/embed.png?style=banner2" alt="Discord Server" /></a>
|
||||
<a href="https://www.patreon.com/atlasnx"><img src="https://c5.patreon.com/external/logo/become_a_patron_button@2x.png" height="75" width="320"></a>
|
||||
</p>
|
||||
|
||||
## Features
|
||||
* Atmosphere
|
||||
|
@ -39,11 +41,9 @@ This handy All-in-One package includes everything you need to run Hekate / Atmos
|
|||
* [Switch Homebrew Menu](https://github.com/switchbrew/nx-hbmenu)
|
||||
* [Sys-FTPD](https://github.com/jakibaki/sys-ftpd)
|
||||
* [Sys-Netcheat](https://github.com/jakibaki/sys-netcheat)
|
||||
* [Ldn-mitm](https://github.com/spacemeowx2/ldn_mitm)
|
||||
* [AppstoreNX](https://github.com/vgmoose/appstorenx)
|
||||
* [Lockpick](https://github.com/shchmue/Lockpick)
|
||||
* [EdiZon](https://github.com/thomasnet-mc/EdiZon)
|
||||
* [Checkpoint](https://github.com/BernardoGiordano/Checkpoint)
|
||||
* [Goldleaf](https://github.com/XorTroll/Goldleaf)
|
||||
* [KosmosUpdater](https://github.com/StevenMattera/SDFilesUpdater)
|
||||
* [CFW-Settings](https://github.com/AtlasNX/CFW-Settings)
|
||||
* [Kosmos Updater](https://github.com/StevenMattera/SDFilesUpdater)
|
||||
* [Kosmos Toolbox](https://github.com/AtlasNX/CFW-Settings)
|
||||
|
|
Reference in a new issue