Compare commits

...

30 commits

Author SHA1 Message Date
35560c76b1 Update modules.py 2020-05-13 13:45:13 -05:00
01e12978bf Update README.md 2020-05-13 13:34:24 -05:00
tomGER
3634bb8b17
Archive Kosmos. 2020-05-12 20:39:30 +02:00
Nichole Mattera
1db317c18e
v16.1 - Hotfix for SDSetup 2020-05-06 06:36:43 -04:00
Nichole Mattera
7387d14ebd
v16.1 2020-05-05 17:04:42 -04:00
Nichole Mattera
5fa8b11896
v16.0 2020-05-01 13:07:01 -04:00
Nichole Mattera
59deacbc77
v15.6
* Added Gamecard Installer NX to Kosmos.

* Updated README to include Gamecard Installer NX.

* Removed Gamecard Installer NX

* Forgot to remove code.

* Lets try this again.

* Removed the nichole logo. Thank you team! 💖

* Added Kosmos Cleaner.

* Fixed emuiibo

* Fixed sys-clk

* Added some additional files to ignore.

* Make the shebang specifically python3.

* Fixed hekate module for SDSetup.
2020-04-20 13:35:19 -04:00
friedkeenan
260c089056 v15.5 2020-04-19 12:39:11 -04:00
Nichole Mattera
29db8b59cf
v15.4-1
* Added Gamecard Installer NX to Kosmos.

* Updated README to include Gamecard Installer NX.

* Removed Gamecard Installer NX

* Forgot to remove code.

* Lets try this again.
2020-04-09 07:15:37 -04:00
Nichole Mattera
92fff1ddc2
v15.4-1
* Added Gamecard Installer NX to Kosmos.

* Updated README to include Gamecard Installer NX.

* Removed Gamecard Installer NX
2020-04-09 07:11:20 -04:00
Nichole Mattera
cee066702c
v15.4
* Added Gamecard Installer NX to Kosmos.
* Updated README to include Gamecard Installer NX.
2020-04-04 14:05:48 -04:00
Nichole Mattera
55f81319a2
v15.3 2020-03-15 09:38:33 -04:00
Nichole Mattera
c91a08406d v15.2 2020-03-03 05:25:55 -05:00
Nichole Mattera
680968f109 v15.1 2020-02-26 19:53:57 -05:00
Nichole Mattera
a7d2d3f857 v15 2020-02-26 19:53:57 -05:00
Nichole Mattera
afd2588986 v14.2 2020-02-26 19:53:57 -05:00
tumGER
4660ec5c1d v14.1 2020-02-26 19:53:56 -05:00
tumGER
0d3d27c824 v14.0.1 2020-02-26 19:53:56 -05:00
tomGER
e83b0e3457 v14 2020-02-26 19:53:56 -05:00
tumGER
39bb2678d6 v13.2.1 2020-02-26 19:53:56 -05:00
Nichole Mattera
25cf8277dc v13.1 and v13.2 2020-02-26 19:53:56 -05:00
shchmue
06840bdcb2 v13.0.3 2020-02-26 19:53:56 -05:00
tumGER
9e126a5c29 v13.0.2 2020-02-26 19:53:56 -05:00
Steven Mattera
6874726dc5 v13.0.1 2020-02-26 19:53:56 -05:00
tumGER
b12e347b5b v13 2020-02-26 19:53:56 -05:00
tomGER
f2c3102830 v12.99 2020-02-26 19:53:56 -05:00
noahc3
5e071c0d7a v12.2 2020-02-26 19:53:56 -05:00
WerWolv
af580b9b5a v12.1.2 2020-02-26 19:53:56 -05:00
zkitX
ab1da6d9f7 v12.1.1 2020-02-26 19:53:56 -05:00
shchmue
1641691f68 v12.1 2020-02-26 19:53:55 -05:00
584 changed files with 2001 additions and 230753 deletions

7
.github/FUNDING.yml vendored Normal file
View file

@ -0,0 +1,7 @@
# These are supported funding model platforms
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
patreon: atlasnx
open_collective: # Replace with a single Open Collective username
ko_fi: atlasnx
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel

140
Builder/.gitignore vendored Normal file
View file

@ -0,0 +1,140 @@
config.py
.vscode
tmp/
*.zip
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/

339
Builder/LICENSE.md Normal file
View file

@ -0,0 +1,339 @@
GNU GENERAL PUBLIC LICENSE
Version 2, June 1991
Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
License is intended to guarantee your freedom to share and change free
software--to make sure the software is free for all its users. This
General Public License applies to most of the Free Software
Foundation's software and to any other program whose authors commit to
using it. (Some other Free Software Foundation software is covered by
the GNU Lesser General Public License instead.) You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
this service if you wish), that you receive source code or can get it
if you want it, that you can change the software or use pieces of it
in new free programs; and that you know you can do these things.
To protect your rights, we need to make restrictions that forbid
anyone to deny you these rights or to ask you to surrender the rights.
These restrictions translate to certain responsibilities for you if you
distribute copies of the software, or if you modify it.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must give the recipients all the rights that
you have. You must make sure that they, too, receive or can get the
source code. And you must show them these terms so they know their
rights.
We protect your rights with two steps: (1) copyright the software, and
(2) offer you this license which gives you legal permission to copy,
distribute and/or modify the software.
Also, for each author's protection and ours, we want to make certain
that everyone understands that there is no warranty for this free
software. If the software is modified by someone else and passed on, we
want its recipients to know that what they have is not the original, so
that any problems introduced by others will not reflect on the original
authors' reputations.
Finally, any free program is threatened constantly by software
patents. We wish to avoid the danger that redistributors of a free
program will individually obtain patent licenses, in effect making the
program proprietary. To prevent this, we have made it clear that any
patent must be licensed for everyone's free use or not licensed at all.
The precise terms and conditions for copying, distribution and
modification follow.
GNU GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License applies to any program or other work which contains
a notice placed by the copyright holder saying it may be distributed
under the terms of this General Public License. The "Program", below,
refers to any such program or work, and a "work based on the Program"
means either the Program or any derivative work under copyright law:
that is to say, a work containing the Program or a portion of it,
either verbatim or with modifications and/or translated into another
language. (Hereinafter, translation is included without limitation in
the term "modification".) Each licensee is addressed as "you".
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running the Program is not restricted, and the output from the Program
is covered only if its contents constitute a work based on the
Program (independent of having been made by running the Program).
Whether that is true depends on what the Program does.
1. You may copy and distribute verbatim copies of the Program's
source code as you receive it, in any medium, provided that you
conspicuously and appropriately publish on each copy an appropriate
copyright notice and disclaimer of warranty; keep intact all the
notices that refer to this License and to the absence of any warranty;
and give any other recipients of the Program a copy of this License
along with the Program.
You may charge a fee for the physical act of transferring a copy, and
you may at your option offer warranty protection in exchange for a fee.
2. You may modify your copy or copies of the Program or any portion
of it, thus forming a work based on the Program, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) You must cause the modified files to carry prominent notices
stating that you changed the files and the date of any change.
b) You must cause any work that you distribute or publish, that in
whole or in part contains or is derived from the Program or any
part thereof, to be licensed as a whole at no charge to all third
parties under the terms of this License.
c) If the modified program normally reads commands interactively
when run, you must cause it, when started running for such
interactive use in the most ordinary way, to print or display an
announcement including an appropriate copyright notice and a
notice that there is no warranty (or else, saying that you provide
a warranty) and that users may redistribute the program under
these conditions, and telling the user how to view a copy of this
License. (Exception: if the Program itself is interactive but
does not normally print such an announcement, your work based on
the Program is not required to print an announcement.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Program,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Program, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Program.
In addition, mere aggregation of another work not based on the Program
with the Program (or with a work based on the Program) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may copy and distribute the Program (or a work based on it,
under Section 2) in object code or executable form under the terms of
Sections 1 and 2 above provided that you also do one of the following:
a) Accompany it with the complete corresponding machine-readable
source code, which must be distributed under the terms of Sections
1 and 2 above on a medium customarily used for software interchange; or,
b) Accompany it with a written offer, valid for at least three
years, to give any third party, for a charge no more than your
cost of physically performing source distribution, a complete
machine-readable copy of the corresponding source code, to be
distributed under the terms of Sections 1 and 2 above on a medium
customarily used for software interchange; or,
c) Accompany it with the information you received as to the offer
to distribute corresponding source code. (This alternative is
allowed only for noncommercial distribution and only if you
received the program in object code or executable form with such
an offer, in accord with Subsection b above.)
The source code for a work means the preferred form of the work for
making modifications to it. For an executable work, complete source
code means all the source code for all modules it contains, plus any
associated interface definition files, plus the scripts used to
control compilation and installation of the executable. However, as a
special exception, the source code distributed need not include
anything that is normally distributed (in either source or binary
form) with the major components (compiler, kernel, and so on) of the
operating system on which the executable runs, unless that component
itself accompanies the executable.
If distribution of executable or object code is made by offering
access to copy from a designated place, then offering equivalent
access to copy the source code from the same place counts as
distribution of the source code, even though third parties are not
compelled to copy the source along with the object code.
4. You may not copy, modify, sublicense, or distribute the Program
except as expressly provided under this License. Any attempt
otherwise to copy, modify, sublicense or distribute the Program is
void, and will automatically terminate your rights under this License.
However, parties who have received copies, or rights, from you under
this License will not have their licenses terminated so long as such
parties remain in full compliance.
5. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Program or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Program (or any work based on the
Program), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Program or works based on it.
6. Each time you redistribute the Program (or any work based on the
Program), the recipient automatically receives a license from the
original licensor to copy, distribute or modify the Program subject to
these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties to
this License.
7. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Program at all. For example, if a patent
license would not permit royalty-free redistribution of the Program by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Program.
If any portion of this section is held invalid or unenforceable under
any particular circumstance, the balance of the section is intended to
apply and the section as a whole is intended to apply in other
circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system, which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
8. If the distribution and/or use of the Program is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Program under this License
may add an explicit geographical distribution limitation excluding
those countries, so that distribution is permitted only in or among
countries not thus excluded. In such case, this License incorporates
the limitation as if written in the body of this License.
9. The Free Software Foundation may publish revised and/or new versions
of the General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the Program
specifies a version number of this License which applies to it and "any
later version", you have the option of following the terms and conditions
either of that version or of any later version published by the Free
Software Foundation. If the Program does not specify a version number of
this License, you may choose any version ever published by the Free Software
Foundation.
10. If you wish to incorporate parts of the Program into other free
programs whose distribution conditions are different, write to the author
to ask for permission. For software which is copyrighted by the Free
Software Foundation, write to the Free Software Foundation; we sometimes
make exceptions for this. Our decision will be guided by the two goals
of preserving the free status of all derivatives of our free software and
of promoting the sharing and reuse of software generally.
NO WARRANTY
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
REPAIR OR CORRECTION.
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
Also add information on how to contact you by electronic and paper mail.
If the program is interactive, make it output a short notice like this
when it starts in an interactive mode:
Gnomovision version 69, Copyright (C) year name of author
Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, the commands you use may
be called something other than `show w' and `show c'; they could even be
mouse-clicks or menu items--whatever suits your program.
You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the program, if
necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in the program
`Gnomovision' (which makes passes at compilers) written by James Hacker.
<signature of Ty Coon>, 1 April 1989
Ty Coon, President of Vice
This General Public License does not permit incorporating your program into
proprietary programs. If your program is a subroutine library, you may
consider it more useful to permit linking proprietary applications with the
library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License.

View file

@ -0,0 +1,60 @@
; Disable uploading error reports to Nintendo
[eupld]
upload_enabled = u8!0x0
; Control whether RO should ease its validation of NROs.
; (note: this is normally not necessary, and ips patches can be used.)
[ro]
ease_nro_restriction = u8!0x0
; Atmosphere custom settings
[atmosphere]
; Reboot from fatal automatically after some number of milliseconds.
; If field is not present or 0, fatal will wait indefinitely for user input.
fatal_auto_reboot_interval = u64!0x0
; Make the power menu's "reboot" button reboot to payload.
; Set to "normal" for normal reboot, "rcm" for rcm reboot.
power_menu_reboot_function = str!payload
; Controls whether dmnt cheats should be toggled on or off by
; default. 1 = toggled on by default, 0 = toggled off by default.
dmnt_cheats_enabled_by_default = u8!0x0
; Controls whether dmnt should always save cheat toggle state
; for restoration on new game launch. 1 = always save toggles,
; 0 = only save toggles if toggle file exists.
dmnt_always_save_cheat_toggles = u8!0x0
; Enable writing to BIS partitions for HBL.
; This is probably undesirable for normal usage.
enable_hbl_bis_write = u8!0x0
; Enable reading the CAL0 partition for HBL.
; This is probably undesirable for normal usage.
enable_hbl_cal_read = u8!0x0
; Controls whether fs.mitm should redirect save files
; to directories on the sd card.
; 0 = Do not redirect, 1 = Redirect.
; NOTE: EXPERIMENTAL
; If you do not know what you are doing, do not touch this yet.
fsmitm_redirect_saves_to_sd = u8!0x0
; Controls whether to enable the deprecated hid mitm
; to fix compatibility with old homebrew.
; 0 = Do not enable, 1 = Enable.
; Please note this setting may be removed in a
; future release of Atmosphere.
enable_deprecated_hid_mitm = u8!0x0
[hbloader]
; Controls the size of the homebrew heap when running as applet.
; If set to zero, all available applet memory is used as heap.
; The default is zero.
applet_heap_size = u64!0x0
; Controls the amount of memory to reserve when running as applet
; for usage by other applets. This setting has no effect if
; applet_heap_size is non-zero. The default is 0x8600000.
applet_heap_reservation_size = u64!0x8600000

View file

@ -0,0 +1,5 @@
{
"name" : "emuiibo",
"tid" : "0100000000000352",
"requires_reboot": true
}

View file

Before

Width:  |  Height:  |  Size: 3.5 MiB

After

Width:  |  Height:  |  Size: 3.5 MiB

View file

@ -0,0 +1,42 @@
[config]
autoboot=0
autoboot_list=0
bootwait=1
verification=1
backlight=100
autohosoff=0
autonogc=1
updater2p=1
{AtlasNX/Kosmos KOSMOS_VERSION}
{}
{Discord: discord.teamatlasnx.com}
{Github: git.teamatlasnx.com}
{Patreon: patreon.teamatlasnx.com}
{Pegascape DNS: pegascape.sdsetup.com}
{}
{--- Custom Firmware ---}
[CFW (SYSNAND)]
emummc_force_disable=1
fss0=atmosphere/fusee-secondary.bin
atmosphere=1
logopath=bootloader/bootlogo.bmp
icon=bootloader/res/icon_payload.bmp
{}
[CFW (EMUMMC)]
emummcforce=1
fss0=atmosphere/fusee-secondary.bin
atmosphere=1
logopath=bootloader/bootlogo.bmp
icon=bootloader/res/icon_payload.bmp
{}
{--- Stock ---}
[Stock (SYSNAND)]
emummc_force_disable=1
fss0=atmosphere/fusee-secondary.bin
stock=1
icon=bootloader/res/icon_switch.bmp
{}

View file

@ -1,19 +1,9 @@
{
"sysmodules" : [
{
"name" : "sys-ftpd",
"tid" : "420000000000000E",
"requires_reboot": false
},
{
"name" : "nxsh",
"tid" : "43000000000000FF",
"requires_reboot": false
},
{
"name" : "sys-netcheat",
"tid" : "430000000000000A",
"requires_reboot": false
"name" : "bootsoundnx",
"tid" : "00FF0000000002AA",
"requires_reboot": true
},
{
"name" : "hid-mitm",
@ -21,9 +11,19 @@
"requires_reboot": true
},
{
"name" : "sys-clk",
"tid" : "00FF0000636C6BFF",
"requires_reboot": true
"name" : "nxsh",
"tid" : "43000000000000FF",
"requires_reboot": false
},
{
"name" : "ojds-nx",
"tid" : "0100000000000901",
"requires_reboot": false
},
{
"name" : "sys-netcheat",
"tid" : "430000000000000A",
"requires_reboot": false
}
]
}

View file

@ -0,0 +1,5 @@
version = "KOSMOS_VERSION";
installed_files = [ ];
received_exfat_warning = false;
received_ignore_config_warning = false;
ignore_config_files = false;

View file

@ -0,0 +1,5 @@
{
"name" : "ldn_mitm",
"tid" : "4200000000000010",
"requires_reboot": true
}

View file

@ -0,0 +1,5 @@
{
"name" : "sys-clk",
"tid" : "00FF0000636C6BFF",
"requires_reboot": false
}

116
Builder/builder.py Executable file
View file

@ -0,0 +1,116 @@
#!/usr/bin/env python3
#
# Kosmos Builder
# Copyright (C) 2020 Nichole Mattera
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
import argparse
import common
import config
import modules
import os
import shutil
import sys
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'-v', '--version',
default=None,
type=str,
help='Overrides the Kosmos Version from the config file.',
metavar='KosmosVersion')
subparsers = parser.add_subparsers()
# Kosmos subcommands
parser_kosmos = subparsers.add_parser('kosmos', help='Create a release build of Kosmos.')
parser_kosmos.add_argument('output', help='Zip file to create.')
parser_kosmos.set_defaults(command=common.Command.Kosmos)
# SDSetup subcommands
parser_sdsetup = subparsers.add_parser('sdsetup', help='Create a Kosmos modules for SDSetup.')
parser_sdsetup.add_argument('output', help='Directory to output modules to.')
parser_sdsetup.add_argument(
'-a', '--auto',
action='store_true',
default=False,
help='Perform an auto build.')
parser_sdsetup.set_defaults(command=common.Command.SDSetup)
# Kosmos Minimal subcommands
parser_kosmos = subparsers.add_parser('kosmos-mini', help='Create a release build of Kosmos Minimal.')
parser_kosmos.add_argument('output', help='Zip file to create.')
parser_kosmos.set_defaults(command=common.Command.KosmosMinimal)
# Parse arguments
args = parser.parse_args()
if not hasattr(args, 'command'):
parser.print_help()
sys.exit()
return args
def get_kosmos_version(args):
if args.version is not None:
return args.version
return config.version
def init_version_messages(args, kosmos_version):
if args.command == common.Command.Kosmos:
return [ f'Kosmos {kosmos_version} built with:' ]
elif args.command == common.Command.SDSetup and not args.auto:
return [ 'SDSetup Modules built with:' ]
elif args.command == common.Command.KosmosMinimal:
return [ f'Kosmos Minimal {kosmos_version} built with:' ]
return []
if __name__ == '__main__':
args = parse_args()
temp_directory = common.generate_temp_path()
os.makedirs(temp_directory)
kosmos_version = get_kosmos_version(args)
auto_build = False
if hasattr(args, 'auto'):
auto_build = args.auto
version_messages = init_version_messages(args, kosmos_version)
build_messages = modules.build(temp_directory, kosmos_version, args.command, auto_build)
common.delete_path(args.output)
if build_messages is not None:
version_messages += build_messages
if args.command == common.Command.SDSetup:
shutil.move(temp_directory, args.output)
else:
shutil.make_archive(
os.path.splitext(args.output)[0],
'zip',
temp_directory)
common.delete_path(os.path.join(os.getcwd(), 'tmp'))
for message in version_messages:
print(message)
else:
common.delete_path(os.path.join(os.getcwd(), 'tmp'))

77
Builder/common.py Normal file
View file

@ -0,0 +1,77 @@
#
# Kosmos Builder
# Copyright (C) 2020 Nichole Mattera
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
import enum
import glob
import os
import re
import shutil
import uuid
class Command(enum.Enum):
Kosmos = 0
SDSetup = 1
KosmosMinimal = 2
class GitService(enum.Enum):
GitHub = 0
GitLab = 1
SourceForge = 2
def generate_temp_path():
return os.path.join(os.getcwd(), 'tmp', str(uuid.uuid4()))
def delete_path(path):
if os.path.exists(path):
if os.path.isfile(path):
os.remove(path)
else:
shutil.rmtree(path)
def copy_module_file(module_name, file_name, destination):
return shutil.copyfile(os.path.join(os.getcwd(), 'Modules', module_name, file_name), destination)
def copy_module_folder(module_name, folder_name, destination):
return shutil.copytree(os.path.join(os.getcwd(), 'Modules', module_name, folder_name), destination)
def find_file(pattern):
return glob.glob(pattern, recursive=False)
def sed(pattern, replace, file_path):
lines = []
with open(file_path, 'r') as text_file:
lines += text_file.readlines()
with open(file_path, 'w') as text_file:
for line in lines:
text_file.write(re.sub(pattern, replace, line))
def mkdir(path):
if not os.path.exists(path):
os.makedirs(path)
def move_contents_of_folder(source, dest):
files = os.listdir(source)
for f in files:
if os.path.isdir(os.path.join(source, f)):
mkdir(os.path.join(dest, f))
move_contents_of_folder(os.path.join(source, f), os.path.join(dest, f))
else:
shutil.move(os.path.join(source, f), dest)

View file

@ -0,0 +1,9 @@
# Current version of Kosmos
version = 'v15.1'
# GitHub Login Information
github_username = ''
github_password = ''
# GitLab Login Information
gitlab_private_access_token = ''

View file

@ -0,0 +1,33 @@
[
{
"name": "Atmosphère",
"download_function_name": "download_atmosphere",
"git": {
"service": 0,
"org_name": "Atmosphere-NX",
"repo_name": "Atmosphere",
"asset_patterns": [
".*atmosphere.*\\.zip",
".*fusee.*\\.bin"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "Hekate",
"download_function_name": "download_hekate",
"git": {
"service": 0,
"org_name": "CTCaer",
"repo_name": "hekate",
"asset_patterns": [
".*hekate.*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
}
]

319
Builder/kosmos.json Normal file
View file

@ -0,0 +1,319 @@
[
{
"name": "Atmosphère",
"download_function_name": "download_atmosphere",
"git": {
"service": 0,
"org_name": "Atmosphere-NX",
"repo_name": "Atmosphere",
"asset_patterns": [
".*atmosphere.*\\.zip",
".*fusee.*\\.bin"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "Hekate",
"download_function_name": "download_hekate",
"git": {
"service": 0,
"org_name": "CTCaer",
"repo_name": "hekate",
"asset_patterns": [
".*hekate.*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "Hekate Icons",
"download_function_name": "download_hekate_icons",
"git": {
"service": 0,
"org_name": "NicholeMattera",
"repo_name": "Hekate-Icons",
"asset_patterns": [
".*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "Homebrew App Store",
"download_function_name": "download_appstore",
"git": {
"service": 1,
"org_name": "4TU",
"repo_name": "hb-appstore",
"asset_patterns": [
"\\[.*\\.nro\\.zip\\]\\((.*\\.nro\\.zip)\\)"
],
"group": 0,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "EdiZon",
"download_function_name": "download_edizon",
"git": {
"service": 0,
"org_name": "WerWolv",
"repo_name": "EdiZon",
"asset_patterns": [
".*\\.nro",
".*\\.ovl"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "Emuiibo",
"download_function_name": "download_emuiibo",
"git": {
"service": 0,
"org_name": "XorTroll",
"repo_name": "emuiibo",
"asset_patterns": [
".*emuiibo.*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "Goldleaf",
"download_function_name": "download_goldleaf",
"git": {
"service": 0,
"org_name": "XorTroll",
"repo_name": "Goldleaf",
"asset_patterns": [
".*\\.nro"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "Kosmos Cleaner",
"download_function_name": "download_kosmos_cleaner",
"git": {
"service": 0,
"org_name": "AtlasNX",
"repo_name": "Kosmos-Cleaner",
"asset_patterns": [
".*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "Kosmos Toolbox",
"download_function_name": "download_kosmos_toolbox",
"git": {
"service": 0,
"org_name": "AtlasNX",
"repo_name": "Kosmos-Toolbox",
"asset_patterns": [
".*\\.nro"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "Kosmos Updater",
"download_function_name": "download_kosmos_updater",
"git": {
"service": 0,
"org_name": "AtlasNX",
"repo_name": "Kosmos-Updater",
"asset_patterns": [
".*\\.nro"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "ldn_mitm",
"download_function_name": "download_ldn_mitm",
"git": {
"service": 0,
"org_name": "spacemeowx2",
"repo_name": "ldn_mitm",
"asset_patterns": [
".*ldn_mitm.*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "Lockpick",
"download_function_name": "download_lockpick",
"git": {
"service": 0,
"org_name": "shchmue",
"repo_name": "Lockpick",
"asset_patterns": [
".*\\.nro"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "Lockpick_RCM",
"download_function_name": "download_lockpick_rcm",
"git": {
"service": 0,
"org_name": "shchmue",
"repo_name": "Lockpick_RCM",
"asset_patterns": [
".*\\.bin"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "nxdumptool",
"download_function_name": "download_nxdumptool",
"git": {
"service": 0,
"org_name": "DarkMatterCore",
"repo_name": "nxdumptool",
"asset_patterns": [
".*nxdumptool.*\\.nro"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "nx-ovlloader",
"download_function_name": "download_nx_ovlloader",
"git": {
"service": 0,
"org_name": "WerWolv",
"repo_name": "nx-ovlloader",
"asset_patterns": [
".*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "ovlSysModules",
"download_function_name": "download_ovl_sysmodules",
"git": {
"service": 0,
"org_name": "WerWolv",
"repo_name": "ovl-sysmodules",
"asset_patterns": [
".*\\.ovl"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "Status Monitor Overlay",
"download_function_name": "download_status_monitor_overlay",
"git": {
"service": 0,
"org_name": "masagrator",
"repo_name": "Status-Monitor-Overlay",
"asset_patterns": [
".*\\.ovl"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "sys-clk",
"download_function_name": "download_sys_clk",
"git": {
"service": 0,
"org_name": "retronx-team",
"repo_name": "sys-clk",
"asset_patterns": [
".*sys-clk-.*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "sys-con",
"download_function_name": "download_sys_con",
"git": {
"service": 0,
"org_name": "cathery",
"repo_name": "sys-con",
"asset_patterns": [
".*sys-con-.*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "sys-ftpd-light",
"download_function_name": "download_sys_ftpd_light",
"git": {
"service": 0,
"org_name": "cathery",
"repo_name": "sys-ftpd-light",
"asset_patterns": [
".*sys-ftpd-light.*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
},
{
"name": "Tesla-Menu",
"download_function_name": "download_tesla_menu",
"git": {
"service": 0,
"org_name": "WerWolv",
"repo_name": "Tesla-Menu",
"asset_patterns": [
".*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": null
}
]

536
Builder/modules.py Normal file
View file

@ -0,0 +1,536 @@
#
# Kosmos Builder
# Copyright (C) 2020 Nichole Mattera
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
import common
import config
from github import Github
from gitlab import Gitlab
import json
import os
import re
import shutil
import urllib.request
import uuid
import xmltodict
import zipfile
gh = Github(config.github_username, config.github_password)
gl = Gitlab('https://gitlab.com', private_token=config.gitlab_private_access_token)
gl.auth()
def get_latest_release(module, include_prereleases = True):
if common.GitService(module['git']['service']) == common.GitService.GitHub:
try:
repo = gh.get_repo(f'{module["git"]["org_name"]}/{module["git"]["repo_name"]}')
except:
print(f'[Error] Unable to find repo: {module["git"]["org_name"]}/{module["git"]["repo_name"]}')
return None
releases = repo.get_releases()
if releases.totalCount == 0:
print(f'[Error] Unable to find any releases for repo: {module["git"]["org_name"]}/{module["git"]["repo_name"]}')
return None
if include_prereleases:
return releases[0]
for release in releases:
if not release.prerelease:
return release
return None
elif common.GitService(module['git']['service']) == common.GitService.GitLab:
try:
project = gl.projects.get(f'{module["git"]["org_name"]}/{module["git"]["repo_name"]}')
except:
print(f'[Error] Unable to find repo: {module["git"]["org_name"]}/{module["git"]["repo_name"]}')
return None
tags = project.tags.list()
for tag in tags:
if tag.release is not None:
return tag
print(f'[Error] Unable to find any releases for repo: {module["git"]["org_name"]}/{module["git"]["repo_name"]}')
return None
else:
releases = None
with urllib.request.urlopen(f'https://sourceforge.net/projects/{module["git"]["repo_name"]}/rss?path=/') as fd:
releases = xmltodict.parse(fd.read().decode('utf-8'))
return releases
def download_asset(module, release, index):
pattern = module['git']['asset_patterns'][index]
if common.GitService(module['git']['service']) == common.GitService.GitHub:
if release is None:
return None
matched_asset = None
for asset in release.get_assets():
if re.search(pattern, asset.name):
matched_asset = asset
break
if matched_asset is None:
print(f'[Error] Unable to find asset that match pattern: "{pattern}"')
return None
download_path = common.generate_temp_path()
urllib.request.urlretrieve(matched_asset.browser_download_url, download_path)
return download_path
elif common.GitService(module['git']['service']) == common.GitService.GitLab:
group = module['git']['group']
match = re.search(pattern, release.release['description'])
if match is None:
return None
groups = match.groups()
if len(groups) <= group:
return None
download_path = common.generate_temp_path()
urllib.request.urlretrieve(f'https://gitlab.com/{module["git"]["org_name"]}/{module["git"]["repo_name"]}{groups[group]}', download_path)
return download_path
else:
matched_item = None
for item in release['rss']['channel']['item']:
if re.search(pattern, item['title']):
matched_item = item
break
if matched_item is None:
print(f'[Error] Unable to find asset that match pattern: "{pattern}"')
return None
download_path = common.generate_temp_path()
urllib.request.urlretrieve(matched_item['link'], download_path)
return download_path
def find_asset(release, pattern):
for asset in release.get_assets():
if re.search(pattern, asset.name):
return asset
return None
def get_version(module, release, index):
if common.GitService(module['git']['service']) == common.GitService.GitHub:
return release.tag_name
elif common.GitService(module['git']['service']) == common.GitService.GitLab:
return release.name
else:
matched_item = None
for item in release['rss']['channel']['item']:
if re.search(module['git']['asset_patterns'][index], item['title']):
matched_item = item
break
if matched_item is None:
return "Latest"
match = re.search(module['git']['version_pattern'], matched_item['title'])
if match is None:
return "Latest"
groups = match.groups()
if len(groups) == 0:
return "Latest"
return groups[0]
def download_atmosphere(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
bundle_path = download_asset(module, release, 0)
if bundle_path is None:
return None
with zipfile.ZipFile(bundle_path, 'r') as zip_ref:
zip_ref.extractall(temp_directory)
common.delete_path(bundle_path)
common.delete_path(os.path.join(temp_directory, 'switch', 'reboot_to_payload.nro'))
common.delete_path(os.path.join(temp_directory, 'switch'))
common.delete_path(os.path.join(temp_directory, 'atmosphere', 'reboot_payload.bin'))
payload_path = download_asset(module, release, 1)
if payload_path is None:
return None
common.mkdir(os.path.join(temp_directory, 'bootloader', 'payloads'))
shutil.move(payload_path, os.path.join(temp_directory, 'bootloader', 'payloads', 'fusee-primary.bin'))
common.copy_module_file('atmosphere', 'system_settings.ini', os.path.join(temp_directory, 'atmosphere', 'config', 'system_settings.ini'))
if not kosmos_build:
common.delete_path(os.path.join(temp_directory, 'hbmenu.nro'))
return get_version(module, release, 0)
def download_hekate(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
bundle_path = download_asset(module, release, 0)
if bundle_path is None:
return None
with zipfile.ZipFile(bundle_path, 'r') as zip_ref:
zip_ref.extractall(temp_directory)
common.delete_path(bundle_path)
common.copy_module_file('hekate', 'bootlogo.bmp', os.path.join(temp_directory, 'bootloader', 'bootlogo.bmp'))
common.copy_module_file('hekate', 'hekate_ipl.ini', os.path.join(temp_directory, 'bootloader', 'hekate_ipl.ini'))
common.sed('KOSMOS_VERSION', kosmos_version, os.path.join(temp_directory, 'bootloader', 'hekate_ipl.ini'))
payload = common.find_file(os.path.join(temp_directory, 'hekate_ctcaer_*.bin'))
if len(payload) != 0:
shutil.copyfile(payload[0], os.path.join(temp_directory, 'bootloader', 'update.bin'))
common.mkdir(os.path.join(temp_directory, 'atmosphere'))
shutil.copyfile(payload[0], os.path.join(temp_directory, 'atmosphere', 'reboot_payload.bin'))
common.delete_path(os.path.join(temp_directory, 'nyx_usb_max_rate (run once per windows pc).reg'))
if not kosmos_build:
common.mkdir(os.path.join(temp_directory, '..', 'must_have'))
common.move_contents_of_folder(os.path.join(temp_directory, 'bootloader'), os.path.join(temp_directory, '..', 'must_have', 'bootloader'))
shutil.move(os.path.join(temp_directory, 'atmosphere', 'reboot_payload.bin'), os.path.join(temp_directory, '..', 'must_have', 'atmosphere', 'reboot_payload.bin'))
common.delete_path(os.path.join(temp_directory, 'atmosphere'))
return get_version(module, release, 0)
def download_hekate_icons(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
bundle_path = download_asset(module, release, 0)
if bundle_path is None:
return None
with zipfile.ZipFile(bundle_path, 'r') as zip_ref:
zip_ref.extractall(temp_directory)
common.delete_path(bundle_path)
shutil.move(os.path.join(temp_directory, 'bootloader', 'res', 'icon_payload.bmp'), os.path.join(temp_directory, 'bootloader', 'res', 'icon_payload_hue.bmp'))
#shutil.move(os.path.join(temp_directory, 'bootloader', 'res', 'icon_payload_custom.bmp'), os.path.join(temp_directory, 'bootloader', 'res', 'icon_payload.bmp'))
shutil.move(os.path.join(temp_directory, 'bootloader', 'res', 'icon_switch.bmp'), os.path.join(temp_directory, 'bootloader', 'res', 'icon_switch_hue.bmp'))
#shutil.move(os.path.join(temp_directory, 'bootloader', 'res', 'icon_switch_custom.bmp'), os.path.join(temp_directory, 'bootloader', 'res', 'icon_switch.bmp'))
return get_version(module, release, 0)
def download_appstore(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
bundle_path = download_asset(module, release, 0)
if bundle_path is None:
return None
with zipfile.ZipFile(bundle_path, 'r') as zip_ref:
zip_ref.extractall(temp_directory)
common.delete_path(bundle_path)
common.mkdir(os.path.join(temp_directory, 'switch', 'appstore'))
shutil.move(os.path.join(temp_directory, 'appstore.nro'), os.path.join(temp_directory, 'switch', 'appstore', 'appstore.nro'))
return get_version(module, release, 0)
def download_edizon(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
app_path = download_asset(module, release, 0)
if app_path is None:
return None
common.mkdir(os.path.join(temp_directory, 'switch', 'EdiZon'))
shutil.move(app_path, os.path.join(temp_directory, 'switch', 'EdiZon', 'EdiZon.nro'))
overlay_path = download_asset(module, release, 1)
if overlay_path is None:
return None
common.mkdir(os.path.join(temp_directory, 'switch', '.overlays'))
shutil.move(overlay_path, os.path.join(temp_directory, 'switch', '.overlays', 'ovlEdiZon.ovl'))
return get_version(module, release, 0)
def download_emuiibo(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
bundle_path = download_asset(module, release, 0)
if bundle_path is None:
return None
with zipfile.ZipFile(bundle_path, 'r') as zip_ref:
zip_ref.extractall(temp_directory)
common.delete_path(bundle_path)
common.mkdir(os.path.join(temp_directory, 'atmosphere', 'contents'))
shutil.move(os.path.join(temp_directory, 'SdOut', 'atmosphere', 'contents', '0100000000000352'), os.path.join(temp_directory, 'atmosphere', 'contents', '0100000000000352'))
common.mkdir(os.path.join(temp_directory, 'switch', '.overlays'))
shutil.move(os.path.join(temp_directory, 'SdOut', 'switch', '.overlays', 'emuiibo.ovl'), os.path.join(temp_directory, 'switch', '.overlays', 'emuiibo.ovl'))
common.delete_path(os.path.join(temp_directory, 'SdOut'))
if kosmos_build:
common.delete_path(os.path.join(temp_directory, 'atmosphere', 'contents', '0100000000000352', 'flags', 'boot2.flag'))
common.copy_module_file('emuiibo', 'toolbox.json', os.path.join(temp_directory, 'atmosphere', 'contents', '0100000000000352', 'toolbox.json'))
return get_version(module, release, 0)
def download_goldleaf(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
app_path = download_asset(module, release, 0)
if app_path is None:
return None
common.mkdir(os.path.join(temp_directory, 'switch', 'Goldleaf'))
shutil.move(app_path, os.path.join(temp_directory, 'switch', 'Goldleaf', 'Goldleaf.nro'))
return get_version(module, release, 0)
def download_kosmos_cleaner(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
bundle_path = download_asset(module, release, 0)
if bundle_path is None:
return None
with zipfile.ZipFile(bundle_path, 'r') as zip_ref:
zip_ref.extractall(temp_directory)
return get_version(module, release, 0)
def download_kosmos_toolbox(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
app_path = download_asset(module, release, 0)
if app_path is None:
return None
common.mkdir(os.path.join(temp_directory, 'switch', 'KosmosToolbox'))
shutil.move(app_path, os.path.join(temp_directory, 'switch', 'KosmosToolbox', 'KosmosToolbox.nro'))
common.copy_module_file('kosmos-toolbox', 'config.json', os.path.join(temp_directory, 'switch', 'KosmosToolbox', 'config.json'))
return get_version(module, release, 0)
def download_kosmos_updater(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
app_path = download_asset(module, release, 0)
if app_path is None:
return None
common.mkdir(os.path.join(temp_directory, 'switch', 'KosmosUpdater'))
shutil.move(app_path, os.path.join(temp_directory, 'switch', 'KosmosUpdater', 'KosmosUpdater.nro'))
common.copy_module_file('kosmos-updater', 'internal.db', os.path.join(temp_directory, 'switch', 'KosmosUpdater', 'internal.db'))
common.sed('KOSMOS_VERSION', kosmos_version, os.path.join(temp_directory, 'switch', 'KosmosUpdater', 'internal.db'))
return get_version(module, release, 0)
def download_ldn_mitm(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
bundle_path = download_asset(module, release, 0)
if bundle_path is None:
return None
with zipfile.ZipFile(bundle_path, 'r') as zip_ref:
zip_ref.extractall(temp_directory)
common.delete_path(bundle_path)
if kosmos_build:
common.delete_path(os.path.join(temp_directory, 'atmosphere', 'contents', '4200000000000010', 'flags', 'boot2.flag'))
common.copy_module_file('ldn_mitm', 'toolbox.json', os.path.join(temp_directory, 'atmosphere', 'contents', '4200000000000010', 'toolbox.json'))
return get_version(module, release, 0)
def download_lockpick(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
app_path = download_asset(module, release, 0)
if app_path is None:
return None
common.mkdir(os.path.join(temp_directory, 'switch', 'Lockpick'))
shutil.move(app_path, os.path.join(temp_directory, 'switch', 'Lockpick', 'Lockpick.nro'))
return get_version(module, release, 0)
def download_lockpick_rcm(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
payload_path = download_asset(module, release, 0)
if payload_path is None:
return None
if kosmos_build:
common.mkdir(os.path.join(temp_directory, 'bootloader', 'payloads'))
shutil.move(payload_path, os.path.join(temp_directory, 'bootloader', 'payloads', 'Lockpick_RCM.bin'))
else:
shutil.move(payload_path, os.path.join(temp_directory, 'Lockpick_RCM.bin'))
return get_version(module, release, 0)
def download_nxdumptool(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
app_path = download_asset(module, release, 0)
if app_path is None:
return None
common.mkdir(os.path.join(temp_directory, 'switch', 'NXDumpTool'))
shutil.move(app_path, os.path.join(temp_directory, 'switch', 'NXDumpTool', 'NXDumpTool.nro'))
return get_version(module, release, 0)
def download_nx_ovlloader(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
bundle_path = download_asset(module, release, 0)
if bundle_path is None:
return None
with zipfile.ZipFile(bundle_path, 'r') as zip_ref:
zip_ref.extractall(temp_directory)
common.delete_path(bundle_path)
return get_version(module, release, 0)
def download_ovl_sysmodules(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
app_path = download_asset(module, release, 0)
if app_path is None:
return None
common.mkdir(os.path.join(temp_directory, 'switch', '.overlays'))
shutil.move(app_path, os.path.join(temp_directory, 'switch', '.overlays', 'ovlSysmodules.ovl'))
return get_version(module, release, 0)
def download_status_monitor_overlay(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
app_path = download_asset(module, release, 0)
if app_path is None:
return None
common.mkdir(os.path.join(temp_directory, 'switch', '.overlays'))
shutil.move(app_path, os.path.join(temp_directory, 'switch', '.overlays', 'Status-Monitor-Overlay.ovl'))
return get_version(module, release, 0)
def download_sys_clk(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
bundle_path = download_asset(module, release, 0)
if bundle_path is None:
return None
with zipfile.ZipFile(bundle_path, 'r') as zip_ref:
zip_ref.extractall(temp_directory)
common.delete_path(bundle_path)
if kosmos_build:
common.delete_path(os.path.join(temp_directory, 'atmosphere', 'contents', '00FF0000636C6BFF', 'flags', 'boot2.flag'))
common.delete_path(os.path.join(temp_directory, 'README.md'))
common.copy_module_file('sys-clk', 'toolbox.json', os.path.join(temp_directory, 'atmosphere', 'contents', '00FF0000636C6BFF', 'toolbox.json'))
return get_version(module, release, 0)
def download_sys_con(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
bundle_path = download_asset(module, release, 0)
if bundle_path is None:
return None
with zipfile.ZipFile(bundle_path, 'r') as zip_ref:
zip_ref.extractall(temp_directory)
common.delete_path(bundle_path)
if kosmos_build:
common.delete_path(os.path.join(temp_directory, 'atmosphere', 'contents', '690000000000000D', 'flags', 'boot2.flag'))
return get_version(module, release, 0)
def download_sys_ftpd_light(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
bundle_path = download_asset(module, release, 0)
if bundle_path is None:
return None
with zipfile.ZipFile(bundle_path, 'r') as zip_ref:
zip_ref.extractall(temp_directory)
common.delete_path(bundle_path)
if kosmos_build:
common.delete_path(os.path.join(temp_directory, 'atmosphere', 'contents', '420000000000000E', 'flags', 'boot2.flag'))
return get_version(module, release, 0)
def download_tesla_menu(module, temp_directory, kosmos_version, kosmos_build):
release = get_latest_release(module)
bundle_path = download_asset(module, release, 0)
if bundle_path is None:
return None
with zipfile.ZipFile(bundle_path, 'r') as zip_ref:
zip_ref.extractall(temp_directory)
common.delete_path(bundle_path)
return get_version(module, release, 0)
def build(temp_directory, kosmos_version, command, auto_build):
results = []
modules_filename = 'kosmos.json'
if command == common.Command.KosmosMinimal:
modules_filename = 'kosmos-minimal.json'
elif command == common.Command.SDSetup:
modules_filename = 'sdsetup.json'
# Open up modules.json
with open(modules_filename) as json_file:
# Parse JSON
data = json.load(json_file)
# Loop through modules
for module in data:
# Running a SDSetup Build
if command == common.Command.SDSetup:
# Only show prompts when it's not an auto build.
if not auto_build:
print(f'Downloading {module["name"]}...')
# Make sure module directory is created.
module_directory = os.path.join(temp_directory, module['sdsetup_module_name'])
common.mkdir(module_directory)
# Download the module.
download = globals()[module['download_function_name']]
version = download(module, module_directory, kosmos_version, False)
if version is None:
return None
# Auto builds have a different prompt at the end for parsing.
if auto_build:
results.append(f'{module["sdsetup_module_name"]}:{version}')
else:
results.append(f' {module["name"]} - {version}')
# Running a Kosmos Build
else:
# Download the module.
print(f'Downloading {module["name"]}...')
download = globals()[module['download_function_name']]
version = download(module, temp_directory, kosmos_version, True)
if version is None:
return None
results.append(f' {module["name"]} - {version}')
return results

3
Builder/requirements.txt Normal file
View file

@ -0,0 +1,3 @@
PyGithub
python-gitlab
xmltodict

243
Builder/sdsetup.json Normal file
View file

@ -0,0 +1,243 @@
[
{
"name": "Atmosphère",
"download_function_name": "download_atmosphere",
"git": {
"service": 0,
"org_name": "Atmosphere-NX",
"repo_name": "Atmosphere",
"asset_patterns": [
".*atmosphere.*\\.zip",
".*fusee.*\\.bin"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": "must_have"
},
{
"name": "Hekate",
"download_function_name": "download_hekate",
"git": {
"service": 0,
"org_name": "CTCaer",
"repo_name": "hekate",
"asset_patterns": [
".*hekate.*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": "hekate"
},
{
"name": "Hekate Icons",
"download_function_name": "download_hekate_icons",
"git": {
"service": 0,
"org_name": "NicholeMattera",
"repo_name": "Hekate-Icons",
"asset_patterns": [
".*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": "hekate_icons"
},
{
"name": "Emuiibo",
"download_function_name": "download_emuiibo",
"git": {
"service": 0,
"org_name": "XorTroll",
"repo_name": "emuiibo",
"asset_patterns": [
".*emuiibo.*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": "emuiibo"
},
{
"name": "Kosmos Cleaner",
"download_function_name": "download_kosmos_cleaner",
"git": {
"service": 0,
"org_name": "AtlasNX",
"repo_name": "Kosmos-Cleaner",
"asset_patterns": [
".*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": "kosmos_cleaner"
},
{
"name": "Kosmos Toolbox",
"download_function_name": "download_kosmos_toolbox",
"git": {
"service": 0,
"org_name": "AtlasNX",
"repo_name": "Kosmos-Toolbox",
"asset_patterns": [
".*\\.nro"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": "kosmos_toolbox"
},
{
"name": "Kosmos Updater",
"download_function_name": "download_kosmos_updater",
"git": {
"service": 0,
"org_name": "AtlasNX",
"repo_name": "Kosmos-Updater",
"asset_patterns": [
".*\\.nro"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": "kosmos_updater"
},
{
"name": "ldn_mitm",
"download_function_name": "download_ldn_mitm",
"git": {
"service": 0,
"org_name": "spacemeowx2",
"repo_name": "ldn_mitm",
"asset_patterns": [
".*ldn_mitm.*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": "ldn_mitm"
},
{
"name": "Lockpick_RCM",
"download_function_name": "download_lockpick_rcm",
"git": {
"service": 0,
"org_name": "shchmue",
"repo_name": "Lockpick_RCM",
"asset_patterns": [
".*\\.bin"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": "lockpick_rcm"
},
{
"name": "nx-ovlloader",
"download_function_name": "download_nx_ovlloader",
"git": {
"service": 0,
"org_name": "WerWolv",
"repo_name": "nx-ovlloader",
"asset_patterns": [
".*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": "nx_ovlloader"
},
{
"name": "ovlSysModules",
"download_function_name": "download_ovl_sysmodules",
"git": {
"service": 0,
"org_name": "WerWolv",
"repo_name": "ovl-sysmodules",
"asset_patterns": [
".*\\.ovl"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": "ovl_sysmodules"
},
{
"name": "Status Monitor Overlay",
"download_function_name": "download_status_monitor_overlay",
"git": {
"service": 0,
"org_name": "masagrator",
"repo_name": "Status-Monitor-Overlay",
"asset_patterns": [
".*\\.ovl"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": "status_monitor_overlay"
},
{
"name": "sys-clk",
"download_function_name": "download_sys_clk",
"git": {
"service": 0,
"org_name": "retronx-team",
"repo_name": "sys-clk",
"asset_patterns": [
".*sys-clk-.*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": "sys_clk"
},
{
"name": "sys-con",
"download_function_name": "download_sys_con",
"git": {
"service": 0,
"org_name": "cathery",
"repo_name": "sys-con",
"asset_patterns": [
".*sys-con-.*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": "sys_con"
},
{
"name": "sys-ftpd-light",
"download_function_name": "download_sys_ftpd_light",
"git": {
"service": 0,
"org_name": "cathery",
"repo_name": "sys-ftpd-light",
"asset_patterns": [
".*sys-ftpd-light.*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": "sys_ftpd"
},
{
"name": "Tesla-Menu",
"download_function_name": "download_tesla_menu",
"git": {
"service": 0,
"org_name": "WerWolv",
"repo_name": "Tesla-Menu",
"asset_patterns": [
".*\\.zip"
],
"group": null,
"version_pattern": null
},
"sdsetup_module_name": "tesla_menu"
}
]

View file

@ -1,18 +0,0 @@
Licenses
========
* Atmosphère is licensed under [GPLv2](https://github.com/Atmosphere-NX/Atmosphere/blob/master/LICENSE)
* Switch Homebrew Menu is licensed under [ISC](https://github.com/switchbrew/nx-hbmenu/blob/master/LICENSE.md)
* Switch Homebrew Loader is licensed under [ISC](https://github.com/switchbrew/nx-hbloader/blob/master/LICENSE.md)
* AppstoreNX is licensed under [GPLv3](https://github.com/vgmoose/appstorenx/blob/master/LICENSE)
* EdiZon is licensed under [GPLv2](https://github.com/WerWolv/EdiZon/blob/master/LICENSE)
* SDFilesUpdater is licensed under [GPLv2](https://github.com/StevenMattera/SDFilesUpdater/blob/master/LICENSE)
* Sys-FTPD is licensed under [GPLv3](https://github.com/jakibaki/sys-ftpd/blob/master/LICENSE)
* Goldleaf is licensed under [GPLv3](https://github.com/XorTroll/Goldleaf/blob/master/LICENSE)
* Sys-Netcheat is licensed under [GPLv3](https://github.com/jakibaki/sys-netcheat/blob/master/LICENSE)
* Hekate is licensed under [GPLv2](https://github.com/CTCaer/hekate/blob/master/LICENSE)
* Noexes is licensed under [GPLv3](https://github.com/mdbell/Noexes/blob/master/LICENSE)
* Lockpick is licensed under [GPLv2](https://github.com/shchmue/Lockpick/blob/master/LICENSE)
* sys-clk is licensed under [Beerware](https://github.com/retronx-team/sys-clk/blob/master/LICENSE)
All patches made to the original software respect their original license.

View file

@ -1,14 +0,0 @@
folder name | function
------------|-----------
must_have | Files that are **required** to make this work
bootlogo | The bootlogo, don't enable when somebody wants to add his own custom bootlogo, **else do!**
es_patches | ES patches, part of the signature patches
hekate_payload | Just the hekate payload
sys-ftpd | FTPD sys-module by jakibaki
sys-netcheat | netcheat sys-module by jakibaki
edizon | EdiZon
appstore | AppstoreNX
KosmosUpdater | A homebrew for directly updating Kosmos
hbmenu | The homebrew menu
kosmos_toolkit | Homebrew for modifying CFW settings
lockpick | This allows dumping all keys used by the Switch

File diff suppressed because it is too large Load diff

View file

@ -1,619 +0,0 @@
{
"author": "Ac_K",
"scriptLanguage": "py",
"beta": false,
"all": [
{
"saveFilePaths": [""],
"files": "userdata.dat",
"filetype": "mk8",
"items": [
{
"name": "Coins",
"category": "Main Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "195C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999
}
},
{
"name": "Drifts",
"category": "Main Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1968"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999
}
},
{
"name": "Mini-Turbos",
"category": "Main Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1970"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999
}
},
{
"name": "Super Mini-Turbos",
"category": "Main Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1974"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999
}
},
{
"name": "Ultra Mini-Turbos",
"category": "Main Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1978"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999
}
},
{
"name": "Jump Boosts",
"category": "Main Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1964"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999
}
},
{
"name": "Race Rating",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1E94"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999
}
},
{
"name": "Battle Rating",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1E98"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999
}
},
{
"name": "Total Battles",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1E9C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999
}
},
{
"name": "Friend Battles",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EA0"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999
}
},
{
"name": "Tournament Battles",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EA4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999
}
},
{
"name": "Racing (All)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EA8"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Racing (Gold)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EAC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Racing (Silver)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EB0"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Racing (Bronze)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EB4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Ballon Battle (All)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EB8"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Ballon Battle (Gold)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EBC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Ballon Battle (Silver)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EC0"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Ballon Battle (Bronze)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EC4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Renegade Roundup (All)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EC8"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Renegade Roundup (Gold)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1ECC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Bob-omb Blast (All)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1ED8"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Bob-omb Blast (Gold)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EDC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Bob-omb Blast (Silver)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EE0"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Bob-omb Blast (Bronze)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EE4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Coin Runners (All)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EE8"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Coin Runners (Gold)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EEC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Coin Runners (Silver)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EF0"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Coin Runners (Bronze)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EF4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Shine Thief (All)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EF8"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Shine Thief (Gold)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1EFC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Shine Thief (Silver)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F00"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Shine Thief (Bronze)",
"category": "Online Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F04"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Racing (All)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F08"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Racing (Gold)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F0C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Racing (Silver)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F10"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Racing (Bronze)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F14"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Ballon Battle (All)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F18"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Ballon Battle (Gold)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F1C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Ballon Battle (Silver)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F20"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Ballon Battle (Bronze)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F24"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Renegade Roundup (All)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F28"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Renegade Roundup (Gold)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F2C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Bob-omb Blast (All)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F38"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Bob-omb Blast (Gold)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F3C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Bob-omb Blast (Silver)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F40"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Bob-omb Blast (Bronze)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F44"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Coin Runners (All)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F48"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Coin Runners (Gold)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F4C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Coin Runners (Silver)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F50"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Coin Runners (Bronze)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F54"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Shine Thief (All)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F58"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Shine Thief (Gold)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F5C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Shine Thief (Silver)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F60"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Shine Thief (Bronze)",
"category": "Wireless Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "1F64"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
}
]
}
]
}

View file

@ -1,197 +0,0 @@
{
"author": "WerWolv",
"scriptLanguage": "py",
"beta": true,
"all": [
{
"saveFilePaths": [""],
"files": "[0-9]\\.celeste",
"filetype": "xmls",
"items": [
{
"name": "Total Deaths",
"category": "Player stats",
"intArgs": [0],
"strArgs": ["SaveData", "TotalDeaths"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999999
}
},
{
"name": "Total Strawberries",
"category": "Player stats",
"intArgs": [0],
"strArgs": ["SaveData", "TotalStrawberries"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999999
}
},
{
"name": "Total Golden Strawberries",
"category": "Player stats",
"intArgs": [0],
"strArgs": ["SaveData", "TotalGoldenStrawberries"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999999
}
},
{
"name": "Total Jumps",
"category": "Player stats",
"intArgs": [0],
"strArgs": ["SaveData", "TotalJumps"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999999
}
},
{
"name": "Total Wall Jumps",
"category": "Player stats",
"intArgs": [0],
"strArgs": ["SaveData", "TotalWallJumps"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999999
}
},
{
"name": "Total Dashes",
"category": "Player stats",
"intArgs": [0],
"strArgs": ["SaveData", "TotalDashes"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999999
}
},
{
"name": "Respawn point X (Dangerous)",
"category": "Player stats",
"intArgs": [0],
"strArgs": ["SaveData", "CurrentSession", "RespawnPoint", "X"],
"widget": {
"type": "int",
"minValue": -50000,
"maxValue": 50000
}
},
{
"name": "Respawn point Y (Dangerous)",
"category": "Player stats",
"intArgs": [0],
"strArgs": ["SaveData", "CurrentSession", "RespawnPoint", "Y"],
"widget": {
"type": "int",
"minValue": -50000,
"maxValue": 50000
}
},
{
"name": "Number of dashes",
"category": "Player stats",
"intArgs": [0],
"strArgs": ["SaveData", "CurrentSession", "Inventory", "Dashes"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 100
}
},
{
"name": "Cheat Mode",
"category": "Assists",
"intArgs": [1],
"strArgs": ["SaveData", "CheatMode"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Assist Mode",
"category": "Assists",
"intArgs": [1],
"strArgs": ["SaveData", "AssistMode"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Game Speed (Default: 10)",
"category": "Assists",
"intArgs": [0],
"strArgs": ["SaveData", "Assists", "GameSpeed"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 20
}
},
{
"name": "Invincible",
"category": "Assists",
"intArgs": [1],
"strArgs": ["SaveData", "Assists", "Invincible"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Infinite Stamina",
"category": "Assists",
"intArgs": [1],
"strArgs": ["SaveData", "Assists", "InfiniteStamina"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
}
]
},
{
"saveFilePaths": [""],
"files": "settings\\.celeste",
"filetype": "xmls",
"items": [
{
"name": "Pico8 on main menu",
"category": "Settings",
"intArgs": [1],
"strArgs": ["Settings", "Pico8OnMainMenu"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Debug mode",
"category": "Settings",
"intArgs": [1],
"strArgs": ["Settings", "LaunchInDebugMode"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
}
]
}
]
}

View file

@ -1,26 +0,0 @@
{
"author": "WerWolv",
"scriptLanguage": "lua",
"beta": false,
"all": [
{
"saveFilePaths": [""],
"files": "buffer_save\\.dat",
"filetype": "bin",
"items": [
{
"name": "Party Points",
"category": "Collectibles",
"intArgs": [2, 4],
"strArgs": ["0000", "000C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 1000000,
"stepSize": 100
}
}
]
}
]
}

View file

@ -1,168 +0,0 @@
{
"author": "Jojo",
"scriptLanguage": "lua",
"beta": true,
"all": [
{
"saveFilePaths": ["save"],
"files": "DRAKS\\d+\\.sl2",
"filetype": "darksouls",
"items": [
{
"name": "HP",
"category": "Save 1",
"intArgs": [4, 4],
"strArgs": ["0028", "0070"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999999
}
},
{
"name": "Max HP",
"category": "Save 1",
"intArgs": [4, 4],
"strArgs": ["0028", "0078"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999999
}
},
{
"name": "Stamina",
"category": "Save 1",
"intArgs": [4, 4],
"strArgs": ["0028", "008C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999999
}
},
{
"name": "Max Stamina",
"category": "Save 1",
"intArgs": [4, 4],
"strArgs": ["0028", "0094"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999999
}
},
{
"name": "Vitality",
"category": "Save 1",
"intArgs": [4, 4],
"strArgs": ["0028", "009C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999999
}
},
{
"name": "Attunement",
"category": "Save 1",
"intArgs": [4, 4],
"strArgs": ["0028", "00A4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999999
}
},
{
"name": "Endurance",
"category": "Save 1",
"intArgs": [4, 4],
"strArgs": ["0028", "00AC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999999
}
},
{
"name": "Strength",
"category": "Save 1",
"intArgs": [4, 4],
"strArgs": ["0028", "00B4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999999
}
},
{
"name": "Dexterity",
"category": "Save 1",
"intArgs": [4, 4],
"strArgs": ["0028", "00BC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999999
}
},
{
"name": "Intelligence",
"category": "Save 1",
"intArgs": [4, 4],
"strArgs": ["0028", "00C4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999999
}
},
{
"name": "Faith",
"category": "Save 1",
"intArgs": [4, 4],
"strArgs": ["0028", "00CC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999999
}
},
{
"name": "Resistance",
"category": "Save 1",
"intArgs": [4, 4],
"strArgs": ["0028", "00E4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999999
}
},
{
"name": "Level",
"category": "Save 1",
"intArgs": [4, 4],
"strArgs": ["0028", "00EC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999999
}
},
{
"name": "Souls",
"category": "Save 1",
"intArgs": [4, 4],
"strArgs": ["0028", "00F0"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999999
}
}
]
}
]
}

View file

@ -1,597 +0,0 @@
{
"author": "findonovan95",
"scriptLanguage": "lua",
"beta": true,
"all": [
{
"saveFilePaths": [""],
"files": "save.001",
"filetype": "bin",
"items": [
{
"name": "HL",
"category": "Money",
"intArgs": [0, 8],
"strArgs": ["0000", "0290"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999999999999
}
},
{
"name": "HP",
"category": "Laharl",
"intArgs": [0, 4],
"strArgs": ["0000", "1184"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 10000000
}
},
{
"name": "Mana",
"category": "Laharl",
"intArgs": [0, 4],
"strArgs": ["0000", "11E4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999999
}
},
{
"name": "SP",
"category": "Laharl",
"intArgs": [0, 4],
"strArgs": ["0000", "1180"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 10000000
}
},
{
"name": "ATT",
"category": "Laharl",
"intArgs": [0, 4],
"strArgs": ["0000", "118C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 10000000
}
},
{
"name": "DEF",
"category": "Laharl",
"intArgs": [0, 4],
"strArgs": ["0000", "1190"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 10000000
}
},
{
"name": "HIT",
"category": "Laharl",
"intArgs": [0, 4],
"strArgs": ["0000", "119C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 10000000
}
},
{
"name": "INT",
"category": "Laharl",
"intArgs": [0, 4],
"strArgs": ["0000", "1184"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 10000000
}
},
{
"name": "SPD",
"category": "Laharl",
"intArgs": [0, 4],
"strArgs": ["0000", "1198"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 10000000
}
},
{
"name": "RES",
"category": "Laharl",
"intArgs": [0, 4],
"strArgs": ["0000", "11A8"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 10000000
}
},
{
"name": "Value",
"category": "Laharl's Main Weapon",
"intArgs": [0, 4],
"strArgs": ["0000", "0C00"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 268435455
}
},
{
"name": "HP",
"category": "Laharl's Main Weapon",
"intArgs": [0, 4],
"strArgs": ["0000", "0C08"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 2000000
}
},
{
"name": "SP",
"category": "Laharl's Main Weapon",
"intArgs": [0, 4],
"strArgs": ["0000", "0C0C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 2000000
}
},
{
"name": "ATT",
"category": "Laharl's Main Weapon",
"intArgs": [0, 4],
"strArgs": ["0000", "0C10"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 2000000
}
},
{
"name": "DEF",
"category": "Laharl's Main Weapon",
"intArgs": [0, 4],
"strArgs": ["0000", "0C14"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 2000000
}
},
{
"name": "INT",
"category": "Laharl's Main Weapon",
"intArgs": [0, 4],
"strArgs": ["0000", "0C18"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 2000000
}
},
{
"name": "SPD",
"category": "Laharl's Main Weapon",
"intArgs": [0, 4],
"strArgs": ["0000", "0C1C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 2000000
}
},
{
"name": "HIT",
"category": "Laharl's Main Weapon",
"intArgs": [0, 4],
"strArgs": ["0000", "0C20"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 2000000
}
},
{
"name": "RES",
"category": "Laharl's Main Weapon",
"intArgs": [0, 4],
"strArgs": ["0000", "0C24"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 2000000
}
},
{
"name": "Specialist 1 - Value",
"category": "Laharl's Main Weapon",
"intArgs": [0, 2],
"strArgs": ["0000", "0BC0"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 19999
}
},
{
"name": "Specialist 2 - Value",
"category": "Laharl's Main Weapon",
"intArgs": [0, 2],
"strArgs": ["0000", "0BC4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 19999
}
},
{
"name": "Specialist 3 - Value",
"category": "Laharl's Main Weapon",
"intArgs": [0, 2],
"strArgs": ["0000", "0BC8"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 19999
}
},
{
"name": "Specialist 4 - Value",
"category": "Laharl's Main Weapon",
"intArgs": [0, 2],
"strArgs": ["0000", "0BCC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 19999
}
},
{
"name": "Specialist 5 - Value",
"category": "Laharl's Main Weapon",
"intArgs": [0, 2],
"strArgs": ["0000", "0BD0"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 19999
}
},
{
"name": "Specialist 6 - Value",
"category": "Laharl's Main Weapon",
"intArgs": [0, 2],
"strArgs": ["0000", "0BD4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 19999
}
},
{
"name": "Specialist 7 - Value",
"category": "Laharl's Main Weapon",
"intArgs": [0, 2],
"strArgs": ["0000", "0BD8"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 19999
}
},
{
"name": "Specialist 8 - Value",
"category": "Laharl's Main Weapon",
"intArgs": [0, 2],
"strArgs": ["0000", "0BDC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 19999
}
},
{
"name": "Specialist 1 - Type",
"category": "Laharl's Main Weapon",
"intArgs": [0, 1],
"strArgs": ["0000", "0BC2"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 65
}
},
{
"name": "Specialist 2 - Type",
"category": "Laharl's Main Weapon",
"intArgs": [0, 1],
"strArgs": ["0000", "0BC6"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 65
}
},
{
"name": "Specialist 3 - Type",
"category": "Laharl's Main Weapon",
"intArgs": [0, 1],
"strArgs": ["0000", "0BCA"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 65
}
},
{
"name": "Specialist 4 - Type",
"category": "Laharl's Main Weapon",
"intArgs": [0, 1],
"strArgs": ["0000", "0BCE"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 65
}
},
{
"name": "Specialist 5 - Type",
"category": "Laharl's Main Weapon",
"intArgs": [0, 1],
"strArgs": ["0000", "0BD2"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 65
}
},
{
"name": "Specialist 6 - Type",
"category": "Laharl's Main Weapon",
"intArgs": [0, 1],
"strArgs": ["0000", "0BD6"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 65
}
},
{
"name": "Specialist 7 - Type",
"category": "Laharl's Main Weapon",
"intArgs": [0, 1],
"strArgs": ["0000", "0BDA"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 65
}
},
{
"name": "Specialist 8 - Type",
"category": "Laharl's Main Weapon",
"intArgs": [0, 1],
"strArgs": ["0000", "0BDE"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 65
}
},
{
"name": "POP",
"category": "Laharl's Main Weapon",
"intArgs": [0, 1],
"strArgs": ["0000", "0C41"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 8
}
},
{
"name": "Specialist 1 - Type",
"category": "Laharl's Item - Other #1",
"intArgs": [0, 1],
"strArgs": ["0000", "0C50"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 65
}
},
{
"name": "Specialist 2 - Type",
"category": "Laharl's Item - Other #1",
"intArgs": [0, 1],
"strArgs": ["0000", "0C54"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 65
}
},
{
"name": "Specialist 3 - Type",
"category": "Laharl's Item - Other #1",
"intArgs": [0, 1],
"strArgs": ["0000", "0C58"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 65
}
},
{
"name": "Specialist 4 - Type",
"category": "Laharl's Item - Other #1",
"intArgs": [0, 1],
"strArgs": ["0000", "0C5C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 65
}
},
{
"name": "Specialist 5 - Type",
"category": "Laharl's Item - Other #1",
"intArgs": [0, 1],
"strArgs": ["0000", "0C60"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 65
}
},
{
"name": "Specialist 6 - Type",
"category": "Laharl's Item - Other #1",
"intArgs": [0, 1],
"strArgs": ["0000", "0C64"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 65
}
},
{
"name": "Specialist 7 - Type",
"category": "Laharl's Item - Other #1",
"intArgs": [0, 1],
"strArgs": ["0000", "0C68"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 65
}
},
{
"name": "Specialist 8 - Type",
"category": "Laharl's Item - Other #1",
"intArgs": [0, 1],
"strArgs": ["0000", "0C6C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 65
}
},
{
"name": "Specialist 1 - Value",
"category": "Laharl's Item - Other #1",
"intArgs": [0, 1],
"strArgs": ["0000", "0C4E"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 19999
}
},
{
"name": "Specialist 2 - Value",
"category": "Laharl's Item - Other #1",
"intArgs": [0, 1],
"strArgs": ["0000", "0C52"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 19999
}
},
{
"name": "Specialist 3 - Value",
"category": "Laharl's Item - Other #1",
"intArgs": [0, 1],
"strArgs": ["0000", "0C56"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 19999
}
},
{
"name": "Specialist 4 - Value",
"category": "Laharl's Item - Other #1",
"intArgs": [0, 1],
"strArgs": ["0000", "0C5A"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 19999
}
},
{
"name": "Specialist 5 - Value",
"category": "Laharl's Item - Other #1",
"intArgs": [0, 1],
"strArgs": ["0000", "0C5E"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 19999
}
},
{
"name": "Specialist 6 - Value",
"category": "Laharl's Item - Other #1",
"intArgs": [0, 1],
"strArgs": ["0000", "0C62"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 19999
}
},
{
"name": "Specialist 7 - Value",
"category": "Laharl's Item - Other #1",
"intArgs": [0, 1],
"strArgs": ["0000", "0C66"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 19999
}
},
{
"name": "Specialist 8 - Value",
"category": "Laharl's Item - Other #1",
"intArgs": [0, 1],
"strArgs": ["0000", "0C6A"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 19999
}
},
{
"name": "POP",
"category": "Laharl's Item - Other #1",
"intArgs": [0, 1],
"strArgs": ["0000", "0CCF"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 8
}
}
]
}
]
}

View file

@ -1,25 +0,0 @@
{
"author": "trueicecold",
"scriptLanguage": "lua",
"beta": true,
"all": [
{
"saveFilePaths": ["Managers"],
"files": "PlayerManager.bs",
"filetype": "bin",
"items": [
{
"name": "Levels Unlocked (All or nothing)",
"category": "Unlockables",
"intArgs": [2, 4],
"strArgs": ["0000", "0086"],
"widget": {
"type": "bool",
"onValue": 400000,
"offValue": 0
}
}
]
}
]
}

View file

@ -1,25 +0,0 @@
{
"author": "Jojo",
"scriptLanguage": "lua",
"beta": false,
"all": [
{
"saveFilePaths": [""],
"files": "data.bin",
"filetype": "puyopuyo",
"items": [
{
"name": "Credits",
"category": "Game",
"intArgs": [2, 2],
"strArgs": ["0000", "06C4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
}
]
}
]
}

View file

@ -1,3 +0,0 @@
{
"useInstead": "0100E66006406000.json"
}

View file

@ -1,630 +0,0 @@
{
"author": "WerWolv",
"scriptLanguage": "lua",
"beta": false,
"all": [
{
"saveFilePaths": [""],
"files": "user\\d\\.dat",
"filetype": "json",
"items": [
{
"name": "Geo count",
"category": "Collectibles",
"intArgs": [0],
"strArgs": ["playerData", "geo"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999
}
},
{
"name": "Dream orb count",
"category": "Collectibles",
"intArgs": [0],
"strArgs": ["playerData", "dreamOrbs"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999
}
},
{
"name": "Simple keys count",
"category": "Collectibles",
"intArgs": [0],
"strArgs": ["playerData", "simpleKeys"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 100
}
},
{
"name": "Pale ore count",
"category": "Collectibles",
"intArgs": [0],
"strArgs": ["playerData", "ore"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 1000
}
},
{
"name": "Nail Damage",
"category": "Stats",
"intArgs": [0],
"strArgs": ["playerData", "nailDamage"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 100
}
},
{
"name": "Max Health",
"category": "Stats",
"intArgs": [0],
"strArgs": ["playerData", "maxHealthBase"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 100
}
},
{
"name": "Charm slot count",
"category": "Stats",
"intArgs": [0],
"strArgs": ["playerData", "charmSlots"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Gathering Swarm enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_1"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Wayward Compass enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_2"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Grubsong enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_3"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Stalwart Shell enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_4"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Baldur Shell enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_5"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Furry of the Fallen enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_6"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Quick Focus enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_7"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Lifeblood Heart enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_8"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Lifeblood Core enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_9"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Defender's Crest enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_10"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Flukenest enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_11"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Thorns of Agony enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_12"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Mark of Pride enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_13"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Steady body enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_14"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Heavy Bow enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_15"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Sharp Shadow enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_16"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Spore Shroom enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_17"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Longnail enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_18"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Shaman Stone enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_19"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Soul Catcher enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_20"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Soul Eater enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_21"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Glowing Womb enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_22"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Fragile Heart enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_23"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Fragile Greed enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_24"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Fragile Strength enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_25"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Nailmaster's Glory enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_26"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Joni's Blessing enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_27"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Shape of Unn enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_28"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Hiveblood enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_29"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Dream Wielder enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_30"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Dashmaster enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_31"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Quick Slash enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_32"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Spell Twister enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_33"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Deep Focus enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_34"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Grubberfly's Elegy enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_35"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Void Soul enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_36"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Sprintmaster enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_37"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Dreamshield enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_38"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Weaversong enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_39"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Grimmchild / Carefree Melody enabled",
"category": "Charms",
"intArgs": [1],
"strArgs": ["playerData", "gotCharm_40"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Mothwing Cloak enabled",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["playerData", "hasDash"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Shadow Mothwing Cloak enabled",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["playerData", "hasShadowDash"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Mantis Claw enabled",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["playerData", "hasWalljump"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Crystal Dash enabled",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["playerData", "hasSuperDash"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Monarch Wings enabled",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["playerData", "hasDoubleJump"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Dreamnail enabled",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["playerData", "hasDreamNail"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Dream Gate enabled",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["playerData", "hasDreamGate"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Dreamnail Upgrade enabled",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["playerData", "dreamNailUpgraded"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Ismas enabled",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["playerData", "hasAcidArmour"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
}
]
}
]
}

View file

@ -1,300 +0,0 @@
{
"author": "mrLewisFC",
"scriptLanguage": "lua",
"beta": true,
"all": [
{
"saveFilePaths": ["save_data"],
"files": "system_data.bin",
"filetype": "smash",
"items": [
{
"name": "Gold",
"category": "1. General",
"intArgs": [3, 3],
"strArgs": ["000000", "5506DC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999999
}
},
{
"name": "Spirit Points (SP)",
"category": "1. General",
"intArgs": [3, 3],
"strArgs": ["000000", "4831E4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999999
}
},
{
"name": "Hammers",
"category": "1. General",
"intArgs": [3, 1],
"strArgs": ["000000", "555E5C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 255
}
},
{
"name": "Skill Points",
"category": "1. General",
"intArgs": [3, 3],
"strArgs": ["000000", "4C40D8 "],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999999
}
},
{
"name": "Tickets",
"category": "1. General",
"intArgs": [3, 1],
"strArgs": ["000000", "5506CC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 255
}
},
{
"name": "Shuffle All",
"category": "2. Support Items",
"intArgs": [3, 1],
"strArgs": ["000000", "4831C0"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 255
}
},
{
"name": "All Primaries",
"category": "2. Support Items",
"intArgs": [3, 1],
"strArgs": ["000000", "4831C1"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 255
}
},
{
"name": "All Supports",
"category": "2. Support Items",
"intArgs": [3, 1],
"strArgs": ["000000", "4831C2"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 255
}
},
{
"name": "Filler",
"category": "2. Support Items",
"intArgs": [3, 1],
"strArgs": ["000000", "4831C3"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 255
}
},
{
"name": "Rematch",
"category": "2. Support Items",
"intArgs": [3, 1],
"strArgs": ["000000", "4831C4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 255
}
},
{
"name": "Damage 50%",
"category": "2. Support Items",
"intArgs": [3, 1],
"strArgs": ["000000", "4831C6"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 255
}
},
{
"name": "Slow FS Charging",
"category": "2. Support Items",
"intArgs": [3, 1],
"strArgs": ["000000", "4831C7"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 255
}
},
{
"name": "Weaken Minions",
"category": "2. Support Items",
"intArgs": [3, 1],
"strArgs": ["000000", "4831C8"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 255
}
},
{
"name": "Health Drain",
"category": "2. Support Items",
"intArgs": [3, 1],
"strArgs": ["000000", "4831C9"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 255
}
},
{
"name": "Disable Items",
"category": "2. Support Items",
"intArgs": [3, 1],
"strArgs": ["000000", "4831CA"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 255
}
},
{
"name": "Shield Spacer",
"category": "2. Support Items",
"intArgs": [3, 1],
"strArgs": ["000000", "4831CB"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 255
}
},
{
"name": "Sluggish Shield",
"category": "2. Support Items",
"intArgs": [3, 1],
"strArgs": ["000000", "4831CC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 255
}
},
{
"name": "Snack (S)",
"category": "2. Support Items",
"intArgs": [3, 2],
"strArgs": ["000000", "4831CE"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 65535
}
},
{
"name": "Snack (M)",
"category": "2. Support Items",
"intArgs": [3, 2],
"strArgs": ["000000", "4831D0"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 65535
}
},
{
"name": "Snack (L)",
"category": "2. Support Items",
"intArgs": [3, 2],
"strArgs": ["000000", "4831D2"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 65535
}
},
{
"name": "Unlock all characters (All or Nothing)",
"category": "3. Characters",
"intArgs": [3, 1],
"strArgs": ["53FF60", "540260"],
"widget": {
"type": "bool",
"offValue": 0,
"onValue": 1
}
},
{
"name": "Get all cores (All or Nothing)",
"category": "4. Cores",
"intArgs": [3, 1],
"strArgs": ["426C77", "434720"],
"widget": {
"type": "bool",
"offValue": 0,
"onValue": 255
}
},
{
"name": "Unlock all inventory spirits (All or Nothing)",
"category": "5. Spirits",
"intArgs": [3, 2],
"strArgs": ["426C76", "42FA46"],
"widget": {
"type": "bool",
"offValue": 0,
"onValue": 65293
}
},
{
"name": "Unlock all activities spirits (All or Nothing)",
"category": "5. Spirits",
"intArgs": [3, 2],
"strArgs": ["434750", "444EA5"],
"widget": {
"type": "bool",
"offValue": 0,
"onValue": 65294
}
},
{
"name": "Unlock All challenges (All or Nothing)",
"category": "6. Challenges",
"intArgs": [3, 1],
"strArgs": ["555BB8", "555BE8"],
"widget": {
"type": "bool",
"offValue": 0,
"onValue": 255
}
},
{
"name": "Unlock All Stores, Explore, Gym, Dojos, Music",
"category": "7. Misc",
"intArgs": [2, 1],
"strArgs": ["00D0", "0378"],
"widget": {
"type": "bool",
"offValue": 0,
"onValue": 255
}
}
]
}
]
}

View file

@ -1,25 +0,0 @@
{
"author": "Jojo",
"scriptLanguage": "lua",
"beta": false,
"all": [
{
"saveFilePaths": [""],
"files": "data.bin",
"filetype": "puyopuyo",
"items": [
{
"name": "Credits",
"category": "Game",
"intArgs": [2, 2],
"strArgs": ["0000", "06C4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
}
]
}
]
}

View file

@ -1,25 +0,0 @@
{
"author": "mrLewisFC",
"scriptLanguage": "lua",
"beta": true,
"all": [
{
"saveFilePaths": [""],
"files": "MySaveData\\d",
"filetype": "bin",
"items": [
{
"name": "Cash (Leaves)",
"category": "1. Inventory",
"intArgs": [2, 4],
"strArgs": ["0000", "0014"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999999
}
}
]
}
]
}

View file

@ -1,201 +0,0 @@
{
"author": "madhatter",
"scriptLanguage": "lua",
"beta": true,
"all": [
{
"saveFilePaths": [""],
"files": "data00",
"filetype": "bin",
"items": [
{
"name": "Halos",
"category": "Halos",
"intArgs": [2, 4],
"strArgs": ["0000", "EF64"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
},
{
"name": "Compound Green",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "EF7C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Compound Yellow",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "EF78"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Compound Red",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "EF80"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Green Herb",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "EF84"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Mega Green Herb",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "EF88"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Purple_Magic",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "EF8C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Mega Purple Magic",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "EF90"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Bloody Rose",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "EF94"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Mega Bloody Rose",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "EF98"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Yellow Moon",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "EF9C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Mega Yellow Moon",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "EFA0"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Broken Heart",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "EFA4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Broken Pearl",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "EFA8"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Arcade Bullet",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "EFAC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Red Hot Shot",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "EFB0"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Magic Flute",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "EFB4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
}
]
}
]
}

View file

@ -1,25 +0,0 @@
{
"author": "trueicecold",
"scriptLanguage": "lua",
"beta": false,
"all": [
{
"saveFilePaths": [""],
"files": "File[ABC]",
"filetype": "bin",
"items": [
{
"name": "Stat Points",
"category": "Stats",
"intArgs": [2, 4],
"strArgs": ["0000", "0906"],
"widget": {
"type": "int",
"minValue": 10,
"maxValue": 255
}
}
]
}
]
}

View file

@ -1,124 +0,0 @@
{
"author": "mrLewisFC",
"scriptLanguage": "lua",
"beta": true,
"all": [
{
"saveFilePaths": [""],
"files": "SAVEDATA\\d\\d",
"filetype": "lostsphear",
"items": [
{
"name": "Gold",
"category": "01. Inventory",
"intArgs": [2, 4],
"strArgs": ["0000", "0524"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999999
}
},
{
"name": "Level",
"category": "02. Kanata",
"intArgs": [2, 1],
"strArgs": ["0000", "1C3E6"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Level",
"category": "03. Lumina",
"intArgs": [2, 1],
"strArgs": ["0000", "1C518"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Level",
"category": "04. Locke",
"intArgs": [2, 1],
"strArgs": ["0000", "1C8AE"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Level",
"category": "05. Van",
"intArgs": [2, 1],
"strArgs": ["0000", "1C47F"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Level",
"category": "06. Obaro",
"intArgs": [2, 1],
"strArgs": ["0000", "1C5B1"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Level",
"category": "07. Galdra",
"intArgs": [2, 1],
"strArgs": ["0000", "1C815"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Level",
"category": "08. Sherra",
"intArgs": [2, 1],
"strArgs": ["0000", "1C64A"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Level",
"category": "09. Dianto",
"intArgs": [2, 1],
"strArgs": ["0000", "1C6E3"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Level",
"category": "10. Gears",
"intArgs": [2, 1],
"strArgs": ["0000", "1C77C"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
}
]
}
]
}

View file

@ -1,245 +0,0 @@
{
"author": "madhatter",
"scriptLanguage": "lua",
"beta": false,
"all": [
{
"saveFilePaths": [""],
"files": "save\\d\\d\\d\\d\\.sav",
"filetype": "bin",
"items": [
{
"name": "Halos",
"category": "Halos",
"intArgs": [2, 4],
"strArgs": ["0000", "A3B0"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999999
}
},
{
"name": "Mandragora Root",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A3E4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Baked Gecko",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A3DC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Unicorn Horn",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A3D4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Yellow Moon Lollipop",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A3EC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Mega Bloody Rose Lollipop",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A3F4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Bloody Rose Lollipop",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A3FC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Mega Purple Magic Lollipop",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A404"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Purple Magic Lollipop",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A40C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Mega Green Herb Lollipop",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A414"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Green Herb Lollipop",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A41C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "item 11",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A424"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "item 12",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A42C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Witchs Recipes",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A434"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "item 14",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A43C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "item 15",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A444"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "item 16",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A44C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "item 17",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A454"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "item 18",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A45C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "item 19",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A464"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "item 20",
"category": "Items",
"intArgs": [2, 2],
"strArgs": ["0000", "A46C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
}
]
}
]
}

View file

@ -1,146 +0,0 @@
{
"author": "Jojo",
"scriptLanguage": "lua",
"beta": false,
"all": [
{
"saveFilePaths": [""],
"files": "systemData",
"filetype": "bin",
"items": [
{
"name": "Coins",
"category": "1. Main",
"intArgs": [2, 4],
"strArgs": ["0000", "0458"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999999
}
},
{
"name": "Unlock Levels",
"category": "1. Main",
"intArgs": [2, 3],
"strArgs": ["0000", "0450"],
"widget": {
"type": "list",
"listItemNames": ["Reset", "Unlock All"],
"listItemValues": [0, 16777215]
}
},
{
"name": "Accessories 01-02",
"category": "2. Accessories",
"intArgs": [2, 1],
"strArgs": ["0000", "0440"],
"widget": {
"type": "list",
"listItemNames": ["None", "1 Full", "2 Full", "All Full"],
"listItemValues": [0, 128, 8, 136]
}
},
{
"name": "Accessories 03-04",
"category": "2. Accessories",
"intArgs": [2, 1],
"strArgs": ["0000", "0441"],
"widget": {
"type": "list",
"listItemNames": ["None", "3 Full", "4 Full", "All Full"],
"listItemValues": [0, 128, 8, 136]
}
},
{
"name": "Accessories 05-06",
"category": "2. Accessories",
"intArgs": [2, 1],
"strArgs": ["0000", "0442"],
"widget": {
"type": "list",
"listItemNames": ["None", "5 Full", "6 Full", "All Full"],
"listItemValues": [0, 128, 8, 136]
}
},
{
"name": "Accessories 07-08",
"category": "2. Accessories",
"intArgs": [2, 1],
"strArgs": ["0000", "0443"],
"widget": {
"type": "list",
"listItemNames": ["None", "7 Full", "8 Full", "All Full"],
"listItemValues": [0, 128, 8, 136]
}
},
{
"name": "Accessories 09-10",
"category": "2. Accessories",
"intArgs": [2, 1],
"strArgs": ["0000", "0444"],
"widget": {
"type": "list",
"listItemNames": ["None", "9 Full", "10 Full", "All Full"],
"listItemValues": [0, 128, 8, 136]
}
},
{
"name": "Accessories 11-12",
"category": "2. Accessories",
"intArgs": [2, 1],
"strArgs": ["0000", "0445"],
"widget": {
"type": "list",
"listItemNames": ["None", "11 Full", "12 Full", "All Full"],
"listItemValues": [0, 128, 8, 136]
}
},
{
"name": "Accessories 13-14",
"category": "2. Accessories",
"intArgs": [2, 1],
"strArgs": ["0000", "0446"],
"widget": {
"type": "list",
"listItemNames": ["None", "13 Full", "14 Full", "All Full"],
"listItemValues": [0, 128, 8, 136]
}
},
{
"name": "Accessories 15-16",
"category": "2. Accessories",
"intArgs": [2, 1],
"strArgs": ["0000", "0447"],
"widget": {
"type": "list",
"listItemNames": ["None", "15 Full", "16 Full", "All Full"],
"listItemValues": [0, 128, 8, 136]
}
},
{
"name": "Accessories 17-18",
"category": "2. Accessories",
"intArgs": [2, 1],
"strArgs": ["0000", "0448"],
"widget": {
"type": "list",
"listItemNames": ["None", "17 Full", "18 Full", "All Full"],
"listItemValues": [0, 128, 8, 136]
}
},
{
"name": "Accessories 19-20",
"category": "2. Accessories",
"intArgs": [2, 1],
"strArgs": ["0000", "0449"],
"widget": {
"type": "list",
"listItemNames": ["None", "19 Full", "20 Full", "All Full"],
"listItemValues": [0, 128, 8, 136]
}
}
]
}
]
}

File diff suppressed because it is too large Load diff

View file

@ -1,210 +0,0 @@
{
"author": "borntohonk & macia10",
"scriptLanguage": "lua",
"beta": false,
"1.5.0": [
{
"saveFilePaths": ["\\d"],
"files": "game_data\\.sav",
"filetype": "bin",
"items": [
{
"name": "Playtime (Seconds)",
"category": "Other",
"intArgs": [4, 4],
"strArgs": ["0000", "75998"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999
}
},
{
"name": "Rupee",
"category": "Collectibles",
"intArgs": [4, 4],
"strArgs": ["0000", "EAF8"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999
}
},
{
"name": "Mons",
"category": "Collectibles",
"intArgs": [4, 4],
"strArgs": ["0000", "D7FA8"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999
}
},
{
"name": "Gerudoo Relic uses remaining",
"category": "Collectibles",
"intArgs": [4, 4],
"strArgs": ["0000", "89968"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Goron Relic uses remaining",
"category": "Collectibles",
"intArgs": [4, 4],
"strArgs": ["0000", "E7BA0"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Rito Relic uses remaining",
"category": "Collectibles",
"intArgs": [4, 4],
"strArgs": ["0000", "F9CC8"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99
}
},
{
"name": "Korok Seeds",
"category": "Collectibles",
"intArgs": [4, 4],
"strArgs": ["0000", "84908"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 900
}
},
{
"name": "Motorcycle",
"category": "Collectibles",
"intArgs": [4, 4],
"strArgs": ["0000", "D2660"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Hearts",
"category": "Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "10970"],
"widget": {
"type": "list",
"listItemNames": [
"1 Heart",
"2 Hearts",
"3 Hearts",
"4 Hearts",
"5 Hearts",
"6 Hearts",
"7 Hearts",
"8 Hearts",
"9 Hearts",
"10 Hearts",
"11 Hearts",
"12 Hearts",
"13 Hearts",
"14 Hearts",
"15 Hearts",
"16 Hearts",
"17 Hearts",
"18 Hearts",
"19 Hearts",
"20 Hearts",
"21 Hearts",
"22 Hearts",
"23 Hearts",
"24 Hearts",
"25 Hearts",
"26 Hearts",
"27 Hearts",
"28 Hearts",
"29 Hearts",
"30 Hearts"
],
"listItemValues": [
4,
8,
12,
16,
20,
24,
28,
32,
36,
40,
44,
48,
52,
56,
60,
64,
68,
72,
76,
80,
84,
88,
92,
96,
100,
104,
108,
112,
116,
120
]
}
},
{
"name": "Stamina",
"category": "Stats",
"intArgs": [4, 4],
"strArgs": ["0000", "0503C8"],
"widget": {
"type": "list",
"listItemNames": [
"1 Wheel of Stamina",
"1 & 1/5 Wheels of Stamina",
"1 & 2/5 Wheels of Stamina",
"1 & 3/5 Wheels of Stamina",
"1 & 4/5 Wheels of Stamina",
"2 Wheels of Stamina",
"2 & 1/5 Wheels of Stamina",
"2 & 2/5 Wheels of Stamina",
"2 & 3/5 Wheels of Stamina",
"2 & 4/5 Wheels of Stamina",
"3 Wheels of Stamina"
],
"listItemValues": [
1148846080,
1150681088,
1152319488,
1153957888,
1155596288,
1157234688,
1158250496,
1159069696,
1159888896,
1160708096,
1161527296
]
}
}
]
}
]
}

View file

@ -1,606 +0,0 @@
{
"author": "Jojo & mrLewisFC",
"scriptLanguage": "lua",
"beta": true,
"all": [
{
"saveFilePaths": [""],
"files": "setsuna\\d\\d\\.dat",
"filetype": "setsuna",
"items": [
{
"name": "Gold",
"category": "1. Inventory",
"intArgs": [2, 4],
"strArgs": ["0000", "00CC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999999
}
},
{
"name": "Level",
"category": "2. Endir",
"intArgs": [2, 1],
"strArgs": ["0000", "72FE6"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Current HP",
"category": "2. Endir",
"intArgs": [2, 2],
"strArgs": ["0000", "72FEE"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Max HP",
"category": "2. Endir",
"intArgs": [2, 2],
"strArgs": ["0000", "72FE2"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Current MP",
"category": "2. Endir",
"intArgs": [2, 2],
"strArgs": ["0000", "72FF2"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Max MP",
"category": "2. Endir",
"intArgs": [2, 2],
"strArgs": ["0000", "72FE8"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Strength",
"category": "2. Endir",
"intArgs": [2, 2],
"strArgs": ["0000", "72FEA"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 9999,
"stepSize": 100
}
},
{
"name": "Intelligence",
"category": "2. Endir",
"intArgs": [2, 2],
"strArgs": ["0000", "72FEC"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 9999,
"stepSize": 100
}
},
{
"name": "Level",
"category": "3. Setsuna",
"intArgs": [2, 1],
"strArgs": ["0000", "7303C"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Current HP",
"category": "3. Setsuna",
"intArgs": [2, 2],
"strArgs": ["0000", "73044"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Max HP",
"category": "3. Setsuna",
"intArgs": [2, 2],
"strArgs": ["0000", "73038"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Current MP",
"category": "3. Setsuna",
"intArgs": [2, 2],
"strArgs": ["0000", "73048"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Max MP",
"category": "3. Setsuna",
"intArgs": [2, 2],
"strArgs": ["0000", "7303E"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Strength",
"category": "3. Setsuna",
"intArgs": [2, 2],
"strArgs": ["0000", "73040"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 9999,
"stepSize": 100
}
},
{
"name": "Intelligence",
"category": "3. Setsuna",
"intArgs": [2, 2],
"strArgs": ["0000", "73042"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 9999,
"stepSize": 100
}
},
{
"name": "Level",
"category": "4. Aeterna",
"intArgs": [2, 1],
"strArgs": ["0000", "73092"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Current HP",
"category": "4. Aeterna",
"intArgs": [2, 2],
"strArgs": ["0000", "7309A"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Max HP",
"category": "4. Aeterna",
"intArgs": [2, 2],
"strArgs": ["0000", "7308E"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Current MP",
"category": "4. Aeterna",
"intArgs": [2, 2],
"strArgs": ["0000", "7309E"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Max MP",
"category": "4. Aeterna",
"intArgs": [2, 2],
"strArgs": ["0000", "73094"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Strength",
"category": "4. Aeterna",
"intArgs": [2, 2],
"strArgs": ["0000", "73096"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 9999,
"stepSize": 100
}
},
{
"name": "Intelligence",
"category": "4. Aeterna",
"intArgs": [2, 2],
"strArgs": ["0000", "73098"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 9999,
"stepSize": 100
}
},
{
"name": "Level",
"category": "5. Nidr",
"intArgs": [2, 1],
"strArgs": ["0000", "730E8"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Current HP",
"category": "5. Nidr",
"intArgs": [2, 2],
"strArgs": ["0000", "730F0"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Max HP",
"category": "5. Nidr",
"intArgs": [2, 2],
"strArgs": ["0000", "730E4"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Current MP",
"category": "5. Nidr",
"intArgs": [2, 2],
"strArgs": ["0000", "730F4"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Max MP",
"category": "5. Nidr",
"intArgs": [2, 2],
"strArgs": ["0000", "730EA"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Strength",
"category": "5. Nidr",
"intArgs": [2, 2],
"strArgs": ["0000", "730EC"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 9999,
"stepSize": 100
}
},
{
"name": "Intelligence",
"category": "5. Nidr",
"intArgs": [2, 2],
"strArgs": ["0000", "730EE"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 9999,
"stepSize": 100
}
},
{
"name": "Level",
"category": "6. Kir",
"intArgs": [2, 1],
"strArgs": ["0000", "7313E"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Current HP",
"category": "6. Kir",
"intArgs": [2, 2],
"strArgs": ["0000", "73146"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Max HP",
"category": "6. Kir",
"intArgs": [2, 2],
"strArgs": ["0000", "7313A"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Current MP",
"category": "6. Kir",
"intArgs": [2, 2],
"strArgs": ["0000", "7314A"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Max MP",
"category": "6. Kir",
"intArgs": [2, 2],
"strArgs": ["0000", "73140"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Strength",
"category": "6. Kir",
"intArgs": [2, 2],
"strArgs": ["0000", "73142"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 9999,
"stepSize": 100
}
},
{
"name": "Intelligence",
"category": "6. Kir",
"intArgs": [2, 2],
"strArgs": ["0000", "73144"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 9999,
"stepSize": 100
}
},
{
"name": "Level",
"category": "7. Julienne",
"intArgs": [2, 1],
"strArgs": ["0000", "73194"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Current HP",
"category": "7. Julienne",
"intArgs": [2, 2],
"strArgs": ["0000", "7319C"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Max HP",
"category": "7. Julienne",
"intArgs": [2, 2],
"strArgs": ["0000", "73190"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Current MP",
"category": "7. Julienne",
"intArgs": [2, 2],
"strArgs": ["0000", "731A0"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Max MP",
"category": "7. Julienne",
"intArgs": [2, 2],
"strArgs": ["0000", "73196"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Strength",
"category": "7. Julienne",
"intArgs": [2, 2],
"strArgs": ["0000", "73198"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 9999,
"stepSize": 100
}
},
{
"name": "Intelligence",
"category": "7. Julienne",
"intArgs": [2, 2],
"strArgs": ["0000", "7319A"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 9999,
"stepSize": 100
}
},
{
"name": "Level",
"category": "8. Fides",
"intArgs": [2, 1],
"strArgs": ["0000", "731EA"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Current HP",
"category": "8. Fides",
"intArgs": [2, 2],
"strArgs": ["0000", "731F2"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Max HP",
"category": "8. Fides",
"intArgs": [2, 2],
"strArgs": ["0000", "731E6"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Current MP",
"category": "8. Fides",
"intArgs": [2, 2],
"strArgs": ["0000", "731F6"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Max MP",
"category": "8. Fides",
"intArgs": [2, 2],
"strArgs": ["0000", "731EC"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 999,
"stepSize": 10
}
},
{
"name": "Strength",
"category": "8. Fides",
"intArgs": [2, 2],
"strArgs": ["0000", "731EE"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 9999,
"stepSize": 100
}
},
{
"name": "Intelligence",
"category": "8. Fides",
"intArgs": [2, 2],
"strArgs": ["0000", "731F0"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 9999,
"stepSize": 100
}
}
]
}
]
}

View file

@ -1,3 +0,0 @@
{
"useInstead": "0100E66006406000.json"
}

View file

@ -1,3 +0,0 @@
{
"useInstead": "01004AB00A260000.json"
}

View file

@ -1,124 +0,0 @@
{
"author": "mrLewisFC",
"scriptLanguage": "lua",
"beta": true,
"all": [
{
"saveFilePaths": [""],
"files": "MySaveData\\d",
"filetype": "bin",
"items": [
{
"name": "Money",
"category": "1. Inventory",
"intArgs": [2, 4],
"strArgs": ["0000", "0010"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999999
}
},
{
"name": "Blue Karma",
"category": "1. Inventory",
"intArgs": [2, 4],
"strArgs": ["0000", "0004"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999999
}
},
{
"name": "Purple Karma",
"category": "1. Inventory",
"intArgs": [2, 4],
"strArgs": ["0000", "0008"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999999
}
},
{
"name": "Red Karma",
"category": "1. Inventory",
"intArgs": [2, 4],
"strArgs": ["0000", "000C"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999999
}
},
{
"name": "Level",
"category": "2. Hayato",
"intArgs": [3, 4],
"strArgs": ["000000", "0146D4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999
}
},
{
"name": "HP",
"category": "2. Hayato",
"intArgs": [3, 4],
"strArgs": ["000000", "0146D8"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 640019
}
},
{
"name": "MP",
"category": "2. Hayato",
"intArgs": [3, 4],
"strArgs": ["000000", "0146DC"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999
}
},
{
"name": "Level",
"category": "3. Lua",
"intArgs": [3, 4],
"strArgs": ["000000", "01497B"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999
}
},
{
"name": "HP",
"category": "3. Lua",
"intArgs": [3, 4],
"strArgs": ["000000", "01497F"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 544016
}
},
{
"name": "MP",
"category": "3. Lua",
"intArgs": [3, 4],
"strArgs": ["000000", "014983"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999
}
}
]
}
]
}

View file

@ -1,3 +0,0 @@
{
"useInstead": "0100E66006406000.json"
}

File diff suppressed because it is too large Load diff

View file

@ -1,113 +0,0 @@
{
"author": "jonyluke",
"scriptLanguage": "lua",
"beta": true,
"all": [
{
"saveFilePaths": [""],
"files": "GAMEDATA.bin",
"filetype": "bin",
"items": [
{
"name": "Bolt",
"category": "Money",
"intArgs": [2, 4],
"strArgs": ["0000", "0B00"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999999
}
},
{
"name": "1-UP",
"category": "Items",
"intArgs": [2, 1],
"strArgs": ["0000", "0CB0"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9
}
},
{
"name": "Energy Tank",
"category": "Items",
"intArgs": [2, 1],
"strArgs": ["0000", "0CB8"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 6
}
},
{
"name": "Weapon Tank",
"category": "Items",
"intArgs": [2, 1],
"strArgs": ["0000", "0CC8"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 6
}
},
{
"name": "Mistery Tank",
"category": "Items",
"intArgs": [2, 1],
"strArgs": ["0000", "0CC0"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 1
}
},
{
"name": "Super Guard",
"category": "Support",
"intArgs": [2, 1],
"strArgs": ["0000", "0CE0"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 1
}
},
{
"name": "Eddie Call",
"category": "Support",
"intArgs": [2, 1],
"strArgs": ["0000", "0CF0"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 1
}
},
{
"name": "Pierce Protector",
"category": "Support",
"intArgs": [2, 1],
"strArgs": ["0000", "0CD8"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 3
}
},
{
"name": "Beat Call",
"category": "Support",
"intArgs": [2, 1],
"strArgs": ["0000", "0CF8"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 1
}
}
]
}
]
}

View file

@ -1,25 +0,0 @@
{
"author": "cubex",
"scriptLanguage": "lua",
"beta": false,
"all": [
{
"saveFilePaths": [""],
"files": "save.dat",
"filetype": "bin",
"items": [
{
"name": "Character Level",
"category": "Adventure Mode",
"intArgs": [2, 4],
"strArgs": ["0000", "40B0"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
}
]
}
]
}

View file

@ -1,136 +0,0 @@
{
"author": "kingofblues",
"scriptLanguage": "lua",
"beta": true,
"all": [
{
"saveFilePaths": [""],
"files": "save.bin",
"filetype": "dktf",
"items": [
{
"name": "Red Balloon",
"category": "Inventory",
"intArgs": [2, 1],
"strArgs": ["0000", "0873"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Banana Coins",
"category": "Inventory",
"intArgs": [2, 2],
"strArgs": ["0000", "086E"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 999,
"stepSize": 100
}
},
{
"name": "Squawks",
"category": "Inventory",
"intArgs": [2, 1],
"strArgs": ["0000", "0897"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Crash Guard",
"category": "Inventory",
"intArgs": [2, 1],
"strArgs": ["0000", "089B"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Heart Boost",
"category": "Inventory",
"intArgs": [2, 1],
"strArgs": ["0000", "089F"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Blue Balloon",
"category": "Inventory",
"intArgs": [2, 1],
"strArgs": ["0000", "08A3"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Green Balloon",
"category": "Inventory",
"intArgs": [2, 1],
"strArgs": ["0000", "08A7"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Banana Juice",
"category": "Inventory",
"intArgs": [2, 1],
"strArgs": ["0000", "08AB"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Diddy Kong Barrel",
"category": "Inventory",
"intArgs": [2, 1],
"strArgs": ["0000", "08AF"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Dixie Kong Barrel",
"category": "Inventory",
"intArgs": [2, 1],
"strArgs": ["0000", "08B3"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
},
{
"name": "Cranky Kong Barrel",
"category": "Inventory",
"intArgs": [2, 1],
"strArgs": ["0000", "08B7"],
"widget": {
"type": "int",
"minValue": 1,
"maxValue": 99
}
}
]
}
]
}

File diff suppressed because it is too large Load diff

View file

@ -1,25 +0,0 @@
{
"author": "macia10",
"scriptLanguage": "py",
"beta": true,
"all": [
{
"saveFilePaths": [""],
"files": "irasavefile\\.sav",
"filetype": "torna",
"items": [
{
"name": "Money",
"category": "Items",
"intArgs": [2, 4],
"strArgs": ["0000", "10"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999999
}
}
]
}
]
}

View file

@ -1,109 +0,0 @@
{
"author": "echo000 & WerWolv",
"scriptLanguage": "py",
"beta": true,
"all": [
{
"saveFilePaths": ["[a-zA-Z0-9]+_\\d+"],
"files": "[a-zA-Z0-9]+_\\d+",
"filetype": "xmls",
"items": [
{
"name": "Money",
"category": "Player",
"intArgs": [0],
"strArgs": ["SaveGame", "player", "money"],
"widget": {
"type": "int",
"stepSize": 1,
"minValue": 0,
"maxValue": 999999999
}
},
{
"name": "Total Money Earned",
"category": "Player",
"intArgs": [0],
"strArgs": ["SaveGame", "player", "totalMoneyEarned"],
"widget": {
"type": "int",
"stepSize": 1,
"minValue": 0,
"maxValue": 999999999
}
},
{
"name": "Fishing Level",
"category": "Player",
"intArgs": [0],
"strArgs": ["SaveGame", "player", "fishingLevel"],
"widget": {
"type": "int",
"stepSize": 1,
"minValue": 0,
"maxValue": 10
}
},
{
"name": "Farming Level",
"category": "Player",
"intArgs": [0],
"strArgs": ["SaveGame", "player", "farmingLevel"],
"widget": {
"type": "int",
"stepSize": 1,
"minValue": 0,
"maxValue": 10
}
},
{
"name": "Mining Level",
"category": "Player",
"intArgs": [0],
"strArgs": ["SaveGame", "player", "miningLevel"],
"widget": {
"type": "int",
"stepSize": 1,
"minValue": 0,
"maxValue": 10
}
},
{
"name": "Combat Level",
"category": "Player",
"intArgs": [0],
"strArgs": ["SaveGame", "player", "combatLevel"],
"widget": {
"type": "int",
"stepSize": 1,
"minValue": 0,
"maxValue": 10
}
},
{
"name": "Foraging Level",
"category": "Player",
"intArgs": [0],
"strArgs": ["SaveGame", "player", "foragingLevel"],
"widget": {
"type": "int",
"stepSize": 1,
"minValue": 0,
"maxValue": 10
}
},
{
"name": "Has Greenhouse",
"category": "Player",
"intArgs": [1],
"strArgs": ["SaveGame", "player", "hasGreenhouse"],
"widget": {
"type": "bool",
"onValue": 0,
"offValue": 1
}
}
]
}
]
}

File diff suppressed because it is too large Load diff

View file

@ -1,25 +0,0 @@
{
"author": "madhatter edit by: macia10",
"scriptLanguage": "lua",
"beta": true,
"all": [
{
"saveFilePaths": [""],
"files": "bf2savefile\\.sav",
"filetype": "bin",
"items": [
{
"name": "Money",
"category": "Items",
"intArgs": [2, 4],
"strArgs": ["0000", "10"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 99999999
}
}
]
}
]
}

File diff suppressed because it is too large Load diff

View file

@ -1,345 +0,0 @@
{
"author": "WerWolv",
"scriptLanguage": "lua",
"beta": false,
"all": [
{
"saveFilePaths": [""],
"files": "slot\\d\\.dat",
"encoding": "ascii",
"filetype": "json",
"items": [
{
"name": "Arcade Token Count",
"category": "Collectibles",
"intArgs": [0],
"strArgs": ["player", "arcadeTokenCount"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 5
}
},
{
"name": "Health Extender Tokens",
"category": "Collectibles",
"intArgs": [0],
"strArgs": ["player", "healthExtenderTokenCount"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 100
}
},
{
"name": "Special Extender Tokens",
"category": "Collectibles",
"intArgs": [0],
"strArgs": ["player", "specialExtenderTokenCount"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 100
}
},
{
"name": "Unspent Pagies",
"category": "Collectibles",
"intArgs": [0],
"strArgs": ["player", "unspentPagies"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 1000
}
},
{
"name": "Tribalstack Tropics Unlocked",
"category": "Worlds",
"intArgs": [1],
"strArgs": ["worlds", "\\2", "worldUnlocked"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Tribalstack Tropics Expanded",
"category": "Worlds",
"intArgs": [1],
"strArgs": ["worlds", "\\2", "worldExpanded"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Glitterglaze Glacier Unlocked",
"category": "Worlds",
"intArgs": [1],
"strArgs": ["worlds", "\\5", "worldUnlocked"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Glitterglaze Glacier Expanded",
"category": "Worlds",
"intArgs": [1],
"strArgs": ["worlds", "\\5", "worldExpanded"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Galleon Galaxy Unlocked",
"category": "Worlds",
"intArgs": [1],
"strArgs": ["worlds", "\\3", "worldUnlocked"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Galleon Galaxy Expanded",
"category": "Worlds",
"intArgs": [1],
"strArgs": ["worlds", "\\3", "worldExpanded"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Capital Cashino Unlocked",
"category": "Worlds",
"intArgs": [1],
"strArgs": ["worlds", "\\4", "worldUnlocked"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Capital Cashino Expanded",
"category": "Worlds",
"intArgs": [1],
"strArgs": ["worlds", "\\4", "worldExpanded"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Moodymaze Marsh Unlocked",
"category": "Worlds",
"intArgs": [1],
"strArgs": ["worlds", "\\6", "worldUnlocked"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Moodymaze Marsh Expanded",
"category": "Worlds",
"intArgs": [1],
"strArgs": ["worlds", "\\6", "worldExpanded"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Camo Cloak Unlocked",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["player", "moveEnabled", "\\0"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Sonar Shot Unlocked",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["player", "moveEnabled", "\\1"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Sonar 'Splosion Unlocked",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["player", "moveEnabled", "\\2"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Sonar Shield Unlocked",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["player", "moveEnabled", "\\3"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Roll Unlocked",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["player", "moveEnabled", "\\4"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Slurp Shot Unlocked",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["player", "moveEnabled", "\\5"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Slurp State Unlocked",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["player", "moveEnabled", "\\6"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Reptile Rush Unlocked",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["player", "moveEnabled", "\\8"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Glide Unlocked",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["player", "moveEnabled", "\\9"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Flappy Flight Unlocked",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["player", "moveEnabled", "\\10"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Buddy Slam Unlocked",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["player", "moveEnabled", "\\11"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Lizard Leap Unlocked",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["player", "moveEnabled", "\\12"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Buddy Bubble Unlocked",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["player", "moveEnabled", "\\13"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Lizard Lash Unlocked",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["player", "moveEnabled", "\\14"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Ground Attack (Tail Twirl) Unlocked",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["player", "moveEnabled", "\\15"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
},
{
"name": "Air Attack (Tail Twirl) Unlocked",
"category": "Abilities",
"intArgs": [1],
"strArgs": ["player", "moveEnabled", "\\16"],
"widget": {
"type": "bool",
"onValue": 1,
"offValue": 0
}
}
]
}
]
}

File diff suppressed because it is too large Load diff

View file

@ -1,25 +0,0 @@
{
"author": "Jojo",
"scriptLanguage": "lua",
"beta": false,
"all": [
{
"saveFilePaths": [""],
"files": "data.bin",
"filetype": "puyopuyo",
"items": [
{
"name": "Credits",
"category": "Game",
"intArgs": [2, 2],
"strArgs": ["0000", "06C4"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999
}
}
]
}
]
}

View file

@ -1,25 +0,0 @@
{
"author": "mrLewisFC",
"scriptLanguage": "lua",
"beta": true,
"all": [
{
"saveFilePaths": [""],
"files": "MySaveData\\d",
"filetype": "bin",
"items": [
{
"name": "Cash (Leaves)",
"category": "1. Inventory",
"intArgs": [2, 4],
"strArgs": ["0000", "0014"],
"widget": {
"type": "int",
"minValue": 0,
"maxValue": 9999999
}
}
]
}
]
}

View file

@ -1,53 +0,0 @@
-- bin --
saveFileBuffer = edizon.getSaveFileBuffer()
function getValueFromSaveFile()
strArgs = edizon.getStrArgs()
intArgs = edizon.getIntArgs()
indirectAddress = tonumber(strArgs[1], 16)
address = tonumber(strArgs[2], 16)
addressSize = intArgs[1]
valueSize = intArgs[2]
offset = 0
value = 0
if indirectAddress ~= 0 then
for i = 0, addressSize - 1 do
offset = offset | (saveFileBuffer[indirectAddress + i + 1] << i * 8)
end
end
for i = 0, valueSize - 1 do
value = value | (saveFileBuffer[offset + address + i + 1] << i * 8)
end
return value
end
function setValueInSaveFile(value)
strArgs = edizon.getStrArgs()
intArgs = edizon.getIntArgs()
indirectAddress = tonumber(strArgs[1], 16)
address = tonumber(strArgs[2], 16)
addressSize = intArgs[1]
valueSize = intArgs[2]
offset = 0
if indirectAddress ~= 0 then
for i = 0, addressSize - 1 do
offset = offset | (saveFileBuffer[indirectAddress + i + 1] << (i * 8))
end
end
for i = 0, valueSize - 1 do
saveFileBuffer[offset + address + i + 1] = (value & (0xFF << i * 8)) >> (i * 8)
end
end
function getModifiedSaveFile()
return saveFileBuffer
end

View file

@ -1,46 +0,0 @@
## bin ##
import edizon
saveFileBuffer = edizon.getSaveFileBuffer()
def getValueFromSaveFile():
strArgs = edizon.getStrArgs()
intArgs = edizon.getIntArgs()
indirectAddress = int(strArgs[0], 16)
address = int(strArgs[1], 16)
addressSize = intArgs[0]
valueSize = intArgs[1]
offset = 0
value = 0
if indirectAddress != 0:
for i in range(0, addressSize):
offset = offset | (saveFileBuffer[indirectAddress + i] << i * 8)
for i in range(0, valueSize):
value = value | (saveFileBuffer[offset + address + i] << i * 8)
return value
def setValueInSaveFile(value):
strArgs = edizon.getStrArgs()
intArgs = edizon.getIntArgs()
indirectAddress = int(strArgs[0], 16)
address = int(strArgs[1], 16)
addressSize = intArgs[0]
valueSize = intArgs[1]
offset = 0
if indirectAddress != 0:
for i in range(0, addressSize):
offset = offset | (saveFileBuffer[indirectAddress + i] << (i * 8))
for i in range(0, valueSize):
saveFileBuffer[offset + address + i] = (value & (0xFF << i * 8)) >> (i * 8)
def getModifiedSaveFile():
return saveFileBuffer

View file

@ -1,80 +0,0 @@
-- darksoulsremastered (modified bin.lua) --
md5 = require("lib.md5")
saveFileBuffer = edizon.getSaveFileBuffer()
function getValueFromSaveFile()
strArgs = edizon.getStrArgs()
intArgs = edizon.getIntArgs()
indirectAddress = tonumber(strArgs[1], 16)
address = tonumber(strArgs[2], 16)
addressSize = intArgs[1]
valueSize = intArgs[2]
offset = 0
value = 0
if indirectAddress ~= 0 then
for i = 0, addressSize - 1 do
offset = offset | (saveFileBuffer[indirectAddress + i + 1] << i * 8)
end
end
for i = 0, valueSize - 1 do
value = value | (saveFileBuffer[offset + address + i + 1] << i * 8)
end
return value
end
function setValueInSaveFile(value)
strArgs = edizon.getStrArgs()
intArgs = edizon.getIntArgs()
indirectAddress = tonumber(strArgs[1], 16)
address = tonumber(strArgs[2], 16)
addressSize = intArgs[1]
valueSize = intArgs[2]
offset = 0
if indirectAddress ~= 0 then
for i = 0, addressSize - 1 do
offset = offset | (saveFileBuffer[indirectAddress + i + 1] << (i * 8))
end
end
for i = 0, valueSize - 1 do
saveFileBuffer[offset + address + i + 1] = (value & (0xFF << i * 8)) >> (i * 8)
end
end
function getOffset()
offset = 0
for i = 0, 3 do
offset = offset | (saveFileBuffer[41 + i] << i * 8)
end
return offset
end
function calcChecksum(saveDataOffset)
checksumFileBuffer = {}
for i = saveDataOffset, saveDataOffset + 393215 do
checksumFileBuffer[i - saveDataOffset + 1] = saveFileBuffer[i]
end
return md5.hash(string.char(table.unpack(checksumFileBuffer)))
end
function setChecksum()
checksumOffset = getOffset()
md5hash = calcChecksum(checksumOffset + 17)
checksum = table.pack(md5hash:byte(1, 16))
for i = 1, 16 do
saveFileBuffer[checksumOffset + i] = checksum[i]
end
end
function getModifiedSaveFile()
setChecksum()
return saveFileBuffer
end

View file

@ -1,73 +0,0 @@
-- Donkey Kong Tropical Freeze (modified bin) --
-- IMPORTANT:
-- Save file is big-endian!
-- getValueFromSaveFile & setValueInSaveFile have been modified accordingly
-- Save file is always 0x40000 bytes in size, where the first 4 bytes are
-- the CRC32 of [0x04 .. 0x40000]
checksum = require("lib.checksum")
saveFileBuffer = edizon.getSaveFileBuffer()
function getValueFromSaveFile()
strArgs = edizon.getStrArgs()
intArgs = edizon.getIntArgs()
indirectAddress = tonumber(strArgs[1], 16)
address = tonumber(strArgs[2], 16)
addressSize = intArgs[1]
valueSize = intArgs[2]
offset = 0
value = 0
if indirectAddress ~= 0 then
for i = 0, addressSize - 1 do
offset = offset | (saveFileBuffer[indirectAddress + i + 1] << ((addressSize - i - 1) * 8))
end
end
for i = 0, valueSize - 1 do
value = value | (saveFileBuffer[offset + address + i + 1] << ((valueSize - i - 1) * 8))
end
return value
end
function setValueInSaveFile(value)
strArgs = edizon.getStrArgs()
intArgs = edizon.getIntArgs()
indirectAddress = tonumber(strArgs[1], 16)
address = tonumber(strArgs[2], 16)
addressSize = intArgs[1]
valueSize = intArgs[2]
offset = 0
if indirectAddress ~= 0 then
for i = 0, addressSize - 1 do
offset = offset | (saveFileBuffer[indirectAddress + i + 1] << ((addressSize - i - 1) * 8))
end
end
for i = 0, valueSize - 1 do
saveFileBuffer[offset + address + i + 1] = (value & (0xFF << (valueSize - i - 1) * 8)) >> ((valueSize - i - 1) * 8)
end
end
function setChecksum()
gameFileBuffer = {}
for i = 1, 262140 do
gameFileBuffer[i] = saveFileBuffer[i + 4]
end
crc = checksum.crc32(string.char(table.unpack(gameFileBuffer)))
crc = crc ~ 0xFFFFFFFF
for i = 0,3 do
saveFileBuffer[i + 1] = (crc & (0xFF000000 >> (i * 8))) >> (24 - i * 8)
end
end
function getModifiedSaveFile()
setChecksum()
return saveFileBuffer
end

View file

@ -1,79 +0,0 @@
-- json --
json = require("lib.json")
saveFileString = edizon.getSaveFileString()
saveFileString = saveFileString:gsub('{%s*}', '{"edizon":true}')
saveFileBuffer = json.decode(saveFileString)
function getValueFromSaveFile()
strArgs = edizon.getStrArgs()
intArgs = edizon.getIntArgs()
item = saveFileBuffer
for i, tag in pairs(strArgs) do
if type(item) ~= "table" then break end
if string.sub(tag, 1, 1) == "\\" then
tag = tonumber(tag:sub(2)) + 1
if tag == nil then return 0 end
end
item = item[tag]
end
if intArgs[1] == 0 then
return item
else
return item and 1 or 0
end
end
function setValueInSaveFile(value)
local items = saveFileBuffer
strArgs = edizon.getStrArgs()
intArgs = edizon.getIntArgs()
local ref = items
for i, tag in ipairs(strArgs) do
if string.sub(tag, 1, 1) == "\\" then
tag = tonumber(tag:sub(2)) + 1
end
if i == #strArgs then
if intArgs[1] == 0 then
ref[tag] = value
else
ref[tag] = (value == 1)
end
else
ref = ref[tag]
end
end
end
local function convertToTable(s)
t = {}
for i = 1, #s do
t[i] = string.byte(s:sub(i, i))
end
return t
end
function getModifiedSaveFile()
encoded = json.encode(saveFileBuffer)
encoded = encoded:gsub('{"edizon":true}', '{}')
convertedTable = {}
convertedTable = convertToTable(encoded)
return convertedTable
end

View file

@ -1,85 +0,0 @@
# kirbysa.py - by Ac_K
import edizon
saveFileBuffer = edizon.getSaveFileBuffer()
save_slot_id = 0
save_slots = [saveFileBuffer[0x100000:0x200000], saveFileBuffer[0x200000:0x300000], saveFileBuffer[0x300000:0x400000]]
def find_offset(section_name, item_name):
section_offset = save_slots[save_slot_id].index(section_name.encode())
section_buffer = save_slots[save_slot_id][section_offset:section_offset + 0x1000]
item_offset = section_buffer.index(item_name.encode())
return section_offset + item_offset
def check_slot_exist(index):
if index == 0:
meta_buffer = saveFileBuffer[0x100002:0x100006].decode()
elif index == 1:
meta_buffer = saveFileBuffer[0x200002:0x200006].decode()
elif index == 2:
meta_buffer = saveFileBuffer[0x300002:0x300006].decode()
else:
meta_buffer = ""
if meta_buffer == "meta":
return True
else:
return False
def getDummyValue():
return save_slot_id + 1
def setDummyValue(value):
global save_slot_id
value -= 1
if check_slot_exist(value):
save_slot_id = value
else:
save_slot_id = 0
def getValueFromSaveFile():
strArgs = edizon.getStrArgs()
intArgs = edizon.getIntArgs()
sectionId = strArgs[0]
itemId = strArgs[1]
padding = int(strArgs[2], 16)
valueSize = intArgs[0]
item_offset = find_offset(sectionId, itemId) + padding
value = 0
for i in range(0, valueSize):
value = value | (save_slots[save_slot_id][item_offset + i] << i * 8)
return value
def setValueInSaveFile(value):
global save_slots
strArgs = edizon.getStrArgs()
intArgs = edizon.getIntArgs()
sectionId = strArgs[0]
itemId = strArgs[1]
padding = int(strArgs[2], 16)
valueSize = intArgs[0]
item_offset = find_offset(sectionId, itemId) + padding
for i in range(0, valueSize):
save_slots[save_slot_id][item_offset + i] = (value & (0xFF << i * 8)) >> (i * 8)
def getModifiedSaveFile():
new_save_buffer = saveFileBuffer[0:0x100000] + save_slots[0] + save_slots[1] + save_slots[2]
return new_save_buffer

View file

@ -1,82 +0,0 @@
-- Copyright (c) 2015 Phil Leblanc -- see LICENSE file
------------------------------------------------------------
-- checksums: adler-32, crc-32
------------------------------------------------------------
local byte = string.byte
------------------------------------------------------------
-- adler32
local function adler32(s)
-- return adler32 checksum (uint32)
-- adler32 is a checksum defined by Mark Adler for zlib
-- (based on the Fletcher checksum used in ITU X.224)
-- implementation based on RFC 1950 (zlib format spec), 1996
local prime = 65521 --largest prime smaller than 2^16
local s1, s2 = 1, 0
-- limit s size to ensure that modulo prime can be done only at end
-- 2^40 bytes should be enough for pure Lua with 64-bit integers...
if #s > (1 << 40) then error("adler32: string too large") end
for i = 1,#s do
local b = byte(s, i)
s1 = s1 + b
s2 = s2 + s1
-- no need to test or compute mod prime every turn.
end
s1 = s1 % prime
s2 = s2 % prime
return (s2 << 16) + s1
end --adler32()
local function crc32_nt(s)
-- return crc32 checksum of string s as an integer
-- uses no lookup table
-- inspired by crc32b at
-- http://www.hackersdelight.org/hdcodetxt/crc.c.txt
local b, crc, mask
crc = 0xffffffff
for i = 1, #s do
b = byte(s, i)
crc = crc ~ b
for _ = 1, 8 do --eight times
mask = -(crc & 1)
crc = (crc >> 1) ~ (0xedb88320 & mask)
end
end--for
return (~crc) & 0xffffffff
end --crc32_nt()
local function crc32(s, lt)
-- return crc32 checksum of string as an integer
-- use lookup table lt if provided or create one on the fly
-- if lt is empty, it is initialized.
lt = lt or {}
local b, crc, mask
if not lt[1] then -- setup table
for i = 1, 256 do
crc = i - 1
for _ = 1, 8 do --eight times
mask = -(crc & 1)
crc = (crc >> 1) ~ (0xedb88320 & mask)
end
lt[i] = crc
end--for
end--if
-- compute the crc
crc = 0xffffffff
for i = 1, #s do
b = byte(s, i)
crc = (crc >> 8) ~ lt[((crc ~ b) & 0xFF) + 1]
end
return (~crc) & 0xffffffff
end --crc32()
return {
-- module
adler32 = adler32,
crc32_nt = crc32_nt,
crc32 = crc32,
}

View file

@ -1,400 +0,0 @@
--
-- json.lua
--
-- Copyright (c) 2018 rxi
--
-- Permission is hereby granted, free of charge, to any person obtaining a copy of
-- this software and associated documentation files (the "Software"), to deal in
-- the Software without restriction, including without limitation the rights to
-- use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-- of the Software, and to permit persons to whom the Software is furnished to do
-- so, subject to the following conditions:
--
-- The above copyright notice and this permission notice shall be included in all
-- copies or substantial portions of the Software.
--
-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-- SOFTWARE.
--
local json = { _version = "0.1.1" }
-------------------------------------------------------------------------------
-- Encode
-------------------------------------------------------------------------------
local encode
local escape_char_map = {
[ "\\" ] = "\\\\",
[ "\"" ] = "\\\"",
[ "\b" ] = "\\b",
[ "\f" ] = "\\f",
[ "\n" ] = "\\n",
[ "\r" ] = "\\r",
[ "\t" ] = "\\t",
}
local escape_char_map_inv = { [ "\\/" ] = "/" }
for k, v in pairs(escape_char_map) do
escape_char_map_inv[v] = k
end
local function escape_char(c)
return escape_char_map[c] or string.format("\\u%04x", c:byte())
end
local function encode_nil(val)
return "null"
end
local function encode_table(val, stack)
local res = {}
stack = stack or {}
-- Circular reference?
if stack[val] then error("circular reference") end
stack[val] = true
if val[1] ~= nil or next(val) == nil then
-- Treat as array -- check keys are valid and it is not sparse
local n = 0
for k in pairs(val) do
if type(k) ~= "number" then
error("invalid table: mixed or invalid key types")
end
n = n + 1
end
if n ~= #val then
error("invalid table: sparse array")
end
-- Encode
for i, v in ipairs(val) do
table.insert(res, encode(v, stack))
end
stack[val] = nil
return "[" .. table.concat(res, ",") .. "]"
else
-- Treat as an object
for k, v in pairs(val) do
if type(k) ~= "string" then
error("invalid table: mixed or invalid key types")
end
table.insert(res, encode(k, stack) .. ":" .. encode(v, stack))
end
stack[val] = nil
return "{" .. table.concat(res, ",") .. "}"
end
end
local function encode_string(val)
return '"' .. val:gsub('[%z\1-\31\\"]', escape_char) .. '"'
end
local function encode_number(val)
-- Check for NaN, -inf and inf
if val ~= val or val <= -math.huge or val >= math.huge then
error("unexpected number value '" .. tostring(val) .. "'")
end
return string.format("%.14g", val)
end
local type_func_map = {
[ "nil" ] = encode_nil,
[ "table" ] = encode_table,
[ "string" ] = encode_string,
[ "number" ] = encode_number,
[ "boolean" ] = tostring,
}
encode = function(val, stack)
local t = type(val)
local f = type_func_map[t]
if f then
return f(val, stack)
end
error("unexpected type '" .. t .. "'")
end
function json.encode(val)
return ( encode(val) )
end
-------------------------------------------------------------------------------
-- Decode
-------------------------------------------------------------------------------
local parse
local function create_set(...)
local res = {}
for i = 1, select("#", ...) do
res[ select(i, ...) ] = true
end
return res
end
local space_chars = create_set(" ", "\t", "\r", "\n")
local delim_chars = create_set(" ", "\t", "\r", "\n", "]", "}", ",")
local escape_chars = create_set("\\", "/", '"', "b", "f", "n", "r", "t", "u")
local literals = create_set("true", "false", "null")
local literal_map = {
[ "true" ] = true,
[ "false" ] = false,
[ "null" ] = nil,
}
local function next_char(str, idx, set, negate)
for i = idx, #str do
if set[str:sub(i, i)] ~= negate then
return i
end
end
return #str + 1
end
local function decode_error(str, idx, msg)
local line_count = 1
local col_count = 1
for i = 1, idx - 1 do
col_count = col_count + 1
if str:sub(i, i) == "\n" then
line_count = line_count + 1
col_count = 1
end
end
error( string.format("%s at line %d col %d", msg, line_count, col_count) )
end
local function codepoint_to_utf8(n)
-- http://scripts.sil.org/cms/scripts/page.php?site_id=nrsi&id=iws-appendixa
local f = math.floor
if n <= 0x7f then
return string.char(n)
elseif n <= 0x7ff then
return string.char(f(n / 64) + 192, n % 64 + 128)
elseif n <= 0xffff then
return string.char(f(n / 4096) + 224, f(n % 4096 / 64) + 128, n % 64 + 128)
elseif n <= 0x10ffff then
return string.char(f(n / 262144) + 240, f(n % 262144 / 4096) + 128,
f(n % 4096 / 64) + 128, n % 64 + 128)
end
error( string.format("invalid unicode codepoint '%x'", n) )
end
local function parse_unicode_escape(s)
local n1 = tonumber( s:sub(3, 6), 16 )
local n2 = tonumber( s:sub(9, 12), 16 )
-- Surrogate pair?
if n2 then
return codepoint_to_utf8((n1 - 0xd800) * 0x400 + (n2 - 0xdc00) + 0x10000)
else
return codepoint_to_utf8(n1)
end
end
local function parse_string(str, i)
local has_unicode_escape = false
local has_surrogate_escape = false
local has_escape = false
local last
for j = i + 1, #str do
local x = str:byte(j)
if x < 32 then
decode_error(str, j, "control character in string")
end
if last == 92 then -- "\\" (escape char)
if x == 117 then -- "u" (unicode escape sequence)
local hex = str:sub(j + 1, j + 5)
if not hex:find("%x%x%x%x") then
decode_error(str, j, "invalid unicode escape in string")
end
if hex:find("^[dD][89aAbB]") then
has_surrogate_escape = true
else
has_unicode_escape = true
end
else
local c = string.char(x)
if not escape_chars[c] then
decode_error(str, j, "invalid escape char '" .. c .. "' in string")
end
has_escape = true
end
last = nil
elseif x == 34 then -- '"' (end of string)
local s = str:sub(i + 1, j - 1)
if has_surrogate_escape then
s = s:gsub("\\u[dD][89aAbB]..\\u....", parse_unicode_escape)
end
if has_unicode_escape then
s = s:gsub("\\u....", parse_unicode_escape)
end
if has_escape then
s = s:gsub("\\.", escape_char_map_inv)
end
return s, j + 1
else
last = x
end
end
decode_error(str, i, "expected closing quote for string")
end
local function parse_number(str, i)
local x = next_char(str, i, delim_chars)
local s = str:sub(i, x - 1)
local n = tonumber(s)
if not n then
decode_error(str, i, "invalid number '" .. s .. "'")
end
return n, x
end
local function parse_literal(str, i)
local x = next_char(str, i, delim_chars)
local word = str:sub(i, x - 1)
if not literals[word] then
decode_error(str, i, "invalid literal '" .. word .. "'")
end
return literal_map[word], x
end
local function parse_array(str, i)
local res = {}
local n = 1
i = i + 1
while 1 do
local x
i = next_char(str, i, space_chars, true)
-- Empty / end of array?
if str:sub(i, i) == "]" then
i = i + 1
break
end
-- Read token
x, i = parse(str, i)
res[n] = x
n = n + 1
-- Next token
i = next_char(str, i, space_chars, true)
local chr = str:sub(i, i)
i = i + 1
if chr == "]" then break end
if chr ~= "," then decode_error(str, i, "expected ']' or ','") end
end
return res, i
end
local function parse_object(str, i)
local res = {}
i = i + 1
while 1 do
local key, val
i = next_char(str, i, space_chars, true)
-- Empty / end of object?
if str:sub(i, i) == "}" then
i = i + 1
break
end
-- Read key
if str:sub(i, i) ~= '"' then
decode_error(str, i, "expected string for key")
end
key, i = parse(str, i)
-- Read ':' delimiter
i = next_char(str, i, space_chars, true)
if str:sub(i, i) ~= ":" then
decode_error(str, i, "expected ':' after key")
end
i = next_char(str, i + 1, space_chars, true)
-- Read value
val, i = parse(str, i)
-- Set
res[key] = val
-- Next token
i = next_char(str, i, space_chars, true)
local chr = str:sub(i, i)
i = i + 1
if chr == "}" then break end
if chr ~= "," then decode_error(str, i, "expected '}' or ','") end
end
return res, i
end
local char_func_map = {
[ '"' ] = parse_string,
[ "0" ] = parse_number,
[ "1" ] = parse_number,
[ "2" ] = parse_number,
[ "3" ] = parse_number,
[ "4" ] = parse_number,
[ "5" ] = parse_number,
[ "6" ] = parse_number,
[ "7" ] = parse_number,
[ "8" ] = parse_number,
[ "9" ] = parse_number,
[ "-" ] = parse_number,
[ "t" ] = parse_literal,
[ "f" ] = parse_literal,
[ "n" ] = parse_literal,
[ "[" ] = parse_array,
[ "{" ] = parse_object,
}
parse = function(str, idx)
local chr = str:sub(idx, idx)
local f = char_func_map[chr]
if f then
return f(str, idx)
end
decode_error(str, idx, "unexpected character '" .. chr .. "'")
end
function json.decode(str)
if type(str) ~= "string" then
error("expected argument of type string, got " .. type(str))
end
local res, idx = parse(str, next_char(str, 1, space_chars, true))
idx = next_char(str, idx, space_chars, true)
if idx <= #str then
decode_error(str, idx, "trailing garbage")
end
return res
end
return json

View file

@ -1,168 +0,0 @@
-- Copyright (c) 2017 Phil Leblanc -- see LICENSE file
------------------------------------------------------------------------
-- md5 hash - see RFC 1321 - https://www.ietf.org/rfc/rfc1321.txt
local spack, sunpack = string.pack, string.unpack
------------------------------------------------------------------------
local function FF(a, b, c, d, x, s, ac)
a = (a + ((b & c) | ((~b) & d)) + x + ac) & 0xffffffff
a = ((a << s) | (a >> (32-s))) & 0xffffffff
a = (a + b) & 0xffffffff
return a
end
local function GG(a, b, c, d, x, s, ac)
a = (a + ((b & d) | c & (~d) ) + x + ac) & 0xffffffff
a = ((a << s) | (a >> (32-s))) & 0xffffffff
a = (a + b) & 0xffffffff
return a
end
local function HH(a, b, c, d, x, s, ac)
a = (a + ((b ~ c ~ d)) + x + ac) & 0xffffffff
a = ((a << s) | (a >> (32-s))) & 0xffffffff
a = (a + b) & 0xffffffff
return a
end
local function II(a, b, c, d, x, s, ac)
a = (a + (c ~ (b | ~d)) + x + ac) & 0xffffffff
a = ((a << s) | (a >> (32-s))) & 0xffffffff
a = (a + b) & 0xffffffff
return a
end
local function transform(state, input, i, t)
-- process the 64-byte input block in string 'input' at offset 'i'
-- t is a uint32[16] array. It is passed as a parameter
-- for performance reasons
--
local a, b, c, d = state[1], state[2], state[3], state[4]
-- load array
for j = 1, 16 do
t[j] = sunpack("<I4", input, i)
i = i + 4
end
-- Round 1
a = FF (a, b, c, d, t[ 1], 7, 0xd76aa478)
d = FF (d, a, b, c, t[ 2], 12, 0xe8c7b756)
c = FF (c, d, a, b, t[ 3], 17, 0x242070db)
b = FF (b, c, d, a, t[ 4], 22, 0xc1bdceee)
a = FF (a, b, c, d, t[ 5], 7, 0xf57c0faf)
d = FF (d, a, b, c, t[ 6], 12, 0x4787c62a)
c = FF (c, d, a, b, t[ 7], 17, 0xa8304613)
b = FF (b, c, d, a, t[ 8], 22, 0xfd469501)
a = FF (a, b, c, d, t[ 9], 7, 0x698098d8)
d = FF (d, a, b, c, t[10], 12, 0x8b44f7af)
c = FF (c, d, a, b, t[11], 17, 0xffff5bb1)
b = FF (b, c, d, a, t[12], 22, 0x895cd7be)
a = FF (a, b, c, d, t[13], 7, 0x6b901122)
d = FF (d, a, b, c, t[14], 12, 0xfd987193)
c = FF (c, d, a, b, t[15], 17, 0xa679438e)
b = FF (b, c, d, a, t[16], 22, 0x49b40821)
-- Round 2
a = GG (a, b, c, d, t[ 2], 5, 0xf61e2562)
d = GG (d, a, b, c, t[ 7], 9, 0xc040b340)
c = GG (c, d, a, b, t[12], 14, 0x265e5a51)
b = GG (b, c, d, a, t[ 1], 20, 0xe9b6c7aa)
a = GG (a, b, c, d, t[ 6], 5, 0xd62f105d)
d = GG (d, a, b, c, t[11], 9, 0x2441453)
c = GG (c, d, a, b, t[16], 14, 0xd8a1e681)
b = GG (b, c, d, a, t[ 5], 20, 0xe7d3fbc8)
a = GG (a, b, c, d, t[10], 5, 0x21e1cde6)
d = GG (d, a, b, c, t[15], 9, 0xc33707d6)
c = GG (c, d, a, b, t[ 4], 14, 0xf4d50d87)
b = GG (b, c, d, a, t[ 9], 20, 0x455a14ed)
a = GG (a, b, c, d, t[14], 5, 0xa9e3e905)
d = GG (d, a, b, c, t[ 3], 9, 0xfcefa3f8)
c = GG (c, d, a, b, t[ 8], 14, 0x676f02d9)
b = GG (b, c, d, a, t[13], 20, 0x8d2a4c8a)
-- Round 3
a = HH (a, b, c, d, t[ 6], 4, 0xfffa3942)
d = HH (d, a, b, c, t[ 9], 11, 0x8771f681)
c = HH (c, d, a, b, t[12], 16, 0x6d9d6122)
b = HH (b, c, d, a, t[15], 23, 0xfde5380c)
a = HH (a, b, c, d, t[ 2], 4, 0xa4beea44)
d = HH (d, a, b, c, t[ 5], 11, 0x4bdecfa9)
c = HH (c, d, a, b, t[ 8], 16, 0xf6bb4b60)
b = HH (b, c, d, a, t[11], 23, 0xbebfbc70)
a = HH (a, b, c, d, t[14], 4, 0x289b7ec6)
d = HH (d, a, b, c, t[ 1], 11, 0xeaa127fa)
c = HH (c, d, a, b, t[ 4], 16, 0xd4ef3085)
b = HH (b, c, d, a, t[ 7], 23, 0x4881d05)
a = HH (a, b, c, d, t[10], 4, 0xd9d4d039)
d = HH (d, a, b, c, t[13], 11, 0xe6db99e5)
c = HH (c, d, a, b, t[16], 16, 0x1fa27cf8)
b = HH (b, c, d, a, t[ 3], 23, 0xc4ac5665)
-- Round 4
a = II (a, b, c, d, t[ 1], 6, 0xf4292244)
d = II (d, a, b, c, t[ 8], 10, 0x432aff97)
c = II (c, d, a, b, t[15], 15, 0xab9423a7)
b = II (b, c, d, a, t[ 6], 21, 0xfc93a039)
a = II (a, b, c, d, t[13], 6, 0x655b59c3)
d = II (d, a, b, c, t[ 4], 10, 0x8f0ccc92)
c = II (c, d, a, b, t[11], 15, 0xffeff47d)
b = II (b, c, d, a, t[ 2], 21, 0x85845dd1)
a = II (a, b, c, d, t[ 9], 6, 0x6fa87e4f)
d = II (d, a, b, c, t[16], 10, 0xfe2ce6e0)
c = II (c, d, a, b, t[ 7], 15, 0xa3014314)
b = II (b, c, d, a, t[14], 21, 0x4e0811a1)
a = II (a, b, c, d, t[ 5], 6, 0xf7537e82)
d = II (d, a, b, c, t[12], 10, 0xbd3af235)
c = II (c, d, a, b, t[ 3], 15, 0x2ad7d2bb)
b = II (b, c, d, a, t[10], 21, 0xeb86d391)
state[1] = (state[1] + a) & 0xffffffff
state[2] = (state[2] + b) & 0xffffffff
state[3] = (state[3] + c) & 0xffffffff
state[4] = (state[4] + d) & 0xffffffff
end --transform()
local function md5(input)
-- initialize state
local state = { 0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476 }
local inputlen = #input
local inputbits = inputlen * 8 -- input length in bits
local r = inputlen -- number of unprocessed bytes
local i = 1 -- index in input string
local ibt = {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0} -- input block uint32[16]
-- process as many 64-byte blocks as possible
while r >= 64 do
-- process block
transform(state, input, i, ibt)
i = i + 64 -- update input index
r = r - 64 -- update number of unprocessed bytes
end
-- finalize. must append to input a mandatory 0x80 byte, some
-- padding, and the input bit-length ('inputbits')
local lastblock -- the rest of input .. some padding .. inputbits
local padlen -- padding length in bytes
if r < 56 then padlen = 55 - r else padlen = 119 - r end
lastblock = input:sub(i) -- remaining input
.. '\x80' .. ('\0'):rep(padlen) --padding
.. spack("<I8", inputbits) -- length in bits
assert(#lastblock == 64 or #lastblock == 128)
transform(state, lastblock, 1, ibt)
if #lastblock == 128 then
transform(state, lastblock, 65, ibt)
end
-- return the digest
local digest = spack("<I4I4I4I4", state[1], state[2], state[3], state[4])
return digest
end --md5()
--~ bin = require "plc.bin"
--~ print(bin.stohex(md5'abc'))
--~ print(bin.stohex(md5""))
return { -- md5 module
hash = md5,
}

View file

@ -1,140 +0,0 @@
"""Record of phased-in incompatible language changes.
Each line is of the form:
FeatureName = "_Feature(" OptionalRelease "," MandatoryRelease ","
CompilerFlag ")"
where, normally, OptionalRelease < MandatoryRelease, and both are 5-tuples
of the same form as sys.version_info:
(PY_MAJOR_VERSION, # the 2 in 2.1.0a3; an int
PY_MINOR_VERSION, # the 1; an int
PY_MICRO_VERSION, # the 0; an int
PY_RELEASE_LEVEL, # "alpha", "beta", "candidate" or "final"; string
PY_RELEASE_SERIAL # the 3; an int
)
OptionalRelease records the first release in which
from __future__ import FeatureName
was accepted.
In the case of MandatoryReleases that have not yet occurred,
MandatoryRelease predicts the release in which the feature will become part
of the language.
Else MandatoryRelease records when the feature became part of the language;
in releases at or after that, modules no longer need
from __future__ import FeatureName
to use the feature in question, but may continue to use such imports.
MandatoryRelease may also be None, meaning that a planned feature got
dropped.
Instances of class _Feature have two corresponding methods,
.getOptionalRelease() and .getMandatoryRelease().
CompilerFlag is the (bitfield) flag that should be passed in the fourth
argument to the builtin function compile() to enable the feature in
dynamically compiled code. This flag is stored in the .compiler_flag
attribute on _Future instances. These values must match the appropriate
#defines of CO_xxx flags in Include/compile.h.
No feature line is ever to be deleted from this file.
"""
all_feature_names = [
"nested_scopes",
"generators",
"division",
"absolute_import",
"with_statement",
"print_function",
"unicode_literals",
"barry_as_FLUFL",
"generator_stop",
]
__all__ = ["all_feature_names"] + all_feature_names
# The CO_xxx symbols are defined here under the same names used by
# compile.h, so that an editor search will find them here. However,
# they're not exported in __all__, because they don't really belong to
# this module.
CO_NESTED = 0x0010 # nested_scopes
CO_GENERATOR_ALLOWED = 0 # generators (obsolete, was 0x1000)
CO_FUTURE_DIVISION = 0x2000 # division
CO_FUTURE_ABSOLUTE_IMPORT = 0x4000 # perform absolute imports by default
CO_FUTURE_WITH_STATEMENT = 0x8000 # with statement
CO_FUTURE_PRINT_FUNCTION = 0x10000 # print function
CO_FUTURE_UNICODE_LITERALS = 0x20000 # unicode string literals
CO_FUTURE_BARRY_AS_BDFL = 0x40000
CO_FUTURE_GENERATOR_STOP = 0x80000 # StopIteration becomes RuntimeError in generators
class _Feature:
def __init__(self, optionalRelease, mandatoryRelease, compiler_flag):
self.optional = optionalRelease
self.mandatory = mandatoryRelease
self.compiler_flag = compiler_flag
def getOptionalRelease(self):
"""Return first release in which this feature was recognized.
This is a 5-tuple, of the same form as sys.version_info.
"""
return self.optional
def getMandatoryRelease(self):
"""Return release in which this feature will become mandatory.
This is a 5-tuple, of the same form as sys.version_info, or, if
the feature was dropped, is None.
"""
return self.mandatory
def __repr__(self):
return "_Feature" + repr((self.optional,
self.mandatory,
self.compiler_flag))
nested_scopes = _Feature((2, 1, 0, "beta", 1),
(2, 2, 0, "alpha", 0),
CO_NESTED)
generators = _Feature((2, 2, 0, "alpha", 1),
(2, 3, 0, "final", 0),
CO_GENERATOR_ALLOWED)
division = _Feature((2, 2, 0, "alpha", 2),
(3, 0, 0, "alpha", 0),
CO_FUTURE_DIVISION)
absolute_import = _Feature((2, 5, 0, "alpha", 1),
(3, 0, 0, "alpha", 0),
CO_FUTURE_ABSOLUTE_IMPORT)
with_statement = _Feature((2, 5, 0, "alpha", 1),
(2, 6, 0, "alpha", 0),
CO_FUTURE_WITH_STATEMENT)
print_function = _Feature((2, 6, 0, "alpha", 2),
(3, 0, 0, "alpha", 0),
CO_FUTURE_PRINT_FUNCTION)
unicode_literals = _Feature((2, 6, 0, "alpha", 2),
(3, 0, 0, "alpha", 0),
CO_FUTURE_UNICODE_LITERALS)
barry_as_FLUFL = _Feature((3, 1, 0, "alpha", 2),
(3, 9, 0, "alpha", 0),
CO_FUTURE_BARRY_AS_BDFL)
generator_stop = _Feature((3, 5, 0, "beta", 1),
(3, 7, 0, "alpha", 0),
CO_FUTURE_GENERATOR_STOP)

View file

@ -1 +0,0 @@
# This file exists as a helper for the test.test_frozen module.

View file

@ -1,34 +0,0 @@
"""A minimal subset of the locale module used at interpreter startup
(imported by the _io module), in order to reduce startup time.
Don't import directly from third-party code; use the `locale` module instead!
"""
import sys
import _locale
if sys.platform.startswith("win"):
def getpreferredencoding(do_setlocale=True):
return _locale._getdefaultlocale()[1]
else:
try:
_locale.CODESET
except AttributeError:
def getpreferredencoding(do_setlocale=True):
# This path for legacy systems needs the more complex
# getdefaultlocale() function, import the full locale module.
import locale
return locale.getpreferredencoding(do_setlocale)
else:
def getpreferredencoding(do_setlocale=True):
assert not do_setlocale
result = _locale.nl_langinfo(_locale.CODESET)
if not result and sys.platform == 'darwin':
# nl_langinfo can return an empty string
# when the setting has an invalid value.
# Default to UTF-8 in that case because
# UTF-8 is the default charset on OSX and
# returning nothing will crash the
# interpreter.
result = 'UTF-8'
return result

View file

@ -1,941 +0,0 @@
# Copyright 2007 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Abstract Base Classes (ABCs) for collections, according to PEP 3119.
Unit tests are in test_collections.
"""
from abc import ABCMeta, abstractmethod
import sys
__all__ = ["Awaitable", "Coroutine", "AsyncIterable", "AsyncIterator",
"Hashable", "Iterable", "Iterator", "Generator",
"Sized", "Container", "Callable",
"Set", "MutableSet",
"Mapping", "MutableMapping",
"MappingView", "KeysView", "ItemsView", "ValuesView",
"Sequence", "MutableSequence",
"ByteString",
]
# This module has been renamed from collections.abc to _collections_abc to
# speed up interpreter startup. Some of the types such as MutableMapping are
# required early but collections module imports a lot of other modules.
# See issue #19218
__name__ = "collections.abc"
# Private list of types that we want to register with the various ABCs
# so that they will pass tests like:
# it = iter(somebytearray)
# assert isinstance(it, Iterable)
# Note: in other implementations, these types might not be distinct
# and they may have their own implementation specific types that
# are not included on this list.
bytes_iterator = type(iter(b''))
bytearray_iterator = type(iter(bytearray()))
#callable_iterator = ???
dict_keyiterator = type(iter({}.keys()))
dict_valueiterator = type(iter({}.values()))
dict_itemiterator = type(iter({}.items()))
list_iterator = type(iter([]))
list_reverseiterator = type(iter(reversed([])))
range_iterator = type(iter(range(0)))
longrange_iterator = type(iter(range(1 << 1000)))
set_iterator = type(iter(set()))
str_iterator = type(iter(""))
tuple_iterator = type(iter(()))
zip_iterator = type(iter(zip()))
## views ##
dict_keys = type({}.keys())
dict_values = type({}.values())
dict_items = type({}.items())
## misc ##
mappingproxy = type(type.__dict__)
generator = type((lambda: (yield))())
## coroutine ##
async def _coro(): pass
_coro = _coro()
coroutine = type(_coro)
_coro.close() # Prevent ResourceWarning
del _coro
### ONE-TRICK PONIES ###
class Hashable(metaclass=ABCMeta):
__slots__ = ()
@abstractmethod
def __hash__(self):
return 0
@classmethod
def __subclasshook__(cls, C):
if cls is Hashable:
for B in C.__mro__:
if "__hash__" in B.__dict__:
if B.__dict__["__hash__"]:
return True
break
return NotImplemented
class Awaitable(metaclass=ABCMeta):
__slots__ = ()
@abstractmethod
def __await__(self):
yield
@classmethod
def __subclasshook__(cls, C):
if cls is Awaitable:
for B in C.__mro__:
if "__await__" in B.__dict__:
if B.__dict__["__await__"]:
return True
break
return NotImplemented
class Coroutine(Awaitable):
__slots__ = ()
@abstractmethod
def send(self, value):
"""Send a value into the coroutine.
Return next yielded value or raise StopIteration.
"""
raise StopIteration
@abstractmethod
def throw(self, typ, val=None, tb=None):
"""Raise an exception in the coroutine.
Return next yielded value or raise StopIteration.
"""
if val is None:
if tb is None:
raise typ
val = typ()
if tb is not None:
val = val.with_traceback(tb)
raise val
def close(self):
"""Raise GeneratorExit inside coroutine.
"""
try:
self.throw(GeneratorExit)
except (GeneratorExit, StopIteration):
pass
else:
raise RuntimeError("coroutine ignored GeneratorExit")
@classmethod
def __subclasshook__(cls, C):
if cls is Coroutine:
mro = C.__mro__
for method in ('__await__', 'send', 'throw', 'close'):
for base in mro:
if method in base.__dict__:
break
else:
return NotImplemented
return True
return NotImplemented
Coroutine.register(coroutine)
class AsyncIterable(metaclass=ABCMeta):
__slots__ = ()
@abstractmethod
def __aiter__(self):
return AsyncIterator()
@classmethod
def __subclasshook__(cls, C):
if cls is AsyncIterable:
if any("__aiter__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
class AsyncIterator(AsyncIterable):
__slots__ = ()
@abstractmethod
async def __anext__(self):
"""Return the next item or raise StopAsyncIteration when exhausted."""
raise StopAsyncIteration
def __aiter__(self):
return self
@classmethod
def __subclasshook__(cls, C):
if cls is AsyncIterator:
if (any("__anext__" in B.__dict__ for B in C.__mro__) and
any("__aiter__" in B.__dict__ for B in C.__mro__)):
return True
return NotImplemented
class Iterable(metaclass=ABCMeta):
__slots__ = ()
@abstractmethod
def __iter__(self):
while False:
yield None
@classmethod
def __subclasshook__(cls, C):
if cls is Iterable:
if any("__iter__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
class Iterator(Iterable):
__slots__ = ()
@abstractmethod
def __next__(self):
'Return the next item from the iterator. When exhausted, raise StopIteration'
raise StopIteration
def __iter__(self):
return self
@classmethod
def __subclasshook__(cls, C):
if cls is Iterator:
if (any("__next__" in B.__dict__ for B in C.__mro__) and
any("__iter__" in B.__dict__ for B in C.__mro__)):
return True
return NotImplemented
Iterator.register(bytes_iterator)
Iterator.register(bytearray_iterator)
#Iterator.register(callable_iterator)
Iterator.register(dict_keyiterator)
Iterator.register(dict_valueiterator)
Iterator.register(dict_itemiterator)
Iterator.register(list_iterator)
Iterator.register(list_reverseiterator)
Iterator.register(range_iterator)
Iterator.register(longrange_iterator)
Iterator.register(set_iterator)
Iterator.register(str_iterator)
Iterator.register(tuple_iterator)
Iterator.register(zip_iterator)
class Generator(Iterator):
__slots__ = ()
def __next__(self):
"""Return the next item from the generator.
When exhausted, raise StopIteration.
"""
return self.send(None)
@abstractmethod
def send(self, value):
"""Send a value into the generator.
Return next yielded value or raise StopIteration.
"""
raise StopIteration
@abstractmethod
def throw(self, typ, val=None, tb=None):
"""Raise an exception in the generator.
Return next yielded value or raise StopIteration.
"""
if val is None:
if tb is None:
raise typ
val = typ()
if tb is not None:
val = val.with_traceback(tb)
raise val
def close(self):
"""Raise GeneratorExit inside generator.
"""
try:
self.throw(GeneratorExit)
except (GeneratorExit, StopIteration):
pass
else:
raise RuntimeError("generator ignored GeneratorExit")
@classmethod
def __subclasshook__(cls, C):
if cls is Generator:
mro = C.__mro__
for method in ('__iter__', '__next__', 'send', 'throw', 'close'):
for base in mro:
if method in base.__dict__:
break
else:
return NotImplemented
return True
return NotImplemented
Generator.register(generator)
class Sized(metaclass=ABCMeta):
__slots__ = ()
@abstractmethod
def __len__(self):
return 0
@classmethod
def __subclasshook__(cls, C):
if cls is Sized:
if any("__len__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
class Container(metaclass=ABCMeta):
__slots__ = ()
@abstractmethod
def __contains__(self, x):
return False
@classmethod
def __subclasshook__(cls, C):
if cls is Container:
if any("__contains__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
class Callable(metaclass=ABCMeta):
__slots__ = ()
@abstractmethod
def __call__(self, *args, **kwds):
return False
@classmethod
def __subclasshook__(cls, C):
if cls is Callable:
if any("__call__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
### SETS ###
class Set(Sized, Iterable, Container):
"""A set is a finite, iterable container.
This class provides concrete generic implementations of all
methods except for __contains__, __iter__ and __len__.
To override the comparisons (presumably for speed, as the
semantics are fixed), redefine __le__ and __ge__,
then the other operations will automatically follow suit.
"""
__slots__ = ()
def __le__(self, other):
if not isinstance(other, Set):
return NotImplemented
if len(self) > len(other):
return False
for elem in self:
if elem not in other:
return False
return True
def __lt__(self, other):
if not isinstance(other, Set):
return NotImplemented
return len(self) < len(other) and self.__le__(other)
def __gt__(self, other):
if not isinstance(other, Set):
return NotImplemented
return len(self) > len(other) and self.__ge__(other)
def __ge__(self, other):
if not isinstance(other, Set):
return NotImplemented
if len(self) < len(other):
return False
for elem in other:
if elem not in self:
return False
return True
def __eq__(self, other):
if not isinstance(other, Set):
return NotImplemented
return len(self) == len(other) and self.__le__(other)
@classmethod
def _from_iterable(cls, it):
'''Construct an instance of the class from any iterable input.
Must override this method if the class constructor signature
does not accept an iterable for an input.
'''
return cls(it)
def __and__(self, other):
if not isinstance(other, Iterable):
return NotImplemented
return self._from_iterable(value for value in other if value in self)
__rand__ = __and__
def isdisjoint(self, other):
'Return True if two sets have a null intersection.'
for value in other:
if value in self:
return False
return True
def __or__(self, other):
if not isinstance(other, Iterable):
return NotImplemented
chain = (e for s in (self, other) for e in s)
return self._from_iterable(chain)
__ror__ = __or__
def __sub__(self, other):
if not isinstance(other, Set):
if not isinstance(other, Iterable):
return NotImplemented
other = self._from_iterable(other)
return self._from_iterable(value for value in self
if value not in other)
def __rsub__(self, other):
if not isinstance(other, Set):
if not isinstance(other, Iterable):
return NotImplemented
other = self._from_iterable(other)
return self._from_iterable(value for value in other
if value not in self)
def __xor__(self, other):
if not isinstance(other, Set):
if not isinstance(other, Iterable):
return NotImplemented
other = self._from_iterable(other)
return (self - other) | (other - self)
__rxor__ = __xor__
def _hash(self):
"""Compute the hash value of a set.
Note that we don't define __hash__: not all sets are hashable.
But if you define a hashable set type, its __hash__ should
call this function.
This must be compatible __eq__.
All sets ought to compare equal if they contain the same
elements, regardless of how they are implemented, and
regardless of the order of the elements; so there's not much
freedom for __eq__ or __hash__. We match the algorithm used
by the built-in frozenset type.
"""
MAX = sys.maxsize
MASK = 2 * MAX + 1
n = len(self)
h = 1927868237 * (n + 1)
h &= MASK
for x in self:
hx = hash(x)
h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167
h &= MASK
h = h * 69069 + 907133923
h &= MASK
if h > MAX:
h -= MASK + 1
if h == -1:
h = 590923713
return h
Set.register(frozenset)
class MutableSet(Set):
"""A mutable set is a finite, iterable container.
This class provides concrete generic implementations of all
methods except for __contains__, __iter__, __len__,
add(), and discard().
To override the comparisons (presumably for speed, as the
semantics are fixed), all you have to do is redefine __le__ and
then the other operations will automatically follow suit.
"""
__slots__ = ()
@abstractmethod
def add(self, value):
"""Add an element."""
raise NotImplementedError
@abstractmethod
def discard(self, value):
"""Remove an element. Do not raise an exception if absent."""
raise NotImplementedError
def remove(self, value):
"""Remove an element. If not a member, raise a KeyError."""
if value not in self:
raise KeyError(value)
self.discard(value)
def pop(self):
"""Return the popped value. Raise KeyError if empty."""
it = iter(self)
try:
value = next(it)
except StopIteration:
raise KeyError
self.discard(value)
return value
def clear(self):
"""This is slow (creates N new iterators!) but effective."""
try:
while True:
self.pop()
except KeyError:
pass
def __ior__(self, it):
for value in it:
self.add(value)
return self
def __iand__(self, it):
for value in (self - it):
self.discard(value)
return self
def __ixor__(self, it):
if it is self:
self.clear()
else:
if not isinstance(it, Set):
it = self._from_iterable(it)
for value in it:
if value in self:
self.discard(value)
else:
self.add(value)
return self
def __isub__(self, it):
if it is self:
self.clear()
else:
for value in it:
self.discard(value)
return self
MutableSet.register(set)
### MAPPINGS ###
class Mapping(Sized, Iterable, Container):
__slots__ = ()
"""A Mapping is a generic container for associating key/value
pairs.
This class provides concrete generic implementations of all
methods except for __getitem__, __iter__, and __len__.
"""
@abstractmethod
def __getitem__(self, key):
raise KeyError
def get(self, key, default=None):
'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.'
try:
return self[key]
except KeyError:
return default
def __contains__(self, key):
try:
self[key]
except KeyError:
return False
else:
return True
def keys(self):
"D.keys() -> a set-like object providing a view on D's keys"
return KeysView(self)
def items(self):
"D.items() -> a set-like object providing a view on D's items"
return ItemsView(self)
def values(self):
"D.values() -> an object providing a view on D's values"
return ValuesView(self)
def __eq__(self, other):
if not isinstance(other, Mapping):
return NotImplemented
return dict(self.items()) == dict(other.items())
Mapping.register(mappingproxy)
class MappingView(Sized):
__slots__ = '_mapping',
def __init__(self, mapping):
self._mapping = mapping
def __len__(self):
return len(self._mapping)
def __repr__(self):
return '{0.__class__.__name__}({0._mapping!r})'.format(self)
class KeysView(MappingView, Set):
__slots__ = ()
@classmethod
def _from_iterable(self, it):
return set(it)
def __contains__(self, key):
return key in self._mapping
def __iter__(self):
yield from self._mapping
KeysView.register(dict_keys)
class ItemsView(MappingView, Set):
__slots__ = ()
@classmethod
def _from_iterable(self, it):
return set(it)
def __contains__(self, item):
key, value = item
try:
v = self._mapping[key]
except KeyError:
return False
else:
return v == value
def __iter__(self):
for key in self._mapping:
yield (key, self._mapping[key])
ItemsView.register(dict_items)
class ValuesView(MappingView):
__slots__ = ()
def __contains__(self, value):
for key in self._mapping:
if value == self._mapping[key]:
return True
return False
def __iter__(self):
for key in self._mapping:
yield self._mapping[key]
ValuesView.register(dict_values)
class MutableMapping(Mapping):
__slots__ = ()
"""A MutableMapping is a generic container for associating
key/value pairs.
This class provides concrete generic implementations of all
methods except for __getitem__, __setitem__, __delitem__,
__iter__, and __len__.
"""
@abstractmethod
def __setitem__(self, key, value):
raise KeyError
@abstractmethod
def __delitem__(self, key):
raise KeyError
__marker = object()
def pop(self, key, default=__marker):
'''D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
'''
try:
value = self[key]
except KeyError:
if default is self.__marker:
raise
return default
else:
del self[key]
return value
def popitem(self):
'''D.popitem() -> (k, v), remove and return some (key, value) pair
as a 2-tuple; but raise KeyError if D is empty.
'''
try:
key = next(iter(self))
except StopIteration:
raise KeyError
value = self[key]
del self[key]
return key, value
def clear(self):
'D.clear() -> None. Remove all items from D.'
try:
while True:
self.popitem()
except KeyError:
pass
def update(*args, **kwds):
''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F.
If E present and has a .keys() method, does: for k in E: D[k] = E[k]
If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v
In either case, this is followed by: for k, v in F.items(): D[k] = v
'''
if not args:
raise TypeError("descriptor 'update' of 'MutableMapping' object "
"needs an argument")
self, *args = args
if len(args) > 1:
raise TypeError('update expected at most 1 arguments, got %d' %
len(args))
if args:
other = args[0]
if isinstance(other, Mapping):
for key in other:
self[key] = other[key]
elif hasattr(other, "keys"):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
def setdefault(self, key, default=None):
'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D'
try:
return self[key]
except KeyError:
self[key] = default
return default
MutableMapping.register(dict)
### SEQUENCES ###
class Sequence(Sized, Iterable, Container):
"""All the operations on a read-only sequence.
Concrete subclasses must override __new__ or __init__,
__getitem__, and __len__.
"""
__slots__ = ()
@abstractmethod
def __getitem__(self, index):
raise IndexError
def __iter__(self):
i = 0
try:
while True:
v = self[i]
yield v
i += 1
except IndexError:
return
def __contains__(self, value):
for v in self:
if v == value:
return True
return False
def __reversed__(self):
for i in reversed(range(len(self))):
yield self[i]
def index(self, value, start=0, stop=None):
'''S.index(value, [start, [stop]]) -> integer -- return first index of value.
Raises ValueError if the value is not present.
'''
if start is not None and start < 0:
start = max(len(self) + start, 0)
if stop is not None and stop < 0:
stop += len(self)
i = start
while stop is None or i < stop:
try:
if self[i] == value:
return i
except IndexError:
break
i += 1
raise ValueError
def count(self, value):
'S.count(value) -> integer -- return number of occurrences of value'
return sum(1 for v in self if v == value)
Sequence.register(tuple)
Sequence.register(str)
Sequence.register(range)
Sequence.register(memoryview)
class ByteString(Sequence):
"""This unifies bytes and bytearray.
XXX Should add all their methods.
"""
__slots__ = ()
ByteString.register(bytes)
ByteString.register(bytearray)
class MutableSequence(Sequence):
__slots__ = ()
"""All the operations on a read-write sequence.
Concrete subclasses must provide __new__ or __init__,
__getitem__, __setitem__, __delitem__, __len__, and insert().
"""
@abstractmethod
def __setitem__(self, index, value):
raise IndexError
@abstractmethod
def __delitem__(self, index):
raise IndexError
@abstractmethod
def insert(self, index, value):
'S.insert(index, value) -- insert value before index'
raise IndexError
def append(self, value):
'S.append(value) -- append value to the end of the sequence'
self.insert(len(self), value)
def clear(self):
'S.clear() -> None -- remove all items from S'
try:
while True:
self.pop()
except IndexError:
pass
def reverse(self):
'S.reverse() -- reverse *IN PLACE*'
n = len(self)
for i in range(n//2):
self[i], self[n-i-1] = self[n-i-1], self[i]
def extend(self, values):
'S.extend(iterable) -- extend sequence by appending elements from the iterable'
for v in values:
self.append(v)
def pop(self, index=-1):
'''S.pop([index]) -> item -- remove and return item at index (default last).
Raise IndexError if list is empty or index is out of range.
'''
v = self[index]
del self[index]
return v
def remove(self, value):
'''S.remove(value) -- remove first occurrence of value.
Raise ValueError if the value is not present.
'''
del self[self.index(value)]
def __iadd__(self, values):
self.extend(values)
return self
MutableSequence.register(list)
MutableSequence.register(bytearray) # Multiply inheriting, see ByteString

View file

@ -1,244 +0,0 @@
# This module is used to map the old Python 2 names to the new names used in
# Python 3 for the pickle module. This needed to make pickle streams
# generated with Python 2 loadable by Python 3.
# This is a copy of lib2to3.fixes.fix_imports.MAPPING. We cannot import
# lib2to3 and use the mapping defined there, because lib2to3 uses pickle.
# Thus, this could cause the module to be imported recursively.
IMPORT_MAPPING = {
'__builtin__' : 'builtins',
'copy_reg': 'copyreg',
'Queue': 'queue',
'SocketServer': 'socketserver',
'ConfigParser': 'configparser',
'repr': 'reprlib',
'tkFileDialog': 'tkinter.filedialog',
'tkSimpleDialog': 'tkinter.simpledialog',
'tkColorChooser': 'tkinter.colorchooser',
'tkCommonDialog': 'tkinter.commondialog',
'Dialog': 'tkinter.dialog',
'Tkdnd': 'tkinter.dnd',
'tkFont': 'tkinter.font',
'tkMessageBox': 'tkinter.messagebox',
'ScrolledText': 'tkinter.scrolledtext',
'Tkconstants': 'tkinter.constants',
'Tix': 'tkinter.tix',
'ttk': 'tkinter.ttk',
'Tkinter': 'tkinter',
'markupbase': '_markupbase',
'_winreg': 'winreg',
'thread': '_thread',
'dummy_thread': '_dummy_thread',
'dbhash': 'dbm.bsd',
'dumbdbm': 'dbm.dumb',
'dbm': 'dbm.ndbm',
'gdbm': 'dbm.gnu',
'xmlrpclib': 'xmlrpc.client',
'SimpleXMLRPCServer': 'xmlrpc.server',
'httplib': 'http.client',
'htmlentitydefs' : 'html.entities',
'HTMLParser' : 'html.parser',
'Cookie': 'http.cookies',
'cookielib': 'http.cookiejar',
'BaseHTTPServer': 'http.server',
'test.test_support': 'test.support',
'commands': 'subprocess',
'urlparse' : 'urllib.parse',
'robotparser' : 'urllib.robotparser',
'urllib2': 'urllib.request',
'anydbm': 'dbm',
'_abcoll' : 'collections.abc',
}
# This contains rename rules that are easy to handle. We ignore the more
# complex stuff (e.g. mapping the names in the urllib and types modules).
# These rules should be run before import names are fixed.
NAME_MAPPING = {
('__builtin__', 'xrange'): ('builtins', 'range'),
('__builtin__', 'reduce'): ('functools', 'reduce'),
('__builtin__', 'intern'): ('sys', 'intern'),
('__builtin__', 'unichr'): ('builtins', 'chr'),
('__builtin__', 'unicode'): ('builtins', 'str'),
('__builtin__', 'long'): ('builtins', 'int'),
('itertools', 'izip'): ('builtins', 'zip'),
('itertools', 'imap'): ('builtins', 'map'),
('itertools', 'ifilter'): ('builtins', 'filter'),
('itertools', 'ifilterfalse'): ('itertools', 'filterfalse'),
('itertools', 'izip_longest'): ('itertools', 'zip_longest'),
('UserDict', 'IterableUserDict'): ('collections', 'UserDict'),
('UserList', 'UserList'): ('collections', 'UserList'),
('UserString', 'UserString'): ('collections', 'UserString'),
('whichdb', 'whichdb'): ('dbm', 'whichdb'),
('_socket', 'fromfd'): ('socket', 'fromfd'),
('_multiprocessing', 'Connection'): ('multiprocessing.connection', 'Connection'),
('multiprocessing.process', 'Process'): ('multiprocessing.context', 'Process'),
('multiprocessing.forking', 'Popen'): ('multiprocessing.popen_fork', 'Popen'),
('urllib', 'ContentTooShortError'): ('urllib.error', 'ContentTooShortError'),
('urllib', 'getproxies'): ('urllib.request', 'getproxies'),
('urllib', 'pathname2url'): ('urllib.request', 'pathname2url'),
('urllib', 'quote_plus'): ('urllib.parse', 'quote_plus'),
('urllib', 'quote'): ('urllib.parse', 'quote'),
('urllib', 'unquote_plus'): ('urllib.parse', 'unquote_plus'),
('urllib', 'unquote'): ('urllib.parse', 'unquote'),
('urllib', 'url2pathname'): ('urllib.request', 'url2pathname'),
('urllib', 'urlcleanup'): ('urllib.request', 'urlcleanup'),
('urllib', 'urlencode'): ('urllib.parse', 'urlencode'),
('urllib', 'urlopen'): ('urllib.request', 'urlopen'),
('urllib', 'urlretrieve'): ('urllib.request', 'urlretrieve'),
('urllib2', 'HTTPError'): ('urllib.error', 'HTTPError'),
('urllib2', 'URLError'): ('urllib.error', 'URLError'),
}
PYTHON2_EXCEPTIONS = (
"ArithmeticError",
"AssertionError",
"AttributeError",
"BaseException",
"BufferError",
"BytesWarning",
"DeprecationWarning",
"EOFError",
"EnvironmentError",
"Exception",
"FloatingPointError",
"FutureWarning",
"GeneratorExit",
"IOError",
"ImportError",
"ImportWarning",
"IndentationError",
"IndexError",
"KeyError",
"KeyboardInterrupt",
"LookupError",
"MemoryError",
"NameError",
"NotImplementedError",
"OSError",
"OverflowError",
"PendingDeprecationWarning",
"ReferenceError",
"RuntimeError",
"RuntimeWarning",
# StandardError is gone in Python 3, so we map it to Exception
"StopIteration",
"SyntaxError",
"SyntaxWarning",
"SystemError",
"SystemExit",
"TabError",
"TypeError",
"UnboundLocalError",
"UnicodeDecodeError",
"UnicodeEncodeError",
"UnicodeError",
"UnicodeTranslateError",
"UnicodeWarning",
"UserWarning",
"ValueError",
"Warning",
"ZeroDivisionError",
)
try:
WindowsError
except NameError:
pass
else:
PYTHON2_EXCEPTIONS += ("WindowsError",)
for excname in PYTHON2_EXCEPTIONS:
NAME_MAPPING[("exceptions", excname)] = ("builtins", excname)
MULTIPROCESSING_EXCEPTIONS = (
'AuthenticationError',
'BufferTooShort',
'ProcessError',
'TimeoutError',
)
for excname in MULTIPROCESSING_EXCEPTIONS:
NAME_MAPPING[("multiprocessing", excname)] = ("multiprocessing.context", excname)
# Same, but for 3.x to 2.x
REVERSE_IMPORT_MAPPING = dict((v, k) for (k, v) in IMPORT_MAPPING.items())
assert len(REVERSE_IMPORT_MAPPING) == len(IMPORT_MAPPING)
REVERSE_NAME_MAPPING = dict((v, k) for (k, v) in NAME_MAPPING.items())
assert len(REVERSE_NAME_MAPPING) == len(NAME_MAPPING)
# Non-mutual mappings.
IMPORT_MAPPING.update({
'cPickle': 'pickle',
'_elementtree': 'xml.etree.ElementTree',
'FileDialog': 'tkinter.filedialog',
'SimpleDialog': 'tkinter.simpledialog',
'DocXMLRPCServer': 'xmlrpc.server',
'SimpleHTTPServer': 'http.server',
'CGIHTTPServer': 'http.server',
# For compatibility with broken pickles saved in old Python 3 versions
'UserDict': 'collections',
'UserList': 'collections',
'UserString': 'collections',
'whichdb': 'dbm',
'StringIO': 'io',
'cStringIO': 'io',
})
REVERSE_IMPORT_MAPPING.update({
'_bz2': 'bz2',
'_dbm': 'dbm',
'_functools': 'functools',
'_gdbm': 'gdbm',
'_pickle': 'pickle',
})
NAME_MAPPING.update({
('__builtin__', 'basestring'): ('builtins', 'str'),
('exceptions', 'StandardError'): ('builtins', 'Exception'),
('UserDict', 'UserDict'): ('collections', 'UserDict'),
('socket', '_socketobject'): ('socket', 'SocketType'),
})
REVERSE_NAME_MAPPING.update({
('_functools', 'reduce'): ('__builtin__', 'reduce'),
('tkinter.filedialog', 'FileDialog'): ('FileDialog', 'FileDialog'),
('tkinter.filedialog', 'LoadFileDialog'): ('FileDialog', 'LoadFileDialog'),
('tkinter.filedialog', 'SaveFileDialog'): ('FileDialog', 'SaveFileDialog'),
('tkinter.simpledialog', 'SimpleDialog'): ('SimpleDialog', 'SimpleDialog'),
('xmlrpc.server', 'ServerHTMLDoc'): ('DocXMLRPCServer', 'ServerHTMLDoc'),
('xmlrpc.server', 'XMLRPCDocGenerator'):
('DocXMLRPCServer', 'XMLRPCDocGenerator'),
('xmlrpc.server', 'DocXMLRPCRequestHandler'):
('DocXMLRPCServer', 'DocXMLRPCRequestHandler'),
('xmlrpc.server', 'DocXMLRPCServer'):
('DocXMLRPCServer', 'DocXMLRPCServer'),
('xmlrpc.server', 'DocCGIXMLRPCRequestHandler'):
('DocXMLRPCServer', 'DocCGIXMLRPCRequestHandler'),
('http.server', 'SimpleHTTPRequestHandler'):
('SimpleHTTPServer', 'SimpleHTTPRequestHandler'),
('http.server', 'CGIHTTPRequestHandler'):
('CGIHTTPServer', 'CGIHTTPRequestHandler'),
('_socket', 'socket'): ('socket', '_socketobject'),
})
PYTHON3_OSERROR_EXCEPTIONS = (
'BrokenPipeError',
'ChildProcessError',
'ConnectionAbortedError',
'ConnectionError',
'ConnectionRefusedError',
'ConnectionResetError',
'FileExistsError',
'FileNotFoundError',
'InterruptedError',
'IsADirectoryError',
'NotADirectoryError',
'PermissionError',
'ProcessLookupError',
'TimeoutError',
)
for excname in PYTHON3_OSERROR_EXCEPTIONS:
REVERSE_NAME_MAPPING[('builtins', excname)] = ('exceptions', 'OSError')

View file

@ -1,152 +0,0 @@
"""Internal classes used by the gzip, lzma and bz2 modules"""
import io
BUFFER_SIZE = io.DEFAULT_BUFFER_SIZE # Compressed data read chunk size
class BaseStream(io.BufferedIOBase):
"""Mode-checking helper functions."""
def _check_not_closed(self):
if self.closed:
raise ValueError("I/O operation on closed file")
def _check_can_read(self):
if not self.readable():
raise io.UnsupportedOperation("File not open for reading")
def _check_can_write(self):
if not self.writable():
raise io.UnsupportedOperation("File not open for writing")
def _check_can_seek(self):
if not self.readable():
raise io.UnsupportedOperation("Seeking is only supported "
"on files open for reading")
if not self.seekable():
raise io.UnsupportedOperation("The underlying file object "
"does not support seeking")
class DecompressReader(io.RawIOBase):
"""Adapts the decompressor API to a RawIOBase reader API"""
def readable(self):
return True
def __init__(self, fp, decomp_factory, trailing_error=(), **decomp_args):
self._fp = fp
self._eof = False
self._pos = 0 # Current offset in decompressed stream
# Set to size of decompressed stream once it is known, for SEEK_END
self._size = -1
# Save the decompressor factory and arguments.
# If the file contains multiple compressed streams, each
# stream will need a separate decompressor object. A new decompressor
# object is also needed when implementing a backwards seek().
self._decomp_factory = decomp_factory
self._decomp_args = decomp_args
self._decompressor = self._decomp_factory(**self._decomp_args)
# Exception class to catch from decompressor signifying invalid
# trailing data to ignore
self._trailing_error = trailing_error
def close(self):
self._decompressor = None
return super().close()
def seekable(self):
return self._fp.seekable()
def readinto(self, b):
with memoryview(b) as view, view.cast("B") as byte_view:
data = self.read(len(byte_view))
byte_view[:len(data)] = data
return len(data)
def read(self, size=-1):
if size < 0:
return self.readall()
if not size or self._eof:
return b""
data = None # Default if EOF is encountered
# Depending on the input data, our call to the decompressor may not
# return any data. In this case, try again after reading another block.
while True:
if self._decompressor.eof:
rawblock = (self._decompressor.unused_data or
self._fp.read(BUFFER_SIZE))
if not rawblock:
break
# Continue to next stream.
self._decompressor = self._decomp_factory(
**self._decomp_args)
try:
data = self._decompressor.decompress(rawblock, size)
except self._trailing_error:
# Trailing data isn't a valid compressed stream; ignore it.
break
else:
if self._decompressor.needs_input:
rawblock = self._fp.read(BUFFER_SIZE)
if not rawblock:
raise EOFError("Compressed file ended before the "
"end-of-stream marker was reached")
else:
rawblock = b""
data = self._decompressor.decompress(rawblock, size)
if data:
break
if not data:
self._eof = True
self._size = self._pos
return b""
self._pos += len(data)
return data
# Rewind the file to the beginning of the data stream.
def _rewind(self):
self._fp.seek(0)
self._eof = False
self._pos = 0
self._decompressor = self._decomp_factory(**self._decomp_args)
def seek(self, offset, whence=io.SEEK_SET):
# Recalculate offset as an absolute file position.
if whence == io.SEEK_SET:
pass
elif whence == io.SEEK_CUR:
offset = self._pos + offset
elif whence == io.SEEK_END:
# Seeking relative to EOF - we need to know the file's size.
if self._size < 0:
while self.read(io.DEFAULT_BUFFER_SIZE):
pass
offset = self._size + offset
else:
raise ValueError("Invalid value for whence: {}".format(whence))
# Make it so that offset is the number of bytes to skip forward.
if offset < self._pos:
self._rewind()
else:
offset -= self._pos
# Read and discard data until we reach the desired position.
while offset > 0:
data = self.read(min(io.DEFAULT_BUFFER_SIZE, offset))
if not data:
break
offset -= len(data)
return self._pos
def tell(self):
"""Return the current file position."""
return self._pos

View file

@ -1,163 +0,0 @@
"""Drop-in replacement for the thread module.
Meant to be used as a brain-dead substitute so that threaded code does
not need to be rewritten for when the thread module is not present.
Suggested usage is::
try:
import _thread
except ImportError:
import _dummy_thread as _thread
"""
# Exports only things specified by thread documentation;
# skipping obsolete synonyms allocate(), start_new(), exit_thread().
__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
'interrupt_main', 'LockType']
# A dummy value
TIMEOUT_MAX = 2**31
# NOTE: this module can be imported early in the extension building process,
# and so top level imports of other modules should be avoided. Instead, all
# imports are done when needed on a function-by-function basis. Since threads
# are disabled, the import lock should not be an issue anyway (??).
error = RuntimeError
def start_new_thread(function, args, kwargs={}):
"""Dummy implementation of _thread.start_new_thread().
Compatibility is maintained by making sure that ``args`` is a
tuple and ``kwargs`` is a dictionary. If an exception is raised
and it is SystemExit (which can be done by _thread.exit()) it is
caught and nothing is done; all other exceptions are printed out
by using traceback.print_exc().
If the executed function calls interrupt_main the KeyboardInterrupt will be
raised when the function returns.
"""
if type(args) != type(tuple()):
raise TypeError("2nd arg must be a tuple")
if type(kwargs) != type(dict()):
raise TypeError("3rd arg must be a dict")
global _main
_main = False
try:
function(*args, **kwargs)
except SystemExit:
pass
except:
import traceback
traceback.print_exc()
_main = True
global _interrupt
if _interrupt:
_interrupt = False
raise KeyboardInterrupt
def exit():
"""Dummy implementation of _thread.exit()."""
raise SystemExit
def get_ident():
"""Dummy implementation of _thread.get_ident().
Since this module should only be used when _threadmodule is not
available, it is safe to assume that the current process is the
only thread. Thus a constant can be safely returned.
"""
return -1
def allocate_lock():
"""Dummy implementation of _thread.allocate_lock()."""
return LockType()
def stack_size(size=None):
"""Dummy implementation of _thread.stack_size()."""
if size is not None:
raise error("setting thread stack size not supported")
return 0
def _set_sentinel():
"""Dummy implementation of _thread._set_sentinel()."""
return LockType()
class LockType(object):
"""Class implementing dummy implementation of _thread.LockType.
Compatibility is maintained by maintaining self.locked_status
which is a boolean that stores the state of the lock. Pickling of
the lock, though, should not be done since if the _thread module is
then used with an unpickled ``lock()`` from here problems could
occur from this class not having atomic methods.
"""
def __init__(self):
self.locked_status = False
def acquire(self, waitflag=None, timeout=-1):
"""Dummy implementation of acquire().
For blocking calls, self.locked_status is automatically set to
True and returned appropriately based on value of
``waitflag``. If it is non-blocking, then the value is
actually checked and not set if it is already acquired. This
is all done so that threading.Condition's assert statements
aren't triggered and throw a little fit.
"""
if waitflag is None or waitflag:
self.locked_status = True
return True
else:
if not self.locked_status:
self.locked_status = True
return True
else:
if timeout > 0:
import time
time.sleep(timeout)
return False
__enter__ = acquire
def __exit__(self, typ, val, tb):
self.release()
def release(self):
"""Release the dummy lock."""
# XXX Perhaps shouldn't actually bother to test? Could lead
# to problems for complex, threaded code.
if not self.locked_status:
raise error
self.locked_status = False
return True
def locked(self):
return self.locked_status
def __repr__(self):
return "<%s %s.%s object at %s>" % (
"locked" if self.locked_status else "unlocked",
self.__class__.__module__,
self.__class__.__qualname__,
hex(id(self))
)
# Used to signal that interrupt_main was called in a "thread"
_interrupt = False
# True when not executing in a "thread"
_main = True
def interrupt_main():
"""Set _interrupt flag to True to have start_new_thread raise
KeyboardInterrupt upon exiting."""
if _main:
raise KeyboardInterrupt
else:
global _interrupt
_interrupt = True

View file

@ -1,395 +0,0 @@
"""Shared support for scanning document type declarations in HTML and XHTML.
This module is used as a foundation for the html.parser module. It has no
documented public API and should not be used directly.
"""
import re
_declname_match = re.compile(r'[a-zA-Z][-_.a-zA-Z0-9]*\s*').match
_declstringlit_match = re.compile(r'(\'[^\']*\'|"[^"]*")\s*').match
_commentclose = re.compile(r'--\s*>')
_markedsectionclose = re.compile(r']\s*]\s*>')
# An analysis of the MS-Word extensions is available at
# http://www.planetpublish.com/xmlarena/xap/Thursday/WordtoXML.pdf
_msmarkedsectionclose = re.compile(r']\s*>')
del re
class ParserBase:
"""Parser base class which provides some common support methods used
by the SGML/HTML and XHTML parsers."""
def __init__(self):
if self.__class__ is ParserBase:
raise RuntimeError(
"_markupbase.ParserBase must be subclassed")
def error(self, message):
raise NotImplementedError(
"subclasses of ParserBase must override error()")
def reset(self):
self.lineno = 1
self.offset = 0
def getpos(self):
"""Return current line number and offset."""
return self.lineno, self.offset
# Internal -- update line number and offset. This should be
# called for each piece of data exactly once, in order -- in other
# words the concatenation of all the input strings to this
# function should be exactly the entire input.
def updatepos(self, i, j):
if i >= j:
return j
rawdata = self.rawdata
nlines = rawdata.count("\n", i, j)
if nlines:
self.lineno = self.lineno + nlines
pos = rawdata.rindex("\n", i, j) # Should not fail
self.offset = j-(pos+1)
else:
self.offset = self.offset + j-i
return j
_decl_otherchars = ''
# Internal -- parse declaration (for use by subclasses).
def parse_declaration(self, i):
# This is some sort of declaration; in "HTML as
# deployed," this should only be the document type
# declaration ("<!DOCTYPE html...>").
# ISO 8879:1986, however, has more complex
# declaration syntax for elements in <!...>, including:
# --comment--
# [marked section]
# name in the following list: ENTITY, DOCTYPE, ELEMENT,
# ATTLIST, NOTATION, SHORTREF, USEMAP,
# LINKTYPE, LINK, IDLINK, USELINK, SYSTEM
rawdata = self.rawdata
j = i + 2
assert rawdata[i:j] == "<!", "unexpected call to parse_declaration"
if rawdata[j:j+1] == ">":
# the empty comment <!>
return j + 1
if rawdata[j:j+1] in ("-", ""):
# Start of comment followed by buffer boundary,
# or just a buffer boundary.
return -1
# A simple, practical version could look like: ((name|stringlit) S*) + '>'
n = len(rawdata)
if rawdata[j:j+2] == '--': #comment
# Locate --.*-- as the body of the comment
return self.parse_comment(i)
elif rawdata[j] == '[': #marked section
# Locate [statusWord [...arbitrary SGML...]] as the body of the marked section
# Where statusWord is one of TEMP, CDATA, IGNORE, INCLUDE, RCDATA
# Note that this is extended by Microsoft Office "Save as Web" function
# to include [if...] and [endif].
return self.parse_marked_section(i)
else: #all other declaration elements
decltype, j = self._scan_name(j, i)
if j < 0:
return j
if decltype == "doctype":
self._decl_otherchars = ''
while j < n:
c = rawdata[j]
if c == ">":
# end of declaration syntax
data = rawdata[i+2:j]
if decltype == "doctype":
self.handle_decl(data)
else:
# According to the HTML5 specs sections "8.2.4.44 Bogus
# comment state" and "8.2.4.45 Markup declaration open
# state", a comment token should be emitted.
# Calling unknown_decl provides more flexibility though.
self.unknown_decl(data)
return j + 1
if c in "\"'":
m = _declstringlit_match(rawdata, j)
if not m:
return -1 # incomplete
j = m.end()
elif c in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ":
name, j = self._scan_name(j, i)
elif c in self._decl_otherchars:
j = j + 1
elif c == "[":
# this could be handled in a separate doctype parser
if decltype == "doctype":
j = self._parse_doctype_subset(j + 1, i)
elif decltype in {"attlist", "linktype", "link", "element"}:
# must tolerate []'d groups in a content model in an element declaration
# also in data attribute specifications of attlist declaration
# also link type declaration subsets in linktype declarations
# also link attribute specification lists in link declarations
self.error("unsupported '[' char in %s declaration" % decltype)
else:
self.error("unexpected '[' char in declaration")
else:
self.error(
"unexpected %r char in declaration" % rawdata[j])
if j < 0:
return j
return -1 # incomplete
# Internal -- parse a marked section
# Override this to handle MS-word extension syntax <![if word]>content<![endif]>
def parse_marked_section(self, i, report=1):
rawdata= self.rawdata
assert rawdata[i:i+3] == '<![', "unexpected call to parse_marked_section()"
sectName, j = self._scan_name( i+3, i )
if j < 0:
return j
if sectName in {"temp", "cdata", "ignore", "include", "rcdata"}:
# look for standard ]]> ending
match= _markedsectionclose.search(rawdata, i+3)
elif sectName in {"if", "else", "endif"}:
# look for MS Office ]> ending
match= _msmarkedsectionclose.search(rawdata, i+3)
else:
self.error('unknown status keyword %r in marked section' % rawdata[i+3:j])
if not match:
return -1
if report:
j = match.start(0)
self.unknown_decl(rawdata[i+3: j])
return match.end(0)
# Internal -- parse comment, return length or -1 if not terminated
def parse_comment(self, i, report=1):
rawdata = self.rawdata
if rawdata[i:i+4] != '<!--':
self.error('unexpected call to parse_comment()')
match = _commentclose.search(rawdata, i+4)
if not match:
return -1
if report:
j = match.start(0)
self.handle_comment(rawdata[i+4: j])
return match.end(0)
# Internal -- scan past the internal subset in a <!DOCTYPE declaration,
# returning the index just past any whitespace following the trailing ']'.
def _parse_doctype_subset(self, i, declstartpos):
rawdata = self.rawdata
n = len(rawdata)
j = i
while j < n:
c = rawdata[j]
if c == "<":
s = rawdata[j:j+2]
if s == "<":
# end of buffer; incomplete
return -1
if s != "<!":
self.updatepos(declstartpos, j + 1)
self.error("unexpected char in internal subset (in %r)" % s)
if (j + 2) == n:
# end of buffer; incomplete
return -1
if (j + 4) > n:
# end of buffer; incomplete
return -1
if rawdata[j:j+4] == "<!--":
j = self.parse_comment(j, report=0)
if j < 0:
return j
continue
name, j = self._scan_name(j + 2, declstartpos)
if j == -1:
return -1
if name not in {"attlist", "element", "entity", "notation"}:
self.updatepos(declstartpos, j + 2)
self.error(
"unknown declaration %r in internal subset" % name)
# handle the individual names
meth = getattr(self, "_parse_doctype_" + name)
j = meth(j, declstartpos)
if j < 0:
return j
elif c == "%":
# parameter entity reference
if (j + 1) == n:
# end of buffer; incomplete
return -1
s, j = self._scan_name(j + 1, declstartpos)
if j < 0:
return j
if rawdata[j] == ";":
j = j + 1
elif c == "]":
j = j + 1
while j < n and rawdata[j].isspace():
j = j + 1
if j < n:
if rawdata[j] == ">":
return j
self.updatepos(declstartpos, j)
self.error("unexpected char after internal subset")
else:
return -1
elif c.isspace():
j = j + 1
else:
self.updatepos(declstartpos, j)
self.error("unexpected char %r in internal subset" % c)
# end of buffer reached
return -1
# Internal -- scan past <!ELEMENT declarations
def _parse_doctype_element(self, i, declstartpos):
name, j = self._scan_name(i, declstartpos)
if j == -1:
return -1
# style content model; just skip until '>'
rawdata = self.rawdata
if '>' in rawdata[j:]:
return rawdata.find(">", j) + 1
return -1
# Internal -- scan past <!ATTLIST declarations
def _parse_doctype_attlist(self, i, declstartpos):
rawdata = self.rawdata
name, j = self._scan_name(i, declstartpos)
c = rawdata[j:j+1]
if c == "":
return -1
if c == ">":
return j + 1
while 1:
# scan a series of attribute descriptions; simplified:
# name type [value] [#constraint]
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
c = rawdata[j:j+1]
if c == "":
return -1
if c == "(":
# an enumerated type; look for ')'
if ")" in rawdata[j:]:
j = rawdata.find(")", j) + 1
else:
return -1
while rawdata[j:j+1].isspace():
j = j + 1
if not rawdata[j:]:
# end of buffer, incomplete
return -1
else:
name, j = self._scan_name(j, declstartpos)
c = rawdata[j:j+1]
if not c:
return -1
if c in "'\"":
m = _declstringlit_match(rawdata, j)
if m:
j = m.end()
else:
return -1
c = rawdata[j:j+1]
if not c:
return -1
if c == "#":
if rawdata[j:] == "#":
# end of buffer
return -1
name, j = self._scan_name(j + 1, declstartpos)
if j < 0:
return j
c = rawdata[j:j+1]
if not c:
return -1
if c == '>':
# all done
return j + 1
# Internal -- scan past <!NOTATION declarations
def _parse_doctype_notation(self, i, declstartpos):
name, j = self._scan_name(i, declstartpos)
if j < 0:
return j
rawdata = self.rawdata
while 1:
c = rawdata[j:j+1]
if not c:
# end of buffer; incomplete
return -1
if c == '>':
return j + 1
if c in "'\"":
m = _declstringlit_match(rawdata, j)
if not m:
return -1
j = m.end()
else:
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
# Internal -- scan past <!ENTITY declarations
def _parse_doctype_entity(self, i, declstartpos):
rawdata = self.rawdata
if rawdata[i:i+1] == "%":
j = i + 1
while 1:
c = rawdata[j:j+1]
if not c:
return -1
if c.isspace():
j = j + 1
else:
break
else:
j = i
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
while 1:
c = self.rawdata[j:j+1]
if not c:
return -1
if c in "'\"":
m = _declstringlit_match(rawdata, j)
if m:
j = m.end()
else:
return -1 # incomplete
elif c == ">":
return j + 1
else:
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
# Internal -- scan a name token and the new position and the token, or
# return -1 if we've reached the end of the buffer.
def _scan_name(self, i, declstartpos):
rawdata = self.rawdata
n = len(rawdata)
if i == n:
return None, -1
m = _declname_match(rawdata, i)
if m:
s = m.group()
name = s.strip()
if (i + len(s)) == n:
return None, -1 # end of buffer
return name.lower(), m.end()
else:
self.updatepos(declstartpos, i)
self.error("expected name token at %r"
% rawdata[declstartpos:declstartpos+20])
# To be overridden -- handlers for unknown objects
def unknown_decl(self, data):
pass

View file

@ -1,502 +0,0 @@
"""Shared OS X support functions."""
import os
import re
import sys
__all__ = [
'compiler_fixup',
'customize_config_vars',
'customize_compiler',
'get_platform_osx',
]
# configuration variables that may contain universal build flags,
# like "-arch" or "-isdkroot", that may need customization for
# the user environment
_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS',
'BLDSHARED', 'LDSHARED', 'CC', 'CXX',
'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
'PY_CORE_CFLAGS')
# configuration variables that may contain compiler calls
_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX')
# prefix added to original configuration variable names
_INITPRE = '_OSX_SUPPORT_INITIAL_'
def _find_executable(executable, path=None):
"""Tries to find 'executable' in the directories listed in 'path'.
A string listing directories separated by 'os.pathsep'; defaults to
os.environ['PATH']. Returns the complete filename or None if not found.
"""
if path is None:
path = os.environ['PATH']
paths = path.split(os.pathsep)
base, ext = os.path.splitext(executable)
if (sys.platform == 'win32') and (ext != '.exe'):
executable = executable + '.exe'
if not os.path.isfile(executable):
for p in paths:
f = os.path.join(p, executable)
if os.path.isfile(f):
# the file exists, we have a shot at spawn working
return f
return None
else:
return executable
def _read_output(commandstring):
"""Output from successful command execution or None"""
# Similar to os.popen(commandstring, "r").read(),
# but without actually using os.popen because that
# function is not usable during python bootstrap.
# tempfile is also not available then.
import contextlib
try:
import tempfile
fp = tempfile.NamedTemporaryFile()
except ImportError:
fp = open("/tmp/_osx_support.%s"%(
os.getpid(),), "w+b")
with contextlib.closing(fp) as fp:
cmd = "%s 2>/dev/null >'%s'" % (commandstring, fp.name)
return fp.read().decode('utf-8').strip() if not os.system(cmd) else None
def _find_build_tool(toolname):
"""Find a build tool on current path or using xcrun"""
return (_find_executable(toolname)
or _read_output("/usr/bin/xcrun -find %s" % (toolname,))
or ''
)
_SYSTEM_VERSION = None
def _get_system_version():
"""Return the OS X system version as a string"""
# Reading this plist is a documented way to get the system
# version (see the documentation for the Gestalt Manager)
# We avoid using platform.mac_ver to avoid possible bootstrap issues during
# the build of Python itself (distutils is used to build standard library
# extensions).
global _SYSTEM_VERSION
if _SYSTEM_VERSION is None:
_SYSTEM_VERSION = ''
try:
f = open('/System/Library/CoreServices/SystemVersion.plist')
except OSError:
# We're on a plain darwin box, fall back to the default
# behaviour.
pass
else:
try:
m = re.search(r'<key>ProductUserVisibleVersion</key>\s*'
r'<string>(.*?)</string>', f.read())
finally:
f.close()
if m is not None:
_SYSTEM_VERSION = '.'.join(m.group(1).split('.')[:2])
# else: fall back to the default behaviour
return _SYSTEM_VERSION
def _remove_original_values(_config_vars):
"""Remove original unmodified values for testing"""
# This is needed for higher-level cross-platform tests of get_platform.
for k in list(_config_vars):
if k.startswith(_INITPRE):
del _config_vars[k]
def _save_modified_value(_config_vars, cv, newvalue):
"""Save modified and original unmodified value of configuration var"""
oldvalue = _config_vars.get(cv, '')
if (oldvalue != newvalue) and (_INITPRE + cv not in _config_vars):
_config_vars[_INITPRE + cv] = oldvalue
_config_vars[cv] = newvalue
def _supports_universal_builds():
"""Returns True if universal builds are supported on this system"""
# As an approximation, we assume that if we are running on 10.4 or above,
# then we are running with an Xcode environment that supports universal
# builds, in particular -isysroot and -arch arguments to the compiler. This
# is in support of allowing 10.4 universal builds to run on 10.3.x systems.
osx_version = _get_system_version()
if osx_version:
try:
osx_version = tuple(int(i) for i in osx_version.split('.'))
except ValueError:
osx_version = ''
return bool(osx_version >= (10, 4)) if osx_version else False
def _find_appropriate_compiler(_config_vars):
"""Find appropriate C compiler for extension module builds"""
# Issue #13590:
# The OSX location for the compiler varies between OSX
# (or rather Xcode) releases. With older releases (up-to 10.5)
# the compiler is in /usr/bin, with newer releases the compiler
# can only be found inside Xcode.app if the "Command Line Tools"
# are not installed.
#
# Furthermore, the compiler that can be used varies between
# Xcode releases. Up to Xcode 4 it was possible to use 'gcc-4.2'
# as the compiler, after that 'clang' should be used because
# gcc-4.2 is either not present, or a copy of 'llvm-gcc' that
# miscompiles Python.
# skip checks if the compiler was overridden with a CC env variable
if 'CC' in os.environ:
return _config_vars
# The CC config var might contain additional arguments.
# Ignore them while searching.
cc = oldcc = _config_vars['CC'].split()[0]
if not _find_executable(cc):
# Compiler is not found on the shell search PATH.
# Now search for clang, first on PATH (if the Command LIne
# Tools have been installed in / or if the user has provided
# another location via CC). If not found, try using xcrun
# to find an uninstalled clang (within a selected Xcode).
# NOTE: Cannot use subprocess here because of bootstrap
# issues when building Python itself (and os.popen is
# implemented on top of subprocess and is therefore not
# usable as well)
cc = _find_build_tool('clang')
elif os.path.basename(cc).startswith('gcc'):
# Compiler is GCC, check if it is LLVM-GCC
data = _read_output("'%s' --version"
% (cc.replace("'", "'\"'\"'"),))
if data and 'llvm-gcc' in data:
# Found LLVM-GCC, fall back to clang
cc = _find_build_tool('clang')
if not cc:
raise SystemError(
"Cannot locate working compiler")
if cc != oldcc:
# Found a replacement compiler.
# Modify config vars using new compiler, if not already explicitly
# overridden by an env variable, preserving additional arguments.
for cv in _COMPILER_CONFIG_VARS:
if cv in _config_vars and cv not in os.environ:
cv_split = _config_vars[cv].split()
cv_split[0] = cc if cv != 'CXX' else cc + '++'
_save_modified_value(_config_vars, cv, ' '.join(cv_split))
return _config_vars
def _remove_universal_flags(_config_vars):
"""Remove all universal build arguments from config vars"""
for cv in _UNIVERSAL_CONFIG_VARS:
# Do not alter a config var explicitly overridden by env var
if cv in _config_vars and cv not in os.environ:
flags = _config_vars[cv]
flags = re.sub('-arch\s+\w+\s', ' ', flags, re.ASCII)
flags = re.sub('-isysroot [^ \t]*', ' ', flags)
_save_modified_value(_config_vars, cv, flags)
return _config_vars
def _remove_unsupported_archs(_config_vars):
"""Remove any unsupported archs from config vars"""
# Different Xcode releases support different sets for '-arch'
# flags. In particular, Xcode 4.x no longer supports the
# PPC architectures.
#
# This code automatically removes '-arch ppc' and '-arch ppc64'
# when these are not supported. That makes it possible to
# build extensions on OSX 10.7 and later with the prebuilt
# 32-bit installer on the python.org website.
# skip checks if the compiler was overridden with a CC env variable
if 'CC' in os.environ:
return _config_vars
if re.search('-arch\s+ppc', _config_vars['CFLAGS']) is not None:
# NOTE: Cannot use subprocess here because of bootstrap
# issues when building Python itself
status = os.system(
"""echo 'int main{};' | """
"""'%s' -c -arch ppc -x c -o /dev/null /dev/null 2>/dev/null"""
%(_config_vars['CC'].replace("'", "'\"'\"'"),))
if status:
# The compile failed for some reason. Because of differences
# across Xcode and compiler versions, there is no reliable way
# to be sure why it failed. Assume here it was due to lack of
# PPC support and remove the related '-arch' flags from each
# config variables not explicitly overridden by an environment
# variable. If the error was for some other reason, we hope the
# failure will show up again when trying to compile an extension
# module.
for cv in _UNIVERSAL_CONFIG_VARS:
if cv in _config_vars and cv not in os.environ:
flags = _config_vars[cv]
flags = re.sub('-arch\s+ppc\w*\s', ' ', flags)
_save_modified_value(_config_vars, cv, flags)
return _config_vars
def _override_all_archs(_config_vars):
"""Allow override of all archs with ARCHFLAGS env var"""
# NOTE: This name was introduced by Apple in OSX 10.5 and
# is used by several scripting languages distributed with
# that OS release.
if 'ARCHFLAGS' in os.environ:
arch = os.environ['ARCHFLAGS']
for cv in _UNIVERSAL_CONFIG_VARS:
if cv in _config_vars and '-arch' in _config_vars[cv]:
flags = _config_vars[cv]
flags = re.sub('-arch\s+\w+\s', ' ', flags)
flags = flags + ' ' + arch
_save_modified_value(_config_vars, cv, flags)
return _config_vars
def _check_for_unavailable_sdk(_config_vars):
"""Remove references to any SDKs not available"""
# If we're on OSX 10.5 or later and the user tries to
# compile an extension using an SDK that is not present
# on the current machine it is better to not use an SDK
# than to fail. This is particularly important with
# the standalone Command Line Tools alternative to a
# full-blown Xcode install since the CLT packages do not
# provide SDKs. If the SDK is not present, it is assumed
# that the header files and dev libs have been installed
# to /usr and /System/Library by either a standalone CLT
# package or the CLT component within Xcode.
cflags = _config_vars.get('CFLAGS', '')
m = re.search(r'-isysroot\s+(\S+)', cflags)
if m is not None:
sdk = m.group(1)
if not os.path.exists(sdk):
for cv in _UNIVERSAL_CONFIG_VARS:
# Do not alter a config var explicitly overridden by env var
if cv in _config_vars and cv not in os.environ:
flags = _config_vars[cv]
flags = re.sub(r'-isysroot\s+\S+(?:\s|$)', ' ', flags)
_save_modified_value(_config_vars, cv, flags)
return _config_vars
def compiler_fixup(compiler_so, cc_args):
"""
This function will strip '-isysroot PATH' and '-arch ARCH' from the
compile flags if the user has specified one them in extra_compile_flags.
This is needed because '-arch ARCH' adds another architecture to the
build, without a way to remove an architecture. Furthermore GCC will
barf if multiple '-isysroot' arguments are present.
"""
stripArch = stripSysroot = False
compiler_so = list(compiler_so)
if not _supports_universal_builds():
# OSX before 10.4.0, these don't support -arch and -isysroot at
# all.
stripArch = stripSysroot = True
else:
stripArch = '-arch' in cc_args
stripSysroot = '-isysroot' in cc_args
if stripArch or 'ARCHFLAGS' in os.environ:
while True:
try:
index = compiler_so.index('-arch')
# Strip this argument and the next one:
del compiler_so[index:index+2]
except ValueError:
break
if 'ARCHFLAGS' in os.environ and not stripArch:
# User specified different -arch flags in the environ,
# see also distutils.sysconfig
compiler_so = compiler_so + os.environ['ARCHFLAGS'].split()
if stripSysroot:
while True:
try:
index = compiler_so.index('-isysroot')
# Strip this argument and the next one:
del compiler_so[index:index+2]
except ValueError:
break
# Check if the SDK that is used during compilation actually exists,
# the universal build requires the usage of a universal SDK and not all
# users have that installed by default.
sysroot = None
if '-isysroot' in cc_args:
idx = cc_args.index('-isysroot')
sysroot = cc_args[idx+1]
elif '-isysroot' in compiler_so:
idx = compiler_so.index('-isysroot')
sysroot = compiler_so[idx+1]
if sysroot and not os.path.isdir(sysroot):
from distutils import log
log.warn("Compiling with an SDK that doesn't seem to exist: %s",
sysroot)
log.warn("Please check your Xcode installation")
return compiler_so
def customize_config_vars(_config_vars):
"""Customize Python build configuration variables.
Called internally from sysconfig with a mutable mapping
containing name/value pairs parsed from the configured
makefile used to build this interpreter. Returns
the mapping updated as needed to reflect the environment
in which the interpreter is running; in the case of
a Python from a binary installer, the installed
environment may be very different from the build
environment, i.e. different OS levels, different
built tools, different available CPU architectures.
This customization is performed whenever
distutils.sysconfig.get_config_vars() is first
called. It may be used in environments where no
compilers are present, i.e. when installing pure
Python dists. Customization of compiler paths
and detection of unavailable archs is deferred
until the first extension module build is
requested (in distutils.sysconfig.customize_compiler).
Currently called from distutils.sysconfig
"""
if not _supports_universal_builds():
# On Mac OS X before 10.4, check if -arch and -isysroot
# are in CFLAGS or LDFLAGS and remove them if they are.
# This is needed when building extensions on a 10.3 system
# using a universal build of python.
_remove_universal_flags(_config_vars)
# Allow user to override all archs with ARCHFLAGS env var
_override_all_archs(_config_vars)
# Remove references to sdks that are not found
_check_for_unavailable_sdk(_config_vars)
return _config_vars
def customize_compiler(_config_vars):
"""Customize compiler path and configuration variables.
This customization is performed when the first
extension module build is requested
in distutils.sysconfig.customize_compiler).
"""
# Find a compiler to use for extension module builds
_find_appropriate_compiler(_config_vars)
# Remove ppc arch flags if not supported here
_remove_unsupported_archs(_config_vars)
# Allow user to override all archs with ARCHFLAGS env var
_override_all_archs(_config_vars)
return _config_vars
def get_platform_osx(_config_vars, osname, release, machine):
"""Filter values for get_platform()"""
# called from get_platform() in sysconfig and distutils.util
#
# For our purposes, we'll assume that the system version from
# distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
# to. This makes the compatibility story a bit more sane because the
# machine is going to compile and link as if it were
# MACOSX_DEPLOYMENT_TARGET.
macver = _config_vars.get('MACOSX_DEPLOYMENT_TARGET', '')
macrelease = _get_system_version() or macver
macver = macver or macrelease
if macver:
release = macver
osname = "macosx"
# Use the original CFLAGS value, if available, so that we
# return the same machine type for the platform string.
# Otherwise, distutils may consider this a cross-compiling
# case and disallow installs.
cflags = _config_vars.get(_INITPRE+'CFLAGS',
_config_vars.get('CFLAGS', ''))
if macrelease:
try:
macrelease = tuple(int(i) for i in macrelease.split('.')[0:2])
except ValueError:
macrelease = (10, 0)
else:
# assume no universal support
macrelease = (10, 0)
if (macrelease >= (10, 4)) and '-arch' in cflags.strip():
# The universal build will build fat binaries, but not on
# systems before 10.4
machine = 'fat'
archs = re.findall('-arch\s+(\S+)', cflags)
archs = tuple(sorted(set(archs)))
if len(archs) == 1:
machine = archs[0]
elif archs == ('i386', 'ppc'):
machine = 'fat'
elif archs == ('i386', 'x86_64'):
machine = 'intel'
elif archs == ('i386', 'ppc', 'x86_64'):
machine = 'fat3'
elif archs == ('ppc64', 'x86_64'):
machine = 'fat64'
elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
machine = 'universal'
else:
raise ValueError(
"Don't know machine value for archs=%r" % (archs,))
elif machine == 'i386':
# On OSX the machine type returned by uname is always the
# 32-bit variant, even if the executable architecture is
# the 64-bit variant
if sys.maxsize >= 2**32:
machine = 'x86_64'
elif machine in ('PowerPC', 'Power_Macintosh'):
# Pick a sane name for the PPC architecture.
# See 'i386' case
if sys.maxsize >= 2**32:
machine = 'ppc64'
else:
machine = 'ppc'
return (osname, release, machine)

File diff suppressed because it is too large Load diff

View file

@ -1,103 +0,0 @@
"""
The objects used by the site module to add custom builtins.
"""
# Those objects are almost immortal and they keep a reference to their module
# globals. Defining them in the site module would keep too many references
# alive.
# Note this means this module should also avoid keep things alive in its
# globals.
import sys
class Quitter(object):
def __init__(self, name, eof):
self.name = name
self.eof = eof
def __repr__(self):
return 'Use %s() or %s to exit' % (self.name, self.eof)
def __call__(self, code=None):
# Shells like IDLE catch the SystemExit, but listen when their
# stdin wrapper is closed.
try:
sys.stdin.close()
except:
pass
raise SystemExit(code)
class _Printer(object):
"""interactive prompt objects for printing the license text, a list of
contributors and the copyright notice."""
MAXLINES = 23
def __init__(self, name, data, files=(), dirs=()):
import os
self.__name = name
self.__data = data
self.__lines = None
self.__filenames = [os.path.join(dir, filename)
for dir in dirs
for filename in files]
def __setup(self):
if self.__lines:
return
data = None
for filename in self.__filenames:
try:
with open(filename, "r") as fp:
data = fp.read()
break
except OSError:
pass
if not data:
data = self.__data
self.__lines = data.split('\n')
self.__linecnt = len(self.__lines)
def __repr__(self):
self.__setup()
if len(self.__lines) <= self.MAXLINES:
return "\n".join(self.__lines)
else:
return "Type %s() to see the full %s text" % ((self.__name,)*2)
def __call__(self):
self.__setup()
prompt = 'Hit Return for more, or q (and Return) to quit: '
lineno = 0
while 1:
try:
for i in range(lineno, lineno + self.MAXLINES):
print(self.__lines[i])
except IndexError:
break
else:
lineno += self.MAXLINES
key = None
while key is None:
key = input(prompt)
if key not in ('', 'q'):
key = None
if key == 'q':
break
class _Helper(object):
"""Define the builtin 'help'.
This is a wrapper around pydoc.help that provides a helpful message
when 'help' is typed at the Python interactive prompt.
Calling help() at the Python prompt starts an interactive help session.
Calling help(thing) prints help for the python object 'thing'.
"""
def __repr__(self):
return "Type help() for interactive help, " \
"or help(object) for help about object."
def __call__(self, *args, **kwds):
import pydoc
return pydoc.help(*args, **kwds)

View file

@ -1,521 +0,0 @@
"""Strptime-related classes and functions.
CLASSES:
LocaleTime -- Discovers and stores locale-specific time information
TimeRE -- Creates regexes for pattern matching a string of text containing
time information
FUNCTIONS:
_getlang -- Figure out what language is being used for the locale
strptime -- Calculates the time struct represented by the passed-in string
"""
import time
import locale
import calendar
from re import compile as re_compile
from re import IGNORECASE
from re import escape as re_escape
from datetime import (date as datetime_date,
timedelta as datetime_timedelta,
timezone as datetime_timezone)
try:
from _thread import allocate_lock as _thread_allocate_lock
except ImportError:
from _dummy_thread import allocate_lock as _thread_allocate_lock
__all__ = []
def _getlang():
# Figure out what the current language is set to.
return locale.getlocale(locale.LC_TIME)
class LocaleTime(object):
"""Stores and handles locale-specific information related to time.
ATTRIBUTES:
f_weekday -- full weekday names (7-item list)
a_weekday -- abbreviated weekday names (7-item list)
f_month -- full month names (13-item list; dummy value in [0], which
is added by code)
a_month -- abbreviated month names (13-item list, dummy value in
[0], which is added by code)
am_pm -- AM/PM representation (2-item list)
LC_date_time -- format string for date/time representation (string)
LC_date -- format string for date representation (string)
LC_time -- format string for time representation (string)
timezone -- daylight- and non-daylight-savings timezone representation
(2-item list of sets)
lang -- Language used by instance (2-item tuple)
"""
def __init__(self):
"""Set all attributes.
Order of methods called matters for dependency reasons.
The locale language is set at the offset and then checked again before
exiting. This is to make sure that the attributes were not set with a
mix of information from more than one locale. This would most likely
happen when using threads where one thread calls a locale-dependent
function while another thread changes the locale while the function in
the other thread is still running. Proper coding would call for
locks to prevent changing the locale while locale-dependent code is
running. The check here is done in case someone does not think about
doing this.
Only other possible issue is if someone changed the timezone and did
not call tz.tzset . That is an issue for the programmer, though,
since changing the timezone is worthless without that call.
"""
self.lang = _getlang()
self.__calc_weekday()
self.__calc_month()
self.__calc_am_pm()
self.__calc_timezone()
self.__calc_date_time()
if _getlang() != self.lang:
raise ValueError("locale changed during initialization")
if time.tzname != self.tzname or time.daylight != self.daylight:
raise ValueError("timezone changed during initialization")
def __pad(self, seq, front):
# Add '' to seq to either the front (is True), else the back.
seq = list(seq)
if front:
seq.insert(0, '')
else:
seq.append('')
return seq
def __calc_weekday(self):
# Set self.a_weekday and self.f_weekday using the calendar
# module.
a_weekday = [calendar.day_abbr[i].lower() for i in range(7)]
f_weekday = [calendar.day_name[i].lower() for i in range(7)]
self.a_weekday = a_weekday
self.f_weekday = f_weekday
def __calc_month(self):
# Set self.f_month and self.a_month using the calendar module.
a_month = [calendar.month_abbr[i].lower() for i in range(13)]
f_month = [calendar.month_name[i].lower() for i in range(13)]
self.a_month = a_month
self.f_month = f_month
def __calc_am_pm(self):
# Set self.am_pm by using time.strftime().
# The magic date (1999,3,17,hour,44,55,2,76,0) is not really that
# magical; just happened to have used it everywhere else where a
# static date was needed.
am_pm = []
for hour in (1, 22):
time_tuple = time.struct_time((1999,3,17,hour,44,55,2,76,0))
am_pm.append(time.strftime("%p", time_tuple).lower())
self.am_pm = am_pm
def __calc_date_time(self):
# Set self.date_time, self.date, & self.time by using
# time.strftime().
# Use (1999,3,17,22,44,55,2,76,0) for magic date because the amount of
# overloaded numbers is minimized. The order in which searches for
# values within the format string is very important; it eliminates
# possible ambiguity for what something represents.
time_tuple = time.struct_time((1999,3,17,22,44,55,2,76,0))
date_time = [None, None, None]
date_time[0] = time.strftime("%c", time_tuple).lower()
date_time[1] = time.strftime("%x", time_tuple).lower()
date_time[2] = time.strftime("%X", time_tuple).lower()
replacement_pairs = [('%', '%%'), (self.f_weekday[2], '%A'),
(self.f_month[3], '%B'), (self.a_weekday[2], '%a'),
(self.a_month[3], '%b'), (self.am_pm[1], '%p'),
('1999', '%Y'), ('99', '%y'), ('22', '%H'),
('44', '%M'), ('55', '%S'), ('76', '%j'),
('17', '%d'), ('03', '%m'), ('3', '%m'),
# '3' needed for when no leading zero.
('2', '%w'), ('10', '%I')]
replacement_pairs.extend([(tz, "%Z") for tz_values in self.timezone
for tz in tz_values])
for offset,directive in ((0,'%c'), (1,'%x'), (2,'%X')):
current_format = date_time[offset]
for old, new in replacement_pairs:
# Must deal with possible lack of locale info
# manifesting itself as the empty string (e.g., Swedish's
# lack of AM/PM info) or a platform returning a tuple of empty
# strings (e.g., MacOS 9 having timezone as ('','')).
if old:
current_format = current_format.replace(old, new)
# If %W is used, then Sunday, 2005-01-03 will fall on week 0 since
# 2005-01-03 occurs before the first Monday of the year. Otherwise
# %U is used.
time_tuple = time.struct_time((1999,1,3,1,1,1,6,3,0))
if '00' in time.strftime(directive, time_tuple):
U_W = '%W'
else:
U_W = '%U'
date_time[offset] = current_format.replace('11', U_W)
self.LC_date_time = date_time[0]
self.LC_date = date_time[1]
self.LC_time = date_time[2]
def __calc_timezone(self):
# Set self.timezone by using time.tzname.
# Do not worry about possibility of time.tzname[0] == time.tzname[1]
# and time.daylight; handle that in strptime.
try:
time.tzset()
except AttributeError:
pass
self.tzname = time.tzname
self.daylight = time.daylight
no_saving = frozenset({"utc", "gmt", self.tzname[0].lower()})
if self.daylight:
has_saving = frozenset({self.tzname[1].lower()})
else:
has_saving = frozenset()
self.timezone = (no_saving, has_saving)
class TimeRE(dict):
"""Handle conversion from format directives to regexes."""
def __init__(self, locale_time=None):
"""Create keys/values.
Order of execution is important for dependency reasons.
"""
if locale_time:
self.locale_time = locale_time
else:
self.locale_time = LocaleTime()
base = super()
base.__init__({
# The " \d" part of the regex is to make %c from ANSI C work
'd': r"(?P<d>3[0-1]|[1-2]\d|0[1-9]|[1-9]| [1-9])",
'f': r"(?P<f>[0-9]{1,6})",
'H': r"(?P<H>2[0-3]|[0-1]\d|\d)",
'I': r"(?P<I>1[0-2]|0[1-9]|[1-9])",
'j': r"(?P<j>36[0-6]|3[0-5]\d|[1-2]\d\d|0[1-9]\d|00[1-9]|[1-9]\d|0[1-9]|[1-9])",
'm': r"(?P<m>1[0-2]|0[1-9]|[1-9])",
'M': r"(?P<M>[0-5]\d|\d)",
'S': r"(?P<S>6[0-1]|[0-5]\d|\d)",
'U': r"(?P<U>5[0-3]|[0-4]\d|\d)",
'w': r"(?P<w>[0-6])",
# W is set below by using 'U'
'y': r"(?P<y>\d\d)",
#XXX: Does 'Y' need to worry about having less or more than
# 4 digits?
'Y': r"(?P<Y>\d\d\d\d)",
'z': r"(?P<z>[+-]\d\d[0-5]\d)",
'A': self.__seqToRE(self.locale_time.f_weekday, 'A'),
'a': self.__seqToRE(self.locale_time.a_weekday, 'a'),
'B': self.__seqToRE(self.locale_time.f_month[1:], 'B'),
'b': self.__seqToRE(self.locale_time.a_month[1:], 'b'),
'p': self.__seqToRE(self.locale_time.am_pm, 'p'),
'Z': self.__seqToRE((tz for tz_names in self.locale_time.timezone
for tz in tz_names),
'Z'),
'%': '%'})
base.__setitem__('W', base.__getitem__('U').replace('U', 'W'))
base.__setitem__('c', self.pattern(self.locale_time.LC_date_time))
base.__setitem__('x', self.pattern(self.locale_time.LC_date))
base.__setitem__('X', self.pattern(self.locale_time.LC_time))
def __seqToRE(self, to_convert, directive):
"""Convert a list to a regex string for matching a directive.
Want possible matching values to be from longest to shortest. This
prevents the possibility of a match occurring for a value that also
a substring of a larger value that should have matched (e.g., 'abc'
matching when 'abcdef' should have been the match).
"""
to_convert = sorted(to_convert, key=len, reverse=True)
for value in to_convert:
if value != '':
break
else:
return ''
regex = '|'.join(re_escape(stuff) for stuff in to_convert)
regex = '(?P<%s>%s' % (directive, regex)
return '%s)' % regex
def pattern(self, format):
"""Return regex pattern for the format string.
Need to make sure that any characters that might be interpreted as
regex syntax are escaped.
"""
processed_format = ''
# The sub() call escapes all characters that might be misconstrued
# as regex syntax. Cannot use re.escape since we have to deal with
# format directives (%m, etc.).
regex_chars = re_compile(r"([\\.^$*+?\(\){}\[\]|])")
format = regex_chars.sub(r"\\\1", format)
whitespace_replacement = re_compile(r'\s+')
format = whitespace_replacement.sub(r'\\s+', format)
while '%' in format:
directive_index = format.index('%')+1
processed_format = "%s%s%s" % (processed_format,
format[:directive_index-1],
self[format[directive_index]])
format = format[directive_index+1:]
return "%s%s" % (processed_format, format)
def compile(self, format):
"""Return a compiled re object for the format string."""
return re_compile(self.pattern(format), IGNORECASE)
_cache_lock = _thread_allocate_lock()
# DO NOT modify _TimeRE_cache or _regex_cache without acquiring the cache lock
# first!
_TimeRE_cache = TimeRE()
_CACHE_MAX_SIZE = 5 # Max number of regexes stored in _regex_cache
_regex_cache = {}
def _calc_julian_from_U_or_W(year, week_of_year, day_of_week, week_starts_Mon):
"""Calculate the Julian day based on the year, week of the year, and day of
the week, with week_start_day representing whether the week of the year
assumes the week starts on Sunday or Monday (6 or 0)."""
first_weekday = datetime_date(year, 1, 1).weekday()
# If we are dealing with the %U directive (week starts on Sunday), it's
# easier to just shift the view to Sunday being the first day of the
# week.
if not week_starts_Mon:
first_weekday = (first_weekday + 1) % 7
day_of_week = (day_of_week + 1) % 7
# Need to watch out for a week 0 (when the first day of the year is not
# the same as that specified by %U or %W).
week_0_length = (7 - first_weekday) % 7
if week_of_year == 0:
return 1 + day_of_week - first_weekday
else:
days_to_week = week_0_length + (7 * (week_of_year - 1))
return 1 + days_to_week + day_of_week
def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
"""Return a 2-tuple consisting of a time struct and an int containing
the number of microseconds based on the input string and the
format string."""
for index, arg in enumerate([data_string, format]):
if not isinstance(arg, str):
msg = "strptime() argument {} must be str, not {}"
raise TypeError(msg.format(index, type(arg)))
global _TimeRE_cache, _regex_cache
with _cache_lock:
locale_time = _TimeRE_cache.locale_time
if (_getlang() != locale_time.lang or
time.tzname != locale_time.tzname or
time.daylight != locale_time.daylight):
_TimeRE_cache = TimeRE()
_regex_cache.clear()
locale_time = _TimeRE_cache.locale_time
if len(_regex_cache) > _CACHE_MAX_SIZE:
_regex_cache.clear()
format_regex = _regex_cache.get(format)
if not format_regex:
try:
format_regex = _TimeRE_cache.compile(format)
# KeyError raised when a bad format is found; can be specified as
# \\, in which case it was a stray % but with a space after it
except KeyError as err:
bad_directive = err.args[0]
if bad_directive == "\\":
bad_directive = "%"
del err
raise ValueError("'%s' is a bad directive in format '%s'" %
(bad_directive, format)) from None
# IndexError only occurs when the format string is "%"
except IndexError:
raise ValueError("stray %% in format '%s'" % format) from None
_regex_cache[format] = format_regex
found = format_regex.match(data_string)
if not found:
raise ValueError("time data %r does not match format %r" %
(data_string, format))
if len(data_string) != found.end():
raise ValueError("unconverted data remains: %s" %
data_string[found.end():])
year = None
month = day = 1
hour = minute = second = fraction = 0
tz = -1
tzoffset = None
# Default to -1 to signify that values not known; not critical to have,
# though
week_of_year = -1
week_of_year_start = -1
# weekday and julian defaulted to None so as to signal need to calculate
# values
weekday = julian = None
found_dict = found.groupdict()
for group_key in found_dict.keys():
# Directives not explicitly handled below:
# c, x, X
# handled by making out of other directives
# U, W
# worthless without day of the week
if group_key == 'y':
year = int(found_dict['y'])
# Open Group specification for strptime() states that a %y
#value in the range of [00, 68] is in the century 2000, while
#[69,99] is in the century 1900
if year <= 68:
year += 2000
else:
year += 1900
elif group_key == 'Y':
year = int(found_dict['Y'])
elif group_key == 'm':
month = int(found_dict['m'])
elif group_key == 'B':
month = locale_time.f_month.index(found_dict['B'].lower())
elif group_key == 'b':
month = locale_time.a_month.index(found_dict['b'].lower())
elif group_key == 'd':
day = int(found_dict['d'])
elif group_key == 'H':
hour = int(found_dict['H'])
elif group_key == 'I':
hour = int(found_dict['I'])
ampm = found_dict.get('p', '').lower()
# If there was no AM/PM indicator, we'll treat this like AM
if ampm in ('', locale_time.am_pm[0]):
# We're in AM so the hour is correct unless we're
# looking at 12 midnight.
# 12 midnight == 12 AM == hour 0
if hour == 12:
hour = 0
elif ampm == locale_time.am_pm[1]:
# We're in PM so we need to add 12 to the hour unless
# we're looking at 12 noon.
# 12 noon == 12 PM == hour 12
if hour != 12:
hour += 12
elif group_key == 'M':
minute = int(found_dict['M'])
elif group_key == 'S':
second = int(found_dict['S'])
elif group_key == 'f':
s = found_dict['f']
# Pad to always return microseconds.
s += "0" * (6 - len(s))
fraction = int(s)
elif group_key == 'A':
weekday = locale_time.f_weekday.index(found_dict['A'].lower())
elif group_key == 'a':
weekday = locale_time.a_weekday.index(found_dict['a'].lower())
elif group_key == 'w':
weekday = int(found_dict['w'])
if weekday == 0:
weekday = 6
else:
weekday -= 1
elif group_key == 'j':
julian = int(found_dict['j'])
elif group_key in ('U', 'W'):
week_of_year = int(found_dict[group_key])
if group_key == 'U':
# U starts week on Sunday.
week_of_year_start = 6
else:
# W starts week on Monday.
week_of_year_start = 0
elif group_key == 'z':
z = found_dict['z']
tzoffset = int(z[1:3]) * 60 + int(z[3:5])
if z.startswith("-"):
tzoffset = -tzoffset
elif group_key == 'Z':
# Since -1 is default value only need to worry about setting tz if
# it can be something other than -1.
found_zone = found_dict['Z'].lower()
for value, tz_values in enumerate(locale_time.timezone):
if found_zone in tz_values:
# Deal with bad locale setup where timezone names are the
# same and yet time.daylight is true; too ambiguous to
# be able to tell what timezone has daylight savings
if (time.tzname[0] == time.tzname[1] and
time.daylight and found_zone not in ("utc", "gmt")):
break
else:
tz = value
break
leap_year_fix = False
if year is None and month == 2 and day == 29:
year = 1904 # 1904 is first leap year of 20th century
leap_year_fix = True
elif year is None:
year = 1900
# If we know the week of the year and what day of that week, we can figure
# out the Julian day of the year.
if julian is None and week_of_year != -1 and weekday is not None:
week_starts_Mon = True if week_of_year_start == 0 else False
julian = _calc_julian_from_U_or_W(year, week_of_year, weekday,
week_starts_Mon)
if julian <= 0:
year -= 1
yday = 366 if calendar.isleap(year) else 365
julian += yday
# Cannot pre-calculate datetime_date() since can change in Julian
# calculation and thus could have different value for the day of the week
# calculation.
if julian is None:
# Need to add 1 to result since first day of the year is 1, not 0.
julian = datetime_date(year, month, day).toordinal() - \
datetime_date(year, 1, 1).toordinal() + 1
else: # Assume that if they bothered to include Julian day it will
# be accurate.
datetime_result = datetime_date.fromordinal((julian - 1) + datetime_date(year, 1, 1).toordinal())
year = datetime_result.year
month = datetime_result.month
day = datetime_result.day
if weekday is None:
weekday = datetime_date(year, month, day).weekday()
# Add timezone info
tzname = found_dict.get("Z")
if tzoffset is not None:
gmtoff = tzoffset * 60
else:
gmtoff = None
if leap_year_fix:
# the caller didn't supply a year but asked for Feb 29th. We couldn't
# use the default of 1900 for computations. We set it back to ensure
# that February 29th is smaller than March 1st.
year = 1900
return (year, month, day,
hour, minute, second,
weekday, julian, tz, tzname, gmtoff), fraction
def _strptime_time(data_string, format="%a %b %d %H:%M:%S %Y"):
"""Return a time struct based on the input string and the
format string."""
tt = _strptime(data_string, format)[0]
return time.struct_time(tt[:time._STRUCT_TM_ITEMS])
def _strptime_datetime(cls, data_string, format="%a %b %d %H:%M:%S %Y"):
"""Return a class cls instance based on the input string and the
format string."""
tt, fraction = _strptime(data_string, format)
tzname, gmtoff = tt[-2:]
args = tt[:6] + (fraction,)
if gmtoff is not None:
tzdelta = datetime_timedelta(seconds=gmtoff)
if tzname:
tz = datetime_timezone(tzdelta, tzname)
else:
tz = datetime_timezone(tzdelta)
args += (tz,)
return cls(*args)

View file

@ -1,246 +0,0 @@
"""Thread-local objects.
(Note that this module provides a Python version of the threading.local
class. Depending on the version of Python you're using, there may be a
faster one available. You should always import the `local` class from
`threading`.)
Thread-local objects support the management of thread-local data.
If you have data that you want to be local to a thread, simply create
a thread-local object and use its attributes:
>>> mydata = local()
>>> mydata.number = 42
>>> mydata.number
42
You can also access the local-object's dictionary:
>>> mydata.__dict__
{'number': 42}
>>> mydata.__dict__.setdefault('widgets', [])
[]
>>> mydata.widgets
[]
What's important about thread-local objects is that their data are
local to a thread. If we access the data in a different thread:
>>> log = []
>>> def f():
... items = sorted(mydata.__dict__.items())
... log.append(items)
... mydata.number = 11
... log.append(mydata.number)
>>> import threading
>>> thread = threading.Thread(target=f)
>>> thread.start()
>>> thread.join()
>>> log
[[], 11]
we get different data. Furthermore, changes made in the other thread
don't affect data seen in this thread:
>>> mydata.number
42
Of course, values you get from a local object, including a __dict__
attribute, are for whatever thread was current at the time the
attribute was read. For that reason, you generally don't want to save
these values across threads, as they apply only to the thread they
came from.
You can create custom local objects by subclassing the local class:
>>> class MyLocal(local):
... number = 2
... initialized = False
... def __init__(self, **kw):
... if self.initialized:
... raise SystemError('__init__ called too many times')
... self.initialized = True
... self.__dict__.update(kw)
... def squared(self):
... return self.number ** 2
This can be useful to support default values, methods and
initialization. Note that if you define an __init__ method, it will be
called each time the local object is used in a separate thread. This
is necessary to initialize each thread's dictionary.
Now if we create a local object:
>>> mydata = MyLocal(color='red')
Now we have a default number:
>>> mydata.number
2
an initial color:
>>> mydata.color
'red'
>>> del mydata.color
And a method that operates on the data:
>>> mydata.squared()
4
As before, we can access the data in a separate thread:
>>> log = []
>>> thread = threading.Thread(target=f)
>>> thread.start()
>>> thread.join()
>>> log
[[('color', 'red'), ('initialized', True)], 11]
without affecting this thread's data:
>>> mydata.number
2
>>> mydata.color
Traceback (most recent call last):
...
AttributeError: 'MyLocal' object has no attribute 'color'
Note that subclasses can define slots, but they are not thread
local. They are shared across threads:
>>> class MyLocal(local):
... __slots__ = 'number'
>>> mydata = MyLocal()
>>> mydata.number = 42
>>> mydata.color = 'red'
So, the separate thread:
>>> thread = threading.Thread(target=f)
>>> thread.start()
>>> thread.join()
affects what we see:
>>> mydata.number
11
>>> del mydata
"""
from weakref import ref
from contextlib import contextmanager
__all__ = ["local"]
# We need to use objects from the threading module, but the threading
# module may also want to use our `local` class, if support for locals
# isn't compiled in to the `thread` module. This creates potential problems
# with circular imports. For that reason, we don't import `threading`
# until the bottom of this file (a hack sufficient to worm around the
# potential problems). Note that all platforms on CPython do have support
# for locals in the `thread` module, and there is no circular import problem
# then, so problems introduced by fiddling the order of imports here won't
# manifest.
class _localimpl:
"""A class managing thread-local dicts"""
__slots__ = 'key', 'dicts', 'localargs', 'locallock', '__weakref__'
def __init__(self):
# The key used in the Thread objects' attribute dicts.
# We keep it a string for speed but make it unlikely to clash with
# a "real" attribute.
self.key = '_threading_local._localimpl.' + str(id(self))
# { id(Thread) -> (ref(Thread), thread-local dict) }
self.dicts = {}
def get_dict(self):
"""Return the dict for the current thread. Raises KeyError if none
defined."""
thread = current_thread()
return self.dicts[id(thread)][1]
def create_dict(self):
"""Create a new dict for the current thread, and return it."""
localdict = {}
key = self.key
thread = current_thread()
idt = id(thread)
def local_deleted(_, key=key):
# When the localimpl is deleted, remove the thread attribute.
thread = wrthread()
if thread is not None:
del thread.__dict__[key]
def thread_deleted(_, idt=idt):
# When the thread is deleted, remove the local dict.
# Note that this is suboptimal if the thread object gets
# caught in a reference loop. We would like to be called
# as soon as the OS-level thread ends instead.
local = wrlocal()
if local is not None:
dct = local.dicts.pop(idt)
wrlocal = ref(self, local_deleted)
wrthread = ref(thread, thread_deleted)
thread.__dict__[key] = wrlocal
self.dicts[idt] = wrthread, localdict
return localdict
@contextmanager
def _patch(self):
impl = object.__getattribute__(self, '_local__impl')
try:
dct = impl.get_dict()
except KeyError:
dct = impl.create_dict()
args, kw = impl.localargs
self.__init__(*args, **kw)
with impl.locallock:
object.__setattr__(self, '__dict__', dct)
yield
class local:
__slots__ = '_local__impl', '__dict__'
def __new__(cls, *args, **kw):
if (args or kw) and (cls.__init__ is object.__init__):
raise TypeError("Initialization arguments are not supported")
self = object.__new__(cls)
impl = _localimpl()
impl.localargs = (args, kw)
impl.locallock = RLock()
object.__setattr__(self, '_local__impl', impl)
# We need to create the thread dict in anticipation of
# __init__ being called, to make sure we don't call it
# again ourselves.
impl.create_dict()
return self
def __getattribute__(self, name):
with _patch(self):
return object.__getattribute__(self, name)
def __setattr__(self, name, value):
if name == '__dict__':
raise AttributeError(
"%r object attribute '__dict__' is read-only"
% self.__class__.__name__)
with _patch(self):
return object.__setattr__(self, name, value)
def __delattr__(self, name):
if name == '__dict__':
raise AttributeError(
"%r object attribute '__dict__' is read-only"
% self.__class__.__name__)
with _patch(self):
return object.__delattr__(self, name)
from threading import current_thread, RLock

View file

@ -1,196 +0,0 @@
# Access WeakSet through the weakref module.
# This code is separated-out because it is needed
# by abc.py to load everything else at startup.
from _weakref import ref
__all__ = ['WeakSet']
class _IterationGuard:
# This context manager registers itself in the current iterators of the
# weak container, such as to delay all removals until the context manager
# exits.
# This technique should be relatively thread-safe (since sets are).
def __init__(self, weakcontainer):
# Don't create cycles
self.weakcontainer = ref(weakcontainer)
def __enter__(self):
w = self.weakcontainer()
if w is not None:
w._iterating.add(self)
return self
def __exit__(self, e, t, b):
w = self.weakcontainer()
if w is not None:
s = w._iterating
s.remove(self)
if not s:
w._commit_removals()
class WeakSet:
def __init__(self, data=None):
self.data = set()
def _remove(item, selfref=ref(self)):
self = selfref()
if self is not None:
if self._iterating:
self._pending_removals.append(item)
else:
self.data.discard(item)
self._remove = _remove
# A list of keys to be removed
self._pending_removals = []
self._iterating = set()
if data is not None:
self.update(data)
def _commit_removals(self):
l = self._pending_removals
discard = self.data.discard
while l:
discard(l.pop())
def __iter__(self):
with _IterationGuard(self):
for itemref in self.data:
item = itemref()
if item is not None:
# Caveat: the iterator will keep a strong reference to
# `item` until it is resumed or closed.
yield item
def __len__(self):
return len(self.data) - len(self._pending_removals)
def __contains__(self, item):
try:
wr = ref(item)
except TypeError:
return False
return wr in self.data
def __reduce__(self):
return (self.__class__, (list(self),),
getattr(self, '__dict__', None))
def add(self, item):
if self._pending_removals:
self._commit_removals()
self.data.add(ref(item, self._remove))
def clear(self):
if self._pending_removals:
self._commit_removals()
self.data.clear()
def copy(self):
return self.__class__(self)
def pop(self):
if self._pending_removals:
self._commit_removals()
while True:
try:
itemref = self.data.pop()
except KeyError:
raise KeyError('pop from empty WeakSet')
item = itemref()
if item is not None:
return item
def remove(self, item):
if self._pending_removals:
self._commit_removals()
self.data.remove(ref(item))
def discard(self, item):
if self._pending_removals:
self._commit_removals()
self.data.discard(ref(item))
def update(self, other):
if self._pending_removals:
self._commit_removals()
for element in other:
self.add(element)
def __ior__(self, other):
self.update(other)
return self
def difference(self, other):
newset = self.copy()
newset.difference_update(other)
return newset
__sub__ = difference
def difference_update(self, other):
self.__isub__(other)
def __isub__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.difference_update(ref(item) for item in other)
return self
def intersection(self, other):
return self.__class__(item for item in other if item in self)
__and__ = intersection
def intersection_update(self, other):
self.__iand__(other)
def __iand__(self, other):
if self._pending_removals:
self._commit_removals()
self.data.intersection_update(ref(item) for item in other)
return self
def issubset(self, other):
return self.data.issubset(ref(item) for item in other)
__le__ = issubset
def __lt__(self, other):
return self.data < set(ref(item) for item in other)
def issuperset(self, other):
return self.data.issuperset(ref(item) for item in other)
__ge__ = issuperset
def __gt__(self, other):
return self.data > set(ref(item) for item in other)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self.data == set(ref(item) for item in other)
def symmetric_difference(self, other):
newset = self.copy()
newset.symmetric_difference_update(other)
return newset
__xor__ = symmetric_difference
def symmetric_difference_update(self, other):
self.__ixor__(other)
def __ixor__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.symmetric_difference_update(ref(item, self._remove) for item in other)
return self
def union(self, other):
return self.__class__(e for s in (self, other) for e in s)
__or__ = union
def isdisjoint(self, other):
return len(self.intersection(other)) == 0

View file

@ -1,248 +0,0 @@
# Copyright 2007 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Abstract Base Classes (ABCs) according to PEP 3119."""
from _weakrefset import WeakSet
def abstractmethod(funcobj):
"""A decorator indicating abstract methods.
Requires that the metaclass is ABCMeta or derived from it. A
class that has a metaclass derived from ABCMeta cannot be
instantiated unless all of its abstract methods are overridden.
The abstract methods can be called using any of the normal
'super' call mechanisms.
Usage:
class C(metaclass=ABCMeta):
@abstractmethod
def my_abstract_method(self, ...):
...
"""
funcobj.__isabstractmethod__ = True
return funcobj
class abstractclassmethod(classmethod):
"""
A decorator indicating abstract classmethods.
Similar to abstractmethod.
Usage:
class C(metaclass=ABCMeta):
@abstractclassmethod
def my_abstract_classmethod(cls, ...):
...
'abstractclassmethod' is deprecated. Use 'classmethod' with
'abstractmethod' instead.
"""
__isabstractmethod__ = True
def __init__(self, callable):
callable.__isabstractmethod__ = True
super().__init__(callable)
class abstractstaticmethod(staticmethod):
"""
A decorator indicating abstract staticmethods.
Similar to abstractmethod.
Usage:
class C(metaclass=ABCMeta):
@abstractstaticmethod
def my_abstract_staticmethod(...):
...
'abstractstaticmethod' is deprecated. Use 'staticmethod' with
'abstractmethod' instead.
"""
__isabstractmethod__ = True
def __init__(self, callable):
callable.__isabstractmethod__ = True
super().__init__(callable)
class abstractproperty(property):
"""
A decorator indicating abstract properties.
Requires that the metaclass is ABCMeta or derived from it. A
class that has a metaclass derived from ABCMeta cannot be
instantiated unless all of its abstract properties are overridden.
The abstract properties can be called using any of the normal
'super' call mechanisms.
Usage:
class C(metaclass=ABCMeta):
@abstractproperty
def my_abstract_property(self):
...
This defines a read-only property; you can also define a read-write
abstract property using the 'long' form of property declaration:
class C(metaclass=ABCMeta):
def getx(self): ...
def setx(self, value): ...
x = abstractproperty(getx, setx)
'abstractproperty' is deprecated. Use 'property' with 'abstractmethod'
instead.
"""
__isabstractmethod__ = True
class ABCMeta(type):
"""Metaclass for defining Abstract Base Classes (ABCs).
Use this metaclass to create an ABC. An ABC can be subclassed
directly, and then acts as a mix-in class. You can also register
unrelated concrete classes (even built-in classes) and unrelated
ABCs as 'virtual subclasses' -- these and their descendants will
be considered subclasses of the registering ABC by the built-in
issubclass() function, but the registering ABC won't show up in
their MRO (Method Resolution Order) nor will method
implementations defined by the registering ABC be callable (not
even via super()).
"""
# A global counter that is incremented each time a class is
# registered as a virtual subclass of anything. It forces the
# negative cache to be cleared before its next use.
# Note: this counter is private. Use `abc.get_cache_token()` for
# external code.
_abc_invalidation_counter = 0
def __new__(mcls, name, bases, namespace):
cls = super().__new__(mcls, name, bases, namespace)
# Compute set of abstract method names
abstracts = {name
for name, value in namespace.items()
if getattr(value, "__isabstractmethod__", False)}
for base in bases:
for name in getattr(base, "__abstractmethods__", set()):
value = getattr(cls, name, None)
if getattr(value, "__isabstractmethod__", False):
abstracts.add(name)
cls.__abstractmethods__ = frozenset(abstracts)
# Set up inheritance registry
cls._abc_registry = WeakSet()
cls._abc_cache = WeakSet()
cls._abc_negative_cache = WeakSet()
cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
return cls
def register(cls, subclass):
"""Register a virtual subclass of an ABC.
Returns the subclass, to allow usage as a class decorator.
"""
if not isinstance(subclass, type):
raise TypeError("Can only register classes")
if issubclass(subclass, cls):
return subclass # Already a subclass
# Subtle: test for cycles *after* testing for "already a subclass";
# this means we allow X.register(X) and interpret it as a no-op.
if issubclass(cls, subclass):
# This would create a cycle, which is bad for the algorithm below
raise RuntimeError("Refusing to create an inheritance cycle")
cls._abc_registry.add(subclass)
ABCMeta._abc_invalidation_counter += 1 # Invalidate negative cache
return subclass
def _dump_registry(cls, file=None):
"""Debug helper to print the ABC registry."""
print("Class: %s.%s" % (cls.__module__, cls.__qualname__), file=file)
print("Inv.counter: %s" % ABCMeta._abc_invalidation_counter, file=file)
for name in sorted(cls.__dict__.keys()):
if name.startswith("_abc_"):
value = getattr(cls, name)
print("%s: %r" % (name, value), file=file)
def __instancecheck__(cls, instance):
"""Override for isinstance(instance, cls)."""
# Inline the cache checking
subclass = instance.__class__
if subclass in cls._abc_cache:
return True
subtype = type(instance)
if subtype is subclass:
if (cls._abc_negative_cache_version ==
ABCMeta._abc_invalidation_counter and
subclass in cls._abc_negative_cache):
return False
# Fall back to the subclass check.
return cls.__subclasscheck__(subclass)
return any(cls.__subclasscheck__(c) for c in {subclass, subtype})
def __subclasscheck__(cls, subclass):
"""Override for issubclass(subclass, cls)."""
# Check cache
if subclass in cls._abc_cache:
return True
# Check negative cache; may have to invalidate
if cls._abc_negative_cache_version < ABCMeta._abc_invalidation_counter:
# Invalidate the negative cache
cls._abc_negative_cache = WeakSet()
cls._abc_negative_cache_version = ABCMeta._abc_invalidation_counter
elif subclass in cls._abc_negative_cache:
return False
# Check the subclass hook
ok = cls.__subclasshook__(subclass)
if ok is not NotImplemented:
assert isinstance(ok, bool)
if ok:
cls._abc_cache.add(subclass)
else:
cls._abc_negative_cache.add(subclass)
return ok
# Check if it's a direct subclass
if cls in getattr(subclass, '__mro__', ()):
cls._abc_cache.add(subclass)
return True
# Check if it's a subclass of a registered class (recursive)
for rcls in cls._abc_registry:
if issubclass(subclass, rcls):
cls._abc_cache.add(subclass)
return True
# Check if it's a subclass of a subclass (recursive)
for scls in cls.__subclasses__():
if issubclass(subclass, scls):
cls._abc_cache.add(subclass)
return True
# No dice; update negative cache
cls._abc_negative_cache.add(subclass)
return False
class ABC(metaclass=ABCMeta):
"""Helper class that provides a standard way to create an ABC using
inheritance.
"""
pass
def get_cache_token():
"""Returns the current ABC cache token.
The token is an opaque object (supporting equality testing) identifying the
current version of the ABC cache for virtual subclasses. The token changes
with every call to ``register()`` on any ABC.
"""
return ABCMeta._abc_invalidation_counter

View file

@ -1,921 +0,0 @@
"""Stuff to parse AIFF-C and AIFF files.
Unless explicitly stated otherwise, the description below is true
both for AIFF-C files and AIFF files.
An AIFF-C file has the following structure.
+-----------------+
| FORM |
+-----------------+
| <size> |
+----+------------+
| | AIFC |
| +------------+
| | <chunks> |
| | . |
| | . |
| | . |
+----+------------+
An AIFF file has the string "AIFF" instead of "AIFC".
A chunk consists of an identifier (4 bytes) followed by a size (4 bytes,
big endian order), followed by the data. The size field does not include
the size of the 8 byte header.
The following chunk types are recognized.
FVER
<version number of AIFF-C defining document> (AIFF-C only).
MARK
<# of markers> (2 bytes)
list of markers:
<marker ID> (2 bytes, must be > 0)
<position> (4 bytes)
<marker name> ("pstring")
COMM
<# of channels> (2 bytes)
<# of sound frames> (4 bytes)
<size of the samples> (2 bytes)
<sampling frequency> (10 bytes, IEEE 80-bit extended
floating point)
in AIFF-C files only:
<compression type> (4 bytes)
<human-readable version of compression type> ("pstring")
SSND
<offset> (4 bytes, not used by this program)
<blocksize> (4 bytes, not used by this program)
<sound data>
A pstring consists of 1 byte length, a string of characters, and 0 or 1
byte pad to make the total length even.
Usage.
Reading AIFF files:
f = aifc.open(file, 'r')
where file is either the name of a file or an open file pointer.
The open file pointer must have methods read(), seek(), and close().
In some types of audio files, if the setpos() method is not used,
the seek() method is not necessary.
This returns an instance of a class with the following public methods:
getnchannels() -- returns number of audio channels (1 for
mono, 2 for stereo)
getsampwidth() -- returns sample width in bytes
getframerate() -- returns sampling frequency
getnframes() -- returns number of audio frames
getcomptype() -- returns compression type ('NONE' for AIFF files)
getcompname() -- returns human-readable version of
compression type ('not compressed' for AIFF files)
getparams() -- returns a namedtuple consisting of all of the
above in the above order
getmarkers() -- get the list of marks in the audio file or None
if there are no marks
getmark(id) -- get mark with the specified id (raises an error
if the mark does not exist)
readframes(n) -- returns at most n frames of audio
rewind() -- rewind to the beginning of the audio stream
setpos(pos) -- seek to the specified position
tell() -- return the current position
close() -- close the instance (make it unusable)
The position returned by tell(), the position given to setpos() and
the position of marks are all compatible and have nothing to do with
the actual position in the file.
The close() method is called automatically when the class instance
is destroyed.
Writing AIFF files:
f = aifc.open(file, 'w')
where file is either the name of a file or an open file pointer.
The open file pointer must have methods write(), tell(), seek(), and
close().
This returns an instance of a class with the following public methods:
aiff() -- create an AIFF file (AIFF-C default)
aifc() -- create an AIFF-C file
setnchannels(n) -- set the number of channels
setsampwidth(n) -- set the sample width
setframerate(n) -- set the frame rate
setnframes(n) -- set the number of frames
setcomptype(type, name)
-- set the compression type and the
human-readable compression type
setparams(tuple)
-- set all parameters at once
setmark(id, pos, name)
-- add specified mark to the list of marks
tell() -- return current position in output file (useful
in combination with setmark())
writeframesraw(data)
-- write audio frames without pathing up the
file header
writeframes(data)
-- write audio frames and patch up the file header
close() -- patch up the file header and close the
output file
You should set the parameters before the first writeframesraw or
writeframes. The total number of frames does not need to be set,
but when it is set to the correct value, the header does not have to
be patched up.
It is best to first set all parameters, perhaps possibly the
compression type, and then write audio frames using writeframesraw.
When all frames have been written, either call writeframes(b'') or
close() to patch up the sizes in the header.
Marks can be added anytime. If there are any marks, you must call
close() after all frames have been written.
The close() method is called automatically when the class instance
is destroyed.
When a file is opened with the extension '.aiff', an AIFF file is
written, otherwise an AIFF-C file is written. This default can be
changed by calling aiff() or aifc() before the first writeframes or
writeframesraw.
"""
import struct
import builtins
import warnings
__all__ = ["Error", "open", "openfp"]
class Error(Exception):
pass
_AIFC_version = 0xA2805140 # Version 1 of AIFF-C
def _read_long(file):
try:
return struct.unpack('>l', file.read(4))[0]
except struct.error:
raise EOFError
def _read_ulong(file):
try:
return struct.unpack('>L', file.read(4))[0]
except struct.error:
raise EOFError
def _read_short(file):
try:
return struct.unpack('>h', file.read(2))[0]
except struct.error:
raise EOFError
def _read_ushort(file):
try:
return struct.unpack('>H', file.read(2))[0]
except struct.error:
raise EOFError
def _read_string(file):
length = ord(file.read(1))
if length == 0:
data = b''
else:
data = file.read(length)
if length & 1 == 0:
dummy = file.read(1)
return data
_HUGE_VAL = 1.79769313486231e+308 # See <limits.h>
def _read_float(f): # 10 bytes
expon = _read_short(f) # 2 bytes
sign = 1
if expon < 0:
sign = -1
expon = expon + 0x8000
himant = _read_ulong(f) # 4 bytes
lomant = _read_ulong(f) # 4 bytes
if expon == himant == lomant == 0:
f = 0.0
elif expon == 0x7FFF:
f = _HUGE_VAL
else:
expon = expon - 16383
f = (himant * 0x100000000 + lomant) * pow(2.0, expon - 63)
return sign * f
def _write_short(f, x):
f.write(struct.pack('>h', x))
def _write_ushort(f, x):
f.write(struct.pack('>H', x))
def _write_long(f, x):
f.write(struct.pack('>l', x))
def _write_ulong(f, x):
f.write(struct.pack('>L', x))
def _write_string(f, s):
if len(s) > 255:
raise ValueError("string exceeds maximum pstring length")
f.write(struct.pack('B', len(s)))
f.write(s)
if len(s) & 1 == 0:
f.write(b'\x00')
def _write_float(f, x):
import math
if x < 0:
sign = 0x8000
x = x * -1
else:
sign = 0
if x == 0:
expon = 0
himant = 0
lomant = 0
else:
fmant, expon = math.frexp(x)
if expon > 16384 or fmant >= 1 or fmant != fmant: # Infinity or NaN
expon = sign|0x7FFF
himant = 0
lomant = 0
else: # Finite
expon = expon + 16382
if expon < 0: # denormalized
fmant = math.ldexp(fmant, expon)
expon = 0
expon = expon | sign
fmant = math.ldexp(fmant, 32)
fsmant = math.floor(fmant)
himant = int(fsmant)
fmant = math.ldexp(fmant - fsmant, 32)
fsmant = math.floor(fmant)
lomant = int(fsmant)
_write_ushort(f, expon)
_write_ulong(f, himant)
_write_ulong(f, lomant)
from chunk import Chunk
from collections import namedtuple
_aifc_params = namedtuple('_aifc_params',
'nchannels sampwidth framerate nframes comptype compname')
class Aifc_read:
# Variables used in this class:
#
# These variables are available to the user though appropriate
# methods of this class:
# _file -- the open file with methods read(), close(), and seek()
# set through the __init__() method
# _nchannels -- the number of audio channels
# available through the getnchannels() method
# _nframes -- the number of audio frames
# available through the getnframes() method
# _sampwidth -- the number of bytes per audio sample
# available through the getsampwidth() method
# _framerate -- the sampling frequency
# available through the getframerate() method
# _comptype -- the AIFF-C compression type ('NONE' if AIFF)
# available through the getcomptype() method
# _compname -- the human-readable AIFF-C compression type
# available through the getcomptype() method
# _markers -- the marks in the audio file
# available through the getmarkers() and getmark()
# methods
# _soundpos -- the position in the audio stream
# available through the tell() method, set through the
# setpos() method
#
# These variables are used internally only:
# _version -- the AIFF-C version number
# _decomp -- the decompressor from builtin module cl
# _comm_chunk_read -- 1 iff the COMM chunk has been read
# _aifc -- 1 iff reading an AIFF-C file
# _ssnd_seek_needed -- 1 iff positioned correctly in audio
# file for readframes()
# _ssnd_chunk -- instantiation of a chunk class for the SSND chunk
# _framesize -- size of one frame in the file
def initfp(self, file):
self._version = 0
self._convert = None
self._markers = []
self._soundpos = 0
self._file = file
chunk = Chunk(file)
if chunk.getname() != b'FORM':
raise Error('file does not start with FORM id')
formdata = chunk.read(4)
if formdata == b'AIFF':
self._aifc = 0
elif formdata == b'AIFC':
self._aifc = 1
else:
raise Error('not an AIFF or AIFF-C file')
self._comm_chunk_read = 0
while 1:
self._ssnd_seek_needed = 1
try:
chunk = Chunk(self._file)
except EOFError:
break
chunkname = chunk.getname()
if chunkname == b'COMM':
self._read_comm_chunk(chunk)
self._comm_chunk_read = 1
elif chunkname == b'SSND':
self._ssnd_chunk = chunk
dummy = chunk.read(8)
self._ssnd_seek_needed = 0
elif chunkname == b'FVER':
self._version = _read_ulong(chunk)
elif chunkname == b'MARK':
self._readmark(chunk)
chunk.skip()
if not self._comm_chunk_read or not self._ssnd_chunk:
raise Error('COMM chunk and/or SSND chunk missing')
def __init__(self, f):
if isinstance(f, str):
f = builtins.open(f, 'rb')
# else, assume it is an open file object already
self.initfp(f)
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
#
# User visible methods.
#
def getfp(self):
return self._file
def rewind(self):
self._ssnd_seek_needed = 1
self._soundpos = 0
def close(self):
file = self._file
if file is not None:
self._file = None
file.close()
def tell(self):
return self._soundpos
def getnchannels(self):
return self._nchannels
def getnframes(self):
return self._nframes
def getsampwidth(self):
return self._sampwidth
def getframerate(self):
return self._framerate
def getcomptype(self):
return self._comptype
def getcompname(self):
return self._compname
## def getversion(self):
## return self._version
def getparams(self):
return _aifc_params(self.getnchannels(), self.getsampwidth(),
self.getframerate(), self.getnframes(),
self.getcomptype(), self.getcompname())
def getmarkers(self):
if len(self._markers) == 0:
return None
return self._markers
def getmark(self, id):
for marker in self._markers:
if id == marker[0]:
return marker
raise Error('marker {0!r} does not exist'.format(id))
def setpos(self, pos):
if pos < 0 or pos > self._nframes:
raise Error('position not in range')
self._soundpos = pos
self._ssnd_seek_needed = 1
def readframes(self, nframes):
if self._ssnd_seek_needed:
self._ssnd_chunk.seek(0)
dummy = self._ssnd_chunk.read(8)
pos = self._soundpos * self._framesize
if pos:
self._ssnd_chunk.seek(pos + 8)
self._ssnd_seek_needed = 0
if nframes == 0:
return b''
data = self._ssnd_chunk.read(nframes * self._framesize)
if self._convert and data:
data = self._convert(data)
self._soundpos = self._soundpos + len(data) // (self._nchannels
* self._sampwidth)
return data
#
# Internal methods.
#
def _alaw2lin(self, data):
import audioop
return audioop.alaw2lin(data, 2)
def _ulaw2lin(self, data):
import audioop
return audioop.ulaw2lin(data, 2)
def _adpcm2lin(self, data):
import audioop
if not hasattr(self, '_adpcmstate'):
# first time
self._adpcmstate = None
data, self._adpcmstate = audioop.adpcm2lin(data, 2, self._adpcmstate)
return data
def _read_comm_chunk(self, chunk):
self._nchannels = _read_short(chunk)
self._nframes = _read_long(chunk)
self._sampwidth = (_read_short(chunk) + 7) // 8
self._framerate = int(_read_float(chunk))
self._framesize = self._nchannels * self._sampwidth
if self._aifc:
#DEBUG: SGI's soundeditor produces a bad size :-(
kludge = 0
if chunk.chunksize == 18:
kludge = 1
warnings.warn('Warning: bad COMM chunk size')
chunk.chunksize = 23
#DEBUG end
self._comptype = chunk.read(4)
#DEBUG start
if kludge:
length = ord(chunk.file.read(1))
if length & 1 == 0:
length = length + 1
chunk.chunksize = chunk.chunksize + length
chunk.file.seek(-1, 1)
#DEBUG end
self._compname = _read_string(chunk)
if self._comptype != b'NONE':
if self._comptype == b'G722':
self._convert = self._adpcm2lin
elif self._comptype in (b'ulaw', b'ULAW'):
self._convert = self._ulaw2lin
elif self._comptype in (b'alaw', b'ALAW'):
self._convert = self._alaw2lin
else:
raise Error('unsupported compression type')
self._sampwidth = 2
else:
self._comptype = b'NONE'
self._compname = b'not compressed'
def _readmark(self, chunk):
nmarkers = _read_short(chunk)
# Some files appear to contain invalid counts.
# Cope with this by testing for EOF.
try:
for i in range(nmarkers):
id = _read_short(chunk)
pos = _read_long(chunk)
name = _read_string(chunk)
if pos or name:
# some files appear to have
# dummy markers consisting of
# a position 0 and name ''
self._markers.append((id, pos, name))
except EOFError:
w = ('Warning: MARK chunk contains only %s marker%s instead of %s' %
(len(self._markers), '' if len(self._markers) == 1 else 's',
nmarkers))
warnings.warn(w)
class Aifc_write:
# Variables used in this class:
#
# These variables are user settable through appropriate methods
# of this class:
# _file -- the open file with methods write(), close(), tell(), seek()
# set through the __init__() method
# _comptype -- the AIFF-C compression type ('NONE' in AIFF)
# set through the setcomptype() or setparams() method
# _compname -- the human-readable AIFF-C compression type
# set through the setcomptype() or setparams() method
# _nchannels -- the number of audio channels
# set through the setnchannels() or setparams() method
# _sampwidth -- the number of bytes per audio sample
# set through the setsampwidth() or setparams() method
# _framerate -- the sampling frequency
# set through the setframerate() or setparams() method
# _nframes -- the number of audio frames written to the header
# set through the setnframes() or setparams() method
# _aifc -- whether we're writing an AIFF-C file or an AIFF file
# set through the aifc() method, reset through the
# aiff() method
#
# These variables are used internally only:
# _version -- the AIFF-C version number
# _comp -- the compressor from builtin module cl
# _nframeswritten -- the number of audio frames actually written
# _datalength -- the size of the audio samples written to the header
# _datawritten -- the size of the audio samples actually written
def __init__(self, f):
if isinstance(f, str):
filename = f
f = builtins.open(f, 'wb')
else:
# else, assume it is an open file object already
filename = '???'
self.initfp(f)
if filename[-5:] == '.aiff':
self._aifc = 0
else:
self._aifc = 1
def initfp(self, file):
self._file = file
self._version = _AIFC_version
self._comptype = b'NONE'
self._compname = b'not compressed'
self._convert = None
self._nchannels = 0
self._sampwidth = 0
self._framerate = 0
self._nframes = 0
self._nframeswritten = 0
self._datawritten = 0
self._datalength = 0
self._markers = []
self._marklength = 0
self._aifc = 1 # AIFF-C is default
def __del__(self):
self.close()
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
#
# User visible methods.
#
def aiff(self):
if self._nframeswritten:
raise Error('cannot change parameters after starting to write')
self._aifc = 0
def aifc(self):
if self._nframeswritten:
raise Error('cannot change parameters after starting to write')
self._aifc = 1
def setnchannels(self, nchannels):
if self._nframeswritten:
raise Error('cannot change parameters after starting to write')
if nchannels < 1:
raise Error('bad # of channels')
self._nchannels = nchannels
def getnchannels(self):
if not self._nchannels:
raise Error('number of channels not set')
return self._nchannels
def setsampwidth(self, sampwidth):
if self._nframeswritten:
raise Error('cannot change parameters after starting to write')
if sampwidth < 1 or sampwidth > 4:
raise Error('bad sample width')
self._sampwidth = sampwidth
def getsampwidth(self):
if not self._sampwidth:
raise Error('sample width not set')
return self._sampwidth
def setframerate(self, framerate):
if self._nframeswritten:
raise Error('cannot change parameters after starting to write')
if framerate <= 0:
raise Error('bad frame rate')
self._framerate = framerate
def getframerate(self):
if not self._framerate:
raise Error('frame rate not set')
return self._framerate
def setnframes(self, nframes):
if self._nframeswritten:
raise Error('cannot change parameters after starting to write')
self._nframes = nframes
def getnframes(self):
return self._nframeswritten
def setcomptype(self, comptype, compname):
if self._nframeswritten:
raise Error('cannot change parameters after starting to write')
if comptype not in (b'NONE', b'ulaw', b'ULAW',
b'alaw', b'ALAW', b'G722'):
raise Error('unsupported compression type')
self._comptype = comptype
self._compname = compname
def getcomptype(self):
return self._comptype
def getcompname(self):
return self._compname
## def setversion(self, version):
## if self._nframeswritten:
## raise Error, 'cannot change parameters after starting to write'
## self._version = version
def setparams(self, params):
nchannels, sampwidth, framerate, nframes, comptype, compname = params
if self._nframeswritten:
raise Error('cannot change parameters after starting to write')
if comptype not in (b'NONE', b'ulaw', b'ULAW',
b'alaw', b'ALAW', b'G722'):
raise Error('unsupported compression type')
self.setnchannels(nchannels)
self.setsampwidth(sampwidth)
self.setframerate(framerate)
self.setnframes(nframes)
self.setcomptype(comptype, compname)
def getparams(self):
if not self._nchannels or not self._sampwidth or not self._framerate:
raise Error('not all parameters set')
return _aifc_params(self._nchannels, self._sampwidth, self._framerate,
self._nframes, self._comptype, self._compname)
def setmark(self, id, pos, name):
if id <= 0:
raise Error('marker ID must be > 0')
if pos < 0:
raise Error('marker position must be >= 0')
if not isinstance(name, bytes):
raise Error('marker name must be bytes')
for i in range(len(self._markers)):
if id == self._markers[i][0]:
self._markers[i] = id, pos, name
return
self._markers.append((id, pos, name))
def getmark(self, id):
for marker in self._markers:
if id == marker[0]:
return marker
raise Error('marker {0!r} does not exist'.format(id))
def getmarkers(self):
if len(self._markers) == 0:
return None
return self._markers
def tell(self):
return self._nframeswritten
def writeframesraw(self, data):
if not isinstance(data, (bytes, bytearray)):
data = memoryview(data).cast('B')
self._ensure_header_written(len(data))
nframes = len(data) // (self._sampwidth * self._nchannels)
if self._convert:
data = self._convert(data)
self._file.write(data)
self._nframeswritten = self._nframeswritten + nframes
self._datawritten = self._datawritten + len(data)
def writeframes(self, data):
self.writeframesraw(data)
if self._nframeswritten != self._nframes or \
self._datalength != self._datawritten:
self._patchheader()
def close(self):
if self._file is None:
return
try:
self._ensure_header_written(0)
if self._datawritten & 1:
# quick pad to even size
self._file.write(b'\x00')
self._datawritten = self._datawritten + 1
self._writemarkers()
if self._nframeswritten != self._nframes or \
self._datalength != self._datawritten or \
self._marklength:
self._patchheader()
finally:
# Prevent ref cycles
self._convert = None
f = self._file
self._file = None
f.close()
#
# Internal methods.
#
def _lin2alaw(self, data):
import audioop
return audioop.lin2alaw(data, 2)
def _lin2ulaw(self, data):
import audioop
return audioop.lin2ulaw(data, 2)
def _lin2adpcm(self, data):
import audioop
if not hasattr(self, '_adpcmstate'):
self._adpcmstate = None
data, self._adpcmstate = audioop.lin2adpcm(data, 2, self._adpcmstate)
return data
def _ensure_header_written(self, datasize):
if not self._nframeswritten:
if self._comptype in (b'ULAW', b'ulaw', b'ALAW', b'alaw', b'G722'):
if not self._sampwidth:
self._sampwidth = 2
if self._sampwidth != 2:
raise Error('sample width must be 2 when compressing '
'with ulaw/ULAW, alaw/ALAW or G7.22 (ADPCM)')
if not self._nchannels:
raise Error('# channels not specified')
if not self._sampwidth:
raise Error('sample width not specified')
if not self._framerate:
raise Error('sampling rate not specified')
self._write_header(datasize)
def _init_compression(self):
if self._comptype == b'G722':
self._convert = self._lin2adpcm
elif self._comptype in (b'ulaw', b'ULAW'):
self._convert = self._lin2ulaw
elif self._comptype in (b'alaw', b'ALAW'):
self._convert = self._lin2alaw
def _write_header(self, initlength):
if self._aifc and self._comptype != b'NONE':
self._init_compression()
self._file.write(b'FORM')
if not self._nframes:
self._nframes = initlength // (self._nchannels * self._sampwidth)
self._datalength = self._nframes * self._nchannels * self._sampwidth
if self._datalength & 1:
self._datalength = self._datalength + 1
if self._aifc:
if self._comptype in (b'ulaw', b'ULAW', b'alaw', b'ALAW'):
self._datalength = self._datalength // 2
if self._datalength & 1:
self._datalength = self._datalength + 1
elif self._comptype == b'G722':
self._datalength = (self._datalength + 3) // 4
if self._datalength & 1:
self._datalength = self._datalength + 1
try:
self._form_length_pos = self._file.tell()
except (AttributeError, OSError):
self._form_length_pos = None
commlength = self._write_form_length(self._datalength)
if self._aifc:
self._file.write(b'AIFC')
self._file.write(b'FVER')
_write_ulong(self._file, 4)
_write_ulong(self._file, self._version)
else:
self._file.write(b'AIFF')
self._file.write(b'COMM')
_write_ulong(self._file, commlength)
_write_short(self._file, self._nchannels)
if self._form_length_pos is not None:
self._nframes_pos = self._file.tell()
_write_ulong(self._file, self._nframes)
if self._comptype in (b'ULAW', b'ulaw', b'ALAW', b'alaw', b'G722'):
_write_short(self._file, 8)
else:
_write_short(self._file, self._sampwidth * 8)
_write_float(self._file, self._framerate)
if self._aifc:
self._file.write(self._comptype)
_write_string(self._file, self._compname)
self._file.write(b'SSND')
if self._form_length_pos is not None:
self._ssnd_length_pos = self._file.tell()
_write_ulong(self._file, self._datalength + 8)
_write_ulong(self._file, 0)
_write_ulong(self._file, 0)
def _write_form_length(self, datalength):
if self._aifc:
commlength = 18 + 5 + len(self._compname)
if commlength & 1:
commlength = commlength + 1
verslength = 12
else:
commlength = 18
verslength = 0
_write_ulong(self._file, 4 + verslength + self._marklength + \
8 + commlength + 16 + datalength)
return commlength
def _patchheader(self):
curpos = self._file.tell()
if self._datawritten & 1:
datalength = self._datawritten + 1
self._file.write(b'\x00')
else:
datalength = self._datawritten
if datalength == self._datalength and \
self._nframes == self._nframeswritten and \
self._marklength == 0:
self._file.seek(curpos, 0)
return
self._file.seek(self._form_length_pos, 0)
dummy = self._write_form_length(datalength)
self._file.seek(self._nframes_pos, 0)
_write_ulong(self._file, self._nframeswritten)
self._file.seek(self._ssnd_length_pos, 0)
_write_ulong(self._file, datalength + 8)
self._file.seek(curpos, 0)
self._nframes = self._nframeswritten
self._datalength = datalength
def _writemarkers(self):
if len(self._markers) == 0:
return
self._file.write(b'MARK')
length = 2
for marker in self._markers:
id, pos, name = marker
length = length + len(name) + 1 + 6
if len(name) & 1 == 0:
length = length + 1
_write_ulong(self._file, length)
self._marklength = length + 8
_write_short(self._file, len(self._markers))
for marker in self._markers:
id, pos, name = marker
_write_short(self._file, id)
_write_ulong(self._file, pos)
_write_string(self._file, name)
def open(f, mode=None):
if mode is None:
if hasattr(f, 'mode'):
mode = f.mode
else:
mode = 'rb'
if mode in ('r', 'rb'):
return Aifc_read(f)
elif mode in ('w', 'wb'):
return Aifc_write(f)
else:
raise Error("mode must be 'r', 'rb', 'w', or 'wb'")
openfp = open # B/W compatibility
if __name__ == '__main__':
import sys
if not sys.argv[1:]:
sys.argv.append('/usr/demos/data/audio/bach.aiff')
fn = sys.argv[1]
with open(fn, 'r') as f:
print("Reading", fn)
print("nchannels =", f.getnchannels())
print("nframes =", f.getnframes())
print("sampwidth =", f.getsampwidth())
print("framerate =", f.getframerate())
print("comptype =", f.getcomptype())
print("compname =", f.getcompname())
if sys.argv[2:]:
gn = sys.argv[2]
print("Writing", gn)
with open(gn, 'w') as g:
g.setparams(f.getparams())
while 1:
data = f.readframes(1024)
if not data:
break
g.writeframes(data)
print("Done.")

View file

@ -1,17 +0,0 @@
import webbrowser
import hashlib
webbrowser.open("https://xkcd.com/353/")
def geohash(latitude, longitude, datedow):
'''Compute geohash() using the Munroe algorithm.
>>> geohash(37.421542, -122.085589, b'2005-05-26-10458.68')
37.857713 -122.544543
'''
# http://xkcd.com/426/
h = hashlib.md5(datedow).hexdigest()
p, q = [('%f' % float.fromhex('0.' + x)) for x in (h[:16], h[16:32])]
print('%d%s %d%s' % (latitude, p[1:], longitude, q[1:]))

File diff suppressed because it is too large Load diff

View file

@ -1,314 +0,0 @@
"""
ast
~~~
The `ast` module helps Python applications to process trees of the Python
abstract syntax grammar. The abstract syntax itself might change with
each Python release; this module helps to find out programmatically what
the current grammar looks like and allows modifications of it.
An abstract syntax tree can be generated by passing `ast.PyCF_ONLY_AST` as
a flag to the `compile()` builtin function or by using the `parse()`
function from this module. The result will be a tree of objects whose
classes all inherit from `ast.AST`.
A modified abstract syntax tree can be compiled into a Python code object
using the built-in `compile()` function.
Additionally various helper functions are provided that make working with
the trees simpler. The main intention of the helper functions and this
module in general is to provide an easy to use interface for libraries
that work tightly with the python syntax (template engines for example).
:copyright: Copyright 2008 by Armin Ronacher.
:license: Python License.
"""
from _ast import *
def parse(source, filename='<unknown>', mode='exec'):
"""
Parse the source into an AST node.
Equivalent to compile(source, filename, mode, PyCF_ONLY_AST).
"""
return compile(source, filename, mode, PyCF_ONLY_AST)
def literal_eval(node_or_string):
"""
Safely evaluate an expression node or a string containing a Python
expression. The string or node provided may only consist of the following
Python literal structures: strings, bytes, numbers, tuples, lists, dicts,
sets, booleans, and None.
"""
if isinstance(node_or_string, str):
node_or_string = parse(node_or_string, mode='eval')
if isinstance(node_or_string, Expression):
node_or_string = node_or_string.body
def _convert(node):
if isinstance(node, (Str, Bytes)):
return node.s
elif isinstance(node, Num):
return node.n
elif isinstance(node, Tuple):
return tuple(map(_convert, node.elts))
elif isinstance(node, List):
return list(map(_convert, node.elts))
elif isinstance(node, Set):
return set(map(_convert, node.elts))
elif isinstance(node, Dict):
return dict((_convert(k), _convert(v)) for k, v
in zip(node.keys, node.values))
elif isinstance(node, NameConstant):
return node.value
elif isinstance(node, UnaryOp) and \
isinstance(node.op, (UAdd, USub)) and \
isinstance(node.operand, (Num, UnaryOp, BinOp)):
operand = _convert(node.operand)
if isinstance(node.op, UAdd):
return + operand
else:
return - operand
elif isinstance(node, BinOp) and \
isinstance(node.op, (Add, Sub)) and \
isinstance(node.right, (Num, UnaryOp, BinOp)) and \
isinstance(node.left, (Num, UnaryOp, BinOp)):
left = _convert(node.left)
right = _convert(node.right)
if isinstance(node.op, Add):
return left + right
else:
return left - right
raise ValueError('malformed node or string: ' + repr(node))
return _convert(node_or_string)
def dump(node, annotate_fields=True, include_attributes=False):
"""
Return a formatted dump of the tree in *node*. This is mainly useful for
debugging purposes. The returned string will show the names and the values
for fields. This makes the code impossible to evaluate, so if evaluation is
wanted *annotate_fields* must be set to False. Attributes such as line
numbers and column offsets are not dumped by default. If this is wanted,
*include_attributes* can be set to True.
"""
def _format(node):
if isinstance(node, AST):
fields = [(a, _format(b)) for a, b in iter_fields(node)]
rv = '%s(%s' % (node.__class__.__name__, ', '.join(
('%s=%s' % field for field in fields)
if annotate_fields else
(b for a, b in fields)
))
if include_attributes and node._attributes:
rv += fields and ', ' or ' '
rv += ', '.join('%s=%s' % (a, _format(getattr(node, a)))
for a in node._attributes)
return rv + ')'
elif isinstance(node, list):
return '[%s]' % ', '.join(_format(x) for x in node)
return repr(node)
if not isinstance(node, AST):
raise TypeError('expected AST, got %r' % node.__class__.__name__)
return _format(node)
def copy_location(new_node, old_node):
"""
Copy source location (`lineno` and `col_offset` attributes) from
*old_node* to *new_node* if possible, and return *new_node*.
"""
for attr in 'lineno', 'col_offset':
if attr in old_node._attributes and attr in new_node._attributes \
and hasattr(old_node, attr):
setattr(new_node, attr, getattr(old_node, attr))
return new_node
def fix_missing_locations(node):
"""
When you compile a node tree with compile(), the compiler expects lineno and
col_offset attributes for every node that supports them. This is rather
tedious to fill in for generated nodes, so this helper adds these attributes
recursively where not already set, by setting them to the values of the
parent node. It works recursively starting at *node*.
"""
def _fix(node, lineno, col_offset):
if 'lineno' in node._attributes:
if not hasattr(node, 'lineno'):
node.lineno = lineno
else:
lineno = node.lineno
if 'col_offset' in node._attributes:
if not hasattr(node, 'col_offset'):
node.col_offset = col_offset
else:
col_offset = node.col_offset
for child in iter_child_nodes(node):
_fix(child, lineno, col_offset)
_fix(node, 1, 0)
return node
def increment_lineno(node, n=1):
"""
Increment the line number of each node in the tree starting at *node* by *n*.
This is useful to "move code" to a different location in a file.
"""
for child in walk(node):
if 'lineno' in child._attributes:
child.lineno = getattr(child, 'lineno', 0) + n
return node
def iter_fields(node):
"""
Yield a tuple of ``(fieldname, value)`` for each field in ``node._fields``
that is present on *node*.
"""
for field in node._fields:
try:
yield field, getattr(node, field)
except AttributeError:
pass
def iter_child_nodes(node):
"""
Yield all direct child nodes of *node*, that is, all fields that are nodes
and all items of fields that are lists of nodes.
"""
for name, field in iter_fields(node):
if isinstance(field, AST):
yield field
elif isinstance(field, list):
for item in field:
if isinstance(item, AST):
yield item
def get_docstring(node, clean=True):
"""
Return the docstring for the given node or None if no docstring can
be found. If the node provided does not have docstrings a TypeError
will be raised.
"""
if not isinstance(node, (AsyncFunctionDef, FunctionDef, ClassDef, Module)):
raise TypeError("%r can't have docstrings" % node.__class__.__name__)
if node.body and isinstance(node.body[0], Expr) and \
isinstance(node.body[0].value, Str):
if clean:
import inspect
return inspect.cleandoc(node.body[0].value.s)
return node.body[0].value.s
def walk(node):
"""
Recursively yield all descendant nodes in the tree starting at *node*
(including *node* itself), in no specified order. This is useful if you
only want to modify nodes in place and don't care about the context.
"""
from collections import deque
todo = deque([node])
while todo:
node = todo.popleft()
todo.extend(iter_child_nodes(node))
yield node
class NodeVisitor(object):
"""
A node visitor base class that walks the abstract syntax tree and calls a
visitor function for every node found. This function may return a value
which is forwarded by the `visit` method.
This class is meant to be subclassed, with the subclass adding visitor
methods.
Per default the visitor functions for the nodes are ``'visit_'`` +
class name of the node. So a `TryFinally` node visit function would
be `visit_TryFinally`. This behavior can be changed by overriding
the `visit` method. If no visitor function exists for a node
(return value `None`) the `generic_visit` visitor is used instead.
Don't use the `NodeVisitor` if you want to apply changes to nodes during
traversing. For this a special visitor exists (`NodeTransformer`) that
allows modifications.
"""
def visit(self, node):
"""Visit a node."""
method = 'visit_' + node.__class__.__name__
visitor = getattr(self, method, self.generic_visit)
return visitor(node)
def generic_visit(self, node):
"""Called if no explicit visitor function exists for a node."""
for field, value in iter_fields(node):
if isinstance(value, list):
for item in value:
if isinstance(item, AST):
self.visit(item)
elif isinstance(value, AST):
self.visit(value)
class NodeTransformer(NodeVisitor):
"""
A :class:`NodeVisitor` subclass that walks the abstract syntax tree and
allows modification of nodes.
The `NodeTransformer` will walk the AST and use the return value of the
visitor methods to replace or remove the old node. If the return value of
the visitor method is ``None``, the node will be removed from its location,
otherwise it is replaced with the return value. The return value may be the
original node in which case no replacement takes place.
Here is an example transformer that rewrites all occurrences of name lookups
(``foo``) to ``data['foo']``::
class RewriteName(NodeTransformer):
def visit_Name(self, node):
return copy_location(Subscript(
value=Name(id='data', ctx=Load()),
slice=Index(value=Str(s=node.id)),
ctx=node.ctx
), node)
Keep in mind that if the node you're operating on has child nodes you must
either transform the child nodes yourself or call the :meth:`generic_visit`
method for the node first.
For nodes that were part of a collection of statements (that applies to all
statement nodes), the visitor may also return a list of nodes rather than
just a single node.
Usually you use the transformer like this::
node = YourTransformer().visit(node)
"""
def generic_visit(self, node):
for field, old_value in iter_fields(node):
if isinstance(old_value, list):
new_values = []
for value in old_value:
if isinstance(value, AST):
value = self.visit(value)
if value is None:
continue
elif not isinstance(value, AST):
new_values.extend(value)
continue
new_values.append(value)
old_value[:] = new_values
elif isinstance(old_value, AST):
new_node = self.visit(old_value)
if new_node is None:
delattr(node, field)
else:
setattr(node, field, new_node)
return node

View file

@ -1,336 +0,0 @@
# -*- Mode: Python; tab-width: 4 -*-
# Id: asynchat.py,v 2.26 2000/09/07 22:29:26 rushing Exp
# Author: Sam Rushing <rushing@nightmare.com>
# ======================================================================
# Copyright 1996 by Sam Rushing
#
# All Rights Reserved
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose and without fee is hereby
# granted, provided that the above copyright notice appear in all
# copies and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of Sam
# Rushing not be used in advertising or publicity pertaining to
# distribution of the software without specific, written prior
# permission.
#
# SAM RUSHING DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
# NO EVENT SHALL SAM RUSHING BE LIABLE FOR ANY SPECIAL, INDIRECT OR
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# ======================================================================
r"""A class supporting chat-style (command/response) protocols.
This class adds support for 'chat' style protocols - where one side
sends a 'command', and the other sends a response (examples would be
the common internet protocols - smtp, nntp, ftp, etc..).
The handle_read() method looks at the input stream for the current
'terminator' (usually '\r\n' for single-line responses, '\r\n.\r\n'
for multi-line output), calling self.found_terminator() on its
receipt.
for example:
Say you build an async nntp client using this class. At the start
of the connection, you'll have self.terminator set to '\r\n', in
order to process the single-line greeting. Just before issuing a
'LIST' command you'll set it to '\r\n.\r\n'. The output of the LIST
command will be accumulated (using your own 'collect_incoming_data'
method) up to the terminator, and then control will be returned to
you - by calling your self.found_terminator() method.
"""
import asyncore
from collections import deque
class async_chat(asyncore.dispatcher):
"""This is an abstract class. You must derive from this class, and add
the two methods collect_incoming_data() and found_terminator()"""
# these are overridable defaults
ac_in_buffer_size = 65536
ac_out_buffer_size = 65536
# we don't want to enable the use of encoding by default, because that is a
# sign of an application bug that we don't want to pass silently
use_encoding = 0
encoding = 'latin-1'
def __init__(self, sock=None, map=None):
# for string terminator matching
self.ac_in_buffer = b''
# we use a list here rather than io.BytesIO for a few reasons...
# del lst[:] is faster than bio.truncate(0)
# lst = [] is faster than bio.truncate(0)
self.incoming = []
# we toss the use of the "simple producer" and replace it with
# a pure deque, which the original fifo was a wrapping of
self.producer_fifo = deque()
asyncore.dispatcher.__init__(self, sock, map)
def collect_incoming_data(self, data):
raise NotImplementedError("must be implemented in subclass")
def _collect_incoming_data(self, data):
self.incoming.append(data)
def _get_data(self):
d = b''.join(self.incoming)
del self.incoming[:]
return d
def found_terminator(self):
raise NotImplementedError("must be implemented in subclass")
def set_terminator(self, term):
"""Set the input delimiter.
Can be a fixed string of any length, an integer, or None.
"""
if isinstance(term, str) and self.use_encoding:
term = bytes(term, self.encoding)
elif isinstance(term, int) and term < 0:
raise ValueError('the number of received bytes must be positive')
self.terminator = term
def get_terminator(self):
return self.terminator
# grab some more data from the socket,
# throw it to the collector method,
# check for the terminator,
# if found, transition to the next state.
def handle_read(self):
try:
data = self.recv(self.ac_in_buffer_size)
except BlockingIOError:
return
except OSError as why:
self.handle_error()
return
if isinstance(data, str) and self.use_encoding:
data = bytes(str, self.encoding)
self.ac_in_buffer = self.ac_in_buffer + data
# Continue to search for self.terminator in self.ac_in_buffer,
# while calling self.collect_incoming_data. The while loop
# is necessary because we might read several data+terminator
# combos with a single recv(4096).
while self.ac_in_buffer:
lb = len(self.ac_in_buffer)
terminator = self.get_terminator()
if not terminator:
# no terminator, collect it all
self.collect_incoming_data(self.ac_in_buffer)
self.ac_in_buffer = b''
elif isinstance(terminator, int):
# numeric terminator
n = terminator
if lb < n:
self.collect_incoming_data(self.ac_in_buffer)
self.ac_in_buffer = b''
self.terminator = self.terminator - lb
else:
self.collect_incoming_data(self.ac_in_buffer[:n])
self.ac_in_buffer = self.ac_in_buffer[n:]
self.terminator = 0
self.found_terminator()
else:
# 3 cases:
# 1) end of buffer matches terminator exactly:
# collect data, transition
# 2) end of buffer matches some prefix:
# collect data to the prefix
# 3) end of buffer does not match any prefix:
# collect data
terminator_len = len(terminator)
index = self.ac_in_buffer.find(terminator)
if index != -1:
# we found the terminator
if index > 0:
# don't bother reporting the empty string
# (source of subtle bugs)
self.collect_incoming_data(self.ac_in_buffer[:index])
self.ac_in_buffer = self.ac_in_buffer[index+terminator_len:]
# This does the Right Thing if the terminator
# is changed here.
self.found_terminator()
else:
# check for a prefix of the terminator
index = find_prefix_at_end(self.ac_in_buffer, terminator)
if index:
if index != lb:
# we found a prefix, collect up to the prefix
self.collect_incoming_data(self.ac_in_buffer[:-index])
self.ac_in_buffer = self.ac_in_buffer[-index:]
break
else:
# no prefix, collect it all
self.collect_incoming_data(self.ac_in_buffer)
self.ac_in_buffer = b''
def handle_write(self):
self.initiate_send()
def handle_close(self):
self.close()
def push(self, data):
if not isinstance(data, (bytes, bytearray, memoryview)):
raise TypeError('data argument must be byte-ish (%r)',
type(data))
sabs = self.ac_out_buffer_size
if len(data) > sabs:
for i in range(0, len(data), sabs):
self.producer_fifo.append(data[i:i+sabs])
else:
self.producer_fifo.append(data)
self.initiate_send()
def push_with_producer(self, producer):
self.producer_fifo.append(producer)
self.initiate_send()
def readable(self):
"predicate for inclusion in the readable for select()"
# cannot use the old predicate, it violates the claim of the
# set_terminator method.
# return (len(self.ac_in_buffer) <= self.ac_in_buffer_size)
return 1
def writable(self):
"predicate for inclusion in the writable for select()"
return self.producer_fifo or (not self.connected)
def close_when_done(self):
"automatically close this channel once the outgoing queue is empty"
self.producer_fifo.append(None)
def initiate_send(self):
while self.producer_fifo and self.connected:
first = self.producer_fifo[0]
# handle empty string/buffer or None entry
if not first:
del self.producer_fifo[0]
if first is None:
self.handle_close()
return
# handle classic producer behavior
obs = self.ac_out_buffer_size
try:
data = first[:obs]
except TypeError:
data = first.more()
if data:
self.producer_fifo.appendleft(data)
else:
del self.producer_fifo[0]
continue
if isinstance(data, str) and self.use_encoding:
data = bytes(data, self.encoding)
# send the data
try:
num_sent = self.send(data)
except OSError:
self.handle_error()
return
if num_sent:
if num_sent < len(data) or obs < len(first):
self.producer_fifo[0] = first[num_sent:]
else:
del self.producer_fifo[0]
# we tried to send some actual data
return
def discard_buffers(self):
# Emergencies only!
self.ac_in_buffer = b''
del self.incoming[:]
self.producer_fifo.clear()
class simple_producer:
def __init__(self, data, buffer_size=512):
self.data = data
self.buffer_size = buffer_size
def more(self):
if len(self.data) > self.buffer_size:
result = self.data[:self.buffer_size]
self.data = self.data[self.buffer_size:]
return result
else:
result = self.data
self.data = b''
return result
class fifo:
def __init__(self, list=None):
import warnings
warnings.warn('fifo class will be removed in Python 3.6',
DeprecationWarning, stacklevel=2)
if not list:
self.list = deque()
else:
self.list = deque(list)
def __len__(self):
return len(self.list)
def is_empty(self):
return not self.list
def first(self):
return self.list[0]
def push(self, data):
self.list.append(data)
def pop(self):
if self.list:
return (1, self.list.popleft())
else:
return (0, None)
# Given 'haystack', see if any prefix of 'needle' is at its end. This
# assumes an exact match has already been checked. Return the number of
# characters matched.
# for example:
# f_p_a_e("qwerty\r", "\r\n") => 1
# f_p_a_e("qwertydkjf", "\r\n") => 0
# f_p_a_e("qwerty\r\n", "\r\n") => <undefined>
# this could maybe be made faster with a computed regex?
# [answer: no; circa Python-2.0, Jan 2001]
# new python: 28961/s
# old python: 18307/s
# re: 12820/s
# regex: 14035/s
def find_prefix_at_end(haystack, needle):
l = len(needle) - 1
while l and not haystack.endswith(needle[:l]):
l -= 1
return l

View file

@ -1,50 +0,0 @@
"""The asyncio package, tracking PEP 3156."""
import sys
# The selectors module is in the stdlib in Python 3.4 but not in 3.3.
# Do this first, so the other submodules can use "from . import selectors".
# Prefer asyncio/selectors.py over the stdlib one, as ours may be newer.
try:
from . import selectors
except ImportError:
import selectors # Will also be exported.
if sys.platform == 'win32':
# Similar thing for _overlapped.
try:
from . import _overlapped
except ImportError:
import _overlapped # Will also be exported.
# This relies on each of the submodules having an __all__ variable.
from .base_events import *
from .coroutines import *
from .events import *
from .futures import *
from .locks import *
from .protocols import *
from .queues import *
from .streams import *
from .subprocess import *
from .tasks import *
from .transports import *
__all__ = (base_events.__all__ +
coroutines.__all__ +
events.__all__ +
futures.__all__ +
locks.__all__ +
protocols.__all__ +
queues.__all__ +
streams.__all__ +
subprocess.__all__ +
tasks.__all__ +
transports.__all__)
if sys.platform == 'win32': # pragma: no cover
from .windows_events import *
__all__ += windows_events.__all__
else:
from .unix_events import * # pragma: no cover
__all__ += unix_events.__all__

View file

@ -1,292 +0,0 @@
import collections
import subprocess
import warnings
from . import compat
from . import protocols
from . import transports
from .coroutines import coroutine
from .log import logger
class BaseSubprocessTransport(transports.SubprocessTransport):
def __init__(self, loop, protocol, args, shell,
stdin, stdout, stderr, bufsize,
waiter=None, extra=None, **kwargs):
super().__init__(extra)
self._closed = False
self._protocol = protocol
self._loop = loop
self._proc = None
self._pid = None
self._returncode = None
self._exit_waiters = []
self._pending_calls = collections.deque()
self._pipes = {}
self._finished = False
if stdin == subprocess.PIPE:
self._pipes[0] = None
if stdout == subprocess.PIPE:
self._pipes[1] = None
if stderr == subprocess.PIPE:
self._pipes[2] = None
# Create the child process: set the _proc attribute
try:
self._start(args=args, shell=shell, stdin=stdin, stdout=stdout,
stderr=stderr, bufsize=bufsize, **kwargs)
except:
self.close()
raise
self._pid = self._proc.pid
self._extra['subprocess'] = self._proc
if self._loop.get_debug():
if isinstance(args, (bytes, str)):
program = args
else:
program = args[0]
logger.debug('process %r created: pid %s',
program, self._pid)
self._loop.create_task(self._connect_pipes(waiter))
def __repr__(self):
info = [self.__class__.__name__]
if self._closed:
info.append('closed')
if self._pid is not None:
info.append('pid=%s' % self._pid)
if self._returncode is not None:
info.append('returncode=%s' % self._returncode)
elif self._pid is not None:
info.append('running')
else:
info.append('not started')
stdin = self._pipes.get(0)
if stdin is not None:
info.append('stdin=%s' % stdin.pipe)
stdout = self._pipes.get(1)
stderr = self._pipes.get(2)
if stdout is not None and stderr is stdout:
info.append('stdout=stderr=%s' % stdout.pipe)
else:
if stdout is not None:
info.append('stdout=%s' % stdout.pipe)
if stderr is not None:
info.append('stderr=%s' % stderr.pipe)
return '<%s>' % ' '.join(info)
def _start(self, args, shell, stdin, stdout, stderr, bufsize, **kwargs):
raise NotImplementedError
def set_protocol(self, protocol):
self._protocol = protocol
def get_protocol(self):
return self._protocol
def is_closing(self):
return self._closed
def close(self):
if self._closed:
return
self._closed = True
for proto in self._pipes.values():
if proto is None:
continue
proto.pipe.close()
if (self._proc is not None
# the child process finished?
and self._returncode is None
# the child process finished but the transport was not notified yet?
and self._proc.poll() is None
):
if self._loop.get_debug():
logger.warning('Close running child process: kill %r', self)
try:
self._proc.kill()
except ProcessLookupError:
pass
# Don't clear the _proc reference yet: _post_init() may still run
# On Python 3.3 and older, objects with a destructor part of a reference
# cycle are never destroyed. It's not more the case on Python 3.4 thanks
# to the PEP 442.
if compat.PY34:
def __del__(self):
if not self._closed:
warnings.warn("unclosed transport %r" % self, ResourceWarning)
self.close()
def get_pid(self):
return self._pid
def get_returncode(self):
return self._returncode
def get_pipe_transport(self, fd):
if fd in self._pipes:
return self._pipes[fd].pipe
else:
return None
def _check_proc(self):
if self._proc is None:
raise ProcessLookupError()
def send_signal(self, signal):
self._check_proc()
self._proc.send_signal(signal)
def terminate(self):
self._check_proc()
self._proc.terminate()
def kill(self):
self._check_proc()
self._proc.kill()
@coroutine
def _connect_pipes(self, waiter):
try:
proc = self._proc
loop = self._loop
if proc.stdin is not None:
_, pipe = yield from loop.connect_write_pipe(
lambda: WriteSubprocessPipeProto(self, 0),
proc.stdin)
self._pipes[0] = pipe
if proc.stdout is not None:
_, pipe = yield from loop.connect_read_pipe(
lambda: ReadSubprocessPipeProto(self, 1),
proc.stdout)
self._pipes[1] = pipe
if proc.stderr is not None:
_, pipe = yield from loop.connect_read_pipe(
lambda: ReadSubprocessPipeProto(self, 2),
proc.stderr)
self._pipes[2] = pipe
assert self._pending_calls is not None
loop.call_soon(self._protocol.connection_made, self)
for callback, data in self._pending_calls:
loop.call_soon(callback, *data)
self._pending_calls = None
except Exception as exc:
if waiter is not None and not waiter.cancelled():
waiter.set_exception(exc)
else:
if waiter is not None and not waiter.cancelled():
waiter.set_result(None)
def _call(self, cb, *data):
if self._pending_calls is not None:
self._pending_calls.append((cb, data))
else:
self._loop.call_soon(cb, *data)
def _pipe_connection_lost(self, fd, exc):
self._call(self._protocol.pipe_connection_lost, fd, exc)
self._try_finish()
def _pipe_data_received(self, fd, data):
self._call(self._protocol.pipe_data_received, fd, data)
def _process_exited(self, returncode):
assert returncode is not None, returncode
assert self._returncode is None, self._returncode
if self._loop.get_debug():
logger.info('%r exited with return code %r',
self, returncode)
self._returncode = returncode
if self._proc.returncode is None:
# asyncio uses a child watcher: copy the status into the Popen
# object. On Python 3.6, it is required to avoid a ResourceWarning.
self._proc.returncode = returncode
self._call(self._protocol.process_exited)
self._try_finish()
# wake up futures waiting for wait()
for waiter in self._exit_waiters:
if not waiter.cancelled():
waiter.set_result(returncode)
self._exit_waiters = None
@coroutine
def _wait(self):
"""Wait until the process exit and return the process return code.
This method is a coroutine."""
if self._returncode is not None:
return self._returncode
waiter = self._loop.create_future()
self._exit_waiters.append(waiter)
return (yield from waiter)
def _try_finish(self):
assert not self._finished
if self._returncode is None:
return
if all(p is not None and p.disconnected
for p in self._pipes.values()):
self._finished = True
self._call(self._call_connection_lost, None)
def _call_connection_lost(self, exc):
try:
self._protocol.connection_lost(exc)
finally:
self._loop = None
self._proc = None
self._protocol = None
class WriteSubprocessPipeProto(protocols.BaseProtocol):
def __init__(self, proc, fd):
self.proc = proc
self.fd = fd
self.pipe = None
self.disconnected = False
def connection_made(self, transport):
self.pipe = transport
def __repr__(self):
return ('<%s fd=%s pipe=%r>'
% (self.__class__.__name__, self.fd, self.pipe))
def connection_lost(self, exc):
self.disconnected = True
self.proc._pipe_connection_lost(self.fd, exc)
self.proc = None
def pause_writing(self):
self.proc._protocol.pause_writing()
def resume_writing(self):
self.proc._protocol.resume_writing()
class ReadSubprocessPipeProto(WriteSubprocessPipeProto,
protocols.Protocol):
def data_received(self, data):
self.proc._pipe_data_received(self.fd, data)

View file

@ -1,18 +0,0 @@
"""Compatibility helpers for the different Python versions."""
import sys
PY34 = sys.version_info >= (3, 4)
PY35 = sys.version_info >= (3, 5)
PY352 = sys.version_info >= (3, 5, 2)
def flatten_list_bytes(list_of_data):
"""Concatenate a sequence of bytes-like objects."""
if not PY34:
# On Python 3.3 and older, bytes.join() doesn't handle
# memoryview.
list_of_data = (
bytes(data) if isinstance(data, memoryview) else data
for data in list_of_data)
return b''.join(list_of_data)

View file

@ -1,7 +0,0 @@
"""Constants."""
# After the connection is lost, log warnings after this many write()s.
LOG_THRESHOLD_FOR_CONNLOST_WRITES = 5
# Seconds to wait before retrying accept().
ACCEPT_RETRY_DELAY = 1

View file

@ -1,344 +0,0 @@
__all__ = ['coroutine',
'iscoroutinefunction', 'iscoroutine']
import functools
import inspect
import opcode
import os
import sys
import traceback
import types
from . import compat
from . import events
from . import futures
from .log import logger
# Opcode of "yield from" instruction
_YIELD_FROM = opcode.opmap['YIELD_FROM']
# If you set _DEBUG to true, @coroutine will wrap the resulting
# generator objects in a CoroWrapper instance (defined below). That
# instance will log a message when the generator is never iterated
# over, which may happen when you forget to use "yield from" with a
# coroutine call. Note that the value of the _DEBUG flag is taken
# when the decorator is used, so to be of any use it must be set
# before you define your coroutines. A downside of using this feature
# is that tracebacks show entries for the CoroWrapper.__next__ method
# when _DEBUG is true.
_DEBUG = (not sys.flags.ignore_environment and
bool(os.environ.get('PYTHONASYNCIODEBUG')))
try:
_types_coroutine = types.coroutine
_types_CoroutineType = types.CoroutineType
except AttributeError:
# Python 3.4
_types_coroutine = None
_types_CoroutineType = None
try:
_inspect_iscoroutinefunction = inspect.iscoroutinefunction
except AttributeError:
# Python 3.4
_inspect_iscoroutinefunction = lambda func: False
try:
from collections.abc import Coroutine as _CoroutineABC, \
Awaitable as _AwaitableABC
except ImportError:
_CoroutineABC = _AwaitableABC = None
# Check for CPython issue #21209
def has_yield_from_bug():
class MyGen:
def __init__(self):
self.send_args = None
def __iter__(self):
return self
def __next__(self):
return 42
def send(self, *what):
self.send_args = what
return None
def yield_from_gen(gen):
yield from gen
value = (1, 2, 3)
gen = MyGen()
coro = yield_from_gen(gen)
next(coro)
coro.send(value)
return gen.send_args != (value,)
_YIELD_FROM_BUG = has_yield_from_bug()
del has_yield_from_bug
def debug_wrapper(gen):
# This function is called from 'sys.set_coroutine_wrapper'.
# We only wrap here coroutines defined via 'async def' syntax.
# Generator-based coroutines are wrapped in @coroutine
# decorator.
return CoroWrapper(gen, None)
class CoroWrapper:
# Wrapper for coroutine object in _DEBUG mode.
def __init__(self, gen, func=None):
assert inspect.isgenerator(gen) or inspect.iscoroutine(gen), gen
self.gen = gen
self.func = func # Used to unwrap @coroutine decorator
self._source_traceback = traceback.extract_stack(sys._getframe(1))
self.__name__ = getattr(gen, '__name__', None)
self.__qualname__ = getattr(gen, '__qualname__', None)
def __repr__(self):
coro_repr = _format_coroutine(self)
if self._source_traceback:
frame = self._source_traceback[-1]
coro_repr += ', created at %s:%s' % (frame[0], frame[1])
return '<%s %s>' % (self.__class__.__name__, coro_repr)
def __iter__(self):
return self
def __next__(self):
return self.gen.send(None)
if _YIELD_FROM_BUG:
# For for CPython issue #21209: using "yield from" and a custom
# generator, generator.send(tuple) unpacks the tuple instead of passing
# the tuple unchanged. Check if the caller is a generator using "yield
# from" to decide if the parameter should be unpacked or not.
def send(self, *value):
frame = sys._getframe()
caller = frame.f_back
assert caller.f_lasti >= 0
if caller.f_code.co_code[caller.f_lasti] != _YIELD_FROM:
value = value[0]
return self.gen.send(value)
else:
def send(self, value):
return self.gen.send(value)
def throw(self, type, value=None, traceback=None):
return self.gen.throw(type, value, traceback)
def close(self):
return self.gen.close()
@property
def gi_frame(self):
return self.gen.gi_frame
@property
def gi_running(self):
return self.gen.gi_running
@property
def gi_code(self):
return self.gen.gi_code
if compat.PY35:
def __await__(self):
cr_await = getattr(self.gen, 'cr_await', None)
if cr_await is not None:
raise RuntimeError(
"Cannot await on coroutine {!r} while it's "
"awaiting for {!r}".format(self.gen, cr_await))
return self
@property
def gi_yieldfrom(self):
return self.gen.gi_yieldfrom
@property
def cr_await(self):
return self.gen.cr_await
@property
def cr_running(self):
return self.gen.cr_running
@property
def cr_code(self):
return self.gen.cr_code
@property
def cr_frame(self):
return self.gen.cr_frame
def __del__(self):
# Be careful accessing self.gen.frame -- self.gen might not exist.
gen = getattr(self, 'gen', None)
frame = getattr(gen, 'gi_frame', None)
if frame is None:
frame = getattr(gen, 'cr_frame', None)
if frame is not None and frame.f_lasti == -1:
msg = '%r was never yielded from' % self
tb = getattr(self, '_source_traceback', ())
if tb:
tb = ''.join(traceback.format_list(tb))
msg += ('\nCoroutine object created at '
'(most recent call last):\n')
msg += tb.rstrip()
logger.error(msg)
def coroutine(func):
"""Decorator to mark coroutines.
If the coroutine is not yielded from before it is destroyed,
an error message is logged.
"""
if _inspect_iscoroutinefunction(func):
# In Python 3.5 that's all we need to do for coroutines
# defiend with "async def".
# Wrapping in CoroWrapper will happen via
# 'sys.set_coroutine_wrapper' function.
return func
if inspect.isgeneratorfunction(func):
coro = func
else:
@functools.wraps(func)
def coro(*args, **kw):
res = func(*args, **kw)
if (futures.isfuture(res) or inspect.isgenerator(res) or
isinstance(res, CoroWrapper)):
res = yield from res
elif _AwaitableABC is not None:
# If 'func' returns an Awaitable (new in 3.5) we
# want to run it.
try:
await_meth = res.__await__
except AttributeError:
pass
else:
if isinstance(res, _AwaitableABC):
res = yield from await_meth()
return res
if not _DEBUG:
if _types_coroutine is None:
wrapper = coro
else:
wrapper = _types_coroutine(coro)
else:
@functools.wraps(func)
def wrapper(*args, **kwds):
w = CoroWrapper(coro(*args, **kwds), func=func)
if w._source_traceback:
del w._source_traceback[-1]
# Python < 3.5 does not implement __qualname__
# on generator objects, so we set it manually.
# We use getattr as some callables (such as
# functools.partial may lack __qualname__).
w.__name__ = getattr(func, '__name__', None)
w.__qualname__ = getattr(func, '__qualname__', None)
return w
wrapper._is_coroutine = _is_coroutine # For iscoroutinefunction().
return wrapper
# A marker for iscoroutinefunction.
_is_coroutine = object()
def iscoroutinefunction(func):
"""Return True if func is a decorated coroutine function."""
return (getattr(func, '_is_coroutine', None) is _is_coroutine or
_inspect_iscoroutinefunction(func))
_COROUTINE_TYPES = (types.GeneratorType, CoroWrapper)
if _CoroutineABC is not None:
_COROUTINE_TYPES += (_CoroutineABC,)
if _types_CoroutineType is not None:
# Prioritize native coroutine check to speed-up
# asyncio.iscoroutine.
_COROUTINE_TYPES = (_types_CoroutineType,) + _COROUTINE_TYPES
def iscoroutine(obj):
"""Return True if obj is a coroutine object."""
return isinstance(obj, _COROUTINE_TYPES)
def _format_coroutine(coro):
assert iscoroutine(coro)
if not hasattr(coro, 'cr_code') and not hasattr(coro, 'gi_code'):
# Most likely a built-in type or a Cython coroutine.
# Built-in types might not have __qualname__ or __name__.
coro_name = getattr(
coro, '__qualname__',
getattr(coro, '__name__', type(coro).__name__))
coro_name = '{}()'.format(coro_name)
running = False
try:
running = coro.cr_running
except AttributeError:
try:
running = coro.gi_running
except AttributeError:
pass
if running:
return '{} running'.format(coro_name)
else:
return coro_name
coro_name = None
if isinstance(coro, CoroWrapper):
func = coro.func
coro_name = coro.__qualname__
if coro_name is not None:
coro_name = '{}()'.format(coro_name)
else:
func = coro
if coro_name is None:
coro_name = events._format_callback(func, (), {})
try:
coro_code = coro.gi_code
except AttributeError:
coro_code = coro.cr_code
try:
coro_frame = coro.gi_frame
except AttributeError:
coro_frame = coro.cr_frame
filename = coro_code.co_filename
lineno = 0
if (isinstance(coro, CoroWrapper) and
not inspect.isgeneratorfunction(coro.func) and
coro.func is not None):
source = events._get_function_source(coro.func)
if source is not None:
filename, lineno = source
if coro_frame is None:
coro_repr = ('%s done, defined at %s:%s'
% (coro_name, filename, lineno))
else:
coro_repr = ('%s running, defined at %s:%s'
% (coro_name, filename, lineno))
elif coro_frame is not None:
lineno = coro_frame.f_lineno
coro_repr = ('%s running at %s:%s'
% (coro_name, filename, lineno))
else:
lineno = coro_code.co_firstlineno
coro_repr = ('%s done, defined at %s:%s'
% (coro_name, filename, lineno))
return coro_repr

View file

@ -1,692 +0,0 @@
"""Event loop and event loop policy."""
__all__ = ['AbstractEventLoopPolicy',
'AbstractEventLoop', 'AbstractServer',
'Handle', 'TimerHandle',
'get_event_loop_policy', 'set_event_loop_policy',
'get_event_loop', 'set_event_loop', 'new_event_loop',
'get_child_watcher', 'set_child_watcher',
'_set_running_loop', '_get_running_loop',
]
import functools
import inspect
import reprlib
import socket
import subprocess
import sys
import threading
import traceback
from asyncio import compat
def _get_function_source(func):
if compat.PY34:
func = inspect.unwrap(func)
elif hasattr(func, '__wrapped__'):
func = func.__wrapped__
if inspect.isfunction(func):
code = func.__code__
return (code.co_filename, code.co_firstlineno)
if isinstance(func, functools.partial):
return _get_function_source(func.func)
if compat.PY34 and isinstance(func, functools.partialmethod):
return _get_function_source(func.func)
return None
def _format_args_and_kwargs(args, kwargs):
"""Format function arguments and keyword arguments.
Special case for a single parameter: ('hello',) is formatted as ('hello').
"""
# use reprlib to limit the length of the output
items = []
if args:
items.extend(reprlib.repr(arg) for arg in args)
if kwargs:
items.extend('{}={}'.format(k, reprlib.repr(v))
for k, v in kwargs.items())
return '(' + ', '.join(items) + ')'
def _format_callback(func, args, kwargs, suffix=''):
if isinstance(func, functools.partial):
suffix = _format_args_and_kwargs(args, kwargs) + suffix
return _format_callback(func.func, func.args, func.keywords, suffix)
if hasattr(func, '__qualname__'):
func_repr = getattr(func, '__qualname__')
elif hasattr(func, '__name__'):
func_repr = getattr(func, '__name__')
else:
func_repr = repr(func)
func_repr += _format_args_and_kwargs(args, kwargs)
if suffix:
func_repr += suffix
return func_repr
def _format_callback_source(func, args):
func_repr = _format_callback(func, args, None)
source = _get_function_source(func)
if source:
func_repr += ' at %s:%s' % source
return func_repr
class Handle:
"""Object returned by callback registration methods."""
__slots__ = ('_callback', '_args', '_cancelled', '_loop',
'_source_traceback', '_repr', '__weakref__')
def __init__(self, callback, args, loop):
self._loop = loop
self._callback = callback
self._args = args
self._cancelled = False
self._repr = None
if self._loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
else:
self._source_traceback = None
def _repr_info(self):
info = [self.__class__.__name__]
if self._cancelled:
info.append('cancelled')
if self._callback is not None:
info.append(_format_callback_source(self._callback, self._args))
if self._source_traceback:
frame = self._source_traceback[-1]
info.append('created at %s:%s' % (frame[0], frame[1]))
return info
def __repr__(self):
if self._repr is not None:
return self._repr
info = self._repr_info()
return '<%s>' % ' '.join(info)
def cancel(self):
if not self._cancelled:
self._cancelled = True
if self._loop.get_debug():
# Keep a representation in debug mode to keep callback and
# parameters. For example, to log the warning
# "Executing <Handle...> took 2.5 second"
self._repr = repr(self)
self._callback = None
self._args = None
def _run(self):
try:
self._callback(*self._args)
except Exception as exc:
cb = _format_callback_source(self._callback, self._args)
msg = 'Exception in callback {}'.format(cb)
context = {
'message': msg,
'exception': exc,
'handle': self,
}
if self._source_traceback:
context['source_traceback'] = self._source_traceback
self._loop.call_exception_handler(context)
self = None # Needed to break cycles when an exception occurs.
class TimerHandle(Handle):
"""Object returned by timed callback registration methods."""
__slots__ = ['_scheduled', '_when']
def __init__(self, when, callback, args, loop):
assert when is not None
super().__init__(callback, args, loop)
if self._source_traceback:
del self._source_traceback[-1]
self._when = when
self._scheduled = False
def _repr_info(self):
info = super()._repr_info()
pos = 2 if self._cancelled else 1
info.insert(pos, 'when=%s' % self._when)
return info
def __hash__(self):
return hash(self._when)
def __lt__(self, other):
return self._when < other._when
def __le__(self, other):
if self._when < other._when:
return True
return self.__eq__(other)
def __gt__(self, other):
return self._when > other._when
def __ge__(self, other):
if self._when > other._when:
return True
return self.__eq__(other)
def __eq__(self, other):
if isinstance(other, TimerHandle):
return (self._when == other._when and
self._callback == other._callback and
self._args == other._args and
self._cancelled == other._cancelled)
return NotImplemented
def __ne__(self, other):
equal = self.__eq__(other)
return NotImplemented if equal is NotImplemented else not equal
def cancel(self):
if not self._cancelled:
self._loop._timer_handle_cancelled(self)
super().cancel()
class AbstractServer:
"""Abstract server returned by create_server()."""
def close(self):
"""Stop serving. This leaves existing connections open."""
return NotImplemented
def wait_closed(self):
"""Coroutine to wait until service is closed."""
return NotImplemented
class AbstractEventLoop:
"""Abstract event loop."""
# Running and stopping the event loop.
def run_forever(self):
"""Run the event loop until stop() is called."""
raise NotImplementedError
def run_until_complete(self, future):
"""Run the event loop until a Future is done.
Return the Future's result, or raise its exception.
"""
raise NotImplementedError
def stop(self):
"""Stop the event loop as soon as reasonable.
Exactly how soon that is may depend on the implementation, but
no more I/O callbacks should be scheduled.
"""
raise NotImplementedError
def is_running(self):
"""Return whether the event loop is currently running."""
raise NotImplementedError
def is_closed(self):
"""Returns True if the event loop was closed."""
raise NotImplementedError
def close(self):
"""Close the loop.
The loop should not be running.
This is idempotent and irreversible.
No other methods should be called after this one.
"""
raise NotImplementedError
def shutdown_asyncgens(self):
"""Shutdown all active asynchronous generators."""
raise NotImplementedError
# Methods scheduling callbacks. All these return Handles.
def _timer_handle_cancelled(self, handle):
"""Notification that a TimerHandle has been cancelled."""
raise NotImplementedError
def call_soon(self, callback, *args):
return self.call_later(0, callback, *args)
def call_later(self, delay, callback, *args):
raise NotImplementedError
def call_at(self, when, callback, *args):
raise NotImplementedError
def time(self):
raise NotImplementedError
def create_future(self):
raise NotImplementedError
# Method scheduling a coroutine object: create a task.
def create_task(self, coro):
raise NotImplementedError
# Methods for interacting with threads.
def call_soon_threadsafe(self, callback, *args):
raise NotImplementedError
def run_in_executor(self, executor, func, *args):
raise NotImplementedError
def set_default_executor(self, executor):
raise NotImplementedError
# Network I/O methods returning Futures.
def getaddrinfo(self, host, port, *, family=0, type=0, proto=0, flags=0):
raise NotImplementedError
def getnameinfo(self, sockaddr, flags=0):
raise NotImplementedError
def create_connection(self, protocol_factory, host=None, port=None, *,
ssl=None, family=0, proto=0, flags=0, sock=None,
local_addr=None, server_hostname=None):
raise NotImplementedError
def create_server(self, protocol_factory, host=None, port=None, *,
family=socket.AF_UNSPEC, flags=socket.AI_PASSIVE,
sock=None, backlog=100, ssl=None, reuse_address=None,
reuse_port=None):
"""A coroutine which creates a TCP server bound to host and port.
The return value is a Server object which can be used to stop
the service.
If host is an empty string or None all interfaces are assumed
and a list of multiple sockets will be returned (most likely
one for IPv4 and another one for IPv6). The host parameter can also be a
sequence (e.g. list) of hosts to bind to.
family can be set to either AF_INET or AF_INET6 to force the
socket to use IPv4 or IPv6. If not set it will be determined
from host (defaults to AF_UNSPEC).
flags is a bitmask for getaddrinfo().
sock can optionally be specified in order to use a preexisting
socket object.
backlog is the maximum number of queued connections passed to
listen() (defaults to 100).
ssl can be set to an SSLContext to enable SSL over the
accepted connections.
reuse_address tells the kernel to reuse a local socket in
TIME_WAIT state, without waiting for its natural timeout to
expire. If not specified will automatically be set to True on
UNIX.
reuse_port tells the kernel to allow this endpoint to be bound to
the same port as other existing endpoints are bound to, so long as
they all set this flag when being created. This option is not
supported on Windows.
"""
raise NotImplementedError
def create_unix_connection(self, protocol_factory, path, *,
ssl=None, sock=None,
server_hostname=None):
raise NotImplementedError
def create_unix_server(self, protocol_factory, path, *,
sock=None, backlog=100, ssl=None):
"""A coroutine which creates a UNIX Domain Socket server.
The return value is a Server object, which can be used to stop
the service.
path is a str, representing a file systsem path to bind the
server socket to.
sock can optionally be specified in order to use a preexisting
socket object.
backlog is the maximum number of queued connections passed to
listen() (defaults to 100).
ssl can be set to an SSLContext to enable SSL over the
accepted connections.
"""
raise NotImplementedError
def create_datagram_endpoint(self, protocol_factory,
local_addr=None, remote_addr=None, *,
family=0, proto=0, flags=0,
reuse_address=None, reuse_port=None,
allow_broadcast=None, sock=None):
"""A coroutine which creates a datagram endpoint.
This method will try to establish the endpoint in the background.
When successful, the coroutine returns a (transport, protocol) pair.
protocol_factory must be a callable returning a protocol instance.
socket family AF_INET or socket.AF_INET6 depending on host (or
family if specified), socket type SOCK_DGRAM.
reuse_address tells the kernel to reuse a local socket in
TIME_WAIT state, without waiting for its natural timeout to
expire. If not specified it will automatically be set to True on
UNIX.
reuse_port tells the kernel to allow this endpoint to be bound to
the same port as other existing endpoints are bound to, so long as
they all set this flag when being created. This option is not
supported on Windows and some UNIX's. If the
:py:data:`~socket.SO_REUSEPORT` constant is not defined then this
capability is unsupported.
allow_broadcast tells the kernel to allow this endpoint to send
messages to the broadcast address.
sock can optionally be specified in order to use a preexisting
socket object.
"""
raise NotImplementedError
# Pipes and subprocesses.
def connect_read_pipe(self, protocol_factory, pipe):
"""Register read pipe in event loop. Set the pipe to non-blocking mode.
protocol_factory should instantiate object with Protocol interface.
pipe is a file-like object.
Return pair (transport, protocol), where transport supports the
ReadTransport interface."""
# The reason to accept file-like object instead of just file descriptor
# is: we need to own pipe and close it at transport finishing
# Can got complicated errors if pass f.fileno(),
# close fd in pipe transport then close f and vise versa.
raise NotImplementedError
def connect_write_pipe(self, protocol_factory, pipe):
"""Register write pipe in event loop.
protocol_factory should instantiate object with BaseProtocol interface.
Pipe is file-like object already switched to nonblocking.
Return pair (transport, protocol), where transport support
WriteTransport interface."""
# The reason to accept file-like object instead of just file descriptor
# is: we need to own pipe and close it at transport finishing
# Can got complicated errors if pass f.fileno(),
# close fd in pipe transport then close f and vise versa.
raise NotImplementedError
def subprocess_shell(self, protocol_factory, cmd, *, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
**kwargs):
raise NotImplementedError
def subprocess_exec(self, protocol_factory, *args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
**kwargs):
raise NotImplementedError
# Ready-based callback registration methods.
# The add_*() methods return None.
# The remove_*() methods return True if something was removed,
# False if there was nothing to delete.
def add_reader(self, fd, callback, *args):
raise NotImplementedError
def remove_reader(self, fd):
raise NotImplementedError
def add_writer(self, fd, callback, *args):
raise NotImplementedError
def remove_writer(self, fd):
raise NotImplementedError
# Completion based I/O methods returning Futures.
def sock_recv(self, sock, nbytes):
raise NotImplementedError
def sock_sendall(self, sock, data):
raise NotImplementedError
def sock_connect(self, sock, address):
raise NotImplementedError
def sock_accept(self, sock):
raise NotImplementedError
# Signal handling.
def add_signal_handler(self, sig, callback, *args):
raise NotImplementedError
def remove_signal_handler(self, sig):
raise NotImplementedError
# Task factory.
def set_task_factory(self, factory):
raise NotImplementedError
def get_task_factory(self):
raise NotImplementedError
# Error handlers.
def get_exception_handler(self):
raise NotImplementedError
def set_exception_handler(self, handler):
raise NotImplementedError
def default_exception_handler(self, context):
raise NotImplementedError
def call_exception_handler(self, context):
raise NotImplementedError
# Debug flag management.
def get_debug(self):
raise NotImplementedError
def set_debug(self, enabled):
raise NotImplementedError
class AbstractEventLoopPolicy:
"""Abstract policy for accessing the event loop."""
def get_event_loop(self):
"""Get the event loop for the current context.
Returns an event loop object implementing the BaseEventLoop interface,
or raises an exception in case no event loop has been set for the
current context and the current policy does not specify to create one.
It should never return None."""
raise NotImplementedError
def set_event_loop(self, loop):
"""Set the event loop for the current context to loop."""
raise NotImplementedError
def new_event_loop(self):
"""Create and return a new event loop object according to this
policy's rules. If there's need to set this loop as the event loop for
the current context, set_event_loop must be called explicitly."""
raise NotImplementedError
# Child processes handling (Unix only).
def get_child_watcher(self):
"Get the watcher for child processes."
raise NotImplementedError
def set_child_watcher(self, watcher):
"""Set the watcher for child processes."""
raise NotImplementedError
class BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy):
"""Default policy implementation for accessing the event loop.
In this policy, each thread has its own event loop. However, we
only automatically create an event loop by default for the main
thread; other threads by default have no event loop.
Other policies may have different rules (e.g. a single global
event loop, or automatically creating an event loop per thread, or
using some other notion of context to which an event loop is
associated).
"""
_loop_factory = None
class _Local(threading.local):
_loop = None
_set_called = False
def __init__(self):
self._local = self._Local()
def get_event_loop(self):
"""Get the event loop.
This may be None or an instance of EventLoop.
"""
if (self._local._loop is None and
not self._local._set_called and
isinstance(threading.current_thread(), threading._MainThread)):
self.set_event_loop(self.new_event_loop())
if self._local._loop is None:
raise RuntimeError('There is no current event loop in thread %r.'
% threading.current_thread().name)
return self._local._loop
def set_event_loop(self, loop):
"""Set the event loop."""
self._local._set_called = True
assert loop is None or isinstance(loop, AbstractEventLoop)
self._local._loop = loop
def new_event_loop(self):
"""Create a new event loop.
You must call set_event_loop() to make this the current event
loop.
"""
return self._loop_factory()
# Event loop policy. The policy itself is always global, even if the
# policy's rules say that there is an event loop per thread (or other
# notion of context). The default policy is installed by the first
# call to get_event_loop_policy().
_event_loop_policy = None
# Lock for protecting the on-the-fly creation of the event loop policy.
_lock = threading.Lock()
# A TLS for the running event loop, used by _get_running_loop.
class _RunningLoop(threading.local):
_loop = None
_running_loop = _RunningLoop()
def _get_running_loop():
"""Return the running event loop or None.
This is a low-level function intended to be used by event loops.
This function is thread-specific.
"""
return _running_loop._loop
def _set_running_loop(loop):
"""Set the running event loop.
This is a low-level function intended to be used by event loops.
This function is thread-specific.
"""
_running_loop._loop = loop
def _init_event_loop_policy():
global _event_loop_policy
with _lock:
if _event_loop_policy is None: # pragma: no branch
from . import DefaultEventLoopPolicy
_event_loop_policy = DefaultEventLoopPolicy()
def get_event_loop_policy():
"""Get the current event loop policy."""
if _event_loop_policy is None:
_init_event_loop_policy()
return _event_loop_policy
def set_event_loop_policy(policy):
"""Set the current event loop policy.
If policy is None, the default policy is restored."""
global _event_loop_policy
assert policy is None or isinstance(policy, AbstractEventLoopPolicy)
_event_loop_policy = policy
def get_event_loop():
"""Return an asyncio event loop.
When called from a coroutine or a callback (e.g. scheduled with call_soon
or similar API), this function will always return the running event loop.
If there is no running event loop set, the function will return
the result of `get_event_loop_policy().get_event_loop()` call.
"""
current_loop = _get_running_loop()
if current_loop is not None:
return current_loop
return get_event_loop_policy().get_event_loop()
def set_event_loop(loop):
"""Equivalent to calling get_event_loop_policy().set_event_loop(loop)."""
get_event_loop_policy().set_event_loop(loop)
def new_event_loop():
"""Equivalent to calling get_event_loop_policy().new_event_loop()."""
return get_event_loop_policy().new_event_loop()
def get_child_watcher():
"""Equivalent to calling get_event_loop_policy().get_child_watcher()."""
return get_event_loop_policy().get_child_watcher()
def set_child_watcher(watcher):
"""Equivalent to calling
get_event_loop_policy().set_child_watcher(watcher)."""
return get_event_loop_policy().set_child_watcher(watcher)

View file

@ -1,479 +0,0 @@
"""A Future class similar to the one in PEP 3148."""
__all__ = ['CancelledError', 'TimeoutError',
'InvalidStateError',
'Future', 'wrap_future', 'isfuture',
]
import concurrent.futures._base
import logging
import reprlib
import sys
import traceback
from . import compat
from . import events
# States for Future.
_PENDING = 'PENDING'
_CANCELLED = 'CANCELLED'
_FINISHED = 'FINISHED'
Error = concurrent.futures._base.Error
CancelledError = concurrent.futures.CancelledError
TimeoutError = concurrent.futures.TimeoutError
STACK_DEBUG = logging.DEBUG - 1 # heavy-duty debugging
class InvalidStateError(Error):
"""The operation is not allowed in this state."""
class _TracebackLogger:
"""Helper to log a traceback upon destruction if not cleared.
This solves a nasty problem with Futures and Tasks that have an
exception set: if nobody asks for the exception, the exception is
never logged. This violates the Zen of Python: 'Errors should
never pass silently. Unless explicitly silenced.'
However, we don't want to log the exception as soon as
set_exception() is called: if the calling code is written
properly, it will get the exception and handle it properly. But
we *do* want to log it if result() or exception() was never called
-- otherwise developers waste a lot of time wondering why their
buggy code fails silently.
An earlier attempt added a __del__() method to the Future class
itself, but this backfired because the presence of __del__()
prevents garbage collection from breaking cycles. A way out of
this catch-22 is to avoid having a __del__() method on the Future
class itself, but instead to have a reference to a helper object
with a __del__() method that logs the traceback, where we ensure
that the helper object doesn't participate in cycles, and only the
Future has a reference to it.
The helper object is added when set_exception() is called. When
the Future is collected, and the helper is present, the helper
object is also collected, and its __del__() method will log the
traceback. When the Future's result() or exception() method is
called (and a helper object is present), it removes the helper
object, after calling its clear() method to prevent it from
logging.
One downside is that we do a fair amount of work to extract the
traceback from the exception, even when it is never logged. It
would seem cheaper to just store the exception object, but that
references the traceback, which references stack frames, which may
reference the Future, which references the _TracebackLogger, and
then the _TracebackLogger would be included in a cycle, which is
what we're trying to avoid! As an optimization, we don't
immediately format the exception; we only do the work when
activate() is called, which call is delayed until after all the
Future's callbacks have run. Since usually a Future has at least
one callback (typically set by 'yield from') and usually that
callback extracts the callback, thereby removing the need to
format the exception.
PS. I don't claim credit for this solution. I first heard of it
in a discussion about closing files when they are collected.
"""
__slots__ = ('loop', 'source_traceback', 'exc', 'tb')
def __init__(self, future, exc):
self.loop = future._loop
self.source_traceback = future._source_traceback
self.exc = exc
self.tb = None
def activate(self):
exc = self.exc
if exc is not None:
self.exc = None
self.tb = traceback.format_exception(exc.__class__, exc,
exc.__traceback__)
def clear(self):
self.exc = None
self.tb = None
def __del__(self):
if self.tb:
msg = 'Future/Task exception was never retrieved\n'
if self.source_traceback:
src = ''.join(traceback.format_list(self.source_traceback))
msg += 'Future/Task created at (most recent call last):\n'
msg += '%s\n' % src.rstrip()
msg += ''.join(self.tb).rstrip()
self.loop.call_exception_handler({'message': msg})
def isfuture(obj):
"""Check for a Future.
This returns True when obj is a Future instance or is advertising
itself as duck-type compatible by setting _asyncio_future_blocking.
See comment in Future for more details.
"""
return (hasattr(obj.__class__, '_asyncio_future_blocking') and
obj._asyncio_future_blocking is not None)
class Future:
"""This class is *almost* compatible with concurrent.futures.Future.
Differences:
- result() and exception() do not take a timeout argument and
raise an exception when the future isn't done yet.
- Callbacks registered with add_done_callback() are always called
via the event loop's call_soon_threadsafe().
- This class is not compatible with the wait() and as_completed()
methods in the concurrent.futures package.
(In Python 3.4 or later we may be able to unify the implementations.)
"""
# Class variables serving as defaults for instance variables.
_state = _PENDING
_result = None
_exception = None
_loop = None
_source_traceback = None
# This field is used for a dual purpose:
# - Its presence is a marker to declare that a class implements
# the Future protocol (i.e. is intended to be duck-type compatible).
# The value must also be not-None, to enable a subclass to declare
# that it is not compatible by setting this to None.
# - It is set by __iter__() below so that Task._step() can tell
# the difference between `yield from Future()` (correct) vs.
# `yield Future()` (incorrect).
_asyncio_future_blocking = False
_log_traceback = False # Used for Python 3.4 and later
_tb_logger = None # Used for Python 3.3 only
def __init__(self, *, loop=None):
"""Initialize the future.
The optional event_loop argument allows explicitly setting the event
loop object used by the future. If it's not provided, the future uses
the default event loop.
"""
if loop is None:
self._loop = events.get_event_loop()
else:
self._loop = loop
self._callbacks = []
if self._loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
def __format_callbacks(self):
cb = self._callbacks
size = len(cb)
if not size:
cb = ''
def format_cb(callback):
return events._format_callback_source(callback, ())
if size == 1:
cb = format_cb(cb[0])
elif size == 2:
cb = '{}, {}'.format(format_cb(cb[0]), format_cb(cb[1]))
elif size > 2:
cb = '{}, <{} more>, {}'.format(format_cb(cb[0]),
size-2,
format_cb(cb[-1]))
return 'cb=[%s]' % cb
def _repr_info(self):
info = [self._state.lower()]
if self._state == _FINISHED:
if self._exception is not None:
info.append('exception={!r}'.format(self._exception))
else:
# use reprlib to limit the length of the output, especially
# for very long strings
result = reprlib.repr(self._result)
info.append('result={}'.format(result))
if self._callbacks:
info.append(self.__format_callbacks())
if self._source_traceback:
frame = self._source_traceback[-1]
info.append('created at %s:%s' % (frame[0], frame[1]))
return info
def __repr__(self):
info = self._repr_info()
return '<%s %s>' % (self.__class__.__name__, ' '.join(info))
# On Python 3.3 and older, objects with a destructor part of a reference
# cycle are never destroyed. It's not more the case on Python 3.4 thanks
# to the PEP 442.
if compat.PY34:
def __del__(self):
if not self._log_traceback:
# set_exception() was not called, or result() or exception()
# has consumed the exception
return
exc = self._exception
context = {
'message': ('%s exception was never retrieved'
% self.__class__.__name__),
'exception': exc,
'future': self,
}
if self._source_traceback:
context['source_traceback'] = self._source_traceback
self._loop.call_exception_handler(context)
def cancel(self):
"""Cancel the future and schedule callbacks.
If the future is already done or cancelled, return False. Otherwise,
change the future's state to cancelled, schedule the callbacks and
return True.
"""
if self._state != _PENDING:
return False
self._state = _CANCELLED
self._schedule_callbacks()
return True
def _schedule_callbacks(self):
"""Internal: Ask the event loop to call all callbacks.
The callbacks are scheduled to be called as soon as possible. Also
clears the callback list.
"""
callbacks = self._callbacks[:]
if not callbacks:
return
self._callbacks[:] = []
for callback in callbacks:
self._loop.call_soon(callback, self)
def cancelled(self):
"""Return True if the future was cancelled."""
return self._state == _CANCELLED
# Don't implement running(); see http://bugs.python.org/issue18699
def done(self):
"""Return True if the future is done.
Done means either that a result / exception are available, or that the
future was cancelled.
"""
return self._state != _PENDING
def result(self):
"""Return the result this future represents.
If the future has been cancelled, raises CancelledError. If the
future's result isn't yet available, raises InvalidStateError. If
the future is done and has an exception set, this exception is raised.
"""
if self._state == _CANCELLED:
raise CancelledError
if self._state != _FINISHED:
raise InvalidStateError('Result is not ready.')
self._log_traceback = False
if self._tb_logger is not None:
self._tb_logger.clear()
self._tb_logger = None
if self._exception is not None:
raise self._exception
return self._result
def exception(self):
"""Return the exception that was set on this future.
The exception (or None if no exception was set) is returned only if
the future is done. If the future has been cancelled, raises
CancelledError. If the future isn't done yet, raises
InvalidStateError.
"""
if self._state == _CANCELLED:
raise CancelledError
if self._state != _FINISHED:
raise InvalidStateError('Exception is not set.')
self._log_traceback = False
if self._tb_logger is not None:
self._tb_logger.clear()
self._tb_logger = None
return self._exception
def add_done_callback(self, fn):
"""Add a callback to be run when the future becomes done.
The callback is called with a single argument - the future object. If
the future is already done when this is called, the callback is
scheduled with call_soon.
"""
if self._state != _PENDING:
self._loop.call_soon(fn, self)
else:
self._callbacks.append(fn)
# New method not in PEP 3148.
def remove_done_callback(self, fn):
"""Remove all instances of a callback from the "call when done" list.
Returns the number of callbacks removed.
"""
filtered_callbacks = [f for f in self._callbacks if f != fn]
removed_count = len(self._callbacks) - len(filtered_callbacks)
if removed_count:
self._callbacks[:] = filtered_callbacks
return removed_count
# So-called internal methods (note: no set_running_or_notify_cancel()).
def set_result(self, result):
"""Mark the future done and set its result.
If the future is already done when this method is called, raises
InvalidStateError.
"""
if self._state != _PENDING:
raise InvalidStateError('{}: {!r}'.format(self._state, self))
self._result = result
self._state = _FINISHED
self._schedule_callbacks()
def set_exception(self, exception):
"""Mark the future done and set an exception.
If the future is already done when this method is called, raises
InvalidStateError.
"""
if self._state != _PENDING:
raise InvalidStateError('{}: {!r}'.format(self._state, self))
if isinstance(exception, type):
exception = exception()
if type(exception) is StopIteration:
raise TypeError("StopIteration interacts badly with generators "
"and cannot be raised into a Future")
self._exception = exception
self._state = _FINISHED
self._schedule_callbacks()
if compat.PY34:
self._log_traceback = True
else:
self._tb_logger = _TracebackLogger(self, exception)
# Arrange for the logger to be activated after all callbacks
# have had a chance to call result() or exception().
self._loop.call_soon(self._tb_logger.activate)
def __iter__(self):
if not self.done():
self._asyncio_future_blocking = True
yield self # This tells Task to wait for completion.
assert self.done(), "yield from wasn't used with future"
return self.result() # May raise too.
if compat.PY35:
__await__ = __iter__ # make compatible with 'await' expression
def _set_result_unless_cancelled(fut, result):
"""Helper setting the result only if the future was not cancelled."""
if fut.cancelled():
return
fut.set_result(result)
def _set_concurrent_future_state(concurrent, source):
"""Copy state from a future to a concurrent.futures.Future."""
assert source.done()
if source.cancelled():
concurrent.cancel()
if not concurrent.set_running_or_notify_cancel():
return
exception = source.exception()
if exception is not None:
concurrent.set_exception(exception)
else:
result = source.result()
concurrent.set_result(result)
def _copy_future_state(source, dest):
"""Internal helper to copy state from another Future.
The other Future may be a concurrent.futures.Future.
"""
assert source.done()
if dest.cancelled():
return
assert not dest.done()
if source.cancelled():
dest.cancel()
else:
exception = source.exception()
if exception is not None:
dest.set_exception(exception)
else:
result = source.result()
dest.set_result(result)
def _chain_future(source, destination):
"""Chain two futures so that when one completes, so does the other.
The result (or exception) of source will be copied to destination.
If destination is cancelled, source gets cancelled too.
Compatible with both asyncio.Future and concurrent.futures.Future.
"""
if not isfuture(source) and not isinstance(source,
concurrent.futures.Future):
raise TypeError('A future is required for source argument')
if not isfuture(destination) and not isinstance(destination,
concurrent.futures.Future):
raise TypeError('A future is required for destination argument')
source_loop = source._loop if isfuture(source) else None
dest_loop = destination._loop if isfuture(destination) else None
def _set_state(future, other):
if isfuture(future):
_copy_future_state(other, future)
else:
_set_concurrent_future_state(future, other)
def _call_check_cancel(destination):
if destination.cancelled():
if source_loop is None or source_loop is dest_loop:
source.cancel()
else:
source_loop.call_soon_threadsafe(source.cancel)
def _call_set_state(source):
if dest_loop is None or dest_loop is source_loop:
_set_state(destination, source)
else:
dest_loop.call_soon_threadsafe(_set_state, destination, source)
destination.add_done_callback(_call_check_cancel)
source.add_done_callback(_call_set_state)
def wrap_future(future, *, loop=None):
"""Wrap concurrent.futures.Future object."""
if isfuture(future):
return future
assert isinstance(future, concurrent.futures.Future), \
'concurrent.futures.Future is expected, got {!r}'.format(future)
if loop is None:
loop = events.get_event_loop()
new_future = loop.create_future()
_chain_future(future, new_future)
return new_future

View file

@ -1,478 +0,0 @@
"""Synchronization primitives."""
__all__ = ['Lock', 'Event', 'Condition', 'Semaphore', 'BoundedSemaphore']
import collections
from . import compat
from . import events
from . import futures
from .coroutines import coroutine
class _ContextManager:
"""Context manager.
This enables the following idiom for acquiring and releasing a
lock around a block:
with (yield from lock):
<block>
while failing loudly when accidentally using:
with lock:
<block>
"""
def __init__(self, lock):
self._lock = lock
def __enter__(self):
# We have no use for the "as ..." clause in the with
# statement for locks.
return None
def __exit__(self, *args):
try:
self._lock.release()
finally:
self._lock = None # Crudely prevent reuse.
class _ContextManagerMixin:
def __enter__(self):
raise RuntimeError(
'"yield from" should be used as context manager expression')
def __exit__(self, *args):
# This must exist because __enter__ exists, even though that
# always raises; that's how the with-statement works.
pass
@coroutine
def __iter__(self):
# This is not a coroutine. It is meant to enable the idiom:
#
# with (yield from lock):
# <block>
#
# as an alternative to:
#
# yield from lock.acquire()
# try:
# <block>
# finally:
# lock.release()
yield from self.acquire()
return _ContextManager(self)
if compat.PY35:
def __await__(self):
# To make "with await lock" work.
yield from self.acquire()
return _ContextManager(self)
@coroutine
def __aenter__(self):
yield from self.acquire()
# We have no use for the "as ..." clause in the with
# statement for locks.
return None
@coroutine
def __aexit__(self, exc_type, exc, tb):
self.release()
class Lock(_ContextManagerMixin):
"""Primitive lock objects.
A primitive lock is a synchronization primitive that is not owned
by a particular coroutine when locked. A primitive lock is in one
of two states, 'locked' or 'unlocked'.
It is created in the unlocked state. It has two basic methods,
acquire() and release(). When the state is unlocked, acquire()
changes the state to locked and returns immediately. When the
state is locked, acquire() blocks until a call to release() in
another coroutine changes it to unlocked, then the acquire() call
resets it to locked and returns. The release() method should only
be called in the locked state; it changes the state to unlocked
and returns immediately. If an attempt is made to release an
unlocked lock, a RuntimeError will be raised.
When more than one coroutine is blocked in acquire() waiting for
the state to turn to unlocked, only one coroutine proceeds when a
release() call resets the state to unlocked; first coroutine which
is blocked in acquire() is being processed.
acquire() is a coroutine and should be called with 'yield from'.
Locks also support the context management protocol. '(yield from lock)'
should be used as the context manager expression.
Usage:
lock = Lock()
...
yield from lock
try:
...
finally:
lock.release()
Context manager usage:
lock = Lock()
...
with (yield from lock):
...
Lock objects can be tested for locking state:
if not lock.locked():
yield from lock
else:
# lock is acquired
...
"""
def __init__(self, *, loop=None):
self._waiters = collections.deque()
self._locked = False
if loop is not None:
self._loop = loop
else:
self._loop = events.get_event_loop()
def __repr__(self):
res = super().__repr__()
extra = 'locked' if self._locked else 'unlocked'
if self._waiters:
extra = '{},waiters:{}'.format(extra, len(self._waiters))
return '<{} [{}]>'.format(res[1:-1], extra)
def locked(self):
"""Return True if lock is acquired."""
return self._locked
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = self._loop.create_future()
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
finally:
self._waiters.remove(fut)
def release(self):
"""Release a lock.
When the lock is locked, reset it to unlocked, and return.
If any other coroutines are blocked waiting for the lock to become
unlocked, allow exactly one of them to proceed.
When invoked on an unlocked lock, a RuntimeError is raised.
There is no return value.
"""
if self._locked:
self._locked = False
# Wake up the first waiter who isn't cancelled.
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
break
else:
raise RuntimeError('Lock is not acquired.')
class Event:
"""Asynchronous equivalent to threading.Event.
Class implementing event objects. An event manages a flag that can be set
to true with the set() method and reset to false with the clear() method.
The wait() method blocks until the flag is true. The flag is initially
false.
"""
def __init__(self, *, loop=None):
self._waiters = collections.deque()
self._value = False
if loop is not None:
self._loop = loop
else:
self._loop = events.get_event_loop()
def __repr__(self):
res = super().__repr__()
extra = 'set' if self._value else 'unset'
if self._waiters:
extra = '{},waiters:{}'.format(extra, len(self._waiters))
return '<{} [{}]>'.format(res[1:-1], extra)
def is_set(self):
"""Return True if and only if the internal flag is true."""
return self._value
def set(self):
"""Set the internal flag to true. All coroutines waiting for it to
become true are awakened. Coroutine that call wait() once the flag is
true will not block at all.
"""
if not self._value:
self._value = True
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
def clear(self):
"""Reset the internal flag to false. Subsequently, coroutines calling
wait() will block until set() is called to set the internal flag
to true again."""
self._value = False
@coroutine
def wait(self):
"""Block until the internal flag is true.
If the internal flag is true on entry, return True
immediately. Otherwise, block until another coroutine calls
set() to set the flag to true, then return True.
"""
if self._value:
return True
fut = self._loop.create_future()
self._waiters.append(fut)
try:
yield from fut
return True
finally:
self._waiters.remove(fut)
class Condition(_ContextManagerMixin):
"""Asynchronous equivalent to threading.Condition.
This class implements condition variable objects. A condition variable
allows one or more coroutines to wait until they are notified by another
coroutine.
A new Lock object is created and used as the underlying lock.
"""
def __init__(self, lock=None, *, loop=None):
if loop is not None:
self._loop = loop
else:
self._loop = events.get_event_loop()
if lock is None:
lock = Lock(loop=self._loop)
elif lock._loop is not self._loop:
raise ValueError("loop argument must agree with lock")
self._lock = lock
# Export the lock's locked(), acquire() and release() methods.
self.locked = lock.locked
self.acquire = lock.acquire
self.release = lock.release
self._waiters = collections.deque()
def __repr__(self):
res = super().__repr__()
extra = 'locked' if self.locked() else 'unlocked'
if self._waiters:
extra = '{},waiters:{}'.format(extra, len(self._waiters))
return '<{} [{}]>'.format(res[1:-1], extra)
@coroutine
def wait(self):
"""Wait until notified.
If the calling coroutine has not acquired the lock when this
method is called, a RuntimeError is raised.
This method releases the underlying lock, and then blocks
until it is awakened by a notify() or notify_all() call for
the same condition variable in another coroutine. Once
awakened, it re-acquires the lock and returns True.
"""
if not self.locked():
raise RuntimeError('cannot wait on un-acquired lock')
self.release()
try:
fut = self._loop.create_future()
self._waiters.append(fut)
try:
yield from fut
return True
finally:
self._waiters.remove(fut)
finally:
# Must reacquire lock even if wait is cancelled
while True:
try:
yield from self.acquire()
break
except futures.CancelledError:
pass
@coroutine
def wait_for(self, predicate):
"""Wait until a predicate becomes true.
The predicate should be a callable which result will be
interpreted as a boolean value. The final predicate value is
the return value.
"""
result = predicate()
while not result:
yield from self.wait()
result = predicate()
return result
def notify(self, n=1):
"""By default, wake up one coroutine waiting on this condition, if any.
If the calling coroutine has not acquired the lock when this method
is called, a RuntimeError is raised.
This method wakes up at most n of the coroutines waiting for the
condition variable; it is a no-op if no coroutines are waiting.
Note: an awakened coroutine does not actually return from its
wait() call until it can reacquire the lock. Since notify() does
not release the lock, its caller should.
"""
if not self.locked():
raise RuntimeError('cannot notify on un-acquired lock')
idx = 0
for fut in self._waiters:
if idx >= n:
break
if not fut.done():
idx += 1
fut.set_result(False)
def notify_all(self):
"""Wake up all threads waiting on this condition. This method acts
like notify(), but wakes up all waiting threads instead of one. If the
calling thread has not acquired the lock when this method is called,
a RuntimeError is raised.
"""
self.notify(len(self._waiters))
class Semaphore(_ContextManagerMixin):
"""A Semaphore implementation.
A semaphore manages an internal counter which is decremented by each
acquire() call and incremented by each release() call. The counter
can never go below zero; when acquire() finds that it is zero, it blocks,
waiting until some other thread calls release().
Semaphores also support the context management protocol.
The optional argument gives the initial value for the internal
counter; it defaults to 1. If the value given is less than 0,
ValueError is raised.
"""
def __init__(self, value=1, *, loop=None):
if value < 0:
raise ValueError("Semaphore initial value must be >= 0")
self._value = value
self._waiters = collections.deque()
if loop is not None:
self._loop = loop
else:
self._loop = events.get_event_loop()
def __repr__(self):
res = super().__repr__()
extra = 'locked' if self.locked() else 'unlocked,value:{}'.format(
self._value)
if self._waiters:
extra = '{},waiters:{}'.format(extra, len(self._waiters))
return '<{} [{}]>'.format(res[1:-1], extra)
def _wake_up_next(self):
while self._waiters:
waiter = self._waiters.popleft()
if not waiter.done():
waiter.set_result(None)
return
def locked(self):
"""Returns True if semaphore can not be acquired immediately."""
return self._value == 0
@coroutine
def acquire(self):
"""Acquire a semaphore.
If the internal counter is larger than zero on entry,
decrement it by one and return True immediately. If it is
zero on entry, block, waiting until some other coroutine has
called release() to make it larger than 0, and then return
True.
"""
while self._value <= 0:
fut = self._loop.create_future()
self._waiters.append(fut)
try:
yield from fut
except:
# See the similar code in Queue.get.
fut.cancel()
if self._value > 0 and not fut.cancelled():
self._wake_up_next()
raise
self._value -= 1
return True
def release(self):
"""Release a semaphore, incrementing the internal counter by one.
When it was zero on entry and another coroutine is waiting for it to
become larger than zero again, wake up that coroutine.
"""
self._value += 1
self._wake_up_next()
class BoundedSemaphore(Semaphore):
"""A bounded semaphore implementation.
This raises ValueError in release() if it would increase the value
above the initial value.
"""
def __init__(self, value=1, *, loop=None):
self._bound_value = value
super().__init__(value, loop=loop)
def release(self):
if self._value >= self._bound_value:
raise ValueError('BoundedSemaphore released too many times')
super().release()

Some files were not shown because too many files have changed in this diff Show more