2020-04-22 03:09:46 +00:00
|
|
|
from __future__ import annotations
|
2021-01-02 11:49:43 +00:00
|
|
|
|
2022-03-31 03:08:15 +00:00
|
|
|
import shutil
|
2020-06-21 13:32:31 +00:00
|
|
|
import typing
|
2021-11-28 03:06:30 +00:00
|
|
|
import builtins
|
|
|
|
import os
|
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import pickle
|
|
|
|
import functools
|
|
|
|
import io
|
|
|
|
import collections
|
|
|
|
import importlib
|
|
|
|
import logging
|
2022-05-18 20:30:19 +00:00
|
|
|
|
|
|
|
if typing.TYPE_CHECKING:
|
|
|
|
from tkinter import Tk
|
|
|
|
else:
|
|
|
|
Tk = typing.Any
|
2020-06-21 13:32:31 +00:00
|
|
|
|
|
|
|
|
2021-08-06 17:33:17 +00:00
|
|
|
def tuplize_version(version: str) -> Version:
|
2020-12-29 18:23:14 +00:00
|
|
|
return Version(*(int(piece, 10) for piece in version.split(".")))
|
|
|
|
|
2020-06-21 13:32:31 +00:00
|
|
|
|
2020-12-29 18:23:14 +00:00
|
|
|
class Version(typing.NamedTuple):
|
|
|
|
major: int
|
|
|
|
minor: int
|
2021-02-21 22:46:05 +00:00
|
|
|
build: int
|
2020-04-22 03:09:46 +00:00
|
|
|
|
2021-07-01 23:29:49 +00:00
|
|
|
|
2022-04-05 01:54:49 +00:00
|
|
|
__version__ = "0.3.2"
|
2021-06-18 20:15:54 +00:00
|
|
|
version_tuple = tuplize_version(__version__)
|
2020-04-20 12:50:49 +00:00
|
|
|
|
2022-05-09 05:18:50 +00:00
|
|
|
import jellyfish
|
2022-04-12 08:57:29 +00:00
|
|
|
from yaml import load, load_all, dump, SafeLoader
|
2020-02-16 14:32:40 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
from yaml import CLoader as Loader
|
|
|
|
except ImportError:
|
|
|
|
from yaml import Loader
|
|
|
|
|
2017-11-28 14:36:32 +00:00
|
|
|
|
2022-04-28 16:03:44 +00:00
|
|
|
def int16_as_bytes(value: int) -> typing.List[int]:
|
2018-02-17 23:38:54 +00:00
|
|
|
value = value & 0xFFFF
|
|
|
|
return [value & 0xFF, (value >> 8) & 0xFF]
|
|
|
|
|
2020-02-16 14:32:40 +00:00
|
|
|
|
2022-04-28 16:03:44 +00:00
|
|
|
def int32_as_bytes(value: int) -> typing.List[int]:
|
2018-02-17 23:38:54 +00:00
|
|
|
value = value & 0xFFFFFFFF
|
|
|
|
return [value & 0xFF, (value >> 8) & 0xFF, (value >> 16) & 0xFF, (value >> 24) & 0xFF]
|
|
|
|
|
2020-02-16 14:32:40 +00:00
|
|
|
|
2022-04-28 16:03:44 +00:00
|
|
|
def pc_to_snes(value: int) -> int:
|
2021-01-02 11:49:43 +00:00
|
|
|
return ((value << 1) & 0x7F0000) | (value & 0x7FFF) | 0x8000
|
2018-09-23 02:51:54 +00:00
|
|
|
|
2020-07-21 21:15:19 +00:00
|
|
|
|
2022-04-28 16:03:44 +00:00
|
|
|
def snes_to_pc(value: int) -> int:
|
2021-01-02 11:49:43 +00:00
|
|
|
return ((value & 0x7F0000) >> 1) | (value & 0x7FFF)
|
2018-09-23 02:51:54 +00:00
|
|
|
|
2020-07-21 21:15:19 +00:00
|
|
|
|
2022-04-28 16:03:44 +00:00
|
|
|
RetType = typing.TypeVar("RetType")
|
|
|
|
|
|
|
|
|
|
|
|
def cache_argsless(function: typing.Callable[[], RetType]) -> typing.Callable[[], RetType]:
|
2022-04-30 02:39:08 +00:00
|
|
|
assert not function.__code__.co_argcount, "Can only cache 0 argument functions with this cache."
|
2021-07-09 15:44:24 +00:00
|
|
|
|
2022-04-28 16:03:44 +00:00
|
|
|
sentinel = object()
|
|
|
|
result: typing.Union[object, RetType] = sentinel
|
2021-07-09 15:44:24 +00:00
|
|
|
|
2022-04-28 16:03:44 +00:00
|
|
|
def _wrap() -> RetType:
|
2021-07-09 15:44:24 +00:00
|
|
|
nonlocal result
|
|
|
|
if result is sentinel:
|
|
|
|
result = function()
|
2022-04-28 16:03:44 +00:00
|
|
|
return typing.cast(RetType, result)
|
2021-07-09 15:44:24 +00:00
|
|
|
|
|
|
|
return _wrap
|
|
|
|
|
|
|
|
|
2021-07-19 19:52:08 +00:00
|
|
|
def is_frozen() -> bool:
|
2022-04-28 16:03:44 +00:00
|
|
|
return typing.cast(bool, getattr(sys, 'frozen', False))
|
2017-11-28 14:36:32 +00:00
|
|
|
|
2020-07-21 21:15:19 +00:00
|
|
|
|
2022-03-31 03:08:15 +00:00
|
|
|
def local_path(*path: str) -> str:
|
|
|
|
"""Returns path to a file in the local Archipelago installation or source."""
|
|
|
|
if hasattr(local_path, 'cached_path'):
|
|
|
|
pass
|
2021-07-19 19:52:08 +00:00
|
|
|
elif is_frozen():
|
2020-03-23 06:45:40 +00:00
|
|
|
if hasattr(sys, "_MEIPASS"):
|
|
|
|
# we are running in a PyInstaller bundle
|
|
|
|
local_path.cached_path = sys._MEIPASS # pylint: disable=protected-access,no-member
|
|
|
|
else:
|
|
|
|
# cx_Freeze
|
|
|
|
local_path.cached_path = os.path.dirname(os.path.abspath(sys.argv[0]))
|
2017-11-28 14:36:32 +00:00
|
|
|
else:
|
2020-03-23 06:45:40 +00:00
|
|
|
import __main__
|
2021-04-04 01:18:19 +00:00
|
|
|
if hasattr(__main__, "__file__"):
|
|
|
|
# we are running in a normal Python environment
|
|
|
|
local_path.cached_path = os.path.dirname(os.path.abspath(__main__.__file__))
|
|
|
|
else:
|
|
|
|
# pray
|
|
|
|
local_path.cached_path = os.path.abspath(".")
|
2020-03-15 18:32:00 +00:00
|
|
|
|
2020-08-25 11:22:47 +00:00
|
|
|
return os.path.join(local_path.cached_path, *path)
|
2017-11-28 14:36:32 +00:00
|
|
|
|
2021-01-02 11:49:43 +00:00
|
|
|
|
2022-03-31 03:08:15 +00:00
|
|
|
def home_path(*path: str) -> str:
|
|
|
|
"""Returns path to a file in the user home's Archipelago directory."""
|
|
|
|
if hasattr(home_path, 'cached_path'):
|
|
|
|
pass
|
|
|
|
elif sys.platform.startswith('linux'):
|
|
|
|
home_path.cached_path = os.path.expanduser('~/Archipelago')
|
|
|
|
os.makedirs(home_path.cached_path, 0o700, exist_ok=True)
|
|
|
|
else:
|
|
|
|
# not implemented
|
|
|
|
home_path.cached_path = local_path() # this will generate the same exceptions we got previously
|
|
|
|
|
|
|
|
return os.path.join(home_path.cached_path, *path)
|
2017-11-28 14:36:32 +00:00
|
|
|
|
2020-08-25 11:22:47 +00:00
|
|
|
|
2022-03-31 03:08:15 +00:00
|
|
|
def user_path(*path: str) -> str:
|
|
|
|
"""Returns either local_path or home_path based on write permissions."""
|
|
|
|
if hasattr(user_path, 'cached_path'):
|
|
|
|
pass
|
|
|
|
elif os.access(local_path(), os.W_OK):
|
|
|
|
user_path.cached_path = local_path()
|
|
|
|
else:
|
|
|
|
user_path.cached_path = home_path()
|
|
|
|
# populate home from local - TODO: upgrade feature
|
|
|
|
if user_path.cached_path != local_path() and not os.path.exists(user_path('host.yaml')):
|
|
|
|
for dn in ('Players', 'data/sprites'):
|
|
|
|
shutil.copytree(local_path(dn), user_path(dn), dirs_exist_ok=True)
|
|
|
|
for fn in ('manifest.json', 'host.yaml'):
|
|
|
|
shutil.copy2(local_path(fn), user_path(fn))
|
|
|
|
|
|
|
|
return os.path.join(user_path.cached_path, *path)
|
|
|
|
|
|
|
|
|
|
|
|
def output_path(*path: str):
|
|
|
|
if hasattr(output_path, 'cached_path'):
|
2020-08-25 11:22:47 +00:00
|
|
|
return os.path.join(output_path.cached_path, *path)
|
2022-03-31 03:08:15 +00:00
|
|
|
output_path.cached_path = user_path(get_options()["general_options"]["output_path"])
|
2020-08-25 11:22:47 +00:00
|
|
|
path = os.path.join(output_path.cached_path, *path)
|
2020-08-01 14:52:11 +00:00
|
|
|
os.makedirs(os.path.dirname(path), exist_ok=True)
|
|
|
|
return path
|
2017-11-28 14:36:32 +00:00
|
|
|
|
2021-01-02 11:49:43 +00:00
|
|
|
|
2017-11-28 14:36:32 +00:00
|
|
|
def open_file(filename):
|
|
|
|
if sys.platform == 'win32':
|
|
|
|
os.startfile(filename)
|
|
|
|
else:
|
2017-12-17 05:25:46 +00:00
|
|
|
open_command = 'open' if sys.platform == 'darwin' else 'xdg-open'
|
2017-11-28 14:36:32 +00:00
|
|
|
subprocess.call([open_command, filename])
|
2017-12-02 14:21:04 +00:00
|
|
|
|
2021-01-02 11:49:43 +00:00
|
|
|
|
2022-01-19 03:26:25 +00:00
|
|
|
# from https://gist.github.com/pypt/94d747fe5180851196eb#gistcomment-4015118 with some changes
|
|
|
|
class UniqueKeyLoader(SafeLoader):
|
|
|
|
def construct_mapping(self, node, deep=False):
|
|
|
|
mapping = set()
|
|
|
|
for key_node, value_node in node.value:
|
|
|
|
key = self.construct_object(key_node, deep=deep)
|
|
|
|
if key in mapping:
|
2022-01-25 03:20:08 +00:00
|
|
|
logging.error(f"YAML duplicates sanity check failed{key_node.start_mark}")
|
|
|
|
raise KeyError(f"Duplicate key {key} found in YAML. Already found keys: {mapping}.")
|
2022-01-19 03:26:25 +00:00
|
|
|
mapping.add(key)
|
|
|
|
return super().construct_mapping(node, deep)
|
|
|
|
|
|
|
|
|
|
|
|
parse_yaml = functools.partial(load, Loader=UniqueKeyLoader)
|
2022-04-12 08:57:29 +00:00
|
|
|
parse_yamls = functools.partial(load_all, Loader=UniqueKeyLoader)
|
2020-07-05 00:06:00 +00:00
|
|
|
unsafe_parse_yaml = functools.partial(load, Loader=Loader)
|
2020-02-16 14:32:40 +00:00
|
|
|
|
2021-07-30 23:40:27 +00:00
|
|
|
|
2021-11-13 22:14:26 +00:00
|
|
|
def get_cert_none_ssl_context():
|
|
|
|
import ssl
|
|
|
|
ctx = ssl.create_default_context()
|
|
|
|
ctx.check_hostname = False
|
|
|
|
ctx.verify_mode = ssl.CERT_NONE
|
|
|
|
return ctx
|
|
|
|
|
|
|
|
|
2021-07-09 15:44:24 +00:00
|
|
|
@cache_argsless
|
2020-03-05 23:48:23 +00:00
|
|
|
def get_public_ipv4() -> str:
|
|
|
|
import socket
|
|
|
|
import urllib.request
|
|
|
|
ip = socket.gethostbyname(socket.gethostname())
|
2021-11-13 22:14:26 +00:00
|
|
|
ctx = get_cert_none_ssl_context()
|
2020-03-05 23:48:23 +00:00
|
|
|
try:
|
2021-11-13 22:14:26 +00:00
|
|
|
ip = urllib.request.urlopen('https://checkip.amazonaws.com/', context=ctx).read().decode('utf8').strip()
|
2020-03-05 23:48:23 +00:00
|
|
|
except Exception as e:
|
|
|
|
try:
|
2021-11-13 22:14:26 +00:00
|
|
|
ip = urllib.request.urlopen('https://v4.ident.me', context=ctx).read().decode('utf8').strip()
|
2020-03-05 23:48:23 +00:00
|
|
|
except:
|
|
|
|
logging.exception(e)
|
|
|
|
pass # we could be offline, in a local game, so no point in erroring out
|
|
|
|
return ip
|
2020-03-15 18:32:00 +00:00
|
|
|
|
2021-07-30 23:40:27 +00:00
|
|
|
|
2021-07-09 15:44:24 +00:00
|
|
|
@cache_argsless
|
2020-06-14 07:06:37 +00:00
|
|
|
def get_public_ipv6() -> str:
|
|
|
|
import socket
|
|
|
|
import urllib.request
|
|
|
|
ip = socket.gethostbyname(socket.gethostname())
|
2021-11-13 22:14:26 +00:00
|
|
|
ctx = get_cert_none_ssl_context()
|
2020-06-14 07:06:37 +00:00
|
|
|
try:
|
2021-11-13 22:14:26 +00:00
|
|
|
ip = urllib.request.urlopen('https://v6.ident.me', context=ctx).read().decode('utf8').strip()
|
2020-06-14 07:06:37 +00:00
|
|
|
except Exception as e:
|
|
|
|
logging.exception(e)
|
2020-06-21 14:13:42 +00:00
|
|
|
pass # we could be offline, in a local game, or ipv6 may not be available
|
2020-06-14 07:06:37 +00:00
|
|
|
return ip
|
2020-03-15 18:32:00 +00:00
|
|
|
|
2021-07-30 23:40:27 +00:00
|
|
|
|
2021-07-09 15:44:24 +00:00
|
|
|
@cache_argsless
|
2020-11-28 19:34:29 +00:00
|
|
|
def get_default_options() -> dict:
|
2021-07-09 15:44:24 +00:00
|
|
|
# Refer to host.yaml for comments as to what all these options mean.
|
|
|
|
options = {
|
|
|
|
"general_options": {
|
|
|
|
"output_path": "output",
|
|
|
|
},
|
|
|
|
"factorio_options": {
|
2022-02-18 17:54:26 +00:00
|
|
|
"executable": os.path.join("factorio", "bin", "x64", "factorio"),
|
2021-07-09 15:44:24 +00:00
|
|
|
},
|
2021-11-12 13:00:11 +00:00
|
|
|
"sm_options": {
|
2021-11-12 13:36:34 +00:00
|
|
|
"rom_file": "Super Metroid (JU).sfc",
|
2021-11-12 13:00:11 +00:00
|
|
|
"sni": "SNI",
|
|
|
|
"rom_start": True,
|
|
|
|
},
|
2021-11-13 19:52:30 +00:00
|
|
|
"soe_options": {
|
|
|
|
"rom_file": "Secret of Evermore (USA).sfc",
|
|
|
|
},
|
2021-07-09 15:44:24 +00:00
|
|
|
"lttp_options": {
|
|
|
|
"rom_file": "Zelda no Densetsu - Kamigami no Triforce (Japan).sfc",
|
|
|
|
"sni": "SNI",
|
|
|
|
"rom_start": True,
|
|
|
|
|
|
|
|
},
|
|
|
|
"server_options": {
|
|
|
|
"host": None,
|
|
|
|
"port": 38281,
|
|
|
|
"password": None,
|
|
|
|
"multidata": None,
|
|
|
|
"savefile": None,
|
|
|
|
"disable_save": False,
|
|
|
|
"loglevel": "info",
|
|
|
|
"server_password": None,
|
|
|
|
"disable_item_cheat": False,
|
|
|
|
"location_check_points": 1,
|
|
|
|
"hint_cost": 10,
|
|
|
|
"forfeit_mode": "goal",
|
2021-10-18 20:58:29 +00:00
|
|
|
"collect_mode": "disabled",
|
2021-07-09 15:44:24 +00:00
|
|
|
"remaining_mode": "goal",
|
|
|
|
"auto_shutdown": 0,
|
|
|
|
"compatibility": 2,
|
|
|
|
"log_network": 0
|
|
|
|
},
|
2021-07-21 16:08:15 +00:00
|
|
|
"generator": {
|
2021-07-09 15:44:24 +00:00
|
|
|
"teams": 1,
|
2022-06-01 23:49:57 +00:00
|
|
|
"enemizer_path": os.path.join("EnemizerCLI", "EnemizerCLI.Core"),
|
2021-07-09 15:44:24 +00:00
|
|
|
"player_files_path": "Players",
|
|
|
|
"players": 0,
|
|
|
|
"weights_file_path": "weights.yaml",
|
|
|
|
"meta_file_path": "meta.yaml",
|
2021-07-21 16:08:15 +00:00
|
|
|
"spoiler": 2,
|
2021-07-09 15:44:24 +00:00
|
|
|
"glitch_triforce_room": 1,
|
|
|
|
"race": 0,
|
|
|
|
"plando_options": "bosses",
|
2021-08-15 00:32:36 +00:00
|
|
|
},
|
|
|
|
"minecraft_options": {
|
|
|
|
"forge_directory": "Minecraft Forge server",
|
2022-05-11 04:00:53 +00:00
|
|
|
"max_heap_size": "2G",
|
|
|
|
"release_channel": "release"
|
2021-08-15 00:32:36 +00:00
|
|
|
},
|
Ocarina of Time (#64)
* first commit (not including OoT data files yet)
* added some basic options
* rule parser works now at least
* make sure to commit everything this time
* temporary change to BaseClasses for oot
* overworld location graph builds mostly correctly
* adding oot data files
* commenting out world options until later since they only existed to make the RuleParser work
* conversion functions between AP ids and OOT ids
* world graph outputs
* set scrub prices
* itempool generates, entrances connected, way too many options added
* fixed set_rules and set_shop_rules
* temp baseclasses changes
* Reaches the fill step now, old event-based system retained in case the new way breaks
* Song placements and misc fixes everywhere
* temporary changes to make oot work
* changed root exits for AP fill framework
* prevent infinite recursion due to OoT sharing usage of the address field
* age reachability works hopefully, songs are broken again
* working spoiler log generation on beatable-only
* Logic tricks implemented
* need this for logic tricks
* fixed map/compass being placed on Serenade location
* kill unreachable events before filling the world
* add a bunch of utility functions to prepare for rom patching
* move OptionList into generic options
* fixed some silly bugs with OptionList
* properly seed all random behavior (so far)
* ROM generation working
* fix hints trying to get alttp dungeon hint texts
* continue fixing hints
* add oot to network data package
* change item and location IDs to 66000 and 67000 range respectively
* push removed items to precollected items
* fixed various issues with cross-contamination with multiple world generation
* reenable glitched logic (hopefully)
* glitched world files age-check fix
* cleaned up some get_locations calls
* added token shuffle and scrub shuffle, modified some options slightly to make the parsing work
* reenable MQ dungeons
* fix forest mq exception
* made targeting style an option for now, will be cosmetic later
* reminder to move targeting to cosmetics
* some oot option maintenance
* enabled starting time of day
* fixed issue breaking shop slots in multiworld generation
* added "off" option for text shuffle and hints
* shopsanity functionality restored
* change patch file extension
* remove unnecessary utility functions + imports
* update MIT license
* change option to "patch_uncompressed_rom" instead of "compress_rom"
* compliance with new AutoWorld systems
* Kill only internal events, remove non-internal big poe event in code
* re-add the big poe event and handle it correctly
* remove extra method in Range option
* fix typo
* Starting items, starting with consumables option
* do not remove nonexistent item
* move set_shop_rules to after shop items are placed
* some cleanup
* add retries for song placement
* flagged Skull Mask and Mask of Truth as advancement items
* update OoT to use LogicMixin
* Fixed trying to assign starting items from the wrong players
* fixed song retry step
* improved option handling, comments, and starting item replacements
* DefaultOnToggle writes Yes or No to spoiler
* enable compression of output if Compress executable is present
* clean up compression
* check whether (de)compressor exists before running the process
* allow specification of rom path in host.yaml
* check if decompressed file already exists before decompressing again
* fix triforce hunt generation
* rename all the oot state functions with prefix
* OoT: mark triforce pieces as completion goal for triforce hunt
* added overworld and any-dungeon shuffle for dungeon items
* Hide most unshuffled locations and events from the list of locations in spoiler
* build oot option ranges with a generic function instead of defining each separately
* move oot output-type control to host.yaml instead of individual yamls
* implement dungeon song shuffle
* minor improvements to overworld dungeon item shuffle
* remove random ice trap names in shops, mostly to avoid maintaining a massive censor list
* always output patch file to folder, remove option to generate ROM in preparation for removal
* re-add the fix for infinite recursion due to not being light or dark world
* change AP-sendable to Ocarina of Time model, since the triforce piece has some extra code apparently
* oot: remove item_names and location_names
* oot: minor fixes
* oot: comment out ROM patching
* oot: only add CollectionState objects on creation if actually needed
* main entrance shuffle method and entrances-based rules
* fix entrances based rules
* disable master quest and big poe count options for client compatibility
* use get_player_name instead of get_player_names
* fix OptionList
* fix oot options for new option system
* new coop section in oot rom: expand player names to 16 bytes, write AP_PLAYER_NAME at end of PLAYER_NAMES
* fill AP player name in oot rom with 0 instead of 0xDF
* encode player name with ASCII for fixed-width
* revert oot player name array to 8 bytes per name
* remove Pierre location if fast scarecrow is on
* check player name length
* "free_scarecrow" not "fast_scarecrow"
* OoT locations now properly store the AP ID instead of the oot internal ID
* oot __version__ updates in lockstep with AP version
* pull in unmodified oot cosmetic files
* also grab JSONDump since it's needed apparently
* gather extra needed methods, modify imports
* delete cosmetics log, replace all instances of SettingsList with OOTWorld
* cosmetic options working, except for sound effects (due to ear-safe issues)
* SFX, Music, and Fanfare randomization reenabled
* move OoT data files into the worlds folder
* move Compress and Decompress into oot data folder
* Replace get_all_state with custom method to avoid the cache
* OoT ROM: increment item counter before setting incoming item/player values to 0, preventing desync issues
* set data_version to 0
* make Kokiri Sword shuffle off by default
* reenable "Random Choice" for various cosmetic options
* kill Ruto's Letter turnin if open fountain
also fix for shopsanity
* place Buy Goron/Zora Tunic first in shop shuffle
* make ice traps appear as other items instead of breaking generation
* managed to break ice traps on non-major-only
* only handle ice traps if they are on
* fix shopsanity for non-oot games, and write player name instead of player number
* light arrows hint uses player name instead of player number
* Reenable "skip child zelda" option
* fix entrances_based_rules
* fix ganondorf hint if starting with light arrows
* fix dungeonitem shuffle and shopsanity interaction
* remove has_all_of, has_any_of, count_of in BaseClasses, replace usage with has_all, has_any, has_group
* force local giveable item on ZL if skip_child_zelda and shuffle_song_items is any
* keep bosses and bombchu bowling chus out of data package
* revert workaround for infinite recursion and fix it properly
* fix shared shop id caches during patching process
* fix shop text box overflows, as much as possible
* add default oot host.yaml option
* add .apz5, .n64, .z64 to gitignore
* Properly document and name all (functioning) OOT options
* clean up some imports
* remove unnecessary files from oot's data
* fix typo in gitignore
* readd the Compress and Decompress utilities, since they are needed for generation
* cleanup of imports and some minor optimizations
* increase shop offset for item IDs to 0xCB
* remove shop item AP ids entirely
* prevent triforce pieces for other players from being received by yourself
* add "excluded" property to Location
* Hint system adapted and reenabled; hints still unseeded
* make hints deterministic with lists instead of sets
* do not allow hints to point to Light Arrows on non-vanilla bridge
* foreign locations hint as their full name in OoT rather than their region
* checkedLocations now stores hint names by player ID, so that the same location in different worlds can have hints associated
* consolidate versioning in Utils
* ice traps appear as major items rather than any progression item
* set prescription and claim check as defaults for adult trade item settings
* add oot options to playerSettings
* allow case-insensitive logic tricks in yaml
* fix oot shopsanity option formatting
* Write OoT override info even if local item, enabling local checks to show up immediately in the client
* implement CollectionState.can_live_dmg for oot glitched logic
* filter item names for invalid characters when patching shops
* make ice traps appear according to the settings of the world they are shuffled into, rather than the original world
* set hidden-spoiler items and locations with Shop items to events
* make GF carpenters, Gerudo Card, Malon, ZL, and Impa events if the relevant settings are enabled, preventing them from appearing in the client on game start
* Fix oot Glitched and No Logic generation
* fix indenting
* Greatly reduce displayed cosmetic options
* Change oot data version to 1
* add apz5 distribution to webhost
* print player name if an ALttP dungeon contains a good item for OoT world
* delete unneeded commented code
* remove OcarinaSongs import to satisfy lint
2021-09-02 12:35:05 +00:00
|
|
|
"oot_options": {
|
|
|
|
"rom_file": "The Legend of Zelda - Ocarina of Time.z64",
|
|
|
|
}
|
2021-07-09 15:44:24 +00:00
|
|
|
}
|
2021-01-02 11:49:43 +00:00
|
|
|
|
2021-07-09 15:44:24 +00:00
|
|
|
return options
|
2020-11-28 19:34:29 +00:00
|
|
|
|
2020-11-30 15:43:13 +00:00
|
|
|
|
2020-11-28 22:51:13 +00:00
|
|
|
def update_options(src: dict, dest: dict, filename: str, keys: list) -> dict:
|
|
|
|
for key, value in src.items():
|
|
|
|
new_keys = keys.copy()
|
|
|
|
new_keys.append(key)
|
2020-11-30 15:43:13 +00:00
|
|
|
option_name = '.'.join(new_keys)
|
2020-11-28 22:51:13 +00:00
|
|
|
if key not in dest:
|
|
|
|
dest[key] = value
|
2021-07-30 23:40:27 +00:00
|
|
|
if filename.endswith("options.yaml"):
|
2020-11-30 15:43:13 +00:00
|
|
|
logging.info(f"Warning: {filename} is missing {option_name}")
|
2020-11-28 22:51:13 +00:00
|
|
|
elif isinstance(value, dict):
|
|
|
|
if not isinstance(dest.get(key, None), dict):
|
2021-07-30 23:40:27 +00:00
|
|
|
if filename.endswith("options.yaml"):
|
2020-11-30 15:43:13 +00:00
|
|
|
logging.info(f"Warning: {filename} has {option_name}, but it is not a dictionary. overwriting.")
|
2020-11-28 22:51:13 +00:00
|
|
|
dest[key] = value
|
|
|
|
else:
|
|
|
|
dest[key] = update_options(value, dest[key], filename, new_keys)
|
|
|
|
return dest
|
2020-11-28 19:34:29 +00:00
|
|
|
|
2021-07-30 23:40:27 +00:00
|
|
|
|
2021-07-09 15:44:24 +00:00
|
|
|
@cache_argsless
|
2020-03-15 18:32:00 +00:00
|
|
|
def get_options() -> dict:
|
2020-03-23 06:59:55 +00:00
|
|
|
if not hasattr(get_options, "options"):
|
2022-03-31 03:08:15 +00:00
|
|
|
filenames = ("options.yaml", "host.yaml")
|
|
|
|
locations = []
|
|
|
|
if os.path.join(os.getcwd()) != local_path():
|
|
|
|
locations += filenames # use files from cwd only if it's not the local_path
|
|
|
|
locations += [user_path(filename) for filename in filenames]
|
2020-03-15 18:32:00 +00:00
|
|
|
|
|
|
|
for location in locations:
|
|
|
|
if os.path.exists(location):
|
|
|
|
with open(location) as f:
|
2020-11-28 19:34:29 +00:00
|
|
|
options = parse_yaml(f.read())
|
|
|
|
|
2020-11-28 22:51:13 +00:00
|
|
|
get_options.options = update_options(get_default_options(), options, location, list())
|
2020-03-23 06:59:55 +00:00
|
|
|
break
|
|
|
|
else:
|
2022-03-31 03:08:15 +00:00
|
|
|
raise FileNotFoundError(f"Could not find {filenames[1]} to load options.")
|
2020-03-23 06:59:55 +00:00
|
|
|
return get_options.options
|
2020-04-14 18:22:42 +00:00
|
|
|
|
|
|
|
|
2021-04-28 13:48:11 +00:00
|
|
|
def get_item_name_from_id(code: int) -> str:
|
2021-02-21 19:17:24 +00:00
|
|
|
from worlds import lookup_any_item_id_to_name
|
|
|
|
return lookup_any_item_id_to_name.get(code, f'Unknown item (ID:{code})')
|
2020-04-14 18:22:42 +00:00
|
|
|
|
|
|
|
|
2021-04-28 13:48:11 +00:00
|
|
|
def get_location_name_from_id(code: int) -> str:
|
2021-02-21 19:17:24 +00:00
|
|
|
from worlds import lookup_any_location_id_to_name
|
2021-04-28 13:48:11 +00:00
|
|
|
return lookup_any_location_id_to_name.get(code, f'Unknown location (ID:{code})')
|
2020-04-14 18:22:42 +00:00
|
|
|
|
|
|
|
|
2021-02-25 01:07:28 +00:00
|
|
|
def persistent_store(category: str, key: typing.Any, value: typing.Any):
|
2022-03-31 03:08:15 +00:00
|
|
|
path = user_path("_persistent_storage.yaml")
|
2020-04-24 03:29:02 +00:00
|
|
|
storage: dict = persistent_load()
|
|
|
|
category = storage.setdefault(category, {})
|
|
|
|
category[key] = value
|
|
|
|
with open(path, "wt") as f:
|
|
|
|
f.write(dump(storage))
|
|
|
|
|
|
|
|
|
2020-04-26 13:14:30 +00:00
|
|
|
def persistent_load() -> typing.Dict[dict]:
|
2020-06-04 19:27:29 +00:00
|
|
|
storage = getattr(persistent_load, "storage", None)
|
|
|
|
if storage:
|
|
|
|
return storage
|
2022-03-31 03:08:15 +00:00
|
|
|
path = user_path("_persistent_storage.yaml")
|
2020-04-24 03:29:02 +00:00
|
|
|
storage: dict = {}
|
|
|
|
if os.path.exists(path):
|
|
|
|
try:
|
|
|
|
with open(path, "r") as f:
|
2020-07-05 00:06:00 +00:00
|
|
|
storage = unsafe_parse_yaml(f.read())
|
2020-04-24 03:29:02 +00:00
|
|
|
except Exception as e:
|
|
|
|
logging.debug(f"Could not read store: {e}")
|
2020-04-30 05:42:26 +00:00
|
|
|
if storage is None:
|
|
|
|
storage = {}
|
2020-06-04 19:27:29 +00:00
|
|
|
persistent_load.storage = storage
|
2020-04-24 03:29:02 +00:00
|
|
|
return storage
|
|
|
|
|
|
|
|
|
2022-01-20 03:19:58 +00:00
|
|
|
def get_adjuster_settings(gameName: str):
|
|
|
|
adjuster_settings = persistent_load().get("adjuster", {}).get(gameName, {})
|
|
|
|
return adjuster_settings
|
2020-06-07 19:04:33 +00:00
|
|
|
|
2021-07-31 13:13:55 +00:00
|
|
|
|
2021-07-09 15:44:24 +00:00
|
|
|
@cache_argsless
|
2020-06-04 19:27:29 +00:00
|
|
|
def get_unique_identifier():
|
|
|
|
uuid = persistent_load().get("client", {}).get("uuid", None)
|
|
|
|
if uuid:
|
|
|
|
return uuid
|
|
|
|
|
|
|
|
import uuid
|
|
|
|
uuid = uuid.getnode()
|
|
|
|
persistent_store("client", "uuid", uuid)
|
|
|
|
return uuid
|
2020-09-08 23:41:37 +00:00
|
|
|
|
|
|
|
|
|
|
|
safe_builtins = {
|
|
|
|
'set',
|
|
|
|
'frozenset',
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
class RestrictedUnpickler(pickle.Unpickler):
|
2021-09-17 23:02:26 +00:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super(RestrictedUnpickler, self).__init__(*args, **kwargs)
|
|
|
|
self.options_module = importlib.import_module("Options")
|
|
|
|
self.net_utils_module = importlib.import_module("NetUtils")
|
2021-09-23 00:29:24 +00:00
|
|
|
self.generic_properties_module = importlib.import_module("worlds.generic")
|
2021-09-17 23:02:26 +00:00
|
|
|
|
2020-09-08 23:41:37 +00:00
|
|
|
def find_class(self, module, name):
|
|
|
|
if module == "builtins" and name in safe_builtins:
|
|
|
|
return getattr(builtins, name)
|
2021-09-17 23:02:26 +00:00
|
|
|
# used by MultiServer -> savegame/multidata
|
2022-01-30 12:57:12 +00:00
|
|
|
if module == "NetUtils" and name in {"NetworkItem", "ClientStatus", "Hint", "SlotType", "NetworkSlot"}:
|
2021-09-17 23:02:26 +00:00
|
|
|
return getattr(self.net_utils_module, name)
|
2021-09-23 00:29:24 +00:00
|
|
|
# Options and Plando are unpickled by WebHost -> Generate
|
|
|
|
if module == "worlds.generic" and name in {"PlandoItem", "PlandoConnection"}:
|
|
|
|
return getattr(self.generic_properties_module, name)
|
2021-09-17 23:02:26 +00:00
|
|
|
if module.endswith("Options"):
|
|
|
|
if module == "Options":
|
|
|
|
mod = self.options_module
|
|
|
|
else:
|
|
|
|
mod = importlib.import_module(module)
|
|
|
|
obj = getattr(mod, name)
|
|
|
|
if issubclass(obj, self.options_module.Option):
|
2021-05-16 20:59:45 +00:00
|
|
|
return obj
|
2020-09-08 23:41:37 +00:00
|
|
|
# Forbid everything else.
|
|
|
|
raise pickle.UnpicklingError("global '%s.%s' is forbidden" %
|
|
|
|
(module, name))
|
|
|
|
|
|
|
|
|
|
|
|
def restricted_loads(s):
|
|
|
|
"""Helper function analogous to pickle.loads()."""
|
2021-07-07 08:14:58 +00:00
|
|
|
return RestrictedUnpickler(io.BytesIO(s)).load()
|
|
|
|
|
2021-07-09 15:44:24 +00:00
|
|
|
|
2021-07-07 08:14:58 +00:00
|
|
|
class KeyedDefaultDict(collections.defaultdict):
|
|
|
|
def __missing__(self, key):
|
|
|
|
self[key] = value = self.default_factory(key)
|
2021-10-16 17:40:27 +00:00
|
|
|
return value
|
|
|
|
|
|
|
|
|
|
|
|
def get_text_between(text: str, start: str, end: str) -> str:
|
|
|
|
return text[text.index(start) + len(start): text.rindex(end)]
|
2021-11-10 14:35:43 +00:00
|
|
|
|
|
|
|
|
|
|
|
loglevel_mapping = {'error': logging.ERROR, 'info': logging.INFO, 'warning': logging.WARNING, 'debug': logging.DEBUG}
|
|
|
|
|
|
|
|
|
|
|
|
def init_logging(name: str, loglevel: typing.Union[str, int] = logging.INFO, write_mode: str = "w",
|
2022-04-20 16:57:29 +00:00
|
|
|
log_format: str = "[%(name)s at %(asctime)s]: %(message)s", exception_logger: str = ""):
|
2021-11-10 14:35:43 +00:00
|
|
|
loglevel: int = loglevel_mapping.get(loglevel, loglevel)
|
2022-03-31 03:08:15 +00:00
|
|
|
log_folder = user_path("logs")
|
2021-11-10 14:35:43 +00:00
|
|
|
os.makedirs(log_folder, exist_ok=True)
|
|
|
|
root_logger = logging.getLogger()
|
|
|
|
for handler in root_logger.handlers[:]:
|
|
|
|
root_logger.removeHandler(handler)
|
|
|
|
handler.close()
|
|
|
|
root_logger.setLevel(loglevel)
|
|
|
|
file_handler = logging.FileHandler(
|
|
|
|
os.path.join(log_folder, f"{name}.txt"),
|
|
|
|
write_mode,
|
|
|
|
encoding="utf-8-sig")
|
|
|
|
file_handler.setFormatter(logging.Formatter(log_format))
|
|
|
|
root_logger.addHandler(file_handler)
|
|
|
|
if sys.stdout:
|
|
|
|
root_logger.addHandler(
|
|
|
|
logging.StreamHandler(sys.stdout)
|
|
|
|
)
|
2021-11-17 21:46:32 +00:00
|
|
|
|
|
|
|
# Relay unhandled exceptions to logger.
|
|
|
|
if not getattr(sys.excepthook, "_wrapped", False): # skip if already modified
|
|
|
|
orig_hook = sys.excepthook
|
|
|
|
|
|
|
|
def handle_exception(exc_type, exc_value, exc_traceback):
|
|
|
|
if issubclass(exc_type, KeyboardInterrupt):
|
|
|
|
sys.__excepthook__(exc_type, exc_value, exc_traceback)
|
|
|
|
return
|
|
|
|
logging.getLogger(exception_logger).exception("Uncaught exception",
|
|
|
|
exc_info=(exc_type, exc_value, exc_traceback))
|
|
|
|
return orig_hook(exc_type, exc_value, exc_traceback)
|
|
|
|
|
|
|
|
handle_exception._wrapped = True
|
|
|
|
|
|
|
|
sys.excepthook = handle_exception
|
2021-11-28 03:06:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
def stream_input(stream, queue):
|
|
|
|
def queuer():
|
|
|
|
while 1:
|
|
|
|
text = stream.readline().strip()
|
|
|
|
if text:
|
|
|
|
queue.put_nowait(text)
|
|
|
|
|
|
|
|
from threading import Thread
|
|
|
|
thread = Thread(target=queuer, name=f"Stream handler for {stream.name}", daemon=True)
|
|
|
|
thread.start()
|
|
|
|
return thread
|
2022-01-18 07:23:38 +00:00
|
|
|
|
|
|
|
|
2022-01-20 03:19:58 +00:00
|
|
|
def tkinter_center_window(window: Tk):
|
|
|
|
window.update()
|
2022-02-24 03:47:01 +00:00
|
|
|
xPos = int(window.winfo_screenwidth() / 2 - window.winfo_reqwidth() / 2)
|
|
|
|
yPos = int(window.winfo_screenheight() / 2 - window.winfo_reqheight() / 2)
|
2022-01-20 03:19:58 +00:00
|
|
|
window.geometry("+{}+{}".format(xPos, yPos))
|
|
|
|
|
2022-02-24 03:47:01 +00:00
|
|
|
|
2022-01-18 07:23:38 +00:00
|
|
|
class VersionException(Exception):
|
|
|
|
pass
|
2022-01-20 03:19:58 +00:00
|
|
|
|
2022-02-24 03:47:01 +00:00
|
|
|
|
2022-04-30 02:39:08 +00:00
|
|
|
# noinspection PyPep8Naming
|
|
|
|
def format_SI_prefix(value, power=1000, power_labels=('', 'k', 'M', 'G', 'T', "P", "E", "Z", "Y")) -> str:
|
2022-02-24 03:47:01 +00:00
|
|
|
n = 0
|
|
|
|
|
|
|
|
while value > power:
|
|
|
|
value /= power
|
|
|
|
n += 1
|
|
|
|
if type(value) == int:
|
|
|
|
return f"{value} {power_labels[n]}"
|
|
|
|
else:
|
|
|
|
return f"{value:0.3f} {power_labels[n]}"
|
2022-05-09 05:18:50 +00:00
|
|
|
|
|
|
|
|
2022-05-09 15:03:16 +00:00
|
|
|
def get_fuzzy_ratio(word1: str, word2: str) -> float:
|
|
|
|
return (1 - jellyfish.damerau_levenshtein_distance(word1.lower(), word2.lower())
|
|
|
|
/ max(len(word1), len(word2)))
|
|
|
|
|
|
|
|
|
|
|
|
def get_fuzzy_results(input_word: str, wordlist: typing.Sequence[str], limit: typing.Optional[int] = None) \
|
|
|
|
-> typing.List[typing.Tuple[str, int]]:
|
|
|
|
limit: int = limit if limit else len(wordlist)
|
|
|
|
return list(
|
|
|
|
map(
|
|
|
|
lambda container: (container[0], int(container[1]*100)), # convert up to limit to int %
|
|
|
|
sorted(
|
|
|
|
map(lambda candidate:
|
|
|
|
(candidate, get_fuzzy_ratio(input_word, candidate)),
|
|
|
|
wordlist),
|
|
|
|
key=lambda element: element[1],
|
|
|
|
reverse=True)[0:limit]
|
|
|
|
)
|
|
|
|
)
|