2020-04-22 03:09:46 +00:00
|
|
|
from __future__ import annotations
|
2021-01-02 11:49:43 +00:00
|
|
|
|
2022-11-02 14:51:35 +00:00
|
|
|
import asyncio
|
2020-06-21 13:32:31 +00:00
|
|
|
import typing
|
2021-11-28 03:06:30 +00:00
|
|
|
import builtins
|
|
|
|
import os
|
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import pickle
|
|
|
|
import functools
|
|
|
|
import io
|
|
|
|
import collections
|
|
|
|
import importlib
|
|
|
|
import logging
|
2022-11-02 14:51:35 +00:00
|
|
|
from typing import BinaryIO, ClassVar, Coroutine, Optional, Set
|
2022-09-29 22:36:30 +00:00
|
|
|
|
2022-08-11 22:32:37 +00:00
|
|
|
from yaml import load, load_all, dump, SafeLoader
|
|
|
|
|
|
|
|
try:
|
|
|
|
from yaml import CLoader as UnsafeLoader
|
|
|
|
from yaml import CDumper as Dumper
|
|
|
|
except ImportError:
|
|
|
|
from yaml import Loader as UnsafeLoader
|
|
|
|
from yaml import Dumper
|
2022-05-18 20:30:19 +00:00
|
|
|
|
|
|
|
if typing.TYPE_CHECKING:
|
2022-08-11 22:32:37 +00:00
|
|
|
import tkinter
|
|
|
|
import pathlib
|
2020-06-21 13:32:31 +00:00
|
|
|
|
|
|
|
|
2021-08-06 17:33:17 +00:00
|
|
|
def tuplize_version(version: str) -> Version:
|
2020-12-29 18:23:14 +00:00
|
|
|
return Version(*(int(piece, 10) for piece in version.split(".")))
|
|
|
|
|
2020-06-21 13:32:31 +00:00
|
|
|
|
2020-12-29 18:23:14 +00:00
|
|
|
class Version(typing.NamedTuple):
|
|
|
|
major: int
|
|
|
|
minor: int
|
2021-02-21 22:46:05 +00:00
|
|
|
build: int
|
2020-04-22 03:09:46 +00:00
|
|
|
|
2021-07-01 23:29:49 +00:00
|
|
|
|
2022-12-07 23:38:34 +00:00
|
|
|
__version__ = "0.3.7"
|
2021-06-18 20:15:54 +00:00
|
|
|
version_tuple = tuplize_version(__version__)
|
2020-04-20 12:50:49 +00:00
|
|
|
|
2022-08-11 22:32:37 +00:00
|
|
|
is_linux = sys.platform.startswith("linux")
|
|
|
|
is_macos = sys.platform == "darwin"
|
2022-06-04 16:10:34 +00:00
|
|
|
is_windows = sys.platform in ("win32", "cygwin", "msys")
|
|
|
|
|
2017-11-28 14:36:32 +00:00
|
|
|
|
2022-04-28 16:03:44 +00:00
|
|
|
def int16_as_bytes(value: int) -> typing.List[int]:
|
2018-02-17 23:38:54 +00:00
|
|
|
value = value & 0xFFFF
|
|
|
|
return [value & 0xFF, (value >> 8) & 0xFF]
|
|
|
|
|
2020-02-16 14:32:40 +00:00
|
|
|
|
2022-04-28 16:03:44 +00:00
|
|
|
def int32_as_bytes(value: int) -> typing.List[int]:
|
2018-02-17 23:38:54 +00:00
|
|
|
value = value & 0xFFFFFFFF
|
|
|
|
return [value & 0xFF, (value >> 8) & 0xFF, (value >> 16) & 0xFF, (value >> 24) & 0xFF]
|
|
|
|
|
2020-02-16 14:32:40 +00:00
|
|
|
|
2022-04-28 16:03:44 +00:00
|
|
|
def pc_to_snes(value: int) -> int:
|
2021-01-02 11:49:43 +00:00
|
|
|
return ((value << 1) & 0x7F0000) | (value & 0x7FFF) | 0x8000
|
2018-09-23 02:51:54 +00:00
|
|
|
|
2020-07-21 21:15:19 +00:00
|
|
|
|
2022-04-28 16:03:44 +00:00
|
|
|
def snes_to_pc(value: int) -> int:
|
2021-01-02 11:49:43 +00:00
|
|
|
return ((value & 0x7F0000) >> 1) | (value & 0x7FFF)
|
2018-09-23 02:51:54 +00:00
|
|
|
|
2020-07-21 21:15:19 +00:00
|
|
|
|
2022-04-28 16:03:44 +00:00
|
|
|
RetType = typing.TypeVar("RetType")
|
|
|
|
|
|
|
|
|
|
|
|
def cache_argsless(function: typing.Callable[[], RetType]) -> typing.Callable[[], RetType]:
|
2022-04-30 02:39:08 +00:00
|
|
|
assert not function.__code__.co_argcount, "Can only cache 0 argument functions with this cache."
|
2021-07-09 15:44:24 +00:00
|
|
|
|
2022-04-28 16:03:44 +00:00
|
|
|
sentinel = object()
|
|
|
|
result: typing.Union[object, RetType] = sentinel
|
2021-07-09 15:44:24 +00:00
|
|
|
|
2022-04-28 16:03:44 +00:00
|
|
|
def _wrap() -> RetType:
|
2021-07-09 15:44:24 +00:00
|
|
|
nonlocal result
|
|
|
|
if result is sentinel:
|
|
|
|
result = function()
|
2022-04-28 16:03:44 +00:00
|
|
|
return typing.cast(RetType, result)
|
2021-07-09 15:44:24 +00:00
|
|
|
|
|
|
|
return _wrap
|
|
|
|
|
|
|
|
|
2021-07-19 19:52:08 +00:00
|
|
|
def is_frozen() -> bool:
|
2022-04-28 16:03:44 +00:00
|
|
|
return typing.cast(bool, getattr(sys, 'frozen', False))
|
2017-11-28 14:36:32 +00:00
|
|
|
|
2020-07-21 21:15:19 +00:00
|
|
|
|
2022-03-31 03:08:15 +00:00
|
|
|
def local_path(*path: str) -> str:
|
|
|
|
"""Returns path to a file in the local Archipelago installation or source."""
|
|
|
|
if hasattr(local_path, 'cached_path'):
|
|
|
|
pass
|
2021-07-19 19:52:08 +00:00
|
|
|
elif is_frozen():
|
2020-03-23 06:45:40 +00:00
|
|
|
if hasattr(sys, "_MEIPASS"):
|
|
|
|
# we are running in a PyInstaller bundle
|
|
|
|
local_path.cached_path = sys._MEIPASS # pylint: disable=protected-access,no-member
|
|
|
|
else:
|
|
|
|
# cx_Freeze
|
|
|
|
local_path.cached_path = os.path.dirname(os.path.abspath(sys.argv[0]))
|
2017-11-28 14:36:32 +00:00
|
|
|
else:
|
2020-03-23 06:45:40 +00:00
|
|
|
import __main__
|
2021-04-04 01:18:19 +00:00
|
|
|
if hasattr(__main__, "__file__"):
|
|
|
|
# we are running in a normal Python environment
|
|
|
|
local_path.cached_path = os.path.dirname(os.path.abspath(__main__.__file__))
|
|
|
|
else:
|
|
|
|
# pray
|
|
|
|
local_path.cached_path = os.path.abspath(".")
|
2020-03-15 18:32:00 +00:00
|
|
|
|
2020-08-25 11:22:47 +00:00
|
|
|
return os.path.join(local_path.cached_path, *path)
|
2017-11-28 14:36:32 +00:00
|
|
|
|
2021-01-02 11:49:43 +00:00
|
|
|
|
2022-03-31 03:08:15 +00:00
|
|
|
def home_path(*path: str) -> str:
|
|
|
|
"""Returns path to a file in the user home's Archipelago directory."""
|
|
|
|
if hasattr(home_path, 'cached_path'):
|
|
|
|
pass
|
|
|
|
elif sys.platform.startswith('linux'):
|
|
|
|
home_path.cached_path = os.path.expanduser('~/Archipelago')
|
|
|
|
os.makedirs(home_path.cached_path, 0o700, exist_ok=True)
|
|
|
|
else:
|
|
|
|
# not implemented
|
|
|
|
home_path.cached_path = local_path() # this will generate the same exceptions we got previously
|
|
|
|
|
|
|
|
return os.path.join(home_path.cached_path, *path)
|
2017-11-28 14:36:32 +00:00
|
|
|
|
2020-08-25 11:22:47 +00:00
|
|
|
|
2022-03-31 03:08:15 +00:00
|
|
|
def user_path(*path: str) -> str:
|
|
|
|
"""Returns either local_path or home_path based on write permissions."""
|
2022-08-11 22:32:37 +00:00
|
|
|
if hasattr(user_path, "cached_path"):
|
2022-03-31 03:08:15 +00:00
|
|
|
pass
|
|
|
|
elif os.access(local_path(), os.W_OK):
|
|
|
|
user_path.cached_path = local_path()
|
|
|
|
else:
|
|
|
|
user_path.cached_path = home_path()
|
|
|
|
# populate home from local - TODO: upgrade feature
|
2022-08-11 22:32:37 +00:00
|
|
|
if user_path.cached_path != local_path() and not os.path.exists(user_path("host.yaml")):
|
|
|
|
import shutil
|
|
|
|
for dn in ("Players", "data/sprites"):
|
2022-03-31 03:08:15 +00:00
|
|
|
shutil.copytree(local_path(dn), user_path(dn), dirs_exist_ok=True)
|
2022-08-11 22:32:37 +00:00
|
|
|
for fn in ("manifest.json", "host.yaml"):
|
2022-03-31 03:08:15 +00:00
|
|
|
shutil.copy2(local_path(fn), user_path(fn))
|
|
|
|
|
|
|
|
return os.path.join(user_path.cached_path, *path)
|
|
|
|
|
|
|
|
|
2022-10-25 17:54:43 +00:00
|
|
|
def output_path(*path: str) -> str:
|
2022-03-31 03:08:15 +00:00
|
|
|
if hasattr(output_path, 'cached_path'):
|
2020-08-25 11:22:47 +00:00
|
|
|
return os.path.join(output_path.cached_path, *path)
|
2022-03-31 03:08:15 +00:00
|
|
|
output_path.cached_path = user_path(get_options()["general_options"]["output_path"])
|
2020-08-25 11:22:47 +00:00
|
|
|
path = os.path.join(output_path.cached_path, *path)
|
2020-08-01 14:52:11 +00:00
|
|
|
os.makedirs(os.path.dirname(path), exist_ok=True)
|
|
|
|
return path
|
2017-11-28 14:36:32 +00:00
|
|
|
|
2021-01-02 11:49:43 +00:00
|
|
|
|
2022-08-11 22:32:37 +00:00
|
|
|
def open_file(filename: typing.Union[str, "pathlib.Path"]) -> None:
|
|
|
|
if is_windows:
|
2017-11-28 14:36:32 +00:00
|
|
|
os.startfile(filename)
|
|
|
|
else:
|
2022-08-11 22:32:37 +00:00
|
|
|
from shutil import which
|
|
|
|
open_command = which("open") if is_macos else (which("xdg-open") or which("gnome-open") or which("kde-open"))
|
2017-11-28 14:36:32 +00:00
|
|
|
subprocess.call([open_command, filename])
|
2017-12-02 14:21:04 +00:00
|
|
|
|
2021-01-02 11:49:43 +00:00
|
|
|
|
2022-01-19 03:26:25 +00:00
|
|
|
# from https://gist.github.com/pypt/94d747fe5180851196eb#gistcomment-4015118 with some changes
|
|
|
|
class UniqueKeyLoader(SafeLoader):
|
|
|
|
def construct_mapping(self, node, deep=False):
|
|
|
|
mapping = set()
|
|
|
|
for key_node, value_node in node.value:
|
|
|
|
key = self.construct_object(key_node, deep=deep)
|
|
|
|
if key in mapping:
|
2022-01-25 03:20:08 +00:00
|
|
|
logging.error(f"YAML duplicates sanity check failed{key_node.start_mark}")
|
|
|
|
raise KeyError(f"Duplicate key {key} found in YAML. Already found keys: {mapping}.")
|
2022-01-19 03:26:25 +00:00
|
|
|
mapping.add(key)
|
|
|
|
return super().construct_mapping(node, deep)
|
|
|
|
|
|
|
|
|
|
|
|
parse_yaml = functools.partial(load, Loader=UniqueKeyLoader)
|
2022-04-12 08:57:29 +00:00
|
|
|
parse_yamls = functools.partial(load_all, Loader=UniqueKeyLoader)
|
2022-08-11 22:32:37 +00:00
|
|
|
unsafe_parse_yaml = functools.partial(load, Loader=UnsafeLoader)
|
|
|
|
|
|
|
|
del load, load_all # should not be used. don't leak their names
|
2020-02-16 14:32:40 +00:00
|
|
|
|
2021-07-30 23:40:27 +00:00
|
|
|
|
2021-11-13 22:14:26 +00:00
|
|
|
def get_cert_none_ssl_context():
|
|
|
|
import ssl
|
|
|
|
ctx = ssl.create_default_context()
|
|
|
|
ctx.check_hostname = False
|
|
|
|
ctx.verify_mode = ssl.CERT_NONE
|
|
|
|
return ctx
|
|
|
|
|
|
|
|
|
2021-07-09 15:44:24 +00:00
|
|
|
@cache_argsless
|
2020-03-05 23:48:23 +00:00
|
|
|
def get_public_ipv4() -> str:
|
|
|
|
import socket
|
|
|
|
import urllib.request
|
|
|
|
ip = socket.gethostbyname(socket.gethostname())
|
2021-11-13 22:14:26 +00:00
|
|
|
ctx = get_cert_none_ssl_context()
|
2020-03-05 23:48:23 +00:00
|
|
|
try:
|
2022-08-11 22:32:37 +00:00
|
|
|
ip = urllib.request.urlopen("https://checkip.amazonaws.com/", context=ctx).read().decode("utf8").strip()
|
2020-03-05 23:48:23 +00:00
|
|
|
except Exception as e:
|
2022-08-11 22:32:37 +00:00
|
|
|
# noinspection PyBroadException
|
2020-03-05 23:48:23 +00:00
|
|
|
try:
|
2022-08-11 22:32:37 +00:00
|
|
|
ip = urllib.request.urlopen("https://v4.ident.me", context=ctx).read().decode("utf8").strip()
|
|
|
|
except Exception:
|
2020-03-05 23:48:23 +00:00
|
|
|
logging.exception(e)
|
|
|
|
pass # we could be offline, in a local game, so no point in erroring out
|
|
|
|
return ip
|
2020-03-15 18:32:00 +00:00
|
|
|
|
2021-07-30 23:40:27 +00:00
|
|
|
|
2021-07-09 15:44:24 +00:00
|
|
|
@cache_argsless
|
2020-06-14 07:06:37 +00:00
|
|
|
def get_public_ipv6() -> str:
|
|
|
|
import socket
|
|
|
|
import urllib.request
|
|
|
|
ip = socket.gethostbyname(socket.gethostname())
|
2021-11-13 22:14:26 +00:00
|
|
|
ctx = get_cert_none_ssl_context()
|
2020-06-14 07:06:37 +00:00
|
|
|
try:
|
2022-08-11 22:32:37 +00:00
|
|
|
ip = urllib.request.urlopen("https://v6.ident.me", context=ctx).read().decode("utf8").strip()
|
2020-06-14 07:06:37 +00:00
|
|
|
except Exception as e:
|
|
|
|
logging.exception(e)
|
2020-06-21 14:13:42 +00:00
|
|
|
pass # we could be offline, in a local game, or ipv6 may not be available
|
2020-06-14 07:06:37 +00:00
|
|
|
return ip
|
2020-03-15 18:32:00 +00:00
|
|
|
|
2021-07-30 23:40:27 +00:00
|
|
|
|
2022-09-28 21:54:10 +00:00
|
|
|
OptionsType = typing.Dict[str, typing.Dict[str, typing.Any]]
|
|
|
|
|
|
|
|
|
2021-07-09 15:44:24 +00:00
|
|
|
@cache_argsless
|
2022-09-28 21:54:10 +00:00
|
|
|
def get_default_options() -> OptionsType:
|
2021-07-09 15:44:24 +00:00
|
|
|
# Refer to host.yaml for comments as to what all these options mean.
|
|
|
|
options = {
|
|
|
|
"general_options": {
|
|
|
|
"output_path": "output",
|
|
|
|
},
|
|
|
|
"factorio_options": {
|
2022-02-18 17:54:26 +00:00
|
|
|
"executable": os.path.join("factorio", "bin", "x64", "factorio"),
|
2022-10-27 22:07:57 +00:00
|
|
|
"filter_item_sends": False,
|
2022-10-27 22:45:26 +00:00
|
|
|
"bridge_chat_out": True,
|
2021-07-09 15:44:24 +00:00
|
|
|
},
|
2022-10-25 17:54:43 +00:00
|
|
|
"sni_options": {
|
2022-11-20 10:14:13 +00:00
|
|
|
"sni_path": "SNI",
|
2022-10-25 17:54:43 +00:00
|
|
|
"snes_rom_start": True,
|
|
|
|
},
|
2021-11-12 13:00:11 +00:00
|
|
|
"sm_options": {
|
2021-11-12 13:36:34 +00:00
|
|
|
"rom_file": "Super Metroid (JU).sfc",
|
2021-11-12 13:00:11 +00:00
|
|
|
},
|
2021-11-13 19:52:30 +00:00
|
|
|
"soe_options": {
|
|
|
|
"rom_file": "Secret of Evermore (USA).sfc",
|
|
|
|
},
|
2021-07-09 15:44:24 +00:00
|
|
|
"lttp_options": {
|
|
|
|
"rom_file": "Zelda no Densetsu - Kamigami no Triforce (Japan).sfc",
|
|
|
|
},
|
|
|
|
"server_options": {
|
|
|
|
"host": None,
|
|
|
|
"port": 38281,
|
|
|
|
"password": None,
|
|
|
|
"multidata": None,
|
|
|
|
"savefile": None,
|
|
|
|
"disable_save": False,
|
|
|
|
"loglevel": "info",
|
|
|
|
"server_password": None,
|
|
|
|
"disable_item_cheat": False,
|
|
|
|
"location_check_points": 1,
|
|
|
|
"hint_cost": 10,
|
|
|
|
"forfeit_mode": "goal",
|
2021-10-18 20:58:29 +00:00
|
|
|
"collect_mode": "disabled",
|
2021-07-09 15:44:24 +00:00
|
|
|
"remaining_mode": "goal",
|
|
|
|
"auto_shutdown": 0,
|
|
|
|
"compatibility": 2,
|
|
|
|
"log_network": 0
|
|
|
|
},
|
2021-07-21 16:08:15 +00:00
|
|
|
"generator": {
|
2022-06-01 23:49:57 +00:00
|
|
|
"enemizer_path": os.path.join("EnemizerCLI", "EnemizerCLI.Core"),
|
2021-07-09 15:44:24 +00:00
|
|
|
"player_files_path": "Players",
|
|
|
|
"players": 0,
|
|
|
|
"weights_file_path": "weights.yaml",
|
|
|
|
"meta_file_path": "meta.yaml",
|
2021-07-21 16:08:15 +00:00
|
|
|
"spoiler": 2,
|
2021-07-09 15:44:24 +00:00
|
|
|
"glitch_triforce_room": 1,
|
|
|
|
"race": 0,
|
|
|
|
"plando_options": "bosses",
|
2021-08-15 00:32:36 +00:00
|
|
|
},
|
|
|
|
"minecraft_options": {
|
|
|
|
"forge_directory": "Minecraft Forge server",
|
2022-05-11 04:00:53 +00:00
|
|
|
"max_heap_size": "2G",
|
|
|
|
"release_channel": "release"
|
2021-08-15 00:32:36 +00:00
|
|
|
},
|
Ocarina of Time (#64)
* first commit (not including OoT data files yet)
* added some basic options
* rule parser works now at least
* make sure to commit everything this time
* temporary change to BaseClasses for oot
* overworld location graph builds mostly correctly
* adding oot data files
* commenting out world options until later since they only existed to make the RuleParser work
* conversion functions between AP ids and OOT ids
* world graph outputs
* set scrub prices
* itempool generates, entrances connected, way too many options added
* fixed set_rules and set_shop_rules
* temp baseclasses changes
* Reaches the fill step now, old event-based system retained in case the new way breaks
* Song placements and misc fixes everywhere
* temporary changes to make oot work
* changed root exits for AP fill framework
* prevent infinite recursion due to OoT sharing usage of the address field
* age reachability works hopefully, songs are broken again
* working spoiler log generation on beatable-only
* Logic tricks implemented
* need this for logic tricks
* fixed map/compass being placed on Serenade location
* kill unreachable events before filling the world
* add a bunch of utility functions to prepare for rom patching
* move OptionList into generic options
* fixed some silly bugs with OptionList
* properly seed all random behavior (so far)
* ROM generation working
* fix hints trying to get alttp dungeon hint texts
* continue fixing hints
* add oot to network data package
* change item and location IDs to 66000 and 67000 range respectively
* push removed items to precollected items
* fixed various issues with cross-contamination with multiple world generation
* reenable glitched logic (hopefully)
* glitched world files age-check fix
* cleaned up some get_locations calls
* added token shuffle and scrub shuffle, modified some options slightly to make the parsing work
* reenable MQ dungeons
* fix forest mq exception
* made targeting style an option for now, will be cosmetic later
* reminder to move targeting to cosmetics
* some oot option maintenance
* enabled starting time of day
* fixed issue breaking shop slots in multiworld generation
* added "off" option for text shuffle and hints
* shopsanity functionality restored
* change patch file extension
* remove unnecessary utility functions + imports
* update MIT license
* change option to "patch_uncompressed_rom" instead of "compress_rom"
* compliance with new AutoWorld systems
* Kill only internal events, remove non-internal big poe event in code
* re-add the big poe event and handle it correctly
* remove extra method in Range option
* fix typo
* Starting items, starting with consumables option
* do not remove nonexistent item
* move set_shop_rules to after shop items are placed
* some cleanup
* add retries for song placement
* flagged Skull Mask and Mask of Truth as advancement items
* update OoT to use LogicMixin
* Fixed trying to assign starting items from the wrong players
* fixed song retry step
* improved option handling, comments, and starting item replacements
* DefaultOnToggle writes Yes or No to spoiler
* enable compression of output if Compress executable is present
* clean up compression
* check whether (de)compressor exists before running the process
* allow specification of rom path in host.yaml
* check if decompressed file already exists before decompressing again
* fix triforce hunt generation
* rename all the oot state functions with prefix
* OoT: mark triforce pieces as completion goal for triforce hunt
* added overworld and any-dungeon shuffle for dungeon items
* Hide most unshuffled locations and events from the list of locations in spoiler
* build oot option ranges with a generic function instead of defining each separately
* move oot output-type control to host.yaml instead of individual yamls
* implement dungeon song shuffle
* minor improvements to overworld dungeon item shuffle
* remove random ice trap names in shops, mostly to avoid maintaining a massive censor list
* always output patch file to folder, remove option to generate ROM in preparation for removal
* re-add the fix for infinite recursion due to not being light or dark world
* change AP-sendable to Ocarina of Time model, since the triforce piece has some extra code apparently
* oot: remove item_names and location_names
* oot: minor fixes
* oot: comment out ROM patching
* oot: only add CollectionState objects on creation if actually needed
* main entrance shuffle method and entrances-based rules
* fix entrances based rules
* disable master quest and big poe count options for client compatibility
* use get_player_name instead of get_player_names
* fix OptionList
* fix oot options for new option system
* new coop section in oot rom: expand player names to 16 bytes, write AP_PLAYER_NAME at end of PLAYER_NAMES
* fill AP player name in oot rom with 0 instead of 0xDF
* encode player name with ASCII for fixed-width
* revert oot player name array to 8 bytes per name
* remove Pierre location if fast scarecrow is on
* check player name length
* "free_scarecrow" not "fast_scarecrow"
* OoT locations now properly store the AP ID instead of the oot internal ID
* oot __version__ updates in lockstep with AP version
* pull in unmodified oot cosmetic files
* also grab JSONDump since it's needed apparently
* gather extra needed methods, modify imports
* delete cosmetics log, replace all instances of SettingsList with OOTWorld
* cosmetic options working, except for sound effects (due to ear-safe issues)
* SFX, Music, and Fanfare randomization reenabled
* move OoT data files into the worlds folder
* move Compress and Decompress into oot data folder
* Replace get_all_state with custom method to avoid the cache
* OoT ROM: increment item counter before setting incoming item/player values to 0, preventing desync issues
* set data_version to 0
* make Kokiri Sword shuffle off by default
* reenable "Random Choice" for various cosmetic options
* kill Ruto's Letter turnin if open fountain
also fix for shopsanity
* place Buy Goron/Zora Tunic first in shop shuffle
* make ice traps appear as other items instead of breaking generation
* managed to break ice traps on non-major-only
* only handle ice traps if they are on
* fix shopsanity for non-oot games, and write player name instead of player number
* light arrows hint uses player name instead of player number
* Reenable "skip child zelda" option
* fix entrances_based_rules
* fix ganondorf hint if starting with light arrows
* fix dungeonitem shuffle and shopsanity interaction
* remove has_all_of, has_any_of, count_of in BaseClasses, replace usage with has_all, has_any, has_group
* force local giveable item on ZL if skip_child_zelda and shuffle_song_items is any
* keep bosses and bombchu bowling chus out of data package
* revert workaround for infinite recursion and fix it properly
* fix shared shop id caches during patching process
* fix shop text box overflows, as much as possible
* add default oot host.yaml option
* add .apz5, .n64, .z64 to gitignore
* Properly document and name all (functioning) OOT options
* clean up some imports
* remove unnecessary files from oot's data
* fix typo in gitignore
* readd the Compress and Decompress utilities, since they are needed for generation
* cleanup of imports and some minor optimizations
* increase shop offset for item IDs to 0xCB
* remove shop item AP ids entirely
* prevent triforce pieces for other players from being received by yourself
* add "excluded" property to Location
* Hint system adapted and reenabled; hints still unseeded
* make hints deterministic with lists instead of sets
* do not allow hints to point to Light Arrows on non-vanilla bridge
* foreign locations hint as their full name in OoT rather than their region
* checkedLocations now stores hint names by player ID, so that the same location in different worlds can have hints associated
* consolidate versioning in Utils
* ice traps appear as major items rather than any progression item
* set prescription and claim check as defaults for adult trade item settings
* add oot options to playerSettings
* allow case-insensitive logic tricks in yaml
* fix oot shopsanity option formatting
* Write OoT override info even if local item, enabling local checks to show up immediately in the client
* implement CollectionState.can_live_dmg for oot glitched logic
* filter item names for invalid characters when patching shops
* make ice traps appear according to the settings of the world they are shuffled into, rather than the original world
* set hidden-spoiler items and locations with Shop items to events
* make GF carpenters, Gerudo Card, Malon, ZL, and Impa events if the relevant settings are enabled, preventing them from appearing in the client on game start
* Fix oot Glitched and No Logic generation
* fix indenting
* Greatly reduce displayed cosmetic options
* Change oot data version to 1
* add apz5 distribution to webhost
* print player name if an ALttP dungeon contains a good item for OoT world
* delete unneeded commented code
* remove OcarinaSongs import to satisfy lint
2021-09-02 12:35:05 +00:00
|
|
|
"oot_options": {
|
|
|
|
"rom_file": "The Legend of Zelda - Ocarina of Time.z64",
|
2022-12-08 01:06:34 +00:00
|
|
|
"rom_start": True
|
2022-07-26 17:25:49 +00:00
|
|
|
},
|
|
|
|
"dkc3_options": {
|
|
|
|
"rom_file": "Donkey Kong Country 3 - Dixie Kong's Double Trouble! (USA) (En,Fr).sfc",
|
|
|
|
},
|
2022-09-29 18:16:59 +00:00
|
|
|
"smw_options": {
|
|
|
|
"rom_file": "Super Mario World (USA).sfc",
|
|
|
|
},
|
2022-10-20 17:41:11 +00:00
|
|
|
"zillion_options": {
|
|
|
|
"rom_file": "Zillion (UE) [!].sms",
|
|
|
|
# RetroArch doesn't make it easy to launch a game from the command line.
|
|
|
|
# You have to know the path to the emulator core library on the user's computer.
|
|
|
|
"rom_start": "retroarch",
|
|
|
|
},
|
2022-10-13 05:45:52 +00:00
|
|
|
"pokemon_rb_options": {
|
|
|
|
"red_rom_file": "Pokemon Red (UE) [S][!].gb",
|
|
|
|
"blue_rom_file": "Pokemon Blue (UE) [S][!].gb",
|
|
|
|
"rom_start": True
|
2022-12-08 01:06:34 +00:00
|
|
|
},
|
|
|
|
"ffr_options": {
|
|
|
|
"display_msgs": True,
|
|
|
|
},
|
2021-07-09 15:44:24 +00:00
|
|
|
}
|
|
|
|
return options
|
2020-11-28 19:34:29 +00:00
|
|
|
|
2020-11-30 15:43:13 +00:00
|
|
|
|
2022-09-28 21:54:10 +00:00
|
|
|
def update_options(src: dict, dest: dict, filename: str, keys: list) -> OptionsType:
|
2020-11-28 22:51:13 +00:00
|
|
|
for key, value in src.items():
|
|
|
|
new_keys = keys.copy()
|
|
|
|
new_keys.append(key)
|
2020-11-30 15:43:13 +00:00
|
|
|
option_name = '.'.join(new_keys)
|
2020-11-28 22:51:13 +00:00
|
|
|
if key not in dest:
|
|
|
|
dest[key] = value
|
2021-07-30 23:40:27 +00:00
|
|
|
if filename.endswith("options.yaml"):
|
2020-11-30 15:43:13 +00:00
|
|
|
logging.info(f"Warning: {filename} is missing {option_name}")
|
2020-11-28 22:51:13 +00:00
|
|
|
elif isinstance(value, dict):
|
|
|
|
if not isinstance(dest.get(key, None), dict):
|
2021-07-30 23:40:27 +00:00
|
|
|
if filename.endswith("options.yaml"):
|
2020-11-30 15:43:13 +00:00
|
|
|
logging.info(f"Warning: {filename} has {option_name}, but it is not a dictionary. overwriting.")
|
2020-11-28 22:51:13 +00:00
|
|
|
dest[key] = value
|
|
|
|
else:
|
|
|
|
dest[key] = update_options(value, dest[key], filename, new_keys)
|
|
|
|
return dest
|
2020-11-28 19:34:29 +00:00
|
|
|
|
2021-07-30 23:40:27 +00:00
|
|
|
|
2021-07-09 15:44:24 +00:00
|
|
|
@cache_argsless
|
2022-09-28 21:54:10 +00:00
|
|
|
def get_options() -> OptionsType:
|
2022-08-11 22:32:37 +00:00
|
|
|
filenames = ("options.yaml", "host.yaml")
|
2022-09-28 21:54:10 +00:00
|
|
|
locations: typing.List[str] = []
|
2022-08-11 22:32:37 +00:00
|
|
|
if os.path.join(os.getcwd()) != local_path():
|
|
|
|
locations += filenames # use files from cwd only if it's not the local_path
|
|
|
|
locations += [user_path(filename) for filename in filenames]
|
|
|
|
|
|
|
|
for location in locations:
|
|
|
|
if os.path.exists(location):
|
|
|
|
with open(location) as f:
|
|
|
|
options = parse_yaml(f.read())
|
|
|
|
return update_options(get_default_options(), options, location, list())
|
|
|
|
|
|
|
|
raise FileNotFoundError(f"Could not find {filenames[1]} to load options.")
|
2020-04-14 18:22:42 +00:00
|
|
|
|
|
|
|
|
2021-02-25 01:07:28 +00:00
|
|
|
def persistent_store(category: str, key: typing.Any, value: typing.Any):
|
2022-03-31 03:08:15 +00:00
|
|
|
path = user_path("_persistent_storage.yaml")
|
2020-04-24 03:29:02 +00:00
|
|
|
storage: dict = persistent_load()
|
|
|
|
category = storage.setdefault(category, {})
|
|
|
|
category[key] = value
|
|
|
|
with open(path, "wt") as f:
|
2022-08-11 22:32:37 +00:00
|
|
|
f.write(dump(storage, Dumper=Dumper))
|
2020-04-24 03:29:02 +00:00
|
|
|
|
|
|
|
|
2022-08-11 22:32:37 +00:00
|
|
|
def persistent_load() -> typing.Dict[str, dict]:
|
2020-06-04 19:27:29 +00:00
|
|
|
storage = getattr(persistent_load, "storage", None)
|
|
|
|
if storage:
|
|
|
|
return storage
|
2022-03-31 03:08:15 +00:00
|
|
|
path = user_path("_persistent_storage.yaml")
|
2020-04-24 03:29:02 +00:00
|
|
|
storage: dict = {}
|
|
|
|
if os.path.exists(path):
|
|
|
|
try:
|
|
|
|
with open(path, "r") as f:
|
2020-07-05 00:06:00 +00:00
|
|
|
storage = unsafe_parse_yaml(f.read())
|
2020-04-24 03:29:02 +00:00
|
|
|
except Exception as e:
|
|
|
|
logging.debug(f"Could not read store: {e}")
|
2020-04-30 05:42:26 +00:00
|
|
|
if storage is None:
|
|
|
|
storage = {}
|
2020-06-04 19:27:29 +00:00
|
|
|
persistent_load.storage = storage
|
2020-04-24 03:29:02 +00:00
|
|
|
return storage
|
|
|
|
|
|
|
|
|
2022-09-28 21:54:10 +00:00
|
|
|
def get_adjuster_settings(game_name: str) -> typing.Dict[str, typing.Any]:
|
2022-08-11 22:32:37 +00:00
|
|
|
adjuster_settings = persistent_load().get("adjuster", {}).get(game_name, {})
|
2022-01-20 03:19:58 +00:00
|
|
|
return adjuster_settings
|
2020-06-07 19:04:33 +00:00
|
|
|
|
2021-07-31 13:13:55 +00:00
|
|
|
|
2021-07-09 15:44:24 +00:00
|
|
|
@cache_argsless
|
2020-06-04 19:27:29 +00:00
|
|
|
def get_unique_identifier():
|
|
|
|
uuid = persistent_load().get("client", {}).get("uuid", None)
|
|
|
|
if uuid:
|
|
|
|
return uuid
|
|
|
|
|
|
|
|
import uuid
|
|
|
|
uuid = uuid.getnode()
|
|
|
|
persistent_store("client", "uuid", uuid)
|
|
|
|
return uuid
|
2020-09-08 23:41:37 +00:00
|
|
|
|
|
|
|
|
2022-08-11 22:32:37 +00:00
|
|
|
safe_builtins = frozenset((
|
2020-09-08 23:41:37 +00:00
|
|
|
'set',
|
|
|
|
'frozenset',
|
2022-08-11 22:32:37 +00:00
|
|
|
))
|
2020-09-08 23:41:37 +00:00
|
|
|
|
|
|
|
|
|
|
|
class RestrictedUnpickler(pickle.Unpickler):
|
2021-09-17 23:02:26 +00:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super(RestrictedUnpickler, self).__init__(*args, **kwargs)
|
|
|
|
self.options_module = importlib.import_module("Options")
|
|
|
|
self.net_utils_module = importlib.import_module("NetUtils")
|
2021-09-23 00:29:24 +00:00
|
|
|
self.generic_properties_module = importlib.import_module("worlds.generic")
|
2021-09-17 23:02:26 +00:00
|
|
|
|
2020-09-08 23:41:37 +00:00
|
|
|
def find_class(self, module, name):
|
|
|
|
if module == "builtins" and name in safe_builtins:
|
|
|
|
return getattr(builtins, name)
|
2021-09-17 23:02:26 +00:00
|
|
|
# used by MultiServer -> savegame/multidata
|
2022-01-30 12:57:12 +00:00
|
|
|
if module == "NetUtils" and name in {"NetworkItem", "ClientStatus", "Hint", "SlotType", "NetworkSlot"}:
|
2021-09-17 23:02:26 +00:00
|
|
|
return getattr(self.net_utils_module, name)
|
2021-09-23 00:29:24 +00:00
|
|
|
# Options and Plando are unpickled by WebHost -> Generate
|
|
|
|
if module == "worlds.generic" and name in {"PlandoItem", "PlandoConnection"}:
|
|
|
|
return getattr(self.generic_properties_module, name)
|
2022-09-28 21:54:10 +00:00
|
|
|
# pep 8 specifies that modules should have "all-lowercase names" (options, not Options)
|
|
|
|
if module.lower().endswith("options"):
|
2021-09-17 23:02:26 +00:00
|
|
|
if module == "Options":
|
|
|
|
mod = self.options_module
|
|
|
|
else:
|
|
|
|
mod = importlib.import_module(module)
|
|
|
|
obj = getattr(mod, name)
|
|
|
|
if issubclass(obj, self.options_module.Option):
|
2021-05-16 20:59:45 +00:00
|
|
|
return obj
|
2020-09-08 23:41:37 +00:00
|
|
|
# Forbid everything else.
|
2022-08-11 22:32:37 +00:00
|
|
|
raise pickle.UnpicklingError(f"global '{module}.{name}' is forbidden")
|
2020-09-08 23:41:37 +00:00
|
|
|
|
|
|
|
|
|
|
|
def restricted_loads(s):
|
|
|
|
"""Helper function analogous to pickle.loads()."""
|
2021-07-07 08:14:58 +00:00
|
|
|
return RestrictedUnpickler(io.BytesIO(s)).load()
|
|
|
|
|
2021-07-09 15:44:24 +00:00
|
|
|
|
2021-07-07 08:14:58 +00:00
|
|
|
class KeyedDefaultDict(collections.defaultdict):
|
2022-08-12 04:52:01 +00:00
|
|
|
"""defaultdict variant that uses the missing key as argument to default_factory"""
|
|
|
|
default_factory: typing.Callable[[typing.Any], typing.Any]
|
|
|
|
|
2021-07-07 08:14:58 +00:00
|
|
|
def __missing__(self, key):
|
|
|
|
self[key] = value = self.default_factory(key)
|
2021-10-16 17:40:27 +00:00
|
|
|
return value
|
|
|
|
|
|
|
|
|
|
|
|
def get_text_between(text: str, start: str, end: str) -> str:
|
|
|
|
return text[text.index(start) + len(start): text.rindex(end)]
|
2021-11-10 14:35:43 +00:00
|
|
|
|
|
|
|
|
2022-08-17 22:27:37 +00:00
|
|
|
def get_text_after(text: str, start: str) -> str:
|
|
|
|
return text[text.index(start) + len(start):]
|
|
|
|
|
|
|
|
|
2021-11-10 14:35:43 +00:00
|
|
|
loglevel_mapping = {'error': logging.ERROR, 'info': logging.INFO, 'warning': logging.WARNING, 'debug': logging.DEBUG}
|
|
|
|
|
|
|
|
|
|
|
|
def init_logging(name: str, loglevel: typing.Union[str, int] = logging.INFO, write_mode: str = "w",
|
2022-06-07 22:34:45 +00:00
|
|
|
log_format: str = "[%(name)s at %(asctime)s]: %(message)s",
|
|
|
|
exception_logger: typing.Optional[str] = None):
|
2022-11-17 20:27:44 +00:00
|
|
|
import datetime
|
2021-11-10 14:35:43 +00:00
|
|
|
loglevel: int = loglevel_mapping.get(loglevel, loglevel)
|
2022-03-31 03:08:15 +00:00
|
|
|
log_folder = user_path("logs")
|
2021-11-10 14:35:43 +00:00
|
|
|
os.makedirs(log_folder, exist_ok=True)
|
|
|
|
root_logger = logging.getLogger()
|
|
|
|
for handler in root_logger.handlers[:]:
|
|
|
|
root_logger.removeHandler(handler)
|
|
|
|
handler.close()
|
|
|
|
root_logger.setLevel(loglevel)
|
2022-11-17 20:27:44 +00:00
|
|
|
if "a" not in write_mode:
|
|
|
|
name += f"_{datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S')}"
|
2021-11-10 14:35:43 +00:00
|
|
|
file_handler = logging.FileHandler(
|
|
|
|
os.path.join(log_folder, f"{name}.txt"),
|
|
|
|
write_mode,
|
|
|
|
encoding="utf-8-sig")
|
|
|
|
file_handler.setFormatter(logging.Formatter(log_format))
|
|
|
|
root_logger.addHandler(file_handler)
|
|
|
|
if sys.stdout:
|
|
|
|
root_logger.addHandler(
|
|
|
|
logging.StreamHandler(sys.stdout)
|
|
|
|
)
|
2021-11-17 21:46:32 +00:00
|
|
|
|
|
|
|
# Relay unhandled exceptions to logger.
|
|
|
|
if not getattr(sys.excepthook, "_wrapped", False): # skip if already modified
|
|
|
|
orig_hook = sys.excepthook
|
|
|
|
|
|
|
|
def handle_exception(exc_type, exc_value, exc_traceback):
|
|
|
|
if issubclass(exc_type, KeyboardInterrupt):
|
|
|
|
sys.__excepthook__(exc_type, exc_value, exc_traceback)
|
|
|
|
return
|
|
|
|
logging.getLogger(exception_logger).exception("Uncaught exception",
|
|
|
|
exc_info=(exc_type, exc_value, exc_traceback))
|
|
|
|
return orig_hook(exc_type, exc_value, exc_traceback)
|
|
|
|
|
|
|
|
handle_exception._wrapped = True
|
|
|
|
|
|
|
|
sys.excepthook = handle_exception
|
2021-11-28 03:06:30 +00:00
|
|
|
|
2022-11-17 20:27:44 +00:00
|
|
|
def _cleanup():
|
|
|
|
for file in os.scandir(log_folder):
|
|
|
|
if file.name.endswith(".txt"):
|
|
|
|
last_change = datetime.datetime.fromtimestamp(file.stat().st_mtime)
|
|
|
|
if datetime.datetime.now() - last_change > datetime.timedelta(days=7):
|
|
|
|
try:
|
|
|
|
os.unlink(file.path)
|
|
|
|
except Exception as e:
|
|
|
|
logging.exception(e)
|
|
|
|
else:
|
|
|
|
logging.info(f"Deleted old logfile {file.path}")
|
|
|
|
import threading
|
|
|
|
threading.Thread(target=_cleanup, name="LogCleaner").start()
|
2022-11-28 01:52:36 +00:00
|
|
|
import platform
|
|
|
|
logging.info(
|
|
|
|
f"Archipelago ({__version__}) logging initialized"
|
|
|
|
f" on {platform.platform()}"
|
|
|
|
f" running Python {sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}"
|
|
|
|
)
|
2022-06-07 22:34:45 +00:00
|
|
|
|
2021-11-28 03:06:30 +00:00
|
|
|
|
|
|
|
def stream_input(stream, queue):
|
|
|
|
def queuer():
|
|
|
|
while 1:
|
2022-08-03 12:53:14 +00:00
|
|
|
try:
|
|
|
|
text = stream.readline().strip()
|
|
|
|
except UnicodeDecodeError as e:
|
|
|
|
logging.exception(e)
|
|
|
|
else:
|
|
|
|
if text:
|
|
|
|
queue.put_nowait(text)
|
2021-11-28 03:06:30 +00:00
|
|
|
|
|
|
|
from threading import Thread
|
|
|
|
thread = Thread(target=queuer, name=f"Stream handler for {stream.name}", daemon=True)
|
|
|
|
thread.start()
|
|
|
|
return thread
|
2022-01-18 07:23:38 +00:00
|
|
|
|
|
|
|
|
2022-08-11 22:32:37 +00:00
|
|
|
def tkinter_center_window(window: "tkinter.Tk") -> None:
|
2022-01-20 03:19:58 +00:00
|
|
|
window.update()
|
2022-08-11 22:32:37 +00:00
|
|
|
x = int(window.winfo_screenwidth() / 2 - window.winfo_reqwidth() / 2)
|
|
|
|
y = int(window.winfo_screenheight() / 2 - window.winfo_reqheight() / 2)
|
|
|
|
window.geometry(f"+{x}+{y}")
|
2022-01-20 03:19:58 +00:00
|
|
|
|
2022-02-24 03:47:01 +00:00
|
|
|
|
2022-01-18 07:23:38 +00:00
|
|
|
class VersionException(Exception):
|
|
|
|
pass
|
2022-01-20 03:19:58 +00:00
|
|
|
|
2022-02-24 03:47:01 +00:00
|
|
|
|
2022-06-21 18:50:40 +00:00
|
|
|
def chaining_prefix(index: int, labels: typing.Tuple[str]) -> str:
|
|
|
|
text = ""
|
|
|
|
max_label = len(labels) - 1
|
|
|
|
while index > max_label:
|
|
|
|
text += labels[-1]
|
|
|
|
index -= max_label
|
|
|
|
return labels[index] + text
|
|
|
|
|
|
|
|
|
2022-04-30 02:39:08 +00:00
|
|
|
# noinspection PyPep8Naming
|
2022-08-11 22:46:11 +00:00
|
|
|
def format_SI_prefix(value, power=1000, power_labels=("", "k", "M", "G", "T", "P", "E", "Z", "Y")) -> str:
|
2022-06-21 18:50:40 +00:00
|
|
|
"""Formats a value into a value + metric/si prefix. More info at https://en.wikipedia.org/wiki/Metric_prefix"""
|
2022-08-12 21:02:56 +00:00
|
|
|
import decimal
|
2022-02-24 03:47:01 +00:00
|
|
|
n = 0
|
2022-06-21 18:50:40 +00:00
|
|
|
value = decimal.Decimal(value)
|
2022-08-11 22:46:11 +00:00
|
|
|
limit = power - decimal.Decimal("0.005")
|
|
|
|
while value >= limit:
|
2022-02-24 03:47:01 +00:00
|
|
|
value /= power
|
|
|
|
n += 1
|
2022-06-21 18:50:40 +00:00
|
|
|
|
|
|
|
return f"{value.quantize(decimal.Decimal('1.00'))} {chaining_prefix(n, power_labels)}"
|
2022-05-09 05:18:50 +00:00
|
|
|
|
|
|
|
|
2022-05-09 15:03:16 +00:00
|
|
|
def get_fuzzy_results(input_word: str, wordlist: typing.Sequence[str], limit: typing.Optional[int] = None) \
|
|
|
|
-> typing.List[typing.Tuple[str, int]]:
|
2022-08-11 22:32:37 +00:00
|
|
|
import jellyfish
|
|
|
|
|
|
|
|
def get_fuzzy_ratio(word1: str, word2: str) -> float:
|
|
|
|
return (1 - jellyfish.damerau_levenshtein_distance(word1.lower(), word2.lower())
|
|
|
|
/ max(len(word1), len(word2)))
|
|
|
|
|
2022-05-09 15:03:16 +00:00
|
|
|
limit: int = limit if limit else len(wordlist)
|
|
|
|
return list(
|
|
|
|
map(
|
|
|
|
lambda container: (container[0], int(container[1]*100)), # convert up to limit to int %
|
|
|
|
sorted(
|
|
|
|
map(lambda candidate:
|
|
|
|
(candidate, get_fuzzy_ratio(input_word, candidate)),
|
|
|
|
wordlist),
|
|
|
|
key=lambda element: element[1],
|
|
|
|
reverse=True)[0:limit]
|
|
|
|
)
|
|
|
|
)
|
2022-06-04 15:02:02 +00:00
|
|
|
|
|
|
|
|
2022-06-04 16:36:50 +00:00
|
|
|
def open_filename(title: str, filetypes: typing.Sequence[typing.Tuple[str, typing.Sequence[str]]]) \
|
|
|
|
-> typing.Optional[str]:
|
|
|
|
def run(*args: str):
|
2022-08-11 22:32:37 +00:00
|
|
|
return subprocess.run(args, capture_output=True, text=True).stdout.split("\n", 1)[0] or None
|
2022-06-04 16:36:50 +00:00
|
|
|
|
|
|
|
if is_linux:
|
|
|
|
# prefer native dialog
|
2022-08-11 22:32:37 +00:00
|
|
|
from shutil import which
|
|
|
|
kdialog = which("kdialog")
|
2022-06-04 16:36:50 +00:00
|
|
|
if kdialog:
|
|
|
|
k_filters = '|'.join((f'{text} (*{" *".join(ext)})' for (text, ext) in filetypes))
|
2022-08-11 22:32:37 +00:00
|
|
|
return run(kdialog, f"--title={title}", "--getopenfilename", ".", k_filters)
|
|
|
|
zenity = which("zenity")
|
2022-06-04 16:36:50 +00:00
|
|
|
if zenity:
|
|
|
|
z_filters = (f'--file-filter={text} ({", ".join(ext)}) | *{" *".join(ext)}' for (text, ext) in filetypes)
|
2022-08-11 22:32:37 +00:00
|
|
|
return run(zenity, f"--title={title}", "--file-selection", *z_filters)
|
2022-06-04 16:36:50 +00:00
|
|
|
|
|
|
|
# fall back to tk
|
|
|
|
try:
|
|
|
|
import tkinter
|
|
|
|
import tkinter.filedialog
|
|
|
|
except Exception as e:
|
|
|
|
logging.error('Could not load tkinter, which is likely not installed. '
|
|
|
|
f'This attempt was made because open_filename was used for "{title}".')
|
|
|
|
raise e
|
|
|
|
else:
|
|
|
|
root = tkinter.Tk()
|
|
|
|
root.withdraw()
|
|
|
|
return tkinter.filedialog.askopenfilename(title=title, filetypes=((t[0], ' '.join(t[1])) for t in filetypes))
|
|
|
|
|
|
|
|
|
2022-06-04 15:02:02 +00:00
|
|
|
def messagebox(title: str, text: str, error: bool = False) -> None:
|
2022-06-23 17:26:30 +00:00
|
|
|
def run(*args: str):
|
2022-08-11 22:32:37 +00:00
|
|
|
return subprocess.run(args, capture_output=True, text=True).stdout.split("\n", 1)[0] or None
|
2022-06-23 17:26:30 +00:00
|
|
|
|
2022-06-04 15:02:02 +00:00
|
|
|
def is_kivy_running():
|
2022-08-11 22:32:37 +00:00
|
|
|
if "kivy" in sys.modules:
|
2022-06-04 15:02:02 +00:00
|
|
|
from kivy.app import App
|
|
|
|
return App.get_running_app() is not None
|
|
|
|
return False
|
|
|
|
|
|
|
|
if is_kivy_running():
|
|
|
|
from kvui import MessageBox
|
|
|
|
MessageBox(title, text, error).open()
|
|
|
|
return
|
|
|
|
|
2022-08-11 22:32:37 +00:00
|
|
|
if is_linux and "tkinter" not in sys.modules:
|
2022-06-23 17:26:30 +00:00
|
|
|
# prefer native dialog
|
2022-08-11 22:32:37 +00:00
|
|
|
from shutil import which
|
|
|
|
kdialog = which("kdialog")
|
2022-06-23 17:26:30 +00:00
|
|
|
if kdialog:
|
2022-08-11 22:32:37 +00:00
|
|
|
return run(kdialog, f"--title={title}", "--error" if error else "--msgbox", text)
|
|
|
|
zenity = which("zenity")
|
2022-06-23 17:26:30 +00:00
|
|
|
if zenity:
|
2022-08-11 22:32:37 +00:00
|
|
|
return run(zenity, f"--title={title}", f"--text={text}", "--error" if error else "--info")
|
2022-06-23 17:26:30 +00:00
|
|
|
|
2022-06-04 15:02:02 +00:00
|
|
|
# fall back to tk
|
|
|
|
try:
|
|
|
|
import tkinter
|
|
|
|
from tkinter.messagebox import showerror, showinfo
|
|
|
|
except Exception as e:
|
|
|
|
logging.error('Could not load tkinter, which is likely not installed. '
|
|
|
|
f'This attempt was made because messagebox was used for "{title}".')
|
|
|
|
raise e
|
|
|
|
else:
|
|
|
|
root = tkinter.Tk()
|
|
|
|
root.withdraw()
|
|
|
|
showerror(title, text) if error else showinfo(title, text)
|
|
|
|
root.update()
|
2022-08-09 20:21:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
def title_sorted(data: typing.Sequence, key=None, ignore: typing.Set = frozenset(("a", "the"))):
|
|
|
|
"""Sorts a sequence of text ignoring typical articles like "a" or "the" in the beginning."""
|
|
|
|
def sorter(element: str) -> str:
|
|
|
|
parts = element.split(maxsplit=1)
|
|
|
|
if parts[0].lower() in ignore:
|
2022-08-26 14:44:09 +00:00
|
|
|
return parts[1].lower()
|
2022-08-09 20:21:45 +00:00
|
|
|
else:
|
2022-08-26 14:44:09 +00:00
|
|
|
return element.lower()
|
2022-08-09 20:21:45 +00:00
|
|
|
return sorted(data, key=lambda i: sorter(key(i)) if key else sorter(i))
|
2022-09-29 22:36:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
def read_snes_rom(stream: BinaryIO, strip_header: bool = True) -> bytearray:
|
|
|
|
"""Reads rom into bytearray and optionally strips off any smc header"""
|
|
|
|
buffer = bytearray(stream.read())
|
|
|
|
if strip_header and len(buffer) % 0x400 == 0x200:
|
|
|
|
return buffer[0x200:]
|
|
|
|
return buffer
|
2022-11-02 14:51:35 +00:00
|
|
|
|
|
|
|
|
|
|
|
_faf_tasks: "Set[asyncio.Task[None]]" = set()
|
|
|
|
|
|
|
|
|
2022-12-03 22:29:33 +00:00
|
|
|
def async_start(co: Coroutine[typing.Any, typing.Any, bool], name: Optional[str] = None) -> None:
|
2022-11-02 14:51:35 +00:00
|
|
|
"""
|
|
|
|
Use this to start a task when you don't keep a reference to it or immediately await it,
|
|
|
|
to prevent early garbage collection. "fire-and-forget"
|
|
|
|
"""
|
|
|
|
# https://docs.python.org/3.10/library/asyncio-task.html#asyncio.create_task
|
|
|
|
# Python docs:
|
|
|
|
# ```
|
|
|
|
# Important: Save a reference to the result of [asyncio.create_task],
|
|
|
|
# to avoid a task disappearing mid-execution.
|
|
|
|
# ```
|
|
|
|
# This implementation follows the pattern given in that documentation.
|
|
|
|
|
|
|
|
task = asyncio.create_task(co, name=name)
|
|
|
|
_faf_tasks.add(task)
|
|
|
|
task.add_done_callback(_faf_tasks.discard)
|