2022-12-08 14:54:49 +00:00
|
|
|
import base64
|
|
|
|
import datetime
|
2020-02-02 05:25:06 +00:00
|
|
|
import os
|
2022-12-08 14:54:49 +00:00
|
|
|
import platform
|
2020-02-02 05:25:06 +00:00
|
|
|
import shutil
|
|
|
|
import sys
|
|
|
|
import sysconfig
|
2022-12-08 14:54:49 +00:00
|
|
|
import typing
|
2022-12-07 05:40:30 +00:00
|
|
|
import zipfile
|
2023-03-05 13:10:05 +00:00
|
|
|
import urllib.request
|
|
|
|
import io
|
|
|
|
import json
|
|
|
|
import threading
|
2023-03-06 11:54:32 +00:00
|
|
|
import subprocess
|
|
|
|
|
2022-03-31 03:08:15 +00:00
|
|
|
from collections.abc import Iterable
|
2022-12-08 14:54:49 +00:00
|
|
|
from hashlib import sha3_512
|
|
|
|
from pathlib import Path
|
2023-03-06 11:54:32 +00:00
|
|
|
|
2022-12-08 14:54:49 +00:00
|
|
|
|
2023-01-30 00:47:28 +00:00
|
|
|
# This is a bit jank. We need cx-Freeze to be able to run anything from this script, so install it
|
|
|
|
try:
|
2023-03-13 15:30:39 +00:00
|
|
|
requirement = 'cx-Freeze>=6.14.7'
|
2023-03-25 18:54:42 +00:00
|
|
|
import pkg_resources
|
|
|
|
try:
|
|
|
|
pkg_resources.require(requirement)
|
|
|
|
install_cx_freeze = False
|
|
|
|
except pkg_resources.ResolutionError:
|
|
|
|
install_cx_freeze = True
|
|
|
|
except ImportError:
|
|
|
|
install_cx_freeze = True
|
|
|
|
pkg_resources = None # type: ignore [assignment]
|
|
|
|
|
|
|
|
if install_cx_freeze:
|
|
|
|
# check if pip is available
|
|
|
|
try:
|
|
|
|
import pip # noqa: F401
|
|
|
|
except ImportError:
|
|
|
|
raise RuntimeError("pip not available. Please install pip.")
|
|
|
|
# install and import cx_freeze
|
2023-01-30 00:47:28 +00:00
|
|
|
if '--yes' not in sys.argv and '-y' not in sys.argv:
|
|
|
|
input(f'Requirement {requirement} is not satisfied, press enter to install it')
|
|
|
|
subprocess.call([sys.executable, '-m', 'pip', 'install', requirement, '--upgrade'])
|
2023-03-25 18:54:42 +00:00
|
|
|
import pkg_resources
|
2023-03-25 23:13:28 +00:00
|
|
|
|
|
|
|
import cx_Freeze
|
2023-01-30 00:47:28 +00:00
|
|
|
|
|
|
|
# .build only exists if cx-Freeze is the right version, so we have to update/install that first before this line
|
|
|
|
import setuptools.command.build
|
2020-02-02 05:25:06 +00:00
|
|
|
|
2023-01-17 17:49:33 +00:00
|
|
|
if __name__ == "__main__":
|
2023-01-30 00:47:28 +00:00
|
|
|
# need to run this early to import from Utils and Launcher
|
|
|
|
# TODO: move stuff to not require this
|
2023-01-17 17:49:33 +00:00
|
|
|
import ModuleUpdate
|
2023-01-30 00:47:28 +00:00
|
|
|
ModuleUpdate.update(yes="--yes" in sys.argv or "-y" in sys.argv)
|
|
|
|
ModuleUpdate.update_ran = False # restore for later
|
2023-01-17 17:49:33 +00:00
|
|
|
|
2023-03-20 20:24:47 +00:00
|
|
|
from worlds.LauncherComponents import components, icon_paths
|
2022-12-08 14:54:49 +00:00
|
|
|
from Utils import version_tuple, is_windows, is_linux
|
2020-12-06 13:36:14 +00:00
|
|
|
|
2023-01-30 00:47:28 +00:00
|
|
|
|
2022-12-19 05:49:38 +00:00
|
|
|
# On Python < 3.10 LogicMixin is not currently supported.
|
2022-12-07 05:40:30 +00:00
|
|
|
apworlds: set = {
|
|
|
|
"Subnautica",
|
|
|
|
"Factorio",
|
2022-12-08 14:54:49 +00:00
|
|
|
"Rogue Legacy",
|
2023-03-21 20:26:13 +00:00
|
|
|
"Sonic Adventure 2 Battle",
|
2023-02-01 20:15:01 +00:00
|
|
|
"Donkey Kong Country 3",
|
|
|
|
"Super Mario World",
|
2023-02-27 00:19:15 +00:00
|
|
|
"Stardew Valley",
|
2023-02-19 20:22:30 +00:00
|
|
|
"Timespinner",
|
2023-03-09 04:13:52 +00:00
|
|
|
"Minecraft",
|
2023-03-12 14:05:50 +00:00
|
|
|
"The Messenger",
|
2023-04-06 18:06:34 +00:00
|
|
|
"Links Awakening DX",
|
2023-04-08 20:52:34 +00:00
|
|
|
"Super Metroid",
|
|
|
|
"SMZ3",
|
2022-12-07 05:40:30 +00:00
|
|
|
}
|
|
|
|
|
2023-03-05 13:10:05 +00:00
|
|
|
|
|
|
|
def download_SNI():
|
|
|
|
print("Updating SNI")
|
|
|
|
machine_to_go = {
|
|
|
|
"x86_64": "amd64",
|
|
|
|
"aarch64": "arm64",
|
|
|
|
"armv7l": "arm"
|
|
|
|
}
|
|
|
|
platform_name = platform.system().lower()
|
|
|
|
machine_name = platform.machine().lower()
|
|
|
|
# force amd64 on macos until we have universal2 sni, otherwise resolve to GOARCH
|
|
|
|
machine_name = "amd64" if platform_name == "darwin" else machine_to_go.get(machine_name, machine_name)
|
|
|
|
with urllib.request.urlopen("https://api.github.com/repos/alttpo/sni/releases/latest") as request:
|
|
|
|
data = json.load(request)
|
|
|
|
files = data["assets"]
|
|
|
|
|
|
|
|
source_url = None
|
|
|
|
|
|
|
|
for file in files:
|
|
|
|
download_url: str = file["browser_download_url"]
|
|
|
|
machine_match = download_url.rsplit("-", 1)[1].split(".", 1)[0] == machine_name
|
|
|
|
if platform_name in download_url and machine_match:
|
|
|
|
# prefer "many" builds
|
|
|
|
if "many" in download_url:
|
|
|
|
source_url = download_url
|
|
|
|
break
|
|
|
|
source_url = download_url
|
|
|
|
|
|
|
|
if source_url and source_url.endswith(".zip"):
|
|
|
|
with urllib.request.urlopen(source_url) as download:
|
|
|
|
with zipfile.ZipFile(io.BytesIO(download.read()), "r") as zf:
|
|
|
|
for member in zf.infolist():
|
|
|
|
zf.extract(member, path="SNI")
|
|
|
|
print(f"Downloaded SNI from {source_url}")
|
|
|
|
|
|
|
|
elif source_url and (source_url.endswith(".tar.xz") or source_url.endswith(".tar.gz")):
|
|
|
|
import tarfile
|
|
|
|
mode = "r:xz" if source_url.endswith(".tar.xz") else "r:gz"
|
|
|
|
with urllib.request.urlopen(source_url) as download:
|
|
|
|
sni_dir = None
|
|
|
|
with tarfile.open(fileobj=io.BytesIO(download.read()), mode=mode) as tf:
|
|
|
|
for member in tf.getmembers():
|
|
|
|
if member.name.startswith("/") or "../" in member.name:
|
|
|
|
raise ValueError(f"Unexpected file '{member.name}' in {source_url}")
|
|
|
|
elif member.isdir() and not sni_dir:
|
|
|
|
sni_dir = member.name
|
|
|
|
elif member.isfile() and not sni_dir or not member.name.startswith(sni_dir):
|
|
|
|
raise ValueError(f"Expected folder before '{member.name}' in {source_url}")
|
|
|
|
elif member.isfile() and sni_dir:
|
|
|
|
tf.extract(member)
|
|
|
|
# sadly SNI is in its own folder on non-windows, so we need to rename
|
|
|
|
shutil.rmtree("SNI", True)
|
|
|
|
os.rename(sni_dir, "SNI")
|
|
|
|
print(f"Downloaded SNI from {source_url}")
|
|
|
|
|
|
|
|
elif source_url:
|
|
|
|
print(f"Don't know how to extract SNI from {source_url}")
|
|
|
|
|
|
|
|
else:
|
|
|
|
print(f"No SNI found for system spec {platform_name} {machine_name}")
|
|
|
|
|
|
|
|
|
2023-03-25 18:54:42 +00:00
|
|
|
signtool: typing.Optional[str]
|
2020-05-01 16:51:08 +00:00
|
|
|
if os.path.exists("X:/pw.txt"):
|
|
|
|
print("Using signtool")
|
2022-05-17 19:40:03 +00:00
|
|
|
with open("X:/pw.txt", encoding="utf-8-sig") as f:
|
2020-05-01 16:51:08 +00:00
|
|
|
pw = f.read()
|
2023-01-29 21:14:20 +00:00
|
|
|
signtool = r'signtool sign /f X:/_SITS_Zertifikat_.pfx /p "' + pw + \
|
|
|
|
r'" /fd sha256 /tr http://timestamp.digicert.com/ '
|
2020-05-01 16:51:08 +00:00
|
|
|
else:
|
|
|
|
signtool = None
|
|
|
|
|
2020-02-02 05:25:06 +00:00
|
|
|
|
2022-06-05 20:52:16 +00:00
|
|
|
build_platform = sysconfig.get_platform()
|
|
|
|
arch_folder = "exe.{platform}-{version}".format(platform=build_platform,
|
2022-03-31 03:08:15 +00:00
|
|
|
version=sysconfig.get_python_version())
|
|
|
|
buildfolder = Path("build", arch_folder)
|
2022-06-05 20:52:16 +00:00
|
|
|
build_arch = build_platform.split('-')[-1] if '-' in build_platform else platform.machine()
|
2020-12-06 13:36:14 +00:00
|
|
|
|
2021-08-06 17:33:17 +00:00
|
|
|
|
2022-03-31 03:08:15 +00:00
|
|
|
# see Launcher.py on how to add scripts to setup.py
|
|
|
|
exes = [
|
|
|
|
cx_Freeze.Executable(
|
|
|
|
script=f'{c.script_name}.py',
|
|
|
|
target_name=c.frozen_name + (".exe" if is_windows else ""),
|
|
|
|
icon=icon_paths[c.icon],
|
|
|
|
base="Win32GUI" if is_windows and not c.cli else None
|
2023-03-25 18:54:42 +00:00
|
|
|
) for c in components if c.script_name and c.frozen_name
|
2022-03-31 03:08:15 +00:00
|
|
|
]
|
2021-08-06 17:33:17 +00:00
|
|
|
|
2022-03-31 03:08:15 +00:00
|
|
|
extra_data = ["LICENSE", "data", "EnemizerCLI", "host.yaml", "SNI"]
|
2023-01-24 23:20:26 +00:00
|
|
|
extra_libs = ["libssl.so", "libcrypto.so"] if is_linux else []
|
2020-02-02 05:25:06 +00:00
|
|
|
|
WebUI (#100)
* Object-Oriented base changes for web-ui prep
* remove debug raise
* optimize broadcast to serialize once
* Implement WebUI socket, static assets, and classes
- Still need to wrap logging functions and send output to UI
- UI commands are successfully being sent to the server
* GUI operational. Wrap logging functions, implement server address selection on GUI, automatically launch web browser when client websocket is served
* Update MultiServer status when a user disconnects / reconnects
* Implement colored item and hint checks, improve GUI readability
* Fix improper formatting on received items
* Update SNES connection status on disconnect / reconnect. Implement itemFound, prevent accidentally printing JS objects
* Minor text change for itemFound
* Fixed a very wrong comment
* Fixed client commands not working, fixed un-helpful error messages appearing in GUI
* Fix a bug causing a failure to connect to a multiworld server if a previously existing cached address was present and the client was loaded without an address passed in
* Convert WebUI to React /w Redux. WebSocket communications not yet operational.
* WebUI fully converted to React / Redux.
- Websocket communication operational
- Added a button to connect to the multiserver which appears only when a SNES is connected and a server connection is not active
* Restore some features lost in WebUI
- Restore (found) notification on hints if the item has already been obtained
- Restore (x/y) indicator on received items, which indicates the number of items the client is waiting to receive from the client in a queue
* Fix a grammatical UI big causing player names to show only an apostrophe when possessive
* Add support for multiple SNES Devices, and switching between them
* freeze support for client
* make sure flask works when frozen
* UI Improvements
- Hint messages now actually show a found status via ✔ and ❌ emoji
- Active player name is always a different color than other players (orange for now)
- Add a toggle to show only entries relevant to the active player
- Added a WidgetArea
- Added a notes widget
* Received items now marked as relevant
* Include production build for deployment
* Notes now survive a browser close. Minimum width applied to monitor to prevent CSS issues.
* include webUi folder in setup.py
* Bugfixes for Monitor
- Fix a bug causing the monitor window to grow beyond it's intended content limit
- Reduced monitor content limit to 200 items
- Ensured each monitor entry has a unique key
* Prevent eslint from yelling at me about stupid things
* Add button to collapse sidebar, press enter on empty server input to disconnect on purpose
* WebUI is now aware of client disconnect, message log limit increased to 350, fix !missing output
* Update WebUI to v2.2.1
- Added color to WebUI for entrance-span
- Make !missing show total count at bottom of list to match /missing behavior
* Fix a bug causing clients version <= 2.2.0 to crash when anyone asks for a hint
- Also fix a bug in the WebUI causing the entrance location to always show as "somewhere"
* Update WebUI color palette (this cost me $50)
* allow text console input alongside web-ui
* remove Flask
a bit overkill for what we're doing
* remove jinja2
* Update WebUI to work with new hosting mechanism
* with flask gone, we no longer need subprocess shenanigans
* If multiple web ui clients try to run, at least present a working console
* Update MultiClient and WebUI to handle multiple clients simultaneously.
- The port on which the websocket for the WebUI is hosted is not chosen randomly from 5000 - 5999. This port is passed to the browser so it knows which MultiClient to connect to
- Removed failure condition if a web server is already running, as there is no need to run more than one web server on a single system. If an exception is thrown while attempting to launch a web server, a check is made for the port being unavailable. If the port is unavailable, it probably means the user is launching a second MultiClient. A web browser is then opened with a connection to the correct webui_socket_port.
- Add a /web command to the MultiClient to repoen the appropriate browser window and get params in case a user accidentally closes the tab
* Use proper name for WebUI
* move webui into /data with other data files
* make web ui optional
This is mostly for laptop users wanting to preserve some battery, should not be needed outside of that.
* fix direct server start
* re-add connection timer
* fix indentation
Co-authored-by: Chris <chris@legendserver.info>
2020-06-03 19:29:43 +00:00
|
|
|
|
2021-07-30 22:03:48 +00:00
|
|
|
def remove_sprites_from_folder(folder):
|
|
|
|
for file in os.listdir(folder):
|
|
|
|
if file != ".gitignore":
|
|
|
|
os.remove(folder / file)
|
|
|
|
|
|
|
|
|
2022-03-31 03:08:15 +00:00
|
|
|
def _threaded_hash(filepath):
|
|
|
|
hasher = sha3_512()
|
|
|
|
hasher.update(open(filepath, "rb").read())
|
|
|
|
return base64.b85encode(hasher.digest()).decode()
|
2020-02-02 05:25:06 +00:00
|
|
|
|
|
|
|
|
2022-03-31 03:08:15 +00:00
|
|
|
# cx_Freeze's build command runs other commands. Override to accept --yes and store that.
|
2023-01-24 23:20:26 +00:00
|
|
|
class BuildCommand(setuptools.command.build.build):
|
2022-03-31 03:08:15 +00:00
|
|
|
user_options = [
|
|
|
|
('yes', 'y', 'Answer "yes" to all questions.'),
|
|
|
|
]
|
|
|
|
yes: bool
|
|
|
|
last_yes: bool = False # used by sub commands of build
|
|
|
|
|
|
|
|
def initialize_options(self):
|
|
|
|
super().initialize_options()
|
|
|
|
type(self).last_yes = self.yes = False
|
|
|
|
|
|
|
|
def finalize_options(self):
|
|
|
|
super().finalize_options()
|
|
|
|
type(self).last_yes = self.yes
|
|
|
|
|
|
|
|
|
|
|
|
# Override cx_Freeze's build_exe command for pre and post build steps
|
2022-06-04 17:21:37 +00:00
|
|
|
class BuildExeCommand(cx_Freeze.command.build_exe.BuildEXE):
|
|
|
|
user_options = cx_Freeze.command.build_exe.BuildEXE.user_options + [
|
2022-03-31 03:08:15 +00:00
|
|
|
('yes', 'y', 'Answer "yes" to all questions.'),
|
|
|
|
('extra-data=', None, 'Additional files to add.'),
|
|
|
|
]
|
|
|
|
yes: bool
|
|
|
|
extra_data: Iterable # [any] not available in 3.8
|
2023-01-24 23:20:26 +00:00
|
|
|
extra_libs: Iterable # work around broken include_files
|
2022-03-31 03:08:15 +00:00
|
|
|
|
|
|
|
buildfolder: Path
|
|
|
|
libfolder: Path
|
|
|
|
library: Path
|
|
|
|
buildtime: datetime.datetime
|
|
|
|
|
|
|
|
def initialize_options(self):
|
|
|
|
super().initialize_options()
|
|
|
|
self.yes = BuildCommand.last_yes
|
|
|
|
self.extra_data = []
|
2023-01-24 23:20:26 +00:00
|
|
|
self.extra_libs = []
|
2022-03-31 03:08:15 +00:00
|
|
|
|
|
|
|
def finalize_options(self):
|
|
|
|
super().finalize_options()
|
|
|
|
self.buildfolder = self.build_exe
|
|
|
|
self.libfolder = Path(self.buildfolder, "lib")
|
|
|
|
self.library = Path(self.libfolder, "library.zip")
|
|
|
|
|
2022-06-14 06:55:57 +00:00
|
|
|
def installfile(self, path, subpath=None, keep_content: bool = False):
|
2022-03-31 03:08:15 +00:00
|
|
|
folder = self.buildfolder
|
2022-06-14 06:55:57 +00:00
|
|
|
if subpath:
|
|
|
|
folder /= subpath
|
2022-03-31 03:08:15 +00:00
|
|
|
print('copying', path, '->', folder)
|
|
|
|
if path.is_dir():
|
|
|
|
folder /= path.name
|
|
|
|
if folder.is_dir() and not keep_content:
|
|
|
|
shutil.rmtree(folder)
|
|
|
|
shutil.copytree(path, folder, dirs_exist_ok=True)
|
|
|
|
elif path.is_file():
|
|
|
|
shutil.copy(path, folder)
|
|
|
|
else:
|
|
|
|
print('Warning,', path, 'not found')
|
|
|
|
|
|
|
|
def create_manifest(self, create_hashes=False):
|
|
|
|
# Since the setup is now split into components and the manifest is not,
|
|
|
|
# it makes most sense to just remove the hashes for now. Not aware of anyone using them.
|
|
|
|
hashes = {}
|
|
|
|
manifestpath = os.path.join(self.buildfolder, "manifest.json")
|
|
|
|
if create_hashes:
|
|
|
|
from concurrent.futures import ThreadPoolExecutor
|
|
|
|
pool = ThreadPoolExecutor()
|
|
|
|
for dirpath, dirnames, filenames in os.walk(self.buildfolder):
|
|
|
|
for filename in filenames:
|
|
|
|
path = os.path.join(dirpath, filename)
|
|
|
|
hashes[os.path.relpath(path, start=self.buildfolder)] = pool.submit(_threaded_hash, path)
|
|
|
|
|
|
|
|
import json
|
|
|
|
manifest = {
|
|
|
|
"buildtime": self.buildtime.isoformat(sep=" ", timespec="seconds"),
|
|
|
|
"hashes": {path: hash.result() for path, hash in hashes.items()},
|
|
|
|
"version": version_tuple}
|
|
|
|
|
|
|
|
json.dump(manifest, open(manifestpath, "wt"), indent=4)
|
|
|
|
print("Created Manifest")
|
|
|
|
|
|
|
|
def run(self):
|
2023-03-05 13:10:05 +00:00
|
|
|
# start downloading sni asap
|
|
|
|
sni_thread = threading.Thread(target=download_SNI, name="SNI Downloader")
|
|
|
|
sni_thread.start()
|
|
|
|
|
2022-03-31 03:08:15 +00:00
|
|
|
# pre build steps
|
|
|
|
print(f"Outputting to: {self.buildfolder}")
|
|
|
|
os.makedirs(self.buildfolder, exist_ok=True)
|
|
|
|
import ModuleUpdate
|
|
|
|
ModuleUpdate.requirements_files.add(os.path.join("WebHostLib", "requirements.txt"))
|
|
|
|
ModuleUpdate.update(yes=self.yes)
|
|
|
|
|
|
|
|
# regular cx build
|
|
|
|
self.buildtime = datetime.datetime.utcnow()
|
|
|
|
super().run()
|
|
|
|
|
2023-03-05 13:10:05 +00:00
|
|
|
# need to finish download before copying
|
|
|
|
sni_thread.join()
|
|
|
|
|
2023-01-24 23:20:26 +00:00
|
|
|
# include_files seems to not be done automatically. implement here
|
2022-06-05 20:52:16 +00:00
|
|
|
for src, dst in self.include_files:
|
2023-01-24 23:20:26 +00:00
|
|
|
print(f"copying {src} -> {self.buildfolder / dst}")
|
|
|
|
shutil.copyfile(src, self.buildfolder / dst, follow_symlinks=False)
|
|
|
|
|
|
|
|
# now that include_files is completely broken, run find_libs here
|
|
|
|
for src, dst in find_libs(*self.extra_libs):
|
|
|
|
print(f"copying {src} -> {self.buildfolder / dst}")
|
2022-06-05 20:52:16 +00:00
|
|
|
shutil.copyfile(src, self.buildfolder / dst, follow_symlinks=False)
|
|
|
|
|
2022-03-31 03:08:15 +00:00
|
|
|
# post build steps
|
2023-01-24 23:20:26 +00:00
|
|
|
if is_windows: # kivy_deps is win32 only, linux picks them up automatically
|
2022-03-31 03:08:15 +00:00
|
|
|
from kivy_deps import sdl2, glew
|
|
|
|
for folder in sdl2.dep_bins + glew.dep_bins:
|
|
|
|
shutil.copytree(folder, self.libfolder, dirs_exist_ok=True)
|
2023-01-24 23:20:26 +00:00
|
|
|
print(f"copying {folder} -> {self.libfolder}")
|
2022-03-31 03:08:15 +00:00
|
|
|
|
|
|
|
for data in self.extra_data:
|
|
|
|
self.installfile(Path(data))
|
|
|
|
|
2022-06-14 06:55:57 +00:00
|
|
|
# kivi data files
|
|
|
|
import kivy
|
|
|
|
shutil.copytree(os.path.join(os.path.dirname(kivy.__file__), "data"),
|
|
|
|
self.buildfolder / "data",
|
|
|
|
dirs_exist_ok=True)
|
|
|
|
|
2022-03-31 03:08:15 +00:00
|
|
|
os.makedirs(self.buildfolder / "Players" / "Templates", exist_ok=True)
|
2023-04-15 23:57:52 +00:00
|
|
|
from Options import generate_yaml_templates
|
2022-03-31 03:08:15 +00:00
|
|
|
from worlds.AutoWorld import AutoWorldRegister
|
2022-12-07 05:40:30 +00:00
|
|
|
assert not apworlds - set(AutoWorldRegister.world_types), "Unknown world designated for .apworld"
|
|
|
|
folders_to_remove: typing.List[str] = []
|
2023-04-15 23:57:52 +00:00
|
|
|
generate_yaml_templates(self.buildfolder / "Players" / "Templates", False)
|
2022-03-31 03:08:15 +00:00
|
|
|
for worldname, worldtype in AutoWorldRegister.world_types.items():
|
2022-12-07 05:40:30 +00:00
|
|
|
if worldname in apworlds:
|
|
|
|
file_name = os.path.split(os.path.dirname(worldtype.__file__))[1]
|
|
|
|
world_directory = self.libfolder / "worlds" / file_name
|
|
|
|
# this method creates an apworld that cannot be moved to a different OS or minor python version,
|
|
|
|
# which should be ok
|
|
|
|
with zipfile.ZipFile(self.libfolder / "worlds" / (file_name + ".apworld"), "x", zipfile.ZIP_DEFLATED,
|
|
|
|
compresslevel=9) as zf:
|
|
|
|
for path in world_directory.rglob("*.*"):
|
|
|
|
relative_path = os.path.join(*path.parts[path.parts.index("worlds")+1:])
|
|
|
|
zf.write(path, relative_path)
|
|
|
|
folders_to_remove.append(file_name)
|
|
|
|
shutil.rmtree(world_directory)
|
2022-03-31 03:08:15 +00:00
|
|
|
shutil.copyfile("meta.yaml", self.buildfolder / "Players" / "Templates" / "meta.yaml")
|
2022-04-04 08:29:02 +00:00
|
|
|
# TODO: fix LttP options one day
|
|
|
|
shutil.copyfile("playerSettings.yaml", self.buildfolder / "Players" / "Templates" / "A Link to the Past.yaml")
|
2022-03-31 03:08:15 +00:00
|
|
|
try:
|
|
|
|
from maseya import z3pr
|
|
|
|
except ImportError:
|
|
|
|
print("Maseya Palette Shuffle not found, skipping data files.")
|
|
|
|
else:
|
|
|
|
# maseya Palette Shuffle exists and needs its data files
|
|
|
|
print("Maseya Palette Shuffle found, including data files...")
|
|
|
|
file = z3pr.__file__
|
|
|
|
self.installfile(Path(os.path.dirname(file)) / "data", keep_content=True)
|
|
|
|
|
|
|
|
if signtool:
|
|
|
|
for exe in self.distribution.executables:
|
|
|
|
print(f"Signing {exe.target_name}")
|
|
|
|
os.system(signtool + os.path.join(self.buildfolder, exe.target_name))
|
2023-03-25 18:54:42 +00:00
|
|
|
print("Signing SNI")
|
2022-03-31 03:08:15 +00:00
|
|
|
os.system(signtool + os.path.join(self.buildfolder, "SNI", "SNI.exe"))
|
2023-03-25 18:54:42 +00:00
|
|
|
print("Signing OoT Utils")
|
2022-03-31 03:08:15 +00:00
|
|
|
for exe_path in (("Compress", "Compress.exe"), ("Decompress", "Decompress.exe")):
|
|
|
|
os.system(signtool + os.path.join(self.buildfolder, "lib", "worlds", "oot", "data", *exe_path))
|
|
|
|
|
|
|
|
remove_sprites_from_folder(self.buildfolder / "data" / "sprites" / "alttpr")
|
|
|
|
|
|
|
|
self.create_manifest()
|
|
|
|
|
|
|
|
if is_windows:
|
2022-12-07 05:40:30 +00:00
|
|
|
# Inno setup stuff
|
2022-03-31 03:08:15 +00:00
|
|
|
with open("setup.ini", "w") as f:
|
|
|
|
min_supported_windows = "6.2.9200" if sys.version_info > (3, 9) else "6.0.6000"
|
|
|
|
f.write(f"[Data]\nsource_path={self.buildfolder}\nmin_windows={min_supported_windows}\n")
|
2022-12-07 05:40:30 +00:00
|
|
|
with open("installdelete.iss", "w") as f:
|
|
|
|
f.writelines("Type: filesandordirs; Name: \"{app}\\lib\\worlds\\"+world_directory+"\"\n"
|
|
|
|
for world_directory in folders_to_remove)
|
2022-03-31 03:08:15 +00:00
|
|
|
else:
|
|
|
|
# make sure extra programs are executable
|
|
|
|
enemizer_exe = self.buildfolder / 'EnemizerCLI/EnemizerCLI.Core'
|
|
|
|
sni_exe = self.buildfolder / 'SNI/sni'
|
|
|
|
extra_exes = (enemizer_exe, sni_exe)
|
|
|
|
for extra_exe in extra_exes:
|
|
|
|
if extra_exe.is_file():
|
|
|
|
extra_exe.chmod(0o755)
|
|
|
|
# rewrite windows-specific things in host.yaml
|
|
|
|
host_yaml = self.buildfolder / 'host.yaml'
|
|
|
|
with host_yaml.open('r+b') as f:
|
|
|
|
data = f.read()
|
|
|
|
data = data.replace(b'factorio\\\\bin\\\\x64\\\\factorio', b'factorio/bin/x64/factorio')
|
|
|
|
f.seek(0, os.SEEK_SET)
|
|
|
|
f.write(data)
|
|
|
|
f.truncate()
|
|
|
|
|
|
|
|
|
|
|
|
class AppImageCommand(setuptools.Command):
|
|
|
|
description = "build an app image from build output"
|
|
|
|
user_options = [
|
|
|
|
("build-folder=", None, "Folder to convert to AppImage."),
|
|
|
|
("dist-file=", None, "AppImage output file."),
|
|
|
|
("app-dir=", None, "Folder to use for packaging."),
|
|
|
|
("app-icon=", None, "The icon to use for the AppImage."),
|
|
|
|
("app-exec=", None, "The application to run inside the image."),
|
|
|
|
("yes", "y", 'Answer "yes" to all questions.'),
|
|
|
|
]
|
|
|
|
build_folder: typing.Optional[Path]
|
|
|
|
dist_file: typing.Optional[Path]
|
|
|
|
app_dir: typing.Optional[Path]
|
|
|
|
app_name: str
|
|
|
|
app_exec: typing.Optional[Path]
|
|
|
|
app_icon: typing.Optional[Path] # source file
|
|
|
|
app_id: str # lower case name, used for icon and .desktop
|
|
|
|
yes: bool
|
|
|
|
|
|
|
|
def write_desktop(self):
|
2023-03-25 18:54:42 +00:00
|
|
|
assert self.app_dir, "Invalid app_dir"
|
|
|
|
desktop_filename = self.app_dir / f"{self.app_id}.desktop"
|
2022-03-31 03:08:15 +00:00
|
|
|
with open(desktop_filename, 'w', encoding="utf-8") as f:
|
|
|
|
f.write("\n".join((
|
|
|
|
"[Desktop Entry]",
|
|
|
|
f'Name={self.app_name}',
|
|
|
|
f'Exec={self.app_exec}',
|
|
|
|
"Type=Application",
|
|
|
|
"Categories=Game",
|
|
|
|
f'Icon={self.app_id}',
|
|
|
|
''
|
|
|
|
)))
|
|
|
|
desktop_filename.chmod(0o755)
|
|
|
|
|
|
|
|
def write_launcher(self, default_exe: Path):
|
2023-03-25 18:54:42 +00:00
|
|
|
assert self.app_dir, "Invalid app_dir"
|
|
|
|
launcher_filename = self.app_dir / "AppRun"
|
2022-03-31 03:08:15 +00:00
|
|
|
with open(launcher_filename, 'w', encoding="utf-8") as f:
|
|
|
|
f.write(f"""#!/bin/sh
|
|
|
|
exe="{default_exe}"
|
|
|
|
match="${{1#--executable=}}"
|
|
|
|
if [ "${{#match}}" -lt "${{#1}}" ]; then
|
|
|
|
exe="$match"
|
|
|
|
shift
|
2022-06-03 00:00:21 +00:00
|
|
|
elif [ "$1" = "-executable" ] || [ "$1" = "--executable" ]; then
|
2022-03-31 03:08:15 +00:00
|
|
|
exe="$2"
|
|
|
|
shift; shift
|
|
|
|
fi
|
|
|
|
tmp="${{exe#*/}}"
|
|
|
|
if [ ! "${{#tmp}}" -lt "${{#exe}}" ]; then
|
|
|
|
exe="{default_exe.parent}/$exe"
|
|
|
|
fi
|
2022-10-04 21:50:02 +00:00
|
|
|
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$APPDIR/{default_exe.parent}/lib"
|
2022-03-31 03:08:15 +00:00
|
|
|
$APPDIR/$exe "$@"
|
|
|
|
""")
|
|
|
|
launcher_filename.chmod(0o755)
|
|
|
|
|
|
|
|
def install_icon(self, src: Path, name: typing.Optional[str] = None, symlink: typing.Optional[Path] = None):
|
2023-03-25 18:54:42 +00:00
|
|
|
assert self.app_dir, "Invalid app_dir"
|
2022-03-31 03:08:15 +00:00
|
|
|
try:
|
|
|
|
from PIL import Image
|
|
|
|
except ModuleNotFoundError:
|
|
|
|
if not self.yes:
|
2023-03-25 18:54:42 +00:00
|
|
|
input("Requirement PIL is not satisfied, press enter to install it")
|
2022-03-31 03:08:15 +00:00
|
|
|
subprocess.call([sys.executable, '-m', 'pip', 'install', 'Pillow', '--upgrade'])
|
|
|
|
from PIL import Image
|
|
|
|
im = Image.open(src)
|
|
|
|
res, _ = im.size
|
|
|
|
|
|
|
|
if not name:
|
|
|
|
name = src.stem
|
|
|
|
ext = src.suffix
|
|
|
|
dest_dir = Path(self.app_dir / f'usr/share/icons/hicolor/{res}x{res}/apps')
|
|
|
|
dest_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
dest_file = dest_dir / f'{name}{ext}'
|
|
|
|
shutil.copy(src, dest_file)
|
|
|
|
if symlink:
|
|
|
|
symlink.symlink_to(dest_file.relative_to(symlink.parent))
|
|
|
|
|
|
|
|
def initialize_options(self):
|
|
|
|
self.build_folder = None
|
|
|
|
self.app_dir = None
|
|
|
|
self.app_name = self.distribution.metadata.name
|
|
|
|
self.app_icon = self.distribution.executables[0].icon
|
|
|
|
self.app_exec = Path('opt/{app_name}/{exe}'.format(
|
|
|
|
app_name=self.distribution.metadata.name, exe=self.distribution.executables[0].target_name
|
|
|
|
))
|
|
|
|
self.dist_file = Path("dist", "{app_name}_{app_version}_{platform}.AppImage".format(
|
|
|
|
app_name=self.distribution.metadata.name, app_version=self.distribution.metadata.version,
|
|
|
|
platform=sysconfig.get_platform()
|
|
|
|
))
|
|
|
|
self.yes = False
|
|
|
|
|
|
|
|
def finalize_options(self):
|
|
|
|
if not self.app_dir:
|
|
|
|
self.app_dir = self.build_folder.parent / "AppDir"
|
|
|
|
self.app_id = self.app_name.lower()
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
self.dist_file.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
if self.app_dir.is_dir():
|
|
|
|
shutil.rmtree(self.app_dir)
|
|
|
|
self.app_dir.mkdir(parents=True)
|
|
|
|
opt_dir = self.app_dir / "opt" / self.distribution.metadata.name
|
|
|
|
shutil.copytree(self.build_folder, opt_dir)
|
|
|
|
root_icon = self.app_dir / f'{self.app_id}{self.app_icon.suffix}'
|
|
|
|
self.install_icon(self.app_icon, self.app_id, symlink=root_icon)
|
|
|
|
shutil.copy(root_icon, self.app_dir / '.DirIcon')
|
|
|
|
self.write_desktop()
|
|
|
|
self.write_launcher(self.app_exec)
|
|
|
|
print(f'{self.app_dir} -> {self.dist_file}')
|
2022-06-05 20:52:16 +00:00
|
|
|
subprocess.call(f'ARCH={build_arch} ./appimagetool -n "{self.app_dir}" "{self.dist_file}"', shell=True)
|
|
|
|
|
|
|
|
|
|
|
|
def find_libs(*args: str) -> typing.Sequence[typing.Tuple[str, str]]:
|
|
|
|
"""Try to find system libraries to be included."""
|
2023-01-24 23:20:26 +00:00
|
|
|
if not args:
|
|
|
|
return []
|
|
|
|
|
2022-06-05 20:52:16 +00:00
|
|
|
arch = build_arch.replace('_', '-')
|
|
|
|
libc = 'libc6' # we currently don't support musl
|
|
|
|
|
|
|
|
def parse(line):
|
|
|
|
lib, path = line.strip().split(' => ')
|
|
|
|
lib, typ = lib.split(' ', 1)
|
|
|
|
for test_arch in ('x86-64', 'i386', 'aarch64'):
|
|
|
|
if test_arch in typ:
|
|
|
|
lib_arch = test_arch
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
lib_arch = ''
|
|
|
|
for test_libc in ('libc6',):
|
|
|
|
if test_libc in typ:
|
|
|
|
lib_libc = test_libc
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
lib_libc = ''
|
|
|
|
return (lib, lib_arch, lib_libc), path
|
|
|
|
|
|
|
|
if not hasattr(find_libs, "cache"):
|
2023-03-25 18:54:42 +00:00
|
|
|
ldconfig = shutil.which("ldconfig")
|
|
|
|
assert ldconfig, "Make sure ldconfig is in PATH"
|
|
|
|
data = subprocess.run([ldconfig, "-p"], capture_output=True, text=True).stdout.split("\n")[1:]
|
|
|
|
find_libs.cache = { # type: ignore [attr-defined]
|
|
|
|
k: v for k, v in (parse(line) for line in data if "=>" in line)
|
|
|
|
}
|
2022-06-05 20:52:16 +00:00
|
|
|
|
|
|
|
def find_lib(lib, arch, libc):
|
|
|
|
for k, v in find_libs.cache.items():
|
|
|
|
if k == (lib, arch, libc):
|
|
|
|
return v
|
|
|
|
for k, v, in find_libs.cache.items():
|
|
|
|
if k[0].startswith(lib) and k[1] == arch and k[2] == libc:
|
|
|
|
return v
|
|
|
|
return None
|
|
|
|
|
|
|
|
res = []
|
|
|
|
for arg in args:
|
|
|
|
# try exact match, empty libc, empty arch, empty arch and libc
|
|
|
|
file = find_lib(arg, arch, libc)
|
|
|
|
file = file or find_lib(arg, arch, '')
|
|
|
|
file = file or find_lib(arg, '', libc)
|
|
|
|
file = file or find_lib(arg, '', '')
|
|
|
|
# resolve symlinks
|
|
|
|
for n in range(0, 5):
|
|
|
|
res.append((file, os.path.join('lib', os.path.basename(file))))
|
|
|
|
if not os.path.islink(file):
|
|
|
|
break
|
|
|
|
dirname = os.path.dirname(file)
|
|
|
|
file = os.readlink(file)
|
|
|
|
if not os.path.isabs(file):
|
|
|
|
file = os.path.join(dirname, file)
|
|
|
|
return res
|
2022-03-31 03:08:15 +00:00
|
|
|
|
2020-02-02 05:25:06 +00:00
|
|
|
|
|
|
|
cx_Freeze.setup(
|
2021-01-03 13:32:32 +00:00
|
|
|
name="Archipelago",
|
2021-08-06 17:33:17 +00:00
|
|
|
version=f"{version_tuple.major}.{version_tuple.minor}.{version_tuple.build}",
|
2021-01-03 13:32:32 +00:00
|
|
|
description="Archipelago",
|
2020-02-02 21:36:55 +00:00
|
|
|
executables=exes,
|
2022-06-16 19:13:45 +00:00
|
|
|
ext_modules=[], # required to disable auto-discovery with setuptools>=61
|
2020-02-02 05:25:06 +00:00
|
|
|
options={
|
|
|
|
"build_exe": {
|
2021-07-30 22:03:48 +00:00
|
|
|
"packages": ["websockets", "worlds", "kivy"],
|
2020-08-24 02:35:32 +00:00
|
|
|
"includes": [],
|
2020-09-19 22:34:35 +00:00
|
|
|
"excludes": ["numpy", "Cython", "PySide2", "PIL",
|
|
|
|
"pandas"],
|
2020-02-02 05:25:06 +00:00
|
|
|
"zip_include_packages": ["*"],
|
2022-06-14 06:55:57 +00:00
|
|
|
"zip_exclude_packages": ["worlds", "sc2"],
|
2023-01-24 23:20:26 +00:00
|
|
|
"include_files": [], # broken in cx 6.14.0, we use more special sauce now
|
2021-07-30 22:03:48 +00:00
|
|
|
"include_msvcr": False,
|
2023-01-24 23:20:26 +00:00
|
|
|
"replace_paths": ["*."],
|
2021-07-28 11:31:27 +00:00
|
|
|
"optimize": 1,
|
2022-03-31 03:08:15 +00:00
|
|
|
"build_exe": buildfolder,
|
|
|
|
"extra_data": extra_data,
|
2023-01-24 23:20:26 +00:00
|
|
|
"extra_libs": extra_libs,
|
2022-06-05 20:52:16 +00:00
|
|
|
"bin_includes": ["libffi.so", "libcrypt.so"] if is_linux else []
|
2022-03-31 03:08:15 +00:00
|
|
|
},
|
|
|
|
"bdist_appimage": {
|
|
|
|
"build_folder": buildfolder,
|
2020-02-02 05:25:06 +00:00
|
|
|
},
|
|
|
|
},
|
2022-03-31 03:08:15 +00:00
|
|
|
# override commands to get custom stuff in
|
|
|
|
cmdclass={
|
|
|
|
"build": BuildCommand,
|
|
|
|
"build_exe": BuildExeCommand,
|
|
|
|
"bdist_appimage": AppImageCommand,
|
|
|
|
},
|
2020-02-02 05:25:06 +00:00
|
|
|
)
|