allow basic WebHost functionality to work
This commit is contained in:
parent
d451145d53
commit
20b72369d8
|
@ -119,7 +119,8 @@ class Context(Node):
|
||||||
self._load(self._decompress(data), use_embedded_server_options)
|
self._load(self._decompress(data), use_embedded_server_options)
|
||||||
self.data_filename = multidatapath
|
self.data_filename = multidatapath
|
||||||
|
|
||||||
def _decompress(self, data: bytes) -> dict:
|
@staticmethod
|
||||||
|
def _decompress(data: bytes) -> dict:
|
||||||
format_version = data[0]
|
format_version = data[0]
|
||||||
if format_version != 1:
|
if format_version != 1:
|
||||||
raise Exception("Incompatible multidata.")
|
raise Exception("Incompatible multidata.")
|
||||||
|
|
8
Utils.py
8
Utils.py
|
@ -84,9 +84,13 @@ def local_path(*path):
|
||||||
# cx_Freeze
|
# cx_Freeze
|
||||||
local_path.cached_path = os.path.dirname(os.path.abspath(sys.argv[0]))
|
local_path.cached_path = os.path.dirname(os.path.abspath(sys.argv[0]))
|
||||||
else:
|
else:
|
||||||
# we are running in a normal Python environment
|
|
||||||
import __main__
|
import __main__
|
||||||
local_path.cached_path = os.path.dirname(os.path.abspath(__main__.__file__))
|
if hasattr(__main__, "__file__"):
|
||||||
|
# we are running in a normal Python environment
|
||||||
|
local_path.cached_path = os.path.dirname(os.path.abspath(__main__.__file__))
|
||||||
|
else:
|
||||||
|
# pray
|
||||||
|
local_path.cached_path = os.path.abspath(".")
|
||||||
|
|
||||||
return os.path.join(local_path.cached_path, *path)
|
return os.path.join(local_path.cached_path, *path)
|
||||||
|
|
||||||
|
|
|
@ -9,13 +9,13 @@ import socket
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import random
|
import random
|
||||||
import zlib
|
import pickle
|
||||||
|
|
||||||
|
|
||||||
from .models import *
|
from .models import *
|
||||||
|
|
||||||
from MultiServer import Context, server, auto_shutdown, ServerCommandProcessor, ClientMessageProcessor
|
from MultiServer import Context, server, auto_shutdown, ServerCommandProcessor, ClientMessageProcessor
|
||||||
from Utils import get_public_ipv4, get_public_ipv6, parse_yaml
|
from Utils import get_public_ipv4, get_public_ipv6, restricted_loads
|
||||||
|
|
||||||
|
|
||||||
class CustomClientMessageProcessor(ClientMessageProcessor):
|
class CustomClientMessageProcessor(ClientMessageProcessor):
|
||||||
|
@ -81,7 +81,7 @@ class WebHostContext(Context):
|
||||||
def init_save(self, enabled: bool = True):
|
def init_save(self, enabled: bool = True):
|
||||||
self.saving = enabled
|
self.saving = enabled
|
||||||
if self.saving:
|
if self.saving:
|
||||||
existing_savegame = Room.get(id=self.room_id).multisave
|
existing_savegame = restricted_loads(Room.get(id=self.room_id).multisave)
|
||||||
if existing_savegame:
|
if existing_savegame:
|
||||||
self.set_save(existing_savegame)
|
self.set_save(existing_savegame)
|
||||||
self._start_async_saving()
|
self._start_async_saving()
|
||||||
|
@ -90,7 +90,7 @@ class WebHostContext(Context):
|
||||||
@db_session
|
@db_session
|
||||||
def _save(self, exit_save:bool = False) -> bool:
|
def _save(self, exit_save:bool = False) -> bool:
|
||||||
room = Room.get(id=self.room_id)
|
room = Room.get(id=self.room_id)
|
||||||
room.multisave = self.get_save()
|
room.multisave = pickle.dumps(self.get_save())
|
||||||
# saving only occurs on activity, so we can "abuse" this information to mark this as last_activity
|
# saving only occurs on activity, so we can "abuse" this information to mark this as last_activity
|
||||||
if not exit_save: # we don't want to count a shutdown as activity, which would restart the server again
|
if not exit_save: # we don't want to count a shutdown as activity, which would restart the server again
|
||||||
room.last_activity = datetime.utcnow()
|
room.last_activity = datetime.utcnow()
|
||||||
|
|
|
@ -8,6 +8,7 @@ from uuid import UUID
|
||||||
from worlds.alttp import Items, Regions
|
from worlds.alttp import Items, Regions
|
||||||
from WebHostLib import app, cache, Room
|
from WebHostLib import app, cache, Room
|
||||||
from NetUtils import Hint
|
from NetUtils import Hint
|
||||||
|
from Utils import restricted_loads
|
||||||
|
|
||||||
|
|
||||||
def get_id(item_name):
|
def get_id(item_name):
|
||||||
|
@ -253,7 +254,7 @@ for item_name, data in Items.item_table.items():
|
||||||
big_key_ids[area] = data[2]
|
big_key_ids[area] = data[2]
|
||||||
ids_big_key[data[2]] = area
|
ids_big_key[data[2]] = area
|
||||||
|
|
||||||
from MultiServer import get_item_name_from_id
|
from MultiServer import get_item_name_from_id, Context
|
||||||
|
|
||||||
|
|
||||||
def attribute_item(inventory, team, recipient, item):
|
def attribute_item(inventory, team, recipient, item):
|
||||||
|
@ -295,9 +296,9 @@ def get_static_room_data(room: Room):
|
||||||
result = _multidata_cache.get(room.seed.id, None)
|
result = _multidata_cache.get(room.seed.id, None)
|
||||||
if result:
|
if result:
|
||||||
return result
|
return result
|
||||||
multidata = room.seed.multidata
|
multidata = Context._decompress(room.seed.multidata)
|
||||||
# in > 100 players this can take a bit of time and is the main reason for the cache
|
# in > 100 players this can take a bit of time and is the main reason for the cache
|
||||||
locations = {tuple(k): tuple(v) for k, v in multidata['locations']}
|
locations = multidata['locations']
|
||||||
names = multidata["names"]
|
names = multidata["names"]
|
||||||
seed_checks_in_area = checks_in_area.copy()
|
seed_checks_in_area = checks_in_area.copy()
|
||||||
|
|
||||||
|
@ -308,30 +309,24 @@ def get_static_room_data(room: Room):
|
||||||
for area, checks in key_only_locations.items():
|
for area, checks in key_only_locations.items():
|
||||||
seed_checks_in_area[area] += len(checks)
|
seed_checks_in_area[area] += len(checks)
|
||||||
seed_checks_in_area["Total"] = 249
|
seed_checks_in_area["Total"] = 249
|
||||||
if "checks_in_area" not in multidata:
|
|
||||||
player_checks_in_area = {playernumber: (seed_checks_in_area if use_door_tracker and
|
|
||||||
(0x140031, playernumber) in locations else checks_in_area)
|
|
||||||
for playernumber in range(1, len(names[0]) + 1)}
|
|
||||||
player_location_to_area = {playernumber: location_to_area
|
|
||||||
for playernumber in range(1, len(names[0]) + 1)}
|
|
||||||
|
|
||||||
else:
|
player_checks_in_area = {playernumber: {areaname: len(multidata["checks_in_area"][playernumber][areaname])
|
||||||
player_checks_in_area = {playernumber: {areaname: len(multidata["checks_in_area"][f'{playernumber}'][areaname])
|
if areaname != "Total" else multidata["checks_in_area"][playernumber]["Total"]
|
||||||
if areaname != "Total" else multidata["checks_in_area"][f'{playernumber}']["Total"]
|
for areaname in ordered_areas}
|
||||||
for areaname in ordered_areas}
|
for playernumber in range(1, len(names[0]) + 1)}
|
||||||
for playernumber in range(1, len(names[0]) + 1)}
|
player_location_to_area = {playernumber: get_location_table(multidata["checks_in_area"][playernumber])
|
||||||
player_location_to_area = {playernumber: get_location_table(multidata["checks_in_area"][f'{playernumber}'])
|
for playernumber in range(1, len(names[0]) + 1)}
|
||||||
for playernumber in range(1, len(names[0]) + 1)}
|
|
||||||
|
|
||||||
player_big_key_locations = {playernumber: set() for playernumber in range(1, len(names[0]) + 1)}
|
player_big_key_locations = {playernumber: set() for playernumber in range(1, len(names[0]) + 1)}
|
||||||
player_small_key_locations = {playernumber: set() for playernumber in range(1, len(names[0]) + 1)}
|
player_small_key_locations = {playernumber: set() for playernumber in range(1, len(names[0]) + 1)}
|
||||||
for _, (item_id, item_player) in multidata["locations"]:
|
for _, (item_id, item_player) in locations.items():
|
||||||
if item_id in ids_big_key:
|
if item_id in ids_big_key:
|
||||||
player_big_key_locations[item_player].add(ids_big_key[item_id])
|
player_big_key_locations[item_player].add(ids_big_key[item_id])
|
||||||
if item_id in ids_small_key:
|
if item_id in ids_small_key:
|
||||||
player_small_key_locations[item_player].add(ids_small_key[item_id])
|
player_small_key_locations[item_player].add(ids_small_key[item_id])
|
||||||
|
|
||||||
result = locations, names, use_door_tracker, player_checks_in_area, player_location_to_area, player_big_key_locations, player_small_key_locations
|
result = locations, names, use_door_tracker, player_checks_in_area, player_location_to_area, \
|
||||||
|
player_big_key_locations, player_small_key_locations, multidata["precollected_items"]
|
||||||
_multidata_cache[room.seed.id] = result
|
_multidata_cache[room.seed.id] = result
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
@ -348,7 +343,7 @@ def getPlayerTracker(tracker: UUID, tracked_team: int, tracked_player: int):
|
||||||
abort(404)
|
abort(404)
|
||||||
|
|
||||||
# Collect seed information and pare it down to a single player
|
# Collect seed information and pare it down to a single player
|
||||||
locations, names, use_door_tracker, seed_checks_in_area, player_location_to_area, player_big_key_locations, player_small_key_locations = get_static_room_data(room)
|
locations, names, use_door_tracker, seed_checks_in_area, player_location_to_area, player_big_key_locations, player_small_key_locations, precollected_items = get_static_room_data(room)
|
||||||
player_name = names[tracked_team][tracked_player - 1]
|
player_name = names[tracked_team][tracked_player - 1]
|
||||||
seed_checks_in_area = seed_checks_in_area[tracked_player]
|
seed_checks_in_area = seed_checks_in_area[tracked_player]
|
||||||
location_to_area = player_location_to_area[tracked_player]
|
location_to_area = player_location_to_area[tracked_player]
|
||||||
|
@ -356,13 +351,18 @@ def getPlayerTracker(tracker: UUID, tracked_team: int, tracked_player: int):
|
||||||
checks_done = {loc_name: 0 for loc_name in default_locations}
|
checks_done = {loc_name: 0 for loc_name in default_locations}
|
||||||
|
|
||||||
# Add starting items to inventory
|
# Add starting items to inventory
|
||||||
starting_items = room.seed.multidata.get("precollected_items", None)[tracked_player - 1]
|
starting_items = precollected_items[tracked_player - 1]
|
||||||
if starting_items:
|
if starting_items:
|
||||||
for item_id in starting_items:
|
for item_id in starting_items:
|
||||||
attribute_item_solo(inventory, item_id)
|
attribute_item_solo(inventory, item_id)
|
||||||
|
|
||||||
|
if room.multisave:
|
||||||
|
multisave = restricted_loads(room.multisave)
|
||||||
|
else:
|
||||||
|
multisave = {}
|
||||||
|
|
||||||
# Add items to player inventory
|
# Add items to player inventory
|
||||||
for (ms_team, ms_player), locations_checked in room.multisave.get("location_checks", {}):
|
for (ms_team, ms_player), locations_checked in multisave.get("location_checks", {}):
|
||||||
# logging.info(f"{ms_team}, {ms_player}, {locations_checked}")
|
# logging.info(f"{ms_team}, {ms_player}, {locations_checked}")
|
||||||
# Skip teams and players not matching the request
|
# Skip teams and players not matching the request
|
||||||
|
|
||||||
|
@ -380,7 +380,7 @@ def getPlayerTracker(tracker: UUID, tracked_team: int, tracked_player: int):
|
||||||
checks_done["Total"] += 1
|
checks_done["Total"] += 1
|
||||||
|
|
||||||
# Note the presence of the triforce item
|
# Note the presence of the triforce item
|
||||||
for (ms_team, ms_player), game_state in room.multisave.get("client_game_state", []):
|
for (ms_team, ms_player), game_state in multisave.get("client_game_state", []):
|
||||||
# Skip teams and players not matching the request
|
# Skip teams and players not matching the request
|
||||||
if ms_team != tracked_team or ms_player != tracked_player:
|
if ms_team != tracked_team or ms_player != tracked_player:
|
||||||
continue
|
continue
|
||||||
|
@ -484,7 +484,8 @@ def getTracker(tracker: UUID):
|
||||||
room = Room.get(tracker=tracker)
|
room = Room.get(tracker=tracker)
|
||||||
if not room:
|
if not room:
|
||||||
abort(404)
|
abort(404)
|
||||||
locations, names, use_door_tracker, seed_checks_in_area, player_location_to_area, player_big_key_locations, player_small_key_locations = get_static_room_data(room)
|
locations, names, use_door_tracker, seed_checks_in_area, player_location_to_area, player_big_key_locations, \
|
||||||
|
player_small_key_locations, precollected_items = get_static_room_data(room)
|
||||||
|
|
||||||
inventory = {teamnumber: {playernumber: collections.Counter() for playernumber in range(1, len(team) + 1)}
|
inventory = {teamnumber: {playernumber: collections.Counter() for playernumber in range(1, len(team) + 1)}
|
||||||
for teamnumber, team in enumerate(names)}
|
for teamnumber, team in enumerate(names)}
|
||||||
|
@ -492,14 +493,18 @@ def getTracker(tracker: UUID):
|
||||||
checks_done = {teamnumber: {playernumber: {loc_name: 0 for loc_name in default_locations}
|
checks_done = {teamnumber: {playernumber: {loc_name: 0 for loc_name in default_locations}
|
||||||
for playernumber in range(1, len(team) + 1)}
|
for playernumber in range(1, len(team) + 1)}
|
||||||
for teamnumber, team in enumerate(names)}
|
for teamnumber, team in enumerate(names)}
|
||||||
precollected_items = room.seed.multidata.get("precollected_items", None)
|
|
||||||
hints = {team: set() for team in range(len(names))}
|
hints = {team: set() for team in range(len(names))}
|
||||||
if "hints" in room.multisave:
|
if room.multisave:
|
||||||
for key, hintdata in room.multisave["hints"]:
|
multisave = restricted_loads(room.multisave)
|
||||||
|
else:
|
||||||
|
multisave = {}
|
||||||
|
if "hints" in multisave:
|
||||||
|
for key, hintdata in multisave["hints"]:
|
||||||
for hint in hintdata:
|
for hint in hintdata:
|
||||||
hints[key[0]].add(Hint(*hint))
|
hints[key[0]].add(Hint(*hint))
|
||||||
|
|
||||||
for (team, player), locations_checked in room.multisave.get("location_checks", {}):
|
for (team, player), locations_checked in multisave.get("location_checks", {}):
|
||||||
if precollected_items:
|
if precollected_items:
|
||||||
precollected = precollected_items[player - 1]
|
precollected = precollected_items[player - 1]
|
||||||
for item_id in precollected:
|
for item_id in precollected:
|
||||||
|
@ -513,7 +518,7 @@ def getTracker(tracker: UUID):
|
||||||
checks_done[team][player][player_location_to_area[player][location]] += 1
|
checks_done[team][player][player_location_to_area[player][location]] += 1
|
||||||
checks_done[team][player]["Total"] += 1
|
checks_done[team][player]["Total"] += 1
|
||||||
|
|
||||||
for (team, player), game_state in room.multisave.get("client_game_state", []):
|
for (team, player), game_state in multisave.get("client_game_state", []):
|
||||||
if game_state:
|
if game_state:
|
||||||
inventory[team][player][106] = 1 # Triforce
|
inventory[team][player][106] = 1 # Triforce
|
||||||
|
|
||||||
|
@ -525,7 +530,7 @@ def getTracker(tracker: UUID):
|
||||||
|
|
||||||
activity_timers = {}
|
activity_timers = {}
|
||||||
now = datetime.datetime.utcnow()
|
now = datetime.datetime.utcnow()
|
||||||
for (team, player), timestamp in room.multisave.get("client_activity_timers", []):
|
for (team, player), timestamp in multisave.get("client_activity_timers", []):
|
||||||
activity_timers[team, player] = now - datetime.datetime.utcfromtimestamp(timestamp)
|
activity_timers[team, player] = now - datetime.datetime.utcfromtimestamp(timestamp)
|
||||||
|
|
||||||
player_names = {}
|
player_names = {}
|
||||||
|
@ -533,12 +538,12 @@ def getTracker(tracker: UUID):
|
||||||
for player, name in enumerate(names, 1):
|
for player, name in enumerate(names, 1):
|
||||||
player_names[(team, player)] = name
|
player_names[(team, player)] = name
|
||||||
long_player_names = player_names.copy()
|
long_player_names = player_names.copy()
|
||||||
for (team, player), alias in room.multisave.get("name_aliases", []):
|
for (team, player), alias in multisave.get("name_aliases", []):
|
||||||
player_names[(team, player)] = alias
|
player_names[(team, player)] = alias
|
||||||
long_player_names[(team, player)] = f"{alias} ({long_player_names[(team, player)]})"
|
long_player_names[(team, player)] = f"{alias} ({long_player_names[(team, player)]})"
|
||||||
|
|
||||||
video = {}
|
video = {}
|
||||||
for (team, player), data in room.multisave.get("video", []):
|
for (team, player), data in multisave.get("video", []):
|
||||||
video[(team, player)] = data
|
video[(team, player)] = data
|
||||||
|
|
||||||
return render_template("tracker.html", inventory=inventory, get_item_name_from_id=get_item_name_from_id,
|
return render_template("tracker.html", inventory=inventory, get_item_name_from_id=get_item_name_from_id,
|
||||||
|
|
|
@ -2,6 +2,7 @@ import json
|
||||||
import zlib
|
import zlib
|
||||||
import zipfile
|
import zipfile
|
||||||
import logging
|
import logging
|
||||||
|
import MultiServer
|
||||||
|
|
||||||
from flask import request, flash, redirect, url_for, session, render_template
|
from flask import request, flash, redirect, url_for, session, render_template
|
||||||
from pony.orm import commit, select
|
from pony.orm import commit, select
|
||||||
|
@ -46,9 +47,12 @@ def uploads():
|
||||||
spoiler = zfile.open(file, "r").read().decode("utf-8-sig")
|
spoiler = zfile.open(file, "r").read().decode("utf-8-sig")
|
||||||
elif file.filename.endswith(".archipelago"):
|
elif file.filename.endswith(".archipelago"):
|
||||||
try:
|
try:
|
||||||
multidata = json.loads(zlib.decompress(zfile.open(file).read()).decode("utf-8-sig"))
|
multidata = zfile.open(file).read()
|
||||||
|
MultiServer.Context._decompress(multidata)
|
||||||
except:
|
except:
|
||||||
flash("Could not load multidata. File may be corrupted or incompatible.")
|
flash("Could not load multidata. File may be corrupted or incompatible.")
|
||||||
|
else:
|
||||||
|
multidata = zfile.open(file).read()
|
||||||
if multidata:
|
if multidata:
|
||||||
commit() # commit patches
|
commit() # commit patches
|
||||||
seed = Seed(multidata=multidata, spoiler=spoiler, patches=patches, owner=session["_id"])
|
seed = Seed(multidata=multidata, spoiler=spoiler, patches=patches, owner=session["_id"])
|
||||||
|
@ -61,10 +65,13 @@ def uploads():
|
||||||
flash("No multidata was found in the zip file, which is required.")
|
flash("No multidata was found in the zip file, which is required.")
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
multidata = json.loads(zlib.decompress(file.read()).decode("utf-8-sig"))
|
multidata = file.read()
|
||||||
|
MultiServer.Context._decompress(multidata)
|
||||||
except:
|
except:
|
||||||
flash("Could not load multidata. File may be corrupted or incompatible.")
|
flash("Could not load multidata. File may be corrupted or incompatible.")
|
||||||
|
raise
|
||||||
else:
|
else:
|
||||||
|
logging.info(multidata)
|
||||||
seed = Seed(multidata=multidata, owner=session["_id"])
|
seed = Seed(multidata=multidata, owner=session["_id"])
|
||||||
commit() # place into DB and generate ids
|
commit() # place into DB and generate ids
|
||||||
return redirect(url_for("viewSeed", seed=seed.id))
|
return redirect(url_for("viewSeed", seed=seed.id))
|
||||||
|
|
Loading…
Reference in New Issue