Logging Revamp
This commit is contained in:
parent
d42d77d3d3
commit
43323e59ce
|
@ -3,13 +3,16 @@ import logging
|
||||||
import asyncio
|
import asyncio
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import sys
|
import sys
|
||||||
import os
|
|
||||||
import typing
|
import typing
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import websockets
|
import websockets
|
||||||
|
|
||||||
import Utils
|
import Utils
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
Utils.init_logging("TextClient")
|
||||||
|
|
||||||
from MultiServer import CommandProcessor
|
from MultiServer import CommandProcessor
|
||||||
from NetUtils import Endpoint, decode, NetworkItem, encode, JSONtoTextParser, ClientStatus, Permission
|
from NetUtils import Endpoint, decode, NetworkItem, encode, JSONtoTextParser, ClientStatus, Permission
|
||||||
from Utils import Version
|
from Utils import Version
|
||||||
|
@ -20,8 +23,6 @@ logger = logging.getLogger("Client")
|
||||||
# without terminal we have to use gui mode
|
# without terminal we have to use gui mode
|
||||||
gui_enabled = not sys.stdout or "--nogui" not in sys.argv
|
gui_enabled = not sys.stdout or "--nogui" not in sys.argv
|
||||||
|
|
||||||
log_folder = Utils.local_path("logs")
|
|
||||||
os.makedirs(log_folder, exist_ok=True)
|
|
||||||
|
|
||||||
class ClientCommandProcessor(CommandProcessor):
|
class ClientCommandProcessor(CommandProcessor):
|
||||||
def __init__(self, ctx: CommonContext):
|
def __init__(self, ctx: CommonContext):
|
||||||
|
@ -517,21 +518,6 @@ async def console_loop(ctx: CommonContext):
|
||||||
logger.exception(e)
|
logger.exception(e)
|
||||||
|
|
||||||
|
|
||||||
def init_logging(name: str):
|
|
||||||
root_logger = logging.getLogger()
|
|
||||||
root_logger.setLevel(logging.INFO)
|
|
||||||
file_handler = logging.FileHandler(
|
|
||||||
os.path.join(log_folder, f"{name}.txt"),
|
|
||||||
"w",
|
|
||||||
encoding="utf-8-sig")
|
|
||||||
file_handler.setFormatter(logging.Formatter("[%(name)s]: %(message)s"))
|
|
||||||
root_logger.addHandler(file_handler)
|
|
||||||
if sys.stdout:
|
|
||||||
root_logger.addHandler(
|
|
||||||
logging.StreamHandler(sys.stdout)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_base_parser(description=None):
|
def get_base_parser(description=None):
|
||||||
import argparse
|
import argparse
|
||||||
parser = argparse.ArgumentParser(description=description)
|
parser = argparse.ArgumentParser(description=description)
|
||||||
|
@ -544,7 +530,6 @@ def get_base_parser(description=None):
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
# Text Mode to use !hint and such with games that have no text entry
|
# Text Mode to use !hint and such with games that have no text entry
|
||||||
init_logging("TextClient")
|
|
||||||
|
|
||||||
class TextContext(CommonContext):
|
class TextContext(CommonContext):
|
||||||
tags = {"AP", "IgnoreGame"}
|
tags = {"AP", "IgnoreGame"}
|
||||||
|
|
|
@ -6,24 +6,24 @@ import string
|
||||||
import copy
|
import copy
|
||||||
import subprocess
|
import subprocess
|
||||||
import time
|
import time
|
||||||
|
import random
|
||||||
|
|
||||||
import factorio_rcon
|
import factorio_rcon
|
||||||
|
|
||||||
import colorama
|
import colorama
|
||||||
import asyncio
|
import asyncio
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
from CommonClient import CommonContext, server_loop, console_loop, ClientCommandProcessor, logger, gui_enabled, \
|
|
||||||
init_logging, get_base_parser
|
|
||||||
from MultiServer import mark_raw
|
|
||||||
|
|
||||||
import Utils
|
import Utils
|
||||||
import random
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
Utils.init_logging("FactorioClient")
|
||||||
|
|
||||||
|
from CommonClient import CommonContext, server_loop, console_loop, ClientCommandProcessor, logger, gui_enabled, \
|
||||||
|
get_base_parser
|
||||||
|
from MultiServer import mark_raw
|
||||||
from NetUtils import NetworkItem, ClientStatus, JSONtoTextParser, JSONMessagePart
|
from NetUtils import NetworkItem, ClientStatus, JSONtoTextParser, JSONMessagePart
|
||||||
|
|
||||||
from worlds.factorio import Factorio
|
from worlds.factorio import Factorio
|
||||||
|
|
||||||
init_logging("FactorioClient")
|
|
||||||
|
|
||||||
|
|
||||||
class FactorioCommandProcessor(ClientCommandProcessor):
|
class FactorioCommandProcessor(ClientCommandProcessor):
|
||||||
ctx: FactorioContext
|
ctx: FactorioContext
|
||||||
|
|
15
Generate.py
15
Generate.py
|
@ -9,6 +9,7 @@ from collections import Counter
|
||||||
import string
|
import string
|
||||||
|
|
||||||
import ModuleUpdate
|
import ModuleUpdate
|
||||||
|
import Utils
|
||||||
|
|
||||||
ModuleUpdate.update()
|
ModuleUpdate.update()
|
||||||
|
|
||||||
|
@ -44,7 +45,6 @@ def mystery_argparse():
|
||||||
parser.add_argument('--outputpath', default=options["general_options"]["output_path"])
|
parser.add_argument('--outputpath', default=options["general_options"]["output_path"])
|
||||||
parser.add_argument('--race', action='store_true', default=defaults["race"])
|
parser.add_argument('--race', action='store_true', default=defaults["race"])
|
||||||
parser.add_argument('--meta_file_path', default=defaults["meta_file_path"])
|
parser.add_argument('--meta_file_path', default=defaults["meta_file_path"])
|
||||||
parser.add_argument('--log_output_path', help='Path to store output log')
|
|
||||||
parser.add_argument('--log_level', default='info', help='Sets log level')
|
parser.add_argument('--log_level', default='info', help='Sets log level')
|
||||||
parser.add_argument('--yaml_output', default=0, type=lambda value: max(int(value), 0),
|
parser.add_argument('--yaml_output', default=0, type=lambda value: max(int(value), 0),
|
||||||
help='Output rolled mystery results to yaml up to specified number (made for async multiworld)')
|
help='Output rolled mystery results to yaml up to specified number (made for async multiworld)')
|
||||||
|
@ -125,18 +125,7 @@ def main(args=None, callback=ERmain):
|
||||||
erargs.outputname = seed_name
|
erargs.outputname = seed_name
|
||||||
erargs.outputpath = args.outputpath
|
erargs.outputpath = args.outputpath
|
||||||
|
|
||||||
# set up logger
|
Utils.init_logging(f"Generate_{seed}.txt", loglevel=args.log_level)
|
||||||
if args.log_level:
|
|
||||||
erargs.loglevel = args.log_level
|
|
||||||
loglevel = {'error': logging.ERROR, 'info': logging.INFO, 'warning': logging.WARNING, 'debug': logging.DEBUG}[
|
|
||||||
erargs.loglevel]
|
|
||||||
|
|
||||||
if args.log_output_path:
|
|
||||||
os.makedirs(args.log_output_path, exist_ok=True)
|
|
||||||
logging.basicConfig(format='%(message)s', level=loglevel, force=True,
|
|
||||||
filename=os.path.join(args.log_output_path, f"{seed}.log"))
|
|
||||||
else:
|
|
||||||
logging.basicConfig(format='%(message)s', level=loglevel, force=True)
|
|
||||||
|
|
||||||
erargs.rom = args.rom
|
erargs.rom = args.rom
|
||||||
erargs.enemizercli = args.enemizercli
|
erargs.enemizercli = args.enemizercli
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import atexit
|
|
||||||
|
|
||||||
exit_func = atexit.register(input, "Press enter to close.")
|
import atexit
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
|
@ -13,23 +12,20 @@ import logging
|
||||||
import asyncio
|
import asyncio
|
||||||
from json import loads, dumps
|
from json import loads, dumps
|
||||||
|
|
||||||
import ModuleUpdate
|
from Utils import get_item_name_from_id, init_logging
|
||||||
|
|
||||||
ModuleUpdate.update()
|
if __name__ == "__main__":
|
||||||
|
init_logging("LttPClient")
|
||||||
|
|
||||||
from Utils import get_item_name_from_id
|
|
||||||
import colorama
|
import colorama
|
||||||
|
|
||||||
from NetUtils import *
|
from NetUtils import *
|
||||||
|
|
||||||
from worlds.alttp import Regions, Shops
|
from worlds.alttp import Regions, Shops
|
||||||
from worlds.alttp import Items
|
from worlds.alttp import Items
|
||||||
from worlds.alttp.Rom import ROM_PLAYER_LIMIT
|
from worlds.alttp.Rom import ROM_PLAYER_LIMIT
|
||||||
import Utils
|
import Utils
|
||||||
from CommonClient import CommonContext, server_loop, console_loop, ClientCommandProcessor, gui_enabled, init_logging, \
|
from CommonClient import CommonContext, server_loop, console_loop, ClientCommandProcessor, gui_enabled, get_base_parser
|
||||||
get_base_parser
|
|
||||||
|
|
||||||
init_logging("LttPClient")
|
|
||||||
|
|
||||||
snes_logger = logging.getLogger("SNES")
|
snes_logger = logging.getLogger("SNES")
|
||||||
|
|
||||||
|
@ -1022,4 +1018,3 @@ if __name__ == '__main__':
|
||||||
loop.run_until_complete(main())
|
loop.run_until_complete(main())
|
||||||
loop.close()
|
loop.close()
|
||||||
colorama.deinit()
|
colorama.deinit()
|
||||||
atexit.unregister(exit_func)
|
|
||||||
|
|
|
@ -4,8 +4,8 @@ import re
|
||||||
import atexit
|
import atexit
|
||||||
from subprocess import Popen
|
from subprocess import Popen
|
||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
from base64 import b64decode
|
|
||||||
from time import strftime
|
from time import strftime
|
||||||
|
import logging
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
@ -34,7 +34,7 @@ def prompt_yes_no(prompt):
|
||||||
def find_forge_jar(forge_dir):
|
def find_forge_jar(forge_dir):
|
||||||
for entry in os.scandir(forge_dir):
|
for entry in os.scandir(forge_dir):
|
||||||
if ".jar" in entry.name and "forge" in entry.name:
|
if ".jar" in entry.name and "forge" in entry.name:
|
||||||
print(f"Found forge .jar: {entry.name}")
|
logging.info(f"Found forge .jar: {entry.name}")
|
||||||
return entry.name
|
return entry.name
|
||||||
raise FileNotFoundError(f"Could not find forge .jar in {forge_dir}.")
|
raise FileNotFoundError(f"Could not find forge .jar in {forge_dir}.")
|
||||||
|
|
||||||
|
@ -47,12 +47,12 @@ def find_ap_randomizer_jar(forge_dir):
|
||||||
for entry in os.scandir(mods_dir):
|
for entry in os.scandir(mods_dir):
|
||||||
match = ap_mod_re.match(entry.name)
|
match = ap_mod_re.match(entry.name)
|
||||||
if match:
|
if match:
|
||||||
print(f"Found AP randomizer mod: {match.group()}")
|
logging.info(f"Found AP randomizer mod: {match.group()}")
|
||||||
return match.group()
|
return match.group()
|
||||||
return None
|
return None
|
||||||
else:
|
else:
|
||||||
os.mkdir(mods_dir)
|
os.mkdir(mods_dir)
|
||||||
print(f"Created mods folder in {forge_dir}")
|
logging.info(f"Created mods folder in {forge_dir}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@ -64,17 +64,17 @@ def replace_apmc_files(forge_dir, apmc_file):
|
||||||
copy_apmc = True
|
copy_apmc = True
|
||||||
if not os.path.isdir(apdata_dir):
|
if not os.path.isdir(apdata_dir):
|
||||||
os.mkdir(apdata_dir)
|
os.mkdir(apdata_dir)
|
||||||
print(f"Created APData folder in {forge_dir}")
|
logging.info(f"Created APData folder in {forge_dir}")
|
||||||
for entry in os.scandir(apdata_dir):
|
for entry in os.scandir(apdata_dir):
|
||||||
if entry.name.endswith(".apmc") and entry.is_file():
|
if entry.name.endswith(".apmc") and entry.is_file():
|
||||||
if not os.path.samefile(apmc_file, entry.path):
|
if not os.path.samefile(apmc_file, entry.path):
|
||||||
os.remove(entry.path)
|
os.remove(entry.path)
|
||||||
print(f"Removed {entry.name} in {apdata_dir}")
|
logging.info(f"Removed {entry.name} in {apdata_dir}")
|
||||||
else: # apmc already in apdata
|
else: # apmc already in apdata
|
||||||
copy_apmc = False
|
copy_apmc = False
|
||||||
if copy_apmc:
|
if copy_apmc:
|
||||||
copyfile(apmc_file, os.path.join(apdata_dir, os.path.basename(apmc_file)))
|
copyfile(apmc_file, os.path.join(apdata_dir, os.path.basename(apmc_file)))
|
||||||
print(f"Copied {os.path.basename(apmc_file)} to {apdata_dir}")
|
logging.info(f"Copied {os.path.basename(apmc_file)} to {apdata_dir}")
|
||||||
|
|
||||||
|
|
||||||
# Check mod version, download new mod from GitHub releases page if needed.
|
# Check mod version, download new mod from GitHub releases page if needed.
|
||||||
|
@ -86,30 +86,31 @@ def update_mod(forge_dir):
|
||||||
if resp.status_code == 200: # OK
|
if resp.status_code == 200: # OK
|
||||||
latest_release = resp.json()[0]
|
latest_release = resp.json()[0]
|
||||||
if ap_randomizer != latest_release['assets'][0]['name']:
|
if ap_randomizer != latest_release['assets'][0]['name']:
|
||||||
print(f"A new release of the Minecraft AP randomizer mod was found: {latest_release['assets'][0]['name']}")
|
logging.info(f"A new release of the Minecraft AP randomizer mod was found: "
|
||||||
|
f"{latest_release['assets'][0]['name']}")
|
||||||
if ap_randomizer is not None:
|
if ap_randomizer is not None:
|
||||||
print(f"Your current mod is {ap_randomizer}.")
|
logging.info(f"Your current mod is {ap_randomizer}.")
|
||||||
else:
|
else:
|
||||||
print(f"You do not have the AP randomizer mod installed.")
|
logging.info(f"You do not have the AP randomizer mod installed.")
|
||||||
if prompt_yes_no("Would you like to update?"):
|
if prompt_yes_no("Would you like to update?"):
|
||||||
old_ap_mod = os.path.join(forge_dir, 'mods', ap_randomizer) if ap_randomizer is not None else None
|
old_ap_mod = os.path.join(forge_dir, 'mods', ap_randomizer) if ap_randomizer is not None else None
|
||||||
new_ap_mod = os.path.join(forge_dir, 'mods', latest_release['assets'][0]['name'])
|
new_ap_mod = os.path.join(forge_dir, 'mods', latest_release['assets'][0]['name'])
|
||||||
print("Downloading AP randomizer mod. This may take a moment...")
|
logging.info("Downloading AP randomizer mod. This may take a moment...")
|
||||||
apmod_resp = requests.get(latest_release['assets'][0]['browser_download_url'])
|
apmod_resp = requests.get(latest_release['assets'][0]['browser_download_url'])
|
||||||
if apmod_resp.status_code == 200:
|
if apmod_resp.status_code == 200:
|
||||||
with open(new_ap_mod, 'wb') as f:
|
with open(new_ap_mod, 'wb') as f:
|
||||||
f.write(apmod_resp.content)
|
f.write(apmod_resp.content)
|
||||||
print(f"Wrote new mod file to {new_ap_mod}")
|
logging.info(f"Wrote new mod file to {new_ap_mod}")
|
||||||
if old_ap_mod is not None:
|
if old_ap_mod is not None:
|
||||||
os.remove(old_ap_mod)
|
os.remove(old_ap_mod)
|
||||||
print(f"Removed old mod file from {old_ap_mod}")
|
logging.info(f"Removed old mod file from {old_ap_mod}")
|
||||||
else:
|
else:
|
||||||
print(f"Error retrieving the randomizer mod (status code {apmod_resp.status_code}).")
|
logging.error(f"Error retrieving the randomizer mod (status code {apmod_resp.status_code}).")
|
||||||
print(f"Please report this issue on the Archipelago Discord server.")
|
logging.error(f"Please report this issue on the Archipelago Discord server.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
else:
|
||||||
print(f"Error checking for randomizer mod updates (status code {resp.status_code}).")
|
logging.error(f"Error checking for randomizer mod updates (status code {resp.status_code}).")
|
||||||
print(f"If this was not expected, please report this issue on the Archipelago Discord server.")
|
logging.error(f"If this was not expected, please report this issue on the Archipelago Discord server.")
|
||||||
if not prompt_yes_no("Continue anyways?"):
|
if not prompt_yes_no("Continue anyways?"):
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
@ -127,13 +128,13 @@ def check_eula(forge_dir):
|
||||||
text = f.read()
|
text = f.read()
|
||||||
if 'false' in text:
|
if 'false' in text:
|
||||||
# Prompt user to agree to the EULA
|
# Prompt user to agree to the EULA
|
||||||
print("You need to agree to the Minecraft EULA in order to run the server.")
|
logging.info("You need to agree to the Minecraft EULA in order to run the server.")
|
||||||
print("The EULA can be found at https://account.mojang.com/documents/minecraft_eula")
|
logging.info("The EULA can be found at https://account.mojang.com/documents/minecraft_eula")
|
||||||
if prompt_yes_no("Do you agree to the EULA?"):
|
if prompt_yes_no("Do you agree to the EULA?"):
|
||||||
f.seek(0)
|
f.seek(0)
|
||||||
f.write(text.replace('false', 'true'))
|
f.write(text.replace('false', 'true'))
|
||||||
f.truncate()
|
f.truncate()
|
||||||
print(f"Set {eula_path} to true")
|
logging.info(f"Set {eula_path} to true")
|
||||||
else:
|
else:
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
@ -152,12 +153,13 @@ def run_forge_server(forge_dir, heap_arg):
|
||||||
heap_arg = "-Xmx" + heap_arg
|
heap_arg = "-Xmx" + heap_arg
|
||||||
|
|
||||||
argstring = ' '.join([java_exe, heap_arg, "-jar", forge_server, "-nogui"])
|
argstring = ' '.join([java_exe, heap_arg, "-jar", forge_server, "-nogui"])
|
||||||
print(f"Running Forge server: {argstring}")
|
logging.info(f"Running Forge server: {argstring}")
|
||||||
os.chdir(forge_dir)
|
os.chdir(forge_dir)
|
||||||
return Popen(argstring)
|
return Popen(argstring)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
Utils.init_logging("MinecraftClient")
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument("apmc_file", default=None, nargs='?', help="Path to an Archipelago Minecraft data file (.apmc)")
|
parser.add_argument("apmc_file", default=None, nargs='?', help="Path to an Archipelago Minecraft data file (.apmc)")
|
||||||
|
|
||||||
|
|
|
@ -1652,8 +1652,7 @@ async def auto_shutdown(ctx, to_cancel=None):
|
||||||
|
|
||||||
|
|
||||||
async def main(args: argparse.Namespace):
|
async def main(args: argparse.Namespace):
|
||||||
logging.basicConfig(force=True,
|
Utils.init_logging("Server", loglevel=args.loglevel.lower())
|
||||||
format='[%(asctime)s] %(message)s', level=getattr(logging, args.loglevel.upper(), logging.INFO))
|
|
||||||
|
|
||||||
ctx = Context(args.host, args.port, args.server_password, args.password, args.location_check_points,
|
ctx = Context(args.host, args.port, args.server_password, args.password, args.location_check_points,
|
||||||
args.hint_cost, not args.disable_item_cheat, args.forfeit_mode, args.collect_mode,
|
args.hint_cost, not args.disable_item_cheat, args.forfeit_mode, args.collect_mode,
|
||||||
|
|
32
Utils.py
32
Utils.py
|
@ -25,6 +25,8 @@ import functools
|
||||||
import io
|
import io
|
||||||
import collections
|
import collections
|
||||||
import importlib
|
import importlib
|
||||||
|
import logging
|
||||||
|
|
||||||
from yaml import load, dump, safe_load
|
from yaml import load, dump, safe_load
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -124,7 +126,6 @@ unsafe_parse_yaml = functools.partial(load, Loader=Loader)
|
||||||
def get_public_ipv4() -> str:
|
def get_public_ipv4() -> str:
|
||||||
import socket
|
import socket
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import logging
|
|
||||||
ip = socket.gethostbyname(socket.gethostname())
|
ip = socket.gethostbyname(socket.gethostname())
|
||||||
try:
|
try:
|
||||||
ip = urllib.request.urlopen('https://checkip.amazonaws.com/').read().decode('utf8').strip()
|
ip = urllib.request.urlopen('https://checkip.amazonaws.com/').read().decode('utf8').strip()
|
||||||
|
@ -141,7 +142,6 @@ def get_public_ipv4() -> str:
|
||||||
def get_public_ipv6() -> str:
|
def get_public_ipv6() -> str:
|
||||||
import socket
|
import socket
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import logging
|
|
||||||
ip = socket.gethostbyname(socket.gethostname())
|
ip = socket.gethostbyname(socket.gethostname())
|
||||||
try:
|
try:
|
||||||
ip = urllib.request.urlopen('https://v6.ident.me').read().decode('utf8').strip()
|
ip = urllib.request.urlopen('https://v6.ident.me').read().decode('utf8').strip()
|
||||||
|
@ -211,7 +211,6 @@ def get_default_options() -> dict:
|
||||||
|
|
||||||
|
|
||||||
def update_options(src: dict, dest: dict, filename: str, keys: list) -> dict:
|
def update_options(src: dict, dest: dict, filename: str, keys: list) -> dict:
|
||||||
import logging
|
|
||||||
for key, value in src.items():
|
for key, value in src.items():
|
||||||
new_keys = keys.copy()
|
new_keys = keys.copy()
|
||||||
new_keys.append(key)
|
new_keys.append(key)
|
||||||
|
@ -278,7 +277,6 @@ def persistent_load() -> typing.Dict[dict]:
|
||||||
with open(path, "r") as f:
|
with open(path, "r") as f:
|
||||||
storage = unsafe_parse_yaml(f.read())
|
storage = unsafe_parse_yaml(f.read())
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
import logging
|
|
||||||
logging.debug(f"Could not read store: {e}")
|
logging.debug(f"Could not read store: {e}")
|
||||||
if storage is None:
|
if storage is None:
|
||||||
storage = {}
|
storage = {}
|
||||||
|
@ -337,7 +335,6 @@ def get_adjuster_settings(romfile: str, skip_questions: bool = False) -> typing.
|
||||||
return romfile, False
|
return romfile, False
|
||||||
else:
|
else:
|
||||||
adjusted = False
|
adjusted = False
|
||||||
import logging
|
|
||||||
if not hasattr(get_adjuster_settings, "adjust_wanted"):
|
if not hasattr(get_adjuster_settings, "adjust_wanted"):
|
||||||
logging.info(f"Skipping post-patch adjustment")
|
logging.info(f"Skipping post-patch adjustment")
|
||||||
get_adjuster_settings.adjuster_settings = adjuster_settings
|
get_adjuster_settings.adjuster_settings = adjuster_settings
|
||||||
|
@ -406,3 +403,28 @@ class KeyedDefaultDict(collections.defaultdict):
|
||||||
|
|
||||||
def get_text_between(text: str, start: str, end: str) -> str:
|
def get_text_between(text: str, start: str, end: str) -> str:
|
||||||
return text[text.index(start) + len(start): text.rindex(end)]
|
return text[text.index(start) + len(start): text.rindex(end)]
|
||||||
|
|
||||||
|
|
||||||
|
loglevel_mapping = {'error': logging.ERROR, 'info': logging.INFO, 'warning': logging.WARNING, 'debug': logging.DEBUG}
|
||||||
|
|
||||||
|
|
||||||
|
def init_logging(name: str, loglevel: typing.Union[str, int] = logging.INFO, write_mode: str = "w",
|
||||||
|
log_format: str = "[%(name)s]: %(message)s"):
|
||||||
|
loglevel: int = loglevel_mapping.get(loglevel, loglevel)
|
||||||
|
log_folder = local_path("logs")
|
||||||
|
os.makedirs(log_folder, exist_ok=True)
|
||||||
|
root_logger = logging.getLogger()
|
||||||
|
for handler in root_logger.handlers[:]:
|
||||||
|
root_logger.removeHandler(handler)
|
||||||
|
handler.close()
|
||||||
|
root_logger.setLevel(loglevel)
|
||||||
|
file_handler = logging.FileHandler(
|
||||||
|
os.path.join(log_folder, f"{name}.txt"),
|
||||||
|
write_mode,
|
||||||
|
encoding="utf-8-sig")
|
||||||
|
file_handler.setFormatter(logging.Formatter(log_format))
|
||||||
|
root_logger.addHandler(file_handler)
|
||||||
|
if sys.stdout:
|
||||||
|
root_logger.addHandler(
|
||||||
|
logging.StreamHandler(sys.stdout)
|
||||||
|
)
|
||||||
|
|
|
@ -155,7 +155,6 @@ def _read_log(path: str):
|
||||||
|
|
||||||
@app.route('/log/<suuid:room>')
|
@app.route('/log/<suuid:room>')
|
||||||
def display_log(room: UUID):
|
def display_log(room: UUID):
|
||||||
# noinspection PyTypeChecker
|
|
||||||
return Response(_read_log(os.path.join("logs", str(room) + ".txt")), mimetype="text/plain;charset=UTF-8")
|
return Response(_read_log(os.path.join("logs", str(room) + ".txt")), mimetype="text/plain;charset=UTF-8")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ import time
|
||||||
import random
|
import random
|
||||||
import pickle
|
import pickle
|
||||||
|
|
||||||
|
import Utils
|
||||||
from .models import *
|
from .models import *
|
||||||
|
|
||||||
from MultiServer import Context, server, auto_shutdown, ServerCommandProcessor, ClientMessageProcessor
|
from MultiServer import Context, server, auto_shutdown, ServerCommandProcessor, ClientMessageProcessor
|
||||||
|
@ -111,11 +111,7 @@ def run_server_process(room_id, ponyconfig: dict):
|
||||||
db.generate_mapping(check_tables=False)
|
db.generate_mapping(check_tables=False)
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
|
Utils.init_logging(str(room_id), write_mode="a")
|
||||||
logging.basicConfig(format='[%(asctime)s] %(message)s',
|
|
||||||
level=logging.INFO,
|
|
||||||
handlers=[
|
|
||||||
logging.FileHandler(os.path.join(LOGS_FOLDER, f"{room_id}.txt"), 'a', 'utf-8-sig')])
|
|
||||||
ctx = WebHostContext()
|
ctx = WebHostContext()
|
||||||
ctx.load(room_id)
|
ctx.load(room_id)
|
||||||
ctx.init_save()
|
ctx.init_save()
|
||||||
|
|
Loading…
Reference in New Issue