From 1e6cd772b5013e389c76b6706cad776f7270a69f Mon Sep 17 00:00:00 2001 From: Juan Daniel Date: Wed, 19 Jul 2023 14:58:36 +0100 Subject: [PATCH 01/13] fix: 'PIL.Image' has no attribute 'ANTIALIAS' --- zoom.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/zoom.py b/zoom.py index 68b3681..b2a81e3 100644 --- a/zoom.py +++ b/zoom.py @@ -67,7 +67,7 @@ def simpleZoom(workQueue): for z in range(start - 1, stop - 1, -1): if img.size[0] >= MINRENDERBOXSIZE * 2 and img.size[1] >= MINRENDERBOXSIZE * 2: - img = img.resize((img.size[0] // 2, img.size[1] // 2), Image.ANTIALIAS) + img = img.resize((img.size[0] // 2, img.size[1] // 2), Image.Resampling.LANCZOS) zFolder = Path(folder, str(z)) if not zFolder.exists(): zFolder.mkdir(parents=True) @@ -218,7 +218,7 @@ def work(basepath, pathList, surfaceName, daytime, size, start, stop, last, chun coords[m][1] * size // 2, ), im=img.resize( - (size // 2, size // 2), Image.ANTIALIAS + (size // 2, size // 2), Image.Resampling.LANCZOS ), ) @@ -461,7 +461,7 @@ def zoom( ), im=Image.open(path, mode="r") .convert("RGB") - .resize((imageSize, imageSize), Image.ANTIALIAS), + .resize((imageSize, imageSize), Image.Resampling.LANCZOS), ) if OUTEXT != EXT: From 6f3e9b03aac1f2c4a3f5ad10101d1e818eb08d9f Mon Sep 17 00:00:00 2001 From: Poli Date: Mon, 9 Oct 2023 23:57:26 +0200 Subject: [PATCH 02/13] Fix Python Syntax error Issue #119 --- auto.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/auto.py b/auto.py index 273e4da..4041397 100644 --- a/auto.py +++ b/auto.py @@ -58,7 +58,7 @@ def naturalSort(l): convert = lambda text: int(text) if text.isdigit() else text.lower() - alphanum_key = lambda key: [ convert(c) for c in re.split('(\d+)', key) ] + alphanum_key = lambda key: [ convert(c) for c in re.split(r'(\d+)', key) ] return sorted(l, key = alphanum_key) def printErase(arg): @@ -796,7 +796,7 @@ def addTag(tags, itemType, itemName, force=False): if "iconType" in tag: addTag(tags, tag["iconType"], tag["iconName"], True) if "text" in tag: - for match in re.finditer("\[([^=]+)=([^\]]+)", tag["text"]): + for match in re.finditer(r"\[([^=]+)=([^\]]+)", tag["text"]): addTag(tags, match.group(1), match.group(2)) rmtree(os.path.join(workfolder, "Images", "labels"), ignore_errors=True) From 753f9064d9be96203a8dc9c6deb9c4f8caf0204f Mon Sep 17 00:00:00 2001 From: L0laapk3 Date: Fri, 13 Oct 2023 10:59:06 +0200 Subject: [PATCH 03/13] wrap importing pkg-resources in try except for python3.12+ --- auto.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/auto.py b/auto.py index 273e4da..b39b470 100644 --- a/auto.py +++ b/auto.py @@ -6,17 +6,20 @@ import os import traceback -import pkg_resources -from pkg_resources import DistributionNotFound, VersionConflict from pathlib import Path try: - with Path(__file__, "..", "requirements.txt").resolve().open("r", encoding="utf-8") as f: - pkg_resources.require(f.read().splitlines()) -except (DistributionNotFound, VersionConflict) as ex: + import pkg_resources + from pkg_resources import DistributionNotFound, VersionConflict + try: + with Path(__file__, "..", "requirements.txt").resolve().open("r", encoding="utf-8") as f: + pkg_resources.require(f.read().splitlines()) + except (DistributionNotFound, VersionConflict) as ex: + raise ImportError from ex +except ImportError as ex: traceback.print_exc() print("\nDependencies not met. Run `pip install -r requirements.txt` to install missing dependencies.") - sys.exit(1) + raise ex import glob import argparse From b804a09a6737b351b692a06e22e3bb0b58d695c1 Mon Sep 17 00:00:00 2001 From: L0laapk3 Date: Fri, 13 Oct 2023 11:00:34 +0200 Subject: [PATCH 04/13] update requirements.txt --- requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 668d779..32dfd97 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,5 @@ PyTurboJPEG>=1.1.5 psutil>=5.4.8 numpy>=1.16.4 -Pillow>=6.1.0 \ No newline at end of file +Pillow>=6.1.0 +setuptools>=68.0.0 \ No newline at end of file From 42445ce7e6f74509b1ece5f20c9677b85c454719 Mon Sep 17 00:00:00 2001 From: L0laapk3 Date: Fri, 13 Oct 2023 15:42:41 +0200 Subject: [PATCH 05/13] ignore linkboxes that werent rendered --- auto.py | 1624 +++++++++++++++++++++++++++---------------------------- crop.py | 157 +++--- ref.py | 566 +++++++++---------- zoom.py | 823 ++++++++++++++-------------- 4 files changed, 1583 insertions(+), 1587 deletions(-) diff --git a/auto.py b/auto.py index fa881f0..e2b6c45 100644 --- a/auto.py +++ b/auto.py @@ -2,24 +2,24 @@ import sys if sys.maxsize <= 2**32 or sys.hexversion < 0x3060000: - raise Exception("64 bit Python 3.6 or higher is required for this script.") + raise Exception("64 bit Python 3.6 or higher is required for this script.") import os import traceback from pathlib import Path try: - import pkg_resources - from pkg_resources import DistributionNotFound, VersionConflict - try: - with Path(__file__, "..", "requirements.txt").resolve().open("r", encoding="utf-8") as f: - pkg_resources.require(f.read().splitlines()) - except (DistributionNotFound, VersionConflict) as ex: - raise ImportError from ex + import pkg_resources + from pkg_resources import DistributionNotFound, VersionConflict + try: + with Path(__file__, "..", "requirements.txt").resolve().open("r", encoding="utf-8") as f: + pkg_resources.require(f.read().splitlines()) + except (DistributionNotFound, VersionConflict) as ex: + raise ImportError from ex except ImportError as ex: - traceback.print_exc() - print("\nDependencies not met. Run `pip install -r requirements.txt` to install missing dependencies.") - raise ex + traceback.print_exc() + print("\nDependencies not met. Run `pip install -r requirements.txt` to install missing dependencies.") + raise ex import glob import argparse @@ -29,10 +29,8 @@ import errno import math import multiprocessing as mp -import random import re import string -import signal import subprocess import tempfile from tempfile import TemporaryDirectory @@ -46,7 +44,6 @@ from shutil import get_terminal_size as tsize from shutil import rmtree from socket import timeout -from subprocess import call from zipfile import ZipFile import psutil @@ -60,225 +57,225 @@ userFolder = Path(__file__, "..", "..", "..").resolve() def naturalSort(l): - convert = lambda text: int(text) if text.isdigit() else text.lower() - alphanum_key = lambda key: [ convert(c) for c in re.split(r'(\d+)', key) ] - return sorted(l, key = alphanum_key) + convert = lambda text: int(text) if text.isdigit() else text.lower() + alphanum_key = lambda key: [ convert(c) for c in re.split(r'(\d+)', key) ] + return sorted(l, key = alphanum_key) def printErase(arg): - try: - tsiz = tsize()[0] - print("\r{}{}\n".format(arg, " " * (tsiz*math.ceil(len(arg)/tsiz)-len(arg) - 1)), end="", flush=True) - except: - #raise - pass + try: + tsiz = tsize()[0] + print("\r{}{}\n".format(arg, " " * (tsiz*math.ceil(len(arg)/tsiz)-len(arg) - 1)), end="", flush=True) + except: + #raise + pass def startGameAndReadGameLogs(results, condition, exeWithArgs, isSteam, tmpDir, pidBlacklist, rawTags, args): - pipeOut, pipeIn = os.pipe() - p = subprocess.Popen(exeWithArgs, stdout=pipeIn) - - printingStackTraceback = False - # TODO: keep printing multiline stuff until new print detected - prevPrinted = False - def handleGameLine(line, isFirst): - if isFirst and not re.match(r'^ *\d+\.\d{3} \d{4}-\d\d-\d\d \d\d:\d\d:\d\d; Factorio (\d+\.\d+\.\d+) \(build (\d+), [^)]+\)$', line): - suggestion = "maybe your version is outdated or too new?" - if line.endswith('Error Util.cpp:83: weakly_canonical: Incorrect function.'): - suggestion = "maybe your temp directory is on a ramdisk?" - raise RuntimeError(f"Unrecognised output from factorio ({suggestion})\n\nOutput from factorio:\n{line}") - - nonlocal prevPrinted - line = line.rstrip('\n') - if re.match(r'^\ *\d+(?:\.\d+)? *[^\n]*$', line) is None: - if prevPrinted: - printErase(line) - return - - prevPrinted = False - - m = re.match(r'^\ *\d+(?:\.\d+)? *Script *@__L0laapk3_FactorioMaps__\/data-final-fixes\.lua:\d+: FactorioMaps_Output_RawTagPaths:([^:]+):(.*)$', line, re.IGNORECASE) - if m is not None: - rawTags[m.group(1)] = m.group(2) - if rawTags["__used"]: - raise Exception("Tags added after they were used.") - else: - if printingStackTraceback or line == "stack traceback:": - printErase("[GAME] %s" % line) - prevPrinted = True - return True - m = re.match(r'^\ *\d+(?:\.\d+)? *Script *@__L0laapk3_FactorioMaps__\/(.*?)(?:(\[info\]) ?(.*))?$', line, re.IGNORECASE) - if m is not None and m.group(2) is not None: - printErase(m.group(3)) - prevPrinted = True - elif m is not None and args.verbose: - printErase(m.group(1)) - prevPrinted = True - elif line.lower() in ("error", "warn", "exception", "fail", "invalid") or (args.verbosegame and len(line) > 0): - printErase("[GAME] %s" % line) - prevPrinted = True - return False - - - with os.fdopen(pipeOut, 'r') as pipef: - - if isSteam: - printErase("using steam launch hack") - - attrs = ('pid', 'name', 'create_time') - - # on some devices, the previous check wasn't enough apparently, so explicitely wait until the log file is created. - while not os.path.exists(os.path.join(tmpDir, "factorio-current.log")): - time.sleep(0.4) - - oldest = None - pid = None - while pid is None: - for proc in psutil.process_iter(attrs=attrs): - pinfo = proc.as_dict(attrs=attrs) - if pinfo["name"] == "factorio.exe" and pinfo["pid"] not in pidBlacklist and (pid is None or pinfo["create_time"] < oldest): - oldest = pinfo["create_time"] - pid = pinfo["pid"] - if pid is None: - time.sleep(1) - # print(f"PID: {pid}") - else: - pid = p.pid - - results.extend((isSteam, pid)) - with condition: - condition.notify() - - psutil.Process(pid).nice(psutil.BELOW_NORMAL_PRIORITY_CLASS if os.name == 'nt' else 10) - - isFirstLine = True - if isSteam: - pipef.close() - with Path(tmpDir, "factorio-current.log").open("r", encoding="utf-8") as f: - while psutil.pid_exists(pid): - where = f.tell() - line = f.readline() - if not line: - time.sleep(0.4) - f.seek(where) - else: - printingStackTraceback = handleGameLine(line, isFirstLine) - isFirstLine = False - - else: - while True: - line = pipef.readline().rstrip("\n") - printingStackTraceback = handleGameLine(line, isFirstLine) - isFirstLine = False + pipeOut, pipeIn = os.pipe() + p = subprocess.Popen(exeWithArgs, stdout=pipeIn) + + printingStackTraceback = False + # TODO: keep printing multiline stuff until new print detected + prevPrinted = False + def handleGameLine(line, isFirst): + if isFirst and not re.match(r'^ *\d+\.\d{3} \d{4}-\d\d-\d\d \d\d:\d\d:\d\d; Factorio (\d+\.\d+\.\d+) \(build (\d+), [^)]+\)$', line): + suggestion = "maybe your version is outdated or too new?" + if line.endswith('Error Util.cpp:83: weakly_canonical: Incorrect function.'): + suggestion = "maybe your temp directory is on a ramdisk?" + raise RuntimeError(f"Unrecognised output from factorio ({suggestion})\n\nOutput from factorio:\n{line}") + + nonlocal prevPrinted + line = line.rstrip('\n') + if re.match(r'^\ *\d+(?:\.\d+)? *[^\n]*$', line) is None: + if prevPrinted: + printErase(line) + return + + prevPrinted = False + + m = re.match(r'^\ *\d+(?:\.\d+)? *Script *@__L0laapk3_FactorioMaps__\/data-final-fixes\.lua:\d+: FactorioMaps_Output_RawTagPaths:([^:]+):(.*)$', line, re.IGNORECASE) + if m is not None: + rawTags[m.group(1)] = m.group(2) + if rawTags["__used"]: + raise Exception("Tags added after they were used.") + else: + if printingStackTraceback or line == "stack traceback:": + printErase("[GAME] %s" % line) + prevPrinted = True + return True + m = re.match(r'^\ *\d+(?:\.\d+)? *Script *@__L0laapk3_FactorioMaps__\/(.*?)(?:(\[info\]) ?(.*))?$', line, re.IGNORECASE) + if m is not None and m.group(2) is not None: + printErase(m.group(3)) + prevPrinted = True + elif m is not None and args.verbose: + printErase(m.group(1)) + prevPrinted = True + elif line.lower() in ("error", "warn", "exception", "fail", "invalid") or (args.verbosegame and len(line) > 0): + printErase("[GAME] %s" % line) + prevPrinted = True + return False + + + with os.fdopen(pipeOut, 'r') as pipef: + + if isSteam: + printErase("using steam launch hack") + + attrs = ('pid', 'name', 'create_time') + + # on some devices, the previous check wasn't enough apparently, so explicitely wait until the log file is created. + while not os.path.exists(os.path.join(tmpDir, "factorio-current.log")): + time.sleep(0.4) + + oldest = None + pid = None + while pid is None: + for proc in psutil.process_iter(attrs=attrs): + pinfo = proc.as_dict(attrs=attrs) + if pinfo["name"] == "factorio.exe" and pinfo["pid"] not in pidBlacklist and (pid is None or pinfo["create_time"] < oldest): + oldest = pinfo["create_time"] + pid = pinfo["pid"] + if pid is None: + time.sleep(1) + # print(f"PID: {pid}") + else: + pid = p.pid + + results.extend((isSteam, pid)) + with condition: + condition.notify() + + psutil.Process(pid).nice(psutil.BELOW_NORMAL_PRIORITY_CLASS if os.name == 'nt' else 10) + + isFirstLine = True + if isSteam: + pipef.close() + with Path(tmpDir, "factorio-current.log").open("r", encoding="utf-8") as f: + while psutil.pid_exists(pid): + where = f.tell() + line = f.readline() + if not line: + time.sleep(0.4) + f.seek(where) + else: + printingStackTraceback = handleGameLine(line, isFirstLine) + isFirstLine = False + + else: + while True: + line = pipef.readline().rstrip("\n") + printingStackTraceback = handleGameLine(line, isFirstLine) + isFirstLine = False def checkUpdate(reverseUpdateTest:bool = False): - try: - print("checking for updates") - latestUpdates = json.loads(urllib.request.urlopen('https://cdn.jsdelivr.net/gh/L0laapk3/FactorioMaps@latest/updates.json', timeout=30).read()) - with Path(__file__, "..", "updates.json").resolve().open("r", encoding="utf-8") as f: - currentUpdates = json.load(f) - if reverseUpdateTest: - latestUpdates, currentUpdates = currentUpdates, latestUpdates - - updates = [] - majorUpdate = False - currentVersion = (0, 0, 0) - for verStr, changes in currentUpdates.items(): - ver = tuple(map(int, verStr.split("."))) - if currentVersion[0] < ver[0] or (currentVersion[0] == ver[0] and currentVersion[1] < ver[1]): - currentVersion = ver - for verStr, changes in latestUpdates.items(): - if verStr not in currentUpdates: - ver = tuple(map(int, verStr.split("."))) - updates.append((verStr, changes)) - updates.sort(key = lambda u: u[0]) - if len(updates) > 0: - - padding = max(map(lambda u: len(u[0]), updates)) - changelogLines = [] - for update in updates: - if isinstance(update[1], str): - updateText = update[1] - else: - updateText = str(("\r\n " + " "*padding).join(update[1])) - if updateText[0] == "!": - majorUpdate = True - updateText = updateText[1:] - changelogLines.append(" %s: %s" % (update[0].rjust(padding), updateText)) - print("") - print("") - print("================================================================================") - print("") - print((" An " + ("important" if majorUpdate else "incremental") + " update has been found!")) - print("") - print(" Here's what changed:") - for line in changelogLines: - print(line) - print("") - print("") - print(" Download: https://git.io/factoriomaps") - if majorUpdate: - print("") - print(" You can dismiss this by using --no-update (not recommended)") - print("") - print("================================================================================") - print("") - print("") - if majorUpdate or reverseUpdateTest: - exit(1) - - except (urllib.error.URLError, timeout) as e: - print("Failed to check for updates. %s: %s" % (type(e).__name__, e)) + try: + print("checking for updates") + latestUpdates = json.loads(urllib.request.urlopen('https://cdn.jsdelivr.net/gh/L0laapk3/FactorioMaps@latest/updates.json', timeout=30).read()) + with Path(__file__, "..", "updates.json").resolve().open("r", encoding="utf-8") as f: + currentUpdates = json.load(f) + if reverseUpdateTest: + latestUpdates, currentUpdates = currentUpdates, latestUpdates + + updates = [] + majorUpdate = False + currentVersion = (0, 0, 0) + for verStr, changes in currentUpdates.items(): + ver = tuple(map(int, verStr.split("."))) + if currentVersion[0] < ver[0] or (currentVersion[0] == ver[0] and currentVersion[1] < ver[1]): + currentVersion = ver + for verStr, changes in latestUpdates.items(): + if verStr not in currentUpdates: + ver = tuple(map(int, verStr.split("."))) + updates.append((verStr, changes)) + updates.sort(key = lambda u: u[0]) + if len(updates) > 0: + + padding = max(map(lambda u: len(u[0]), updates)) + changelogLines = [] + for update in updates: + if isinstance(update[1], str): + updateText = update[1] + else: + updateText = str(("\r\n " + " "*padding).join(update[1])) + if updateText[0] == "!": + majorUpdate = True + updateText = updateText[1:] + changelogLines.append(" %s: %s" % (update[0].rjust(padding), updateText)) + print("") + print("") + print("================================================================================") + print("") + print((" An " + ("important" if majorUpdate else "incremental") + " update has been found!")) + print("") + print(" Here's what changed:") + for line in changelogLines: + print(line) + print("") + print("") + print(" Download: https://git.io/factoriomaps") + if majorUpdate: + print("") + print(" You can dismiss this by using --no-update (not recommended)") + print("") + print("================================================================================") + print("") + print("") + if majorUpdate or reverseUpdateTest: + exit(1) + + except (urllib.error.URLError, timeout) as e: + print("Failed to check for updates. %s: %s" % (type(e).__name__, e)) def linkDir(src: Path, dest:Path): - if os.name == 'nt': - subprocess.check_call(("MKLINK", "/J", src.resolve(), dest.resolve()), stdout=subprocess.DEVNULL, shell=True) - else: - os.symlink(dest.resolve(), src.resolve()) + if os.name == 'nt': + subprocess.check_call(("MKLINK", "/J", src.resolve(), dest.resolve()), stdout=subprocess.DEVNULL, shell=True) + else: + os.symlink(dest.resolve(), src.resolve()) def linkCustomModFolder(modpath: Path): - print(f"Verifying mod version in custom mod folder ({modpath})") - modPattern = re.compile(r'^L0laapk3_FactorioMaps_', flags=re.IGNORECASE) - for entry in [entry for entry in modpath.iterdir() if modPattern.match(entry.name)]: - print("Found other factoriomaps mod in custom mod folder, deleting.") - path = Path(modpath, entry) - if path.is_file() or path.is_symlink(): - path.unlink() - elif path.is_dir(): - rmtree(path) - else: - raise Exception(f"Unable to remove {path} unknown type") + print(f"Verifying mod version in custom mod folder ({modpath})") + modPattern = re.compile(r'^L0laapk3_FactorioMaps_', flags=re.IGNORECASE) + for entry in [entry for entry in modpath.iterdir() if modPattern.match(entry.name)]: + print("Found other factoriomaps mod in custom mod folder, deleting.") + path = Path(modpath, entry) + if path.is_file() or path.is_symlink(): + path.unlink() + elif path.is_dir(): + rmtree(path) + else: + raise Exception(f"Unable to remove {path} unknown type") - linkDir(Path(modpath, Path('.').resolve().name), Path(".")) + linkDir(Path(modpath, Path('.').resolve().name), Path(".")) def changeModlist(modpath: Path,newState: bool): - print(f"{'Enabling' if newState else 'Disabling'} FactorioMaps mod") - done = False - modlistPath = Path(modpath, "mod-list.json") - with modlistPath.open("r", encoding="utf-8") as f: - modlist = json.load(f) - for mod in modlist["mods"]: - if mod["name"] == "L0laapk3_FactorioMaps": - mod["enabled"] = newState - done = True - break - if not done: - modlist["mods"].append({"name": "L0laapk3_FactorioMaps", "enabled": newState}) - with modlistPath.open("w", encoding="utf-8") as f: - json.dump(modlist, f, indent=2) + print(f"{'Enabling' if newState else 'Disabling'} FactorioMaps mod") + done = False + modlistPath = Path(modpath, "mod-list.json") + with modlistPath.open("r", encoding="utf-8") as f: + modlist = json.load(f) + for mod in modlist["mods"]: + if mod["name"] == "L0laapk3_FactorioMaps": + mod["enabled"] = newState + done = True + break + if not done: + modlist["mods"].append({"name": "L0laapk3_FactorioMaps", "enabled": newState}) + with modlistPath.open("w", encoding="utf-8") as f: + json.dump(modlist, f, indent=2) def buildAutorun(args: Namespace, workFolder: Path, outFolder: Path, isFirstSnapshot: bool, daytime: str): - printErase("Building autorun.lua") - mapInfoPath = Path(workFolder, "mapInfo.json") - if mapInfoPath.is_file(): - with mapInfoPath.open("r", encoding='utf-8') as f: - mapInfoLua = re.sub(r'"([^"]+)" *:', lambda m: '["'+m.group(1)+'"] = ', f.read().replace("[", "{").replace("]", "}")) - # TODO: Update for new argument parsing + printErase("Building autorun.lua") + mapInfoPath = Path(workFolder, "mapInfo.json") + if mapInfoPath.is_file(): + with mapInfoPath.open("r", encoding='utf-8') as f: + mapInfoLua = re.sub(r'"([^"]+)" *:', lambda m: '["'+m.group(1)+'"] = ', f.read().replace("[", "{").replace("]", "}")) + # TODO: Update for new argument parsing # if isFirstSnapshot: # f.seek(0) # mapInfo = json.load(f) @@ -286,622 +283,623 @@ def buildAutorun(args: Namespace, workFolder: Path, outFolder: Path, isFirstSnap # for kwarg in changedKwargs: # if kwarg in ("hd", "dayonly", "nightonly", "build-range", "connect-range", "tag-range"): # printErase("Warning: flag '" + kwarg + "' is overriden by previous setting found in existing timeline.") - else: - mapInfoLua = "{}" - - isFirstSnapshot = False - - chunkCachePath = Path(workFolder, "chunkCache.json") - if chunkCachePath.is_file(): - with chunkCachePath.open("r", encoding="utf-8") as f: - chunkCache = re.sub(r'"([^"]+)" *:', lambda m: '["'+m.group(1)+'"] = ', f.read().replace("[", "{").replace("]", "}")) - else: - chunkCache = "{}" - - def lowerBool(value: bool): - return str(value).lower() - - with Path(__file__, "..", "autorun.lua").resolve().open("w", encoding="utf-8") as f: - surfaceString = '{"' + '", "'.join(args.surface) + '"}' if args.surface else "nil" - autorunString = \ - f'''fm.autorun = {{ - HD = {lowerBool(args.hd)}, - daytime = "{daytime}", - alt_mode = {lowerBool(args.altmode)}, - tags = {lowerBool(args.tags)}, - around_tag_range = {args.tag_range}, - around_build_range = {args.build_range}, - around_connect_range = {args.connect_range}, - connect_types = {{"lamp", "electric-pole", "radar", "straight-rail", "curved-rail", "rail-signal", "rail-chain-signal", "locomotive", "cargo-wagon", "fluid-wagon", "car"}}, - date = "{datetime.datetime.strptime(args.date, "%d/%m/%y").strftime("%d/%m/%y")}", - surfaces = {surfaceString}, - name = "{str(outFolder) + "/"}", - mapInfo = {mapInfoLua.encode("utf-8").decode("unicode-escape")}, - chunkCache = {chunkCache} - }}''' - f.write(autorunString) - if args.verbose: - printErase(autorunString) + else: + mapInfoLua = "{}" + + isFirstSnapshot = False + + chunkCachePath = Path(workFolder, "chunkCache.json") + if chunkCachePath.is_file(): + with chunkCachePath.open("r", encoding="utf-8") as f: + chunkCache = re.sub(r'"([^"]+)" *:', lambda m: '["'+m.group(1)+'"] = ', f.read().replace("[", "{").replace("]", "}")) + else: + chunkCache = "{}" + + def lowerBool(value: bool): + return str(value).lower() + + with Path(__file__, "..", "autorun.lua").resolve().open("w", encoding="utf-8") as f: + surfaceString = '{"' + '", "'.join(args.surface) + '"}' if args.surface else "nil" + autorunString = \ + f'''fm.autorun = {{ + HD = {lowerBool(args.hd)}, + daytime = "{daytime}", + alt_mode = {lowerBool(args.altmode)}, + tags = {lowerBool(args.tags)}, + around_tag_range = {args.tag_range}, + around_build_range = {args.build_range}, + around_connect_range = {args.connect_range}, + connect_types = {{"lamp", "electric-pole", "radar", "straight-rail", "curved-rail", "rail-signal", "rail-chain-signal", "locomotive", "cargo-wagon", "fluid-wagon", "car"}}, + date = "{datetime.datetime.strptime(args.date, "%d/%m/%y").strftime("%d/%m/%y")}", + surfaces = {surfaceString}, + name = "{str(outFolder) + "/"}", + mapInfo = {mapInfoLua.encode("utf-8").decode("unicode-escape")}, + chunkCache = {chunkCache} + }}''' + f.write(autorunString) + if args.verbose: + printErase(autorunString) def buildConfig(args: Namespace, tmpDir, basepath): - printErase("Building config.ini") - if args.verbose > 2: - print(f"Using temporary directory '{tmpDir}'") - configPath = Path(tmpDir, "config","config.ini") - configPath.parent.mkdir(parents=True) + printErase("Building config.ini") + if args.verbose > 2: + print(f"Using temporary directory '{tmpDir}'") + configPath = Path(tmpDir, "config","config.ini") + configPath.parent.mkdir(parents=True) - config = configparser.ConfigParser() - config.read(Path(args.config_path, "config.ini")) + config = configparser.ConfigParser() + config.read(Path(args.config_path, "config.ini")) - if "interface" not in config: - config["interface"] = {} - config["interface"]["show-tips-and-tricks"] = "false" + if "interface" not in config: + config["interface"] = {} + config["interface"]["show-tips-and-tricks"] = "false" - if "path" not in config: - config["path"] = {} - config["path"]["write-data"] = tmpDir + if "path" not in config: + config["path"] = {} + config["path"]["write-data"] = tmpDir - config["path"]["script-output"] = str(basepath) + config["path"]["script-output"] = str(basepath) - if "graphics" not in config: - config["graphics"] = {} - config["graphics"]["screenshots-threads-count"] = str(args.screenshotthreads if args.screenshotthreads else args.maxthreads) - config["graphics"]["max-threads"] = config["graphics"]["screenshots-threads-count"] + if "graphics" not in config: + config["graphics"] = {} + config["graphics"]["screenshots-threads-count"] = str(args.screenshotthreads if args.screenshotthreads else args.maxthreads) + config["graphics"]["max-threads"] = config["graphics"]["screenshots-threads-count"] - with configPath.open("w+", encoding="utf-8") as configFile: - configFile.writelines(("; version=3\n", )) - config.write(configFile, space_around_delimiters=False) + with configPath.open("w+", encoding="utf-8") as configFile: + configFile.writelines(("; version=3\n", )) + config.write(configFile, space_around_delimiters=False) - copy(Path(userFolder, 'player-data.json'), tmpDir) + copy(Path(userFolder, 'player-data.json'), tmpDir) - return configPath + return configPath def auto(*args): - lock = threading.Lock() - def kill(pid, onlyStall=False): - if pid: - with lock: - if not onlyStall and psutil.pid_exists(pid): - - if os.name == 'nt': - subprocess.check_call(("taskkill", "/pid", str(pid)), stdout=subprocess.DEVNULL, shell=True) - else: - subprocess.check_call(("killall", "factorio"), stdout=subprocess.DEVNULL) # TODO: kill correct process instead of just killing all - - while psutil.pid_exists(pid): - time.sleep(0.1) - - printErase("killed factorio") - - #time.sleep(0.1) - - parser = argparse.ArgumentParser(description="FactorioMaps") - daytime = parser.add_mutually_exclusive_group() - daytime.add_argument("--dayonly", dest="night", action="store_false", help="Only take daytime screenshots.") - daytime.add_argument("--nightonly", dest="day", action="store_false", help="Only take nighttime screenshots.") - parser.add_argument("--hd", action="store_true", help="Take screenshots of resolution 64 x 64 pixels per in-game tile.") - parser.add_argument("--no-altmode", dest="altmode", action="store_false", help="Hides entity info (alt mode).") - parser.add_argument("--no-tags", dest="tags", action="store_false", help="Hides map tags") - parser.add_argument("--default-timestamp", type=int, default=None, dest="default_timestamp", help="Snapshot that will be loaded by the webpage by default. Negative values indicate newest snapshots, so -1 indicates the newest map while 0 indicates the oldest map.") - parser.add_argument("--build-range", type=float, default=5.2, help="The maximum range from buildings around which pictures are saved (in chunks, 32 by 32 in-game tiles).") - parser.add_argument("--connect-range", type=float, default=1.2, help="The maximum range from connection buildings (rails, electric poles) around which pictures are saved.") - parser.add_argument("--tag-range", type=float, default=5.2, help="The maximum range from mapview tags around which pictures are saved.") - parser.add_argument("--surface", action="append", default=[], help="Used to capture other surfaces. If left empty, the surface the player is standing on will be used. To capture multiple surfaces, use the argument multiple times: --surface nauvis --surface 'Factory floor 1'") - parser.add_argument("--factorio", type=lambda p: Path(p).resolve(), help="Use factorio.exe from PATH instead of attempting to find it in common locations.") - parser.add_argument("--output-path", dest="basepath", type=lambda p: Path(p).resolve(), default=Path(userFolder, "script-output", "FactorioMaps"), help="path to the output folder (default is '..\\..\\script-output\\FactorioMaps')") - parser.add_argument("--mod-path", "--modpath", type=lambda p: Path(p).resolve(), default=Path(userFolder, 'mods'), help="Use PATH as the mod folder. (default is '..\\..\\mods')") - parser.add_argument("--config-path", type=lambda p: Path(p).resolve(), default=Path(userFolder, 'config'), help="Use PATH as the mod folder. (default is '..\\..\\config')") - parser.add_argument("--date", default=datetime.date.today().strftime("%d/%m/%y"), help="Date attached to the snapshot, default is today. [dd/mm/yy]") - parser.add_argument("--steam", default=0, action="store_true", help="Only use factorio binary from steam") - parser.add_argument("--standalone", default=0, action="store_true", help="Only use standalone factorio binary") - parser.add_argument('--verbose', '-v', action='count', default=0, help="Displays factoriomaps script logs.") - parser.add_argument('--verbosegame', action='count', default=0, help="Displays all game logs.") - parser.add_argument("--no-update", "--noupdate", dest="update", action="store_false", help="Skips the update check.") - parser.add_argument("--reverseupdatetest", action="store_true", help=argparse.SUPPRESS) - parser.add_argument("--maxthreads", type=int, default=mp.cpu_count(), help="Sets the number of threads used for all steps. By default this is equal to the amount of logical processor cores available.") - parser.add_argument("--cropthreads", type=int, default=None, help="Sets the number of threads used for the crop step.") - parser.add_argument("--refthreads", type=int, default=None, help="Sets the number of threads used for the crossreferencing step.") - parser.add_argument("--zoomthreads", type=int, default=None, help="Sets the number of threads used for the zoom step.") - parser.add_argument("--screenshotthreads", type=int, default=None, help="Set the number of screenshotting threads factorio uses.") - parser.add_argument("--delete", action="store_true", help="Deletes the output folder specified before running the script.") - parser.add_argument("--dry", action="store_true", help="Skips starting factorio, making screenshots and doing the main steps, only execute setting up and finishing of script.") - parser.add_argument("targetname", nargs="?", help="output folder name for the generated snapshots.") - parser.add_argument("savename", nargs="*", help="Names of the savegames to generate snapshots from. If no savegames are provided the latest save or the save matching outfolder will be gerated. Glob patterns are supported.") - parser.add_argument("--force-lib-update", action="store_true", help="Forces an update of the web dependencies.") - parser.add_argument('--temp-dir', '--tempdir', type=lambda p: Path(p).resolve(), help='Set a custom temporary directory to use (this is only needed if the defualt one is on a RAM disk, which Factorio does not support).') - - args = parser.parse_args() - if args.verbose > 0: - print(args) - - if args.update: - checkUpdate(args.reverseupdatetest) - - saves = Path(userFolder, "saves") - if args.targetname: - foldername = args.targetname - else: - timestamp, filePath = max( - (save.stat().st_mtime, save) - for save in saves.iterdir() - if not save.stem.startswith("_autosave") and save.name != "steam_autocloud.vdf" - ) - foldername = filePath.stem - print("No save name passed. Using most recent save: %s" % foldername) - saveNames = args.savename or [foldername] - foldername = foldername.replace('*', '').replace('?', '') - - saveGames = set() - for saveName in saveNames: - saveNameEscaped = glob.escape(saveName).replace("[*]", "*") - globResults = list(saves.glob(saveNameEscaped)) - globResults += list(saves.glob(f"{saveNameEscaped}.zip")) - - if not globResults: - print(f'Cannot find savefile: "{saveName}"') - raise IOError(f"savefile {saveName!r} not found in {str(saves)!r}") - results = [save for save in globResults if save.is_file()] - for result in results: - saveGames.add(result.relative_to(saves).as_posix()) - - saveGames = naturalSort(list(saveGames)) - - if args.verbose > 0: - print(f"Will generate snapshots for : {saveGames}") - - if args.factorio: - possibleFactorioPaths = [args.factorio] - else: - unixPaths = [ - "../../bin/x64/factorio.exe", - "../../bin/x64/factorio", - ] - windowsPathsStandalone = [ - "Program Files/Factorio/bin/x64/factorio.exe", - "Games/Factorio/bin/x64/factorio.exe", - ] - windowsPathsSteam = [ - "Program Files (x86)/Steam/steamapps/common/Factorio/bin/x64/factorio.exe", - "Steam/steamapps/common/Factorio/bin/x64/factorio.exe", - ] - def driveExists(drive): - try: - return Path(f"{drive}:/").exists() - except (OSError, PermissionError): - return False - availableDrives = [ - "%s:/" % d for d in string.ascii_uppercase if driveExists(d) - ] - possibleFactorioPaths = unixPaths - if args.steam == 0: - possibleFactorioPaths += [ drive + path for drive in availableDrives for path in windowsPathsStandalone ] - if args.standalone == 0: - possibleFactorioPaths += [ drive + path for drive in availableDrives for path in windowsPathsSteam ] - - try: - factorioPath = next( - x - for x in map(Path, possibleFactorioPaths) - if x.is_file() - ) - except StopIteration: - raise Exception( - "Can't find factorio.exe. Please pass --factorio=PATH as an argument.", - "Searched the following locations:", possibleFactorioPaths - ) - - print("factorio path: {}".format(factorioPath)) - - psutil.Process(os.getpid()).nice(psutil.ABOVE_NORMAL_PRIORITY_CLASS if os.name == 'nt' else 5) - - workthread = None - - workfolder = Path(args.basepath, foldername).resolve() - try: - print("output folder: {}".format(workfolder.relative_to(Path(userFolder)))) - except ValueError: - print("output folder: {}".format(workfolder.resolve())) - - try: - workfolder.mkdir(parents=True, exist_ok=True) - except FileExistsError: - raise Exception(f"{workfolder} exists and is not a directory!") - - updateLib(args.force_lib_update) - - #TODO: integrity check, if done files aren't there or there are any bmps left, complain. - - if args.mod_path.resolve() != Path(userFolder,"mods").resolve(): - linkCustomModFolder(args.mod_path) - - changeModlist(args.mod_path, True) - - manager = mp.Manager() - rawTags = manager.dict() - rawTags["__used"] = False - - if args.delete: - print(f"Deleting output folder ({workfolder})") - try: - rmtree(workfolder) - except (FileNotFoundError, NotADirectoryError): - pass - - - ########################################### - # # - # Start of Work # - # # - ########################################### - - datapath = Path(workfolder, "latest.txt") - - isFirstSnapshot = True - - try: - - daytimes = [] - if args.day: - daytimes.append("day") - if args.night: - daytimes.append("night") - - for index, savename in () if args.dry else enumerate(saveGames): - for daytimeIndex, setDaytime in enumerate(daytimes): - - printErase("cleaning up") - if datapath.is_file(): - datapath.unlink() - - buildAutorun(args, workfolder, foldername, isFirstSnapshot, setDaytime) - isFirstSnapshot = False - - if args.temp_dir is not None: - try: - os.makedirs(args.temp_dir) - except OSError: - pass - with TemporaryDirectory(prefix="FactorioMaps-", dir=args.temp_dir) as tmpDir: - configPath = buildConfig(args, tmpDir, args.basepath) - - pid = None - isSteam = None - pidBlacklist = [p.info["pid"] for p in psutil.process_iter(attrs=['pid', 'name']) if p.info['name'] == "factorio.exe"] - - - launchArgs = [ - '--load-game', - str(Path(userFolder, 'saves', *(savename.split('/'))).absolute()), - '--disable-audio', - '--config', - str(configPath), - "--mod-directory",str(args.mod_path.absolute()), - "--disable-migration-window" - ] - - usedSteamLaunchHack = False - - if os.name == "nt": - steamApiPath = Path(factorioPath, "..", "steam_api64.dll") - else: - steamApiPath = Path(factorioPath, "..", "steam_api64.so") - - if steamApiPath.exists(): # chances are this is a steam install.. - # try to find steam - try: - from winreg import OpenKey, HKEY_CURRENT_USER, ConnectRegistry, QueryValueEx, REG_SZ - - key = OpenKey(ConnectRegistry(None, HKEY_CURRENT_USER), r'Software\Valve\Steam') - val, valType = QueryValueEx(key, 'SteamExe') - if valType != REG_SZ: - raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), "SteamExe") - steamPath = Path(val) - except (ImportError, FileNotFoundError) as e: - # fallback to old method - if os.name == "nt": - steamPath = Path(factorioPath, "..", "..", "..", "..", "..", "..", "steam.exe") - else: - steamPath = Path(factorioPath, "..", "..", "..", "..", "..", "..", "steam") - - if steamPath and steamPath.exists(): # found a steam executable - usedSteamLaunchHack = True - exeWithArgs = [ - str(steamPath), - "-applaunch", - "427520" - ] + launchArgs - - if not usedSteamLaunchHack: # if non steam factorio, or if steam factorio but steam executable isnt found. - exeWithArgs = [ - str(factorioPath) - ] + launchArgs - - if args.verbose: - printErase(exeWithArgs) - - condition = mp.Condition() - results = manager.list() - - printErase("starting factorio") - startLogProcess = mp.Process( - target=startGameAndReadGameLogs, - args=(results, condition, exeWithArgs, usedSteamLaunchHack, tmpDir, pidBlacklist, rawTags, args) - ) - startLogProcess.daemon = True - startLogProcess.start() - - with condition: - condition.wait() - isSteam, pid = results[:] - - if isSteam is None: - raise Exception("isSteam error") - if pid is None: - raise Exception("pid error") - - while not datapath.exists(): - time.sleep(0.4) - - # empty autorun.lua - Path(__file__, "..", "autorun.lua").resolve().open('w', encoding="utf-8").close() - - latest = [] - with datapath.open('r', encoding="utf-8") as f: - for line in f: - latest.append(line.rstrip("\n")) - if args.verbose: - printErase(latest) - - firstOutFolder, timestamp, surface, daytime = latest[-1].split(" ") - firstOutFolder = firstOutFolder.replace("/", " ") - waitfilename = Path(args.basepath, firstOutFolder, "images", timestamp, surface, daytime, "done.txt") - - isKilled = [False] - def waitKill(isKilled, pid): - while not isKilled[0]: - #print(f"Can I kill yet? {os.path.isfile(waitfilename)} {waitfilename}") - if os.path.isfile(waitfilename): - isKilled[0] = True - kill(pid) - break - else: - time.sleep(0.4) - - killThread = threading.Thread(target=waitKill, args=(isKilled, pid)) - killThread.daemon = True - killThread.start() - - if workthread and workthread.is_alive(): - #print("waiting for workthread") - workthread.join() - - timestamp = None - daytimeSurfaces = {} - for jindex, screenshot in enumerate(latest): - outFolder, timestamp, surface, daytime = list(map(lambda s: s.replace("|", " "), screenshot.split(" "))) - outFolder = outFolder.replace("/", " ") - print(f"Processing {outFolder}/{'/'.join([timestamp, surface, daytime])} ({len(latest) * index + jindex + 1 + daytimeIndex} of {len(latest) * len(saveGames) * len(daytimes)})") - - if daytime in daytimeSurfaces: - daytimeSurfaces[daytime].append(surface) - else: - daytimeSurfaces[daytime] = [surface] - - #print("Cropping %s images" % screenshot) - crop(outFolder, timestamp, surface, daytime, args.basepath, args) - waitlocalfilename = os.path.join(args.basepath, outFolder, "Images", timestamp, surface, daytime, "done.txt") - if not os.path.exists(waitlocalfilename): - #print("waiting for done.txt") - while not os.path.exists(waitlocalfilename): - time.sleep(0.4) - - - - def refZoom(): - needsThumbnail = index + 1 == len(saveGames) - #print("Crossreferencing %s images" % screenshot) - ref(outFolder, timestamp, surface, daytime, args.basepath, args) - #print("downsampling %s images" % screenshot) - zoom(outFolder, timestamp, surface, daytime, args.basepath, needsThumbnail, args) - - if jindex == len(latest) - 1: - print("zooming renderboxes", timestamp) - zoomRenderboxes(daytimeSurfaces, workfolder, timestamp, Path(args.basepath, firstOutFolder, "Images"), args) - - if screenshot != latest[-1]: - refZoom() - else: - startLogProcess.terminate() - - # I have receieved a bug report from feidan in which he describes what seems like that this doesnt kill factorio? - - onlyStall = isKilled[0] - isKilled[0] = True - kill(pid, onlyStall) - - if savename == saveGames[-1] and daytimeIndex == len(daytimes) - 1: - refZoom() - - else: - workthread = threading.Thread(target=refZoom) - workthread.daemon = True - workthread.start() - - - - - - - - - - if os.path.isfile(os.path.join(workfolder, "mapInfo.out.json")): - print("generating mapInfo.json") - with Path(workfolder, "mapInfo.json").open('r+', encoding='utf-8') as destf, Path(workfolder, "mapInfo.out.json").open("r", encoding='utf-8') as srcf: - data = json.load(destf) - for mapIndex, mapStuff in json.load(srcf)["maps"].items(): - for surfaceName, surfaceStuff in mapStuff["surfaces"].items(): - if "chunks" in surfaceStuff: - data["maps"][int(mapIndex)]["surfaces"][surfaceName]["chunks"] = surfaceStuff["chunks"] - if "links" in surfaceStuff: - for linkIndex, link in enumerate(surfaceStuff["links"]): - data["maps"][int(mapIndex)]["surfaces"][surfaceName]["links"][linkIndex]["path"] = link["path"] - data["maps"][int(mapIndex)]["surfaces"][surfaceName]["links"][linkIndex]["zoom"]["min"] = link["zoom"]["min"] - destf.seek(0) - json.dump(data, destf) - destf.truncate() - os.remove(os.path.join(workfolder, "mapInfo.out.json")) - - - # List of length 3 tuples: - # mod name in lowercase (e.g. `krastorio2`, `fnei`) - # (major version string, minor version string, patch version string, bool if the mod's a zipfile) - # mod full ID in original casing (e.g. `Krastorio2_1.1.4`, `FNEI_0.4.1`) - # - # Does not include mods that don't have versions in - # their names, such as mods manually installed from - # source. - modVersions = sorted( - map(lambda m: (m.group(2).lower(), (m.group(3), m.group(4), m.group(5), m.group(6) is None), m.group(1)), - filter(lambda m: m, - map(lambda f: re.search(r"^((.*)_(\d+)\.(\d+)\.(\d+))(\.zip)?$", f, flags=re.IGNORECASE), - os.listdir(os.path.join(args.basepath, args.mod_path))))), - key = lambda t: t[1], - reverse = True) - - - rawTags["__used"] = True - if args.tags: - print("updating labels") - tags = {} - def addTag(tags, itemType, itemName, force=False): - index = itemType + itemName[0].upper() + itemName[1:] - if index in rawTags: - tags[index] = { - "itemType": itemType, - "itemName": itemName, - "iconPath": "Images/labels/" + itemType + "/" + itemName + ".png", - } - else: - if force: - raise "tag not found." - else: - print(f"[WARNING] tag \"{index}\" not found.") - with Path(workfolder, "mapInfo.json").open('r+', encoding='utf-8') as mapInfoJson: - data = json.load(mapInfoJson) - for mapStuff in data["maps"]: - for surfaceName, surfaceStuff in mapStuff["surfaces"].items(): - if "tags" in surfaceStuff: - for tag in surfaceStuff["tags"]: - if "iconType" in tag: - addTag(tags, tag["iconType"], tag["iconName"], True) - if "text" in tag: - for match in re.finditer(r"\[([^=]+)=([^\]]+)", tag["text"]): - addTag(tags, match.group(1), match.group(2)) - - rmtree(os.path.join(workfolder, "Images", "labels"), ignore_errors=True) - - for tagIndex, tag in tags.items(): - dest = os.path.join(workfolder, tag["iconPath"]) - os.makedirs(os.path.dirname(dest), exist_ok=True) - - rawPath = rawTags[tagIndex] - - icons = rawPath.split('|') - img = None - for i, path in enumerate(icons): - m = re.match(r"^__([^\/]+)__[\/\\](.*)$", path) - if m is None: - raise Exception("raw path of %s %s: %s not found" % (tag["iconType"], tag["iconName"], path)) - - iconColor = m.group(2).split("?") - icon = iconColor[0] - if m.group(1) in ("base", "core"): - src = os.path.join(os.path.split(factorioPath)[0], "../../data", m.group(1), icon + ".png") - else: - mod = next(mod for mod in modVersions if mod[0] == m.group(1).lower()) - if not mod[1][3]: #true if mod is zip - zipPath = os.path.join(args.basepath, args.mod_path, mod[2] + ".zip") - with ZipFile(zipPath, 'r') as zipObj: - internalFolder = os.path.commonpath(zipObj.namelist()) - if len(icons) == 1: - zipInfo = zipObj.getinfo(os.path.join(internalFolder, icon + ".png").replace('\\', '/')) - zipInfo.filename = os.path.basename(dest) - zipObj.extract(zipInfo, os.path.dirname(os.path.realpath(dest))) - src = None - else: - src = zipObj.extract(os.path.join(internalFolder, icon + ".png").replace('\\', '/'), os.path.join(tempfile.gettempdir(), "FactorioMaps")) - else: - src = os.path.join(args.basepath, args.mod_path, mod[2], icon + ".png") - - if len(icons) == 1: - if src is not None: - img = Image.open(src) - w, h = img.size - img = img.crop((0, 0, h, h)).resize((64, 64)) - img.save(dest) - else: - newImg = Image.open(src) - w, h = newImg.size - newImg = newImg.crop((0, 0, h, h)).resize((64, 64)).convert("RGBA") - if len(iconColor) > 1: - newImg = ImageChops.multiply(newImg, Image.new("RGBA", newImg.size, color=tuple(map(lambda s: int(round(float(s))), iconColor[1].split("%"))))) - if i == 0: - img = newImg - else: - img.paste(newImg.convert("RGB"), (0, 0), newImg) - if len(icons) > 1: - img.save(dest) - - - - print("applying configuration") - with Path(workfolder, "mapInfo.json").open("r+", encoding='utf-8') as f: - mapInfo = json.load(f) - if args.default_timestamp != None or "defaultTimestamp" not in mapInfo["options"]: - if args.default_timestamp == None: - args.default_timestamp = -1 - mapInfo["options"]["defaultTimestamp"] = args.default_timestamp - f.seek(0) - json.dump(mapInfo, f) - f.truncate() - - - - print("generating mapInfo.js") - with Path(workfolder, "mapInfo.js").open('w', encoding="utf-8") as outf, Path(workfolder, "mapInfo.json").open("r", encoding='utf-8') as inf: - outf.write('"use strict";\nwindow.mapInfo = JSON.parse(') - outf.write(json.dumps(inf.read())) - outf.write(");") - - - print("creating index.html") - for fileName in ("index.html", "index.css", "index.js"): - copy(Path(__file__, "..", "web", fileName).resolve(), os.path.join(workfolder, fileName)) - try: - rmtree(os.path.join(workfolder, "lib")) - except (FileNotFoundError, NotADirectoryError): - pass - copytree(Path(__file__, "..", "web", "lib").resolve(), os.path.join(workfolder, "lib")) - - - - except KeyboardInterrupt: - print("keyboardinterrupt") - kill(pid) - raise - - finally: - - try: - kill(pid) - except: - pass - - changeModlist(args.mod_path, False) + lock = threading.Lock() + def kill(pid, onlyStall=False): + if pid: + with lock: + if not onlyStall and psutil.pid_exists(pid): + + if os.name == 'nt': + subprocess.check_call(("taskkill", "/pid", str(pid)), stdout=subprocess.DEVNULL, shell=True) + else: + subprocess.check_call(("killall", "factorio"), stdout=subprocess.DEVNULL) # TODO: kill correct process instead of just killing all + + while psutil.pid_exists(pid): + time.sleep(0.1) + + printErase("killed factorio") + + time.sleep(10) + + parser = argparse.ArgumentParser(description="FactorioMaps") + daytime = parser.add_mutually_exclusive_group() + daytime.add_argument("--dayonly", dest="night", action="store_false", help="Only take daytime screenshots.") + daytime.add_argument("--nightonly", dest="day", action="store_false", help="Only take nighttime screenshots.") + parser.add_argument("--hd", action="store_true", help="Take screenshots of resolution 64 x 64 pixels per in-game tile.") + parser.add_argument("--no-altmode", dest="altmode", action="store_false", help="Hides entity info (alt mode).") + parser.add_argument("--no-tags", dest="tags", action="store_false", help="Hides map tags") + parser.add_argument("--default-timestamp", type=int, default=None, dest="default_timestamp", help="Snapshot that will be loaded by the webpage by default. Negative values indicate newest snapshots, so -1 indicates the newest map while 0 indicates the oldest map.") + parser.add_argument("--build-range", type=float, default=5.2, help="The maximum range from buildings around which pictures are saved (in chunks, 32 by 32 in-game tiles).") + parser.add_argument("--connect-range", type=float, default=1.2, help="The maximum range from connection buildings (rails, electric poles) around which pictures are saved.") + parser.add_argument("--tag-range", type=float, default=5.2, help="The maximum range from mapview tags around which pictures are saved.") + parser.add_argument("--surface", action="append", default=[], help="Used to capture other surfaces. If left empty, the surface the player is standing on will be used. To capture multiple surfaces, use the argument multiple times: --surface nauvis --surface 'Factory floor 1'") + parser.add_argument("--factorio", type=lambda p: Path(p).resolve(), help="Use factorio.exe from PATH instead of attempting to find it in common locations.") + parser.add_argument("--output-path", dest="basepath", type=lambda p: Path(p).resolve(), default=Path(userFolder, "script-output", "FactorioMaps"), help="path to the output folder (default is '..\\..\\script-output\\FactorioMaps')") + parser.add_argument("--mod-path", "--modpath", type=lambda p: Path(p).resolve(), default=Path(userFolder, 'mods'), help="Use PATH as the mod folder. (default is '..\\..\\mods')") + parser.add_argument("--config-path", type=lambda p: Path(p).resolve(), default=Path(userFolder, 'config'), help="Use PATH as the mod folder. (default is '..\\..\\config')") + parser.add_argument("--date", default=datetime.date.today().strftime("%d/%m/%y"), help="Date attached to the snapshot, default is today. [dd/mm/yy]") + parser.add_argument("--steam", default=0, action="store_true", help="Only use factorio binary from steam") + parser.add_argument("--standalone", default=0, action="store_true", help="Only use standalone factorio binary") + parser.add_argument('--verbose', '-v', action='count', default=0, help="Displays factoriomaps script logs.") + parser.add_argument('--verbosegame', action='count', default=0, help="Displays all game logs.") + parser.add_argument("--no-update", "--noupdate", dest="update", action="store_false", help="Skips the update check.") + parser.add_argument("--reverseupdatetest", action="store_true", help=argparse.SUPPRESS) + parser.add_argument("--maxthreads", type=int, default=mp.cpu_count(), help="Sets the number of threads used for all steps. By default this is equal to the amount of logical processor cores available.") + parser.add_argument("--cropthreads", type=int, default=None, help="Sets the number of threads used for the crop step.") + parser.add_argument("--refthreads", type=int, default=None, help="Sets the number of threads used for the crossreferencing step.") + parser.add_argument("--zoomthreads", type=int, default=None, help="Sets the number of threads used for the zoom step.") + parser.add_argument("--screenshotthreads", type=int, default=None, help="Set the number of screenshotting threads factorio uses.") + parser.add_argument("--delete", action="store_true", help="Deletes the output folder specified before running the script.") + parser.add_argument("--dry", action="store_true", help="Skips starting factorio, making screenshots and doing the main steps, only execute setting up and finishing of script.") + parser.add_argument("targetname", nargs="?", help="output folder name for the generated snapshots.") + parser.add_argument("savename", nargs="*", help="Names of the savegames to generate snapshots from. If no savegames are provided the latest save or the save matching outfolder will be gerated. Glob patterns are supported.") + parser.add_argument("--force-lib-update", action="store_true", help="Forces an update of the web dependencies.") + parser.add_argument('--temp-dir', '--tempdir', type=lambda p: Path(p).resolve(), help='Set a custom temporary directory to use (this is only needed if the defualt one is on a RAM disk, which Factorio does not support).') + + args = parser.parse_args() + if args.verbose > 0: + print(args) + + if args.update: + checkUpdate(args.reverseupdatetest) + + saves = Path(userFolder, "saves") + if args.targetname: + foldername = args.targetname + else: + timestamp, filePath = max( + (save.stat().st_mtime, save) + for save in saves.iterdir() + if not save.stem.startswith("_autosave") and save.name != "steam_autocloud.vdf" + ) + foldername = filePath.stem + print("No save name passed. Using most recent save: %s" % foldername) + saveNames = args.savename or [foldername] + foldername = foldername.replace('*', '').replace('?', '') + + saveGames = set() + for saveName in saveNames: + saveNameEscaped = glob.escape(saveName).replace("[*]", "*") + globResults = list(saves.glob(saveNameEscaped)) + globResults += list(saves.glob(f"{saveNameEscaped}.zip")) + + if not globResults: + print(f'Cannot find savefile: "{saveName}"') + raise IOError(f"savefile {saveName!r} not found in {str(saves)!r}") + results = [save for save in globResults if save.is_file()] + for result in results: + saveGames.add(result.relative_to(saves).as_posix()) + + saveGames = naturalSort(list(saveGames)) + + if args.verbose > 0: + print(f"Will generate snapshots for : {saveGames}") + + if args.factorio: + possibleFactorioPaths = [args.factorio] + else: + unixPaths = [ + "../../bin/x64/factorio.exe", + "../../bin/x64/factorio", + ] + windowsPathsStandalone = [ + "Program Files/Factorio/bin/x64/factorio.exe", + "Games/Factorio/bin/x64/factorio.exe", + ] + windowsPathsSteam = [ + "Program Files (x86)/Steam/steamapps/common/Factorio/bin/x64/factorio.exe", + "Steam/steamapps/common/Factorio/bin/x64/factorio.exe", + ] + def driveExists(drive): + try: + return Path(f"{drive}:/").exists() + except (OSError, PermissionError): + return False + availableDrives = [ + "%s:/" % d for d in string.ascii_uppercase if driveExists(d) + ] + possibleFactorioPaths = unixPaths + if args.steam == 0: + possibleFactorioPaths += [ drive + path for drive in availableDrives for path in windowsPathsStandalone ] + if args.standalone == 0: + possibleFactorioPaths += [ drive + path for drive in availableDrives for path in windowsPathsSteam ] + + try: + factorioPath = next( + x + for x in map(Path, possibleFactorioPaths) + if x.is_file() + ) + except StopIteration: + raise Exception( + "Can't find factorio.exe. Please pass --factorio=PATH as an argument.", + "Searched the following locations:", possibleFactorioPaths + ) + + print("factorio path: {}".format(factorioPath)) + + psutil.Process(os.getpid()).nice(psutil.ABOVE_NORMAL_PRIORITY_CLASS if os.name == 'nt' else 5) + + workthread = None + + workfolder = Path(args.basepath, foldername).resolve() + try: + print("output folder: {}".format(workfolder.relative_to(Path(userFolder)))) + except ValueError: + print("output folder: {}".format(workfolder.resolve())) + + try: + workfolder.mkdir(parents=True, exist_ok=True) + except FileExistsError: + raise Exception(f"{workfolder} exists and is not a directory!") + + updateLib(args.force_lib_update) + + #TODO: integrity check, if done files aren't there or there are any bmps left, complain. + + if args.mod_path.resolve() != Path(userFolder,"mods").resolve(): + linkCustomModFolder(args.mod_path) + + changeModlist(args.mod_path, True) + + manager = mp.Manager() + rawTags = manager.dict() + rawTags["__used"] = False + + if args.delete: + print(f"Deleting output folder ({workfolder})") + try: + rmtree(workfolder) + except (FileNotFoundError, NotADirectoryError): + pass + + + ########################################### + # # + # Start of Work # + # # + ########################################### + + datapath = Path(workfolder, "latest.txt") + + isFirstSnapshot = True + + try: + + daytimes = [] + if args.day: + daytimes.append("day") + # if args.night: + # daytimes.append("night") + + for index, savename in () if args.dry else enumerate(saveGames): + for daytimeIndex, setDaytime in enumerate(daytimes): + + printErase("cleaning up") + if datapath.is_file(): + datapath.unlink() + + buildAutorun(args, workfolder, foldername, isFirstSnapshot, setDaytime) + isFirstSnapshot = False + + if args.temp_dir is not None: + try: + os.makedirs(args.temp_dir) + except OSError: + pass + with TemporaryDirectory(prefix="FactorioMaps-", dir=args.temp_dir) as tmpDir: + configPath = buildConfig(args, tmpDir, args.basepath) + + pid = None + isSteam = None + pidBlacklist = [p.info["pid"] for p in psutil.process_iter(attrs=['pid', 'name']) if p.info['name'] == "factorio.exe"] + + + launchArgs = [ + '--load-game', + str(Path(userFolder, 'saves', *(savename.split('/'))).absolute()), + '--disable-audio', + '--config', + str(configPath), + "--mod-directory",str(args.mod_path.absolute()), + "--disable-migration-window" + ] + + usedSteamLaunchHack = False + + if os.name == "nt": + steamApiPath = Path(factorioPath, "..", "steam_api64.dll") + else: + steamApiPath = Path(factorioPath, "..", "steam_api64.so") + + if steamApiPath.exists(): # chances are this is a steam install.. + # try to find steam + try: + from winreg import OpenKey, HKEY_CURRENT_USER, ConnectRegistry, QueryValueEx, REG_SZ + + key = OpenKey(ConnectRegistry(None, HKEY_CURRENT_USER), r'Software\Valve\Steam') + val, valType = QueryValueEx(key, 'SteamExe') + if valType != REG_SZ: + raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), "SteamExe") + steamPath = Path(val) + except (ImportError, FileNotFoundError) as e: + # fallback to old method + if os.name == "nt": + steamPath = Path(factorioPath, "..", "..", "..", "..", "..", "..", "steam.exe") + else: + steamPath = Path(factorioPath, "..", "..", "..", "..", "..", "..", "steam") + + if steamPath and steamPath.exists(): # found a steam executable + usedSteamLaunchHack = True + exeWithArgs = [ + str(steamPath), + "-applaunch", + "427520" + ] + launchArgs + + if not usedSteamLaunchHack: # if non steam factorio, or if steam factorio but steam executable isnt found. + exeWithArgs = [ + str(factorioPath) + ] + launchArgs + + if args.verbose: + printErase(exeWithArgs) + + condition = mp.Condition() + results = manager.list() + + printErase("starting factorio") + startLogProcess = mp.Process( + target=startGameAndReadGameLogs, + args=(results, condition, exeWithArgs, usedSteamLaunchHack, tmpDir, pidBlacklist, rawTags, args) + ) + startLogProcess.daemon = True + startLogProcess.start() + + with condition: + condition.wait() + isSteam, pid = results[:] + + if isSteam is None: + raise Exception("isSteam error") + if pid is None: + raise Exception("pid error") + + while not datapath.exists(): + time.sleep(0.4) + + # empty autorun.lua + Path(__file__, "..", "autorun.lua").resolve().open('w', encoding="utf-8").close() + + latest = [] + with datapath.open('r', encoding="utf-8") as f: + for line in f: + latest.append(line.rstrip("\n")) + if args.verbose: + printErase(latest) + + firstOutFolder, timestamp, surface, daytime = latest[-1].split(" ") + firstOutFolder = firstOutFolder.replace("/", " ") + waitfilename = Path(args.basepath, firstOutFolder, "images", timestamp, surface, daytime, "done.txt") + + isKilled = [False] + def waitKill(isKilled, pid): + while not isKilled[0]: + #print(f"Can I kill yet? {os.path.isfile(waitfilename)} {waitfilename}") + if os.path.isfile(waitfilename): + isKilled[0] = True + kill(pid) + break + else: + time.sleep(0.4) + + killThread = threading.Thread(target=waitKill, args=(isKilled, pid)) + killThread.daemon = True + killThread.start() + + if workthread and workthread.is_alive(): + #print("waiting for workthread") + workthread.join() + + timestamp = None + daytimeSurfaces = {} + for jindex, screenshot in enumerate(latest): + outFolder, timestamp, surface, daytime = list(map(lambda s: s.replace("|", " "), screenshot.split(" "))) + outFolder = outFolder.replace("/", " ") + print(f"Processing {outFolder}/{'/'.join([timestamp, surface, daytime])} ({len(latest) * index + jindex + 1 + daytimeIndex} of {len(latest) * len(saveGames) * len(daytimes)})") + + if daytime in daytimeSurfaces: + daytimeSurfaces[daytime].append(surface) + else: + daytimeSurfaces[daytime] = [surface] + + #print("Cropping %s images" % screenshot) + crop(outFolder, timestamp, surface, daytime, args.basepath, args) + waitlocalfilename = os.path.join(args.basepath, outFolder, "Images", timestamp, surface, daytime, "done.txt") + if not os.path.exists(waitlocalfilename): + #print("waiting for done.txt") + while not os.path.exists(waitlocalfilename): + time.sleep(0.4) + + + + def refZoom(): + needsThumbnail = index + 1 == len(saveGames) + #print("Crossreferencing %s images" % screenshot) + ref(outFolder, timestamp, surface, daytime, args.basepath, args) + #print("downsampling %s images" % screenshot) + zoom(outFolder, timestamp, surface, daytime, args.basepath, needsThumbnail, args) + + if jindex == len(latest) - 1: + print("zooming renderboxes", timestamp) + zoomRenderboxes(daytimeSurfaces, workfolder, timestamp, Path(args.basepath, firstOutFolder, "Images"), args) + + if screenshot != latest[-1]: + refZoom() + else: + startLogProcess.terminate() + + # I have receieved a bug report from feidan in which he describes what seems like that this doesnt kill factorio? + + onlyStall = isKilled[0] + isKilled[0] = True + kill(pid, onlyStall) + + if savename == saveGames[-1] and daytimeIndex == len(daytimes) - 1: + refZoom() + + else: + workthread = threading.Thread(target=refZoom) + workthread.daemon = True + workthread.start() + + + + + + + + + + if os.path.isfile(os.path.join(workfolder, "mapInfo.out.json")): + print("generating mapInfo.json") + with Path(workfolder, "mapInfo.json").open('r+', encoding='utf-8') as destf, Path(workfolder, "mapInfo.out.json").open("r", encoding='utf-8') as srcf: + data = json.load(destf) + for mapIndex, mapStuff in json.load(srcf)["maps"].items(): + for surfaceName, surfaceStuff in mapStuff["surfaces"].items(): + if "chunks" in surfaceStuff: + data["maps"][int(mapIndex)]["surfaces"][surfaceName]["chunks"] = surfaceStuff["chunks"] + if "links" in surfaceStuff: + for linkIndex, link in enumerate(surfaceStuff["links"]): + if "filename" in link: + data["maps"][int(mapIndex)]["surfaces"][surfaceName]["links"][linkIndex]["path"] = link["path"] + data["maps"][int(mapIndex)]["surfaces"][surfaceName]["links"][linkIndex]["zoom"]["min"] = link["zoom"]["min"] + destf.seek(0) + json.dump(data, destf) + destf.truncate() + os.remove(os.path.join(workfolder, "mapInfo.out.json")) + + + # List of length 3 tuples: + # mod name in lowercase (e.g. `krastorio2`, `fnei`) + # (major version string, minor version string, patch version string, bool if the mod's a zipfile) + # mod full ID in original casing (e.g. `Krastorio2_1.1.4`, `FNEI_0.4.1`) + # + # Does not include mods that don't have versions in + # their names, such as mods manually installed from + # source. + modVersions = sorted( + map(lambda m: (m.group(2).lower(), (m.group(3), m.group(4), m.group(5), m.group(6) is None), m.group(1)), + filter(lambda m: m, + map(lambda f: re.search(r"^((.*)_(\d+)\.(\d+)\.(\d+))(\.zip)?$", f, flags=re.IGNORECASE), + os.listdir(os.path.join(args.basepath, args.mod_path))))), + key = lambda t: t[1], + reverse = True) + + + rawTags["__used"] = True + if args.tags: + print("updating labels") + tags = {} + def addTag(tags, itemType, itemName, force=False): + index = itemType + itemName[0].upper() + itemName[1:] + if index in rawTags: + tags[index] = { + "itemType": itemType, + "itemName": itemName, + "iconPath": "Images/labels/" + itemType + "/" + itemName + ".png", + } + else: + if force: + raise "tag not found." + else: + print(f"[WARNING] tag \"{index}\" not found.") + with Path(workfolder, "mapInfo.json").open('r+', encoding='utf-8') as mapInfoJson: + data = json.load(mapInfoJson) + for mapStuff in data["maps"]: + for surfaceName, surfaceStuff in mapStuff["surfaces"].items(): + if "tags" in surfaceStuff: + for tag in surfaceStuff["tags"]: + if "iconType" in tag: + addTag(tags, tag["iconType"], tag["iconName"], True) + if "text" in tag: + for match in re.finditer(r"\[([^=]+)=([^\]]+)", tag["text"]): + addTag(tags, match.group(1), match.group(2)) + + rmtree(os.path.join(workfolder, "Images", "labels"), ignore_errors=True) + + for tagIndex, tag in tags.items(): + dest = os.path.join(workfolder, tag["iconPath"]) + os.makedirs(os.path.dirname(dest), exist_ok=True) + + rawPath = rawTags[tagIndex] + + icons = rawPath.split('|') + img = None + for i, path in enumerate(icons): + m = re.match(r"^__([^\/]+)__[\/\\](.*)$", path) + if m is None: + raise Exception("raw path of %s %s: %s not found" % (tag["iconType"], tag["iconName"], path)) + + iconColor = m.group(2).split("?") + icon = iconColor[0] + if m.group(1) in ("base", "core"): + src = os.path.join(os.path.split(factorioPath)[0], "../../data", m.group(1), icon + ".png") + else: + mod = next(mod for mod in modVersions if mod[0] == m.group(1).lower()) + if not mod[1][3]: #true if mod is zip + zipPath = os.path.join(args.basepath, args.mod_path, mod[2] + ".zip") + with ZipFile(zipPath, 'r') as zipObj: + internalFolder = os.path.commonpath(zipObj.namelist()) + if len(icons) == 1: + zipInfo = zipObj.getinfo(os.path.join(internalFolder, icon + ".png").replace('\\', '/')) + zipInfo.filename = os.path.basename(dest) + zipObj.extract(zipInfo, os.path.dirname(os.path.realpath(dest))) + src = None + else: + src = zipObj.extract(os.path.join(internalFolder, icon + ".png").replace('\\', '/'), os.path.join(tempfile.gettempdir(), "FactorioMaps")) + else: + src = os.path.join(args.basepath, args.mod_path, mod[2], icon + ".png") + + if len(icons) == 1: + if src is not None: + img = Image.open(src) + w, h = img.size + img = img.crop((0, 0, h, h)).resize((64, 64)) + img.save(dest) + else: + newImg = Image.open(src) + w, h = newImg.size + newImg = newImg.crop((0, 0, h, h)).resize((64, 64)).convert("RGBA") + if len(iconColor) > 1: + newImg = ImageChops.multiply(newImg, Image.new("RGBA", newImg.size, color=tuple(map(lambda s: int(round(float(s))), iconColor[1].split("%"))))) + if i == 0: + img = newImg + else: + img.paste(newImg.convert("RGB"), (0, 0), newImg) + if len(icons) > 1: + img.save(dest) + + + + print("applying configuration") + with Path(workfolder, "mapInfo.json").open("r+", encoding='utf-8') as f: + mapInfo = json.load(f) + if args.default_timestamp != None or "defaultTimestamp" not in mapInfo["options"]: + if args.default_timestamp == None: + args.default_timestamp = -1 + mapInfo["options"]["defaultTimestamp"] = args.default_timestamp + f.seek(0) + json.dump(mapInfo, f) + f.truncate() + + + + print("generating mapInfo.js") + with Path(workfolder, "mapInfo.js").open('w', encoding="utf-8") as outf, Path(workfolder, "mapInfo.json").open("r", encoding='utf-8') as inf: + outf.write('"use strict";\nwindow.mapInfo = JSON.parse(') + outf.write(json.dumps(inf.read())) + outf.write(");") + + + print("creating index.html") + for fileName in ("index.html", "index.css", "index.js"): + copy(Path(__file__, "..", "web", fileName).resolve(), os.path.join(workfolder, fileName)) + try: + rmtree(os.path.join(workfolder, "lib")) + except (FileNotFoundError, NotADirectoryError): + pass + copytree(Path(__file__, "..", "web", "lib").resolve(), os.path.join(workfolder, "lib")) + + + + except KeyboardInterrupt: + print("keyboardinterrupt") + kill(pid) + raise + + finally: + + try: + kill(pid) + except: + pass + + changeModlist(args.mod_path, False) if __name__ == '__main__': - auto(*sys.argv[1:]) + auto(*sys.argv[1:]) diff --git a/crop.py b/crop.py index b36538c..c4b3505 100644 --- a/crop.py +++ b/crop.py @@ -1,6 +1,5 @@ import multiprocessing as mp import os -import sys import time from argparse import Namespace from functools import partial @@ -14,85 +13,85 @@ def work(line, folder, progressQueue): - arg = line.rstrip("\n").split(" ", 5) - path = Path(folder, arg.pop(5)) - arg = list(map(int, arg[:4])) - top, left, width, height = arg - try: - Image.open(path).convert("RGB").crop( - (top, left, top + width, left + height) - ).save(path) - except IOError: - progressQueue.put(False, True) - return line - except: - progressQueue.put(False, True) - import traceback - - traceback.print_exc() - pass - return False - progressQueue.put(True, True) - return False + arg = line.rstrip("\n").split(" ", 5) + path = Path(folder, arg.pop(5)) + arg = list(map(int, arg[:4])) + top, left, width, height = arg + try: + Image.open(path).convert("RGB").crop( + (top, left, top + width, left + height) + ).save(path) + except IOError: + progressQueue.put(False, True) + return line + except: + progressQueue.put(False, True) + import traceback + + traceback.print_exc() + pass + return False + progressQueue.put(True, True) + return False def crop(outFolder, timestamp, surface, daytime, basePath=None, args: Namespace = Namespace()): - psutil.Process(os.getpid()).nice(psutil.BELOW_NORMAL_PRIORITY_CLASS if os.name == "nt" else 10) - - subname = Path(timestamp, surface, daytime) - toppath = Path( - basePath if basePath else Path(__file__, "..", "..", "..", "script-output", "FactorioMaps").resolve(), - outFolder, - ) - - imagePath = Path(toppath, "Images") - - datapath = Path(imagePath, subname, "crop.txt") - maxthreads = args.cropthreads if args.cropthreads else args.maxthreads - - while not datapath.exists(): - time.sleep(1) - - print(f"crop {0:5.1f}% [{' ' * (tsize()[0]-15)}]", end="") - - files = [] - with datapath.open("r", encoding="utf-8") as data: - assert data.readline().rstrip("\n") == "v2" - for line in data: - files.append(line) - - pool = mp.Pool(processes=maxthreads) - - m = mp.Manager() - progressQueue = m.Queue() - originalSize = len(files) - doneSize = 0 - - try: - while len(files) > 0: - workers = pool.map_async( - partial(work, folder=imagePath, progressQueue=progressQueue), - files, - 128, - ) - for _ in range(len(files)): - if progressQueue.get(True): - doneSize += 1 - progress = float(doneSize) / originalSize - tsiz = tsize()[0] - 15 - print(f"\rcrop {round(progress * 100, 1):5.1f}% [{'=' * int(progress * tsiz)}{' ' * (tsiz - int(progress * tsiz))}]",end="",) - workers.wait() - files = [x for x in workers.get() if x] - if len(files) > 0: - time.sleep(10 if len(files) > 1000 else 1) - print(f"\rcrop {100:5.1f}% [{'=' * (tsize()[0]-15)}]") - except KeyboardInterrupt: - - time.sleep(0.2) - print(f"Keyboardinterrupt caught with {len(files)} files left.") - if len(files) < 40: - for line in files: - print(line) - - raise + psutil.Process(os.getpid()).nice(psutil.BELOW_NORMAL_PRIORITY_CLASS if os.name == "nt" else 10) + + subname = Path(timestamp, surface, daytime) + toppath = Path( + basePath if basePath else Path(__file__, "..", "..", "..", "script-output", "FactorioMaps").resolve(), + outFolder, + ) + + imagePath = Path(toppath, "Images") + + datapath = Path(imagePath, subname, "crop.txt") + maxthreads = args.cropthreads if args.cropthreads else args.maxthreads + + while not datapath.exists(): + time.sleep(1) + + print(f"crop {0:5.1f}% [{' ' * (tsize()[0]-15)}]", end="") + + files = [] + with datapath.open("r", encoding="utf-8") as data: + assert data.readline().rstrip("\n") == "v2" + for line in data: + files.append(line) + + pool = mp.Pool(processes=maxthreads) + + m = mp.Manager() + progressQueue = m.Queue() + originalSize = len(files) + doneSize = 0 + + try: + while len(files) > 0: + workers = pool.map_async( + partial(work, folder=imagePath, progressQueue=progressQueue), + files, + 128, + ) + for _ in range(len(files)): + if progressQueue.get(True): + doneSize += 1 + progress = float(doneSize) / originalSize + tsiz = tsize()[0] - 15 + print(f"\rcrop {round(progress * 100, 1):5.1f}% [{'=' * int(progress * tsiz)}{' ' * (tsiz - int(progress * tsiz))}]",end="",) + workers.wait() + files = [x for x in workers.get() if x] + if len(files) > 0: + time.sleep(10 if len(files) > 1000 else 1) + print(f"\rcrop {100:5.1f}% [{'=' * (tsize()[0]-15)}]") + except KeyboardInterrupt: + + time.sleep(0.2) + print(f"Keyboardinterrupt caught with {len(files)} files left.") + if len(files) < 40: + for line in files: + print(line) + + raise diff --git a/ref.py b/ref.py index 2e8f4c2..b0bf0cc 100644 --- a/ref.py +++ b/ref.py @@ -1,5 +1,5 @@ from argparse import Namespace -import os, sys, math, time, json, psutil +import os, json, psutil from pathlib import Path from PIL import Image, ImageChops, ImageStat import multiprocessing as mp @@ -15,59 +15,59 @@ def test(paths): - newImg = Image.open(paths[0], mode='r').convert("RGB") - oldImg = Image.open(paths[1], mode='r').convert("RGB") - treshold = .03 * newImg.size[0]**2 - # jpeg artifacts always average out perfectly over 8x8 sections, we take advantage of that and scale down by 8 so we can compare compressed images with uncompressed images. - size = (newImg.size[0] / 8, newImg.size[0] / 8) - newImg.thumbnail(size, Image.BILINEAR) - oldImg.thumbnail(size, Image.BILINEAR) - diff = ImageChops.difference(newImg, oldImg) - return sum(ImageStat.Stat(diff).sum2) > treshold + newImg = Image.open(paths[0], mode='r').convert("RGB") + oldImg = Image.open(paths[1], mode='r').convert("RGB") + treshold = .03 * newImg.size[0]**2 + # jpeg artifacts always average out perfectly over 8x8 sections, we take advantage of that and scale down by 8 so we can compare compressed images with uncompressed images. + size = (newImg.size[0] / 8, newImg.size[0] / 8) + newImg.thumbnail(size, Image.BILINEAR) + oldImg.thumbnail(size, Image.BILINEAR) + diff = ImageChops.difference(newImg, oldImg) + return sum(ImageStat.Stat(diff).sum2) > treshold def compare(path, basePath, new, progressQueue): - testResult = False - try: - testResult = test((os.path.join(basePath, new, *path[1:]), os.path.join(basePath, *path).replace(ext, outext))) - except: - print("\r") - traceback.print_exc() - print("\n") - raise - finally: - progressQueue.put(True, True) - return (testResult, path[1:]) + testResult = False + try: + testResult = test((os.path.join(basePath, new, *path[1:]), os.path.join(basePath, *path).replace(ext, outext))) + except: + print("\r") + traceback.print_exc() + print("\n") + raise + finally: + progressQueue.put(True, True) + return (testResult, path[1:]) def compareRenderbox(renderbox, basePath, new): - newPath = os.path.join(basePath, new, renderbox[0]) + ext - testResult = False - try: - testResult = test((newPath, os.path.join(basePath, renderbox[1], renderbox[0]) + outext)) - except: - print("\r") - raise - return (testResult, newPath, renderbox[1], renderbox[2]) + newPath = os.path.join(basePath, new, renderbox[0]) + ext + testResult = False + try: + testResult = test((newPath, os.path.join(basePath, renderbox[1], renderbox[0]) + outext)) + except: + print("\r") + raise + return (testResult, newPath, renderbox[1], renderbox[2]) def neighbourScan(coord, keepList, cropList): - """ - x+ = UP, y+ = RIGHT - corners: - 2 1 - X - 4 3 - """ - surfaceName, daytime, z = coord[:3] - x, y = int(coord[3]), int(os.path.splitext(coord[4])[0]) - return (((surfaceName, daytime, z, str(x+1), str(y+1) + ext) in keepList and cropList.get((surfaceName, daytime, z, x+1, y+1), 0) & 0b1000) \ - or ((surfaceName, daytime, z, str(x+1), str(y-1) + ext) in keepList and cropList.get((surfaceName, daytime, z, x+1, y-1), 0) & 0b0100) \ - or ((surfaceName, daytime, z, str(x-1), str(y+1) + ext) in keepList and cropList.get((surfaceName, daytime, z, x-1, y+1), 0) & 0b0010) \ - or ((surfaceName, daytime, z, str(x-1), str(y-1) + ext) in keepList and cropList.get((surfaceName, daytime, z, x-1, y-1), 0) & 0b0001) \ - or ((surfaceName, daytime, z, str(x+1), str(y ) + ext) in keepList and cropList.get((surfaceName, daytime, z, x+1, y ), 0) & 0b1100) \ - or ((surfaceName, daytime, z, str(x-1), str(y ) + ext) in keepList and cropList.get((surfaceName, daytime, z, x-1, y ), 0) & 0b0011) \ - or ((surfaceName, daytime, z, str(x ), str(y+1) + ext) in keepList and cropList.get((surfaceName, daytime, z, x , y+1), 0) & 0b1010) \ - or ((surfaceName, daytime, z, str(x ), str(y-1) + ext) in keepList and cropList.get((surfaceName, daytime, z, x , y-1), 0) & 0b0101), coord) + """ + x+ = UP, y+ = RIGHT + corners: + 2 1 + X + 4 3 + """ + surfaceName, daytime, z = coord[:3] + x, y = int(coord[3]), int(os.path.splitext(coord[4])[0]) + return (((surfaceName, daytime, z, str(x+1), str(y+1) + ext) in keepList and cropList.get((surfaceName, daytime, z, x+1, y+1), 0) & 0b1000) \ + or ((surfaceName, daytime, z, str(x+1), str(y-1) + ext) in keepList and cropList.get((surfaceName, daytime, z, x+1, y-1), 0) & 0b0100) \ + or ((surfaceName, daytime, z, str(x-1), str(y+1) + ext) in keepList and cropList.get((surfaceName, daytime, z, x-1, y+1), 0) & 0b0010) \ + or ((surfaceName, daytime, z, str(x-1), str(y-1) + ext) in keepList and cropList.get((surfaceName, daytime, z, x-1, y-1), 0) & 0b0001) \ + or ((surfaceName, daytime, z, str(x+1), str(y ) + ext) in keepList and cropList.get((surfaceName, daytime, z, x+1, y ), 0) & 0b1100) \ + or ((surfaceName, daytime, z, str(x-1), str(y ) + ext) in keepList and cropList.get((surfaceName, daytime, z, x-1, y ), 0) & 0b0011) \ + or ((surfaceName, daytime, z, str(x ), str(y+1) + ext) in keepList and cropList.get((surfaceName, daytime, z, x , y+1), 0) & 0b1010) \ + or ((surfaceName, daytime, z, str(x ), str(y-1) + ext) in keepList and cropList.get((surfaceName, daytime, z, x , y-1), 0) & 0b0101), coord) @@ -76,19 +76,19 @@ def neighbourScan(coord, keepList, cropList): def base64Char(i): - assert(i >= 0 and i < 64) # Did you change image size? it could make this overflow - if i == 63: - return "/" - elif i == 62: - return "+" - elif i > 51: - return chr(i - 4) - elif i > 25: - return chr(i + 71) - return chr(i + 65) + assert(i >= 0 and i < 64) # Did you change image size? it could make this overflow + if i == 63: + return "/" + elif i == 62: + return "+" + elif i > 51: + return chr(i - 4) + elif i > 25: + return chr(i + 71) + return chr(i + 65) def getBase64(number, isNight): #coordinate to 18 bit value (3 char base64) - number = int(number) + (2**16 if isNight else (2**17 + 2**16)) # IMAGES CURRENTLY CONTAIN 16 TILES. IF IMAGE SIZE CHANGES THIS WONT WORK ANYMORE. (It will for a long time until it wont) - return base64Char(number % 64) + base64Char(int(number / 64) % 64) + base64Char(int(number / 64 / 64)) + number = int(number) + (2**16 if isNight else (2**17 + 2**16)) # IMAGES CURRENTLY CONTAIN 16 TILES. IF IMAGE SIZE CHANGES THIS WONT WORK ANYMORE. (It will for a long time until it wont) + return base64Char(number % 64) + base64Char(int(number / 64) % 64) + base64Char(int(number / 64 / 64)) @@ -97,212 +97,213 @@ def getBase64(number, isNight): #coordinate to 18 bit value (3 char base64) def ref( - outFolder: Path, - timestamp: str = None, - surfaceReference: str = None, - daytimeReference: str = None, - basepath: Path = None, - args: Namespace = Namespace(), + outFolder: Path, + timestamp: str = None, + surfaceReference: str = None, + daytimeReference: str = None, + basepath: Path = None, + args: Namespace = Namespace(), ): - psutil.Process(os.getpid()).nice(psutil.BELOW_NORMAL_PRIORITY_CLASS if os.name == 'nt' else 10) + psutil.Process(os.getpid()).nice(psutil.BELOW_NORMAL_PRIORITY_CLASS if os.name == 'nt' else 10) - workFolder = basepath if basepath else Path(__file__, "..", "..", "..", "script-output", "FactorioMaps").resolve() - topPath = Path(workFolder, outFolder) - dataPath = Path(topPath, "mapInfo.json") - maxthreads = args.refthreads if args.refthreads else args.maxthreads + workFolder = basepath if basepath else Path(__file__, "..", "..", "..", "script-output", "FactorioMaps").resolve() + topPath = Path(workFolder, outFolder) + dataPath = Path(topPath, "mapInfo.json") + maxthreads = args.refthreads if args.refthreads else args.maxthreads - pool = mp.Pool(processes=maxthreads) + pool = mp.Pool(processes=maxthreads) - with open(dataPath, "r", encoding="utf-8") as f: - data = json.load(f) - outFile = Path(topPath, "mapInfo.out.json") - if outFile.exists(): - with outFile.open("r", encoding="utf-8") as mapInfoOutFile: - outdata = json.load(mapInfoOutFile) - else: - outdata = {} + with open(dataPath, "r", encoding="utf-8") as f: + data = json.load(f) + # copy to debug file + outFile = Path(topPath, "mapInfo.out.json") + if outFile.exists(): + with outFile.open("r", encoding="utf-8") as mapInfoOutFile: + outdata = json.load(mapInfoOutFile) + else: + outdata = {} - if timestamp: - for i, mapObj in enumerate(data["maps"]): - if mapObj["path"] == timestamp: - new = i - break - else: - new = len(data["maps"]) - 1 + if timestamp: + for i, mapObj in enumerate(data["maps"]): + if mapObj["path"] == timestamp: + new = i + break + else: + new = len(data["maps"]) - 1 - changed = False - if "maps" not in outdata: - outdata["maps"] = {} - if str(new) not in outdata["maps"]: - outdata["maps"][str(new)] = { "surfaces": {} } + changed = False + if "maps" not in outdata: + outdata["maps"] = {} + if str(new) not in outdata["maps"]: + outdata["maps"][str(new)] = { "surfaces": {} } - newMap = data["maps"][new] - allImageIndex = {} - allDayImages = {} + newMap = data["maps"][new] + allImageIndex = {} + allDayImages = {} - for daytime in ("day", "night"): - newComparedSurfaces = [] - compareList = [] - keepList = [] - firstRemoveList = [] - cropList = {} - didAnything = False - if daytime is None or daytime == daytimeReference: - for surfaceName, surface in newMap["surfaces"].items(): - if (surfaceReference is None or surfaceName == surfaceReference) and daytime in surface and str(surface[daytime]) and (daytime is None or daytime == daytimeReference): - didAnything = True - z = surface["zoom"]["max"] + for daytime in ("day", "night"): + newComparedSurfaces = [] + compareList = [] + keepList = [] + firstRemoveList = [] + cropList = {} + didAnything = False + if daytime is None or daytime == daytimeReference: + for surfaceName, surface in newMap["surfaces"].items(): + if (surfaceReference is None or surfaceName == surfaceReference) and daytime in surface and str(surface[daytime]) and (daytime is None or daytime == daytimeReference): + didAnything = True + z = surface["zoom"]["max"] - dayImages = [] + dayImages = [] - newComparedSurfaces.append((surfaceName, daytime)) + newComparedSurfaces.append((surfaceName, daytime)) - oldMapsList = [] - for old in range(new): - if surfaceName in data["maps"][old]["surfaces"]: - oldMapsList.append(old) + oldMapsList = [] + for old in range(new): + if surfaceName in data["maps"][old]["surfaces"]: + oldMapsList.append(old) - def readCropList(path, combinePrevious): - with open(path, "r", encoding="utf-8") as f: - version = 2 if f.readline().rstrip('\n') == "v2" else 1 - for line in f: - if version == 1: - split = line.rstrip("\n").split(" ", 5) - key = (surfaceName, daytime, str(z), int(split[0]), int(os.path.splitext(split[1])[0])) - value = split[4] - else: - split = line.rstrip("\n").split(" ", 5) - pathSplit = split[5].split("/", 5) - if pathSplit[3] != str(z): - continue - #(surfaceName, daytime, z, str(x+1), str(y+1) + ext) - key = (surfaceName, daytime, str(z), int(pathSplit[4]), int(os.path.splitext(pathSplit[5])[0])) - value = split[2] + def readCropList(path, combinePrevious): + with open(path, "r", encoding="utf-8") as f: + version = 2 if f.readline().rstrip('\n') == "v2" else 1 + for line in f: + if version == 1: + split = line.rstrip("\n").split(" ", 5) + key = (surfaceName, daytime, str(z), int(split[0]), int(os.path.splitext(split[1])[0])) + value = split[4] + else: + split = line.rstrip("\n").split(" ", 5) + pathSplit = split[5].split("/", 5) + if pathSplit[3] != str(z): + continue + #(surfaceName, daytime, z, str(x+1), str(y+1) + ext) + key = (surfaceName, daytime, str(z), int(pathSplit[4]), int(os.path.splitext(pathSplit[5])[0])) + value = split[2] - cropList[key] = int(value, 16) | cropList.get(key, 0) if combinePrevious else int(value, 16) + cropList[key] = int(value, 16) | cropList.get(key, 0) if combinePrevious else int(value, 16) - for old in oldMapsList: - readCropList(os.path.join(topPath, "Images", data["maps"][old]["path"], surfaceName, daytime, "crop.txt"), False) + for old in oldMapsList: + readCropList(os.path.join(topPath, "Images", data["maps"][old]["path"], surfaceName, daytime, "crop.txt"), False) - readCropList(os.path.join(topPath, "Images", newMap["path"], surfaceName, daytime, "crop.txt"), True) + readCropList(os.path.join(topPath, "Images", newMap["path"], surfaceName, daytime, "crop.txt"), True) - oldImages = {} - for old in oldMapsList: - if surfaceName in data["maps"][old]["surfaces"] and daytime in surface and z == surface["zoom"]["max"]: - if surfaceName not in allImageIndex: - allImageIndex[surfaceName] = {} - path = os.path.join(topPath, "Images", data["maps"][old]["path"], surfaceName, daytime, str(z)) - for x in os.listdir(path): - for y in os.listdir(os.path.join(path, x)): - oldImages[(x, y.replace(ext, outext))] = data["maps"][old]["path"] + oldImages = {} + for old in oldMapsList: + if surfaceName in data["maps"][old]["surfaces"] and daytime in surface and z == surface["zoom"]["max"]: + if surfaceName not in allImageIndex: + allImageIndex[surfaceName] = {} + path = os.path.join(topPath, "Images", data["maps"][old]["path"], surfaceName, daytime, str(z)) + for x in os.listdir(path): + for y in os.listdir(os.path.join(path, x)): + oldImages[(x, y.replace(ext, outext))] = data["maps"][old]["path"] - if daytime != "day": - if not os.path.isfile(os.path.join(topPath, "Images", newMap["path"], surfaceName, "day", "ref.txt")): - print("WARNING: cannot find day surface to copy non-day surface from. running ref.py on night surfaces is not very accurate.") - else: - if args.verbose: print("found day surface, reuse results from ref.py from there") + if daytime != "day": + if not os.path.isfile(os.path.join(topPath, "Images", newMap["path"], surfaceName, "day", "ref.txt")): + print("WARNING: cannot find day surface to copy non-day surface from. running ref.py on night surfaces is not very accurate.") + else: + if args.verbose: print("found day surface, reuse results from ref.py from there") - with Path(topPath, "Images", newMap["path"], surfaceName, "day", "ref.txt").open("r", encoding="utf-8") as f: - for line in f: - dayImages.append(tuple(line.rstrip("\n").split(" ", 2))) + with Path(topPath, "Images", newMap["path"], surfaceName, "day", "ref.txt").open("r", encoding="utf-8") as f: + for line in f: + dayImages.append(tuple(line.rstrip("\n").split(" ", 2))) - allDayImages[surfaceName] = dayImages + allDayImages[surfaceName] = dayImages - path = os.path.join(topPath, "Images", newMap["path"], surfaceName, daytime, str(z)) - for x in os.listdir(path): - for y in os.listdir(os.path.join(path, x)): - if (x, os.path.splitext(y)[0]) in dayImages or (x, y.replace(ext, outext)) not in oldImages: - keepList.append((surfaceName, daytime, str(z), x, y)) - elif (x, y.replace(ext, outext)) in oldImages: - compareList.append((oldImages[(x, y.replace(ext, outext))], surfaceName, daytime, str(z), x, y)) + path = os.path.join(topPath, "Images", newMap["path"], surfaceName, daytime, str(z)) + for x in os.listdir(path): + for y in os.listdir(os.path.join(path, x)): + if (x, os.path.splitext(y)[0]) in dayImages or (x, y.replace(ext, outext)) not in oldImages: + keepList.append((surfaceName, daytime, str(z), x, y)) + elif (x, y.replace(ext, outext)) in oldImages: + compareList.append((oldImages[(x, y.replace(ext, outext))], surfaceName, daytime, str(z), x, y)) - if not didAnything: - continue + if not didAnything: + continue - if args.verbose: print("found %s new images" % len(keepList)) - if len(compareList) > 0: - if args.verbose: print("comparing %s existing images" % len(compareList)) - m = mp.Manager() - progressQueue = m.Queue() - #compare(compareList[0], treshold=treshold, basePath=os.path.join(topPath, "Images"), new=str(newMap["path"]), progressQueue=progressQueue) - workers = pool.map_async(partial(compare, basePath=os.path.join(topPath, "Images"), new=str(newMap["path"]), progressQueue=progressQueue), compareList, 128) - doneSize = 0 - print("ref {:5.1f}% [{}]".format(0, " " * (tsize()[0]-15)), end="") - for i in range(len(compareList)): - progressQueue.get(True) - doneSize += 1 - progress = float(doneSize) / len(compareList) - tsiz = tsize()[0]-15 - print("\rref {:5.1f}% [{}{}]".format(round(progress * 100, 1), "=" * int(progress * tsiz), " " * (tsiz - int(progress * tsiz))), end="") - workers.wait() - resultList = workers.get() + if args.verbose: print("found %s new images" % len(keepList)) + if len(compareList) > 0: + if args.verbose: print("comparing %s existing images" % len(compareList)) + m = mp.Manager() + progressQueue = m.Queue() + #compare(compareList[0], treshold=treshold, basePath=os.path.join(topPath, "Images"), new=str(newMap["path"]), progressQueue=progressQueue) + workers = pool.map_async(partial(compare, basePath=os.path.join(topPath, "Images"), new=str(newMap["path"]), progressQueue=progressQueue), compareList, 128) + doneSize = 0 + print("ref {:5.1f}% [{}]".format(0, " " * (tsize()[0]-15)), end="") + for i in range(len(compareList)): + progressQueue.get(True) + doneSize += 1 + progress = float(doneSize) / len(compareList) + tsiz = tsize()[0]-15 + print("\rref {:5.1f}% [{}{}]".format(round(progress * 100, 1), "=" * int(progress * tsiz), " " * (tsiz - int(progress * tsiz))), end="") + workers.wait() + resultList = workers.get() - newList = [x[1] for x in [x for x in resultList if x[0]]] - firstRemoveList += [x[1] for x in [x for x in resultList if not x[0]]] - if args.verbose: print("found %s changed in %s images" % (len(newList), len(compareList))) - keepList += newList - print("\rref {:5.1f}% [{}]".format(100, "=" * (tsize()[0]-15))) + newList = [x[1] for x in [x for x in resultList if x[0]]] + firstRemoveList += [x[1] for x in [x for x in resultList if not x[0]]] + if args.verbose: print("found %s changed in %s images" % (len(newList), len(compareList))) + keepList += newList + print("\rref {:5.1f}% [{}]".format(100, "=" * (tsize()[0]-15))) - if args.verbose: print("scanning %s chunks for neighbour cropping" % len(firstRemoveList)) - resultList = pool.map(partial(neighbourScan, keepList=keepList, cropList=cropList), firstRemoveList, 64) - neighbourList = [x[1] for x in [x for x in resultList if x[0]]] - removeList = [x[1] for x in [x for x in resultList if not x[0]]] - if args.verbose: print("keeping %s neighbouring images" % len(neighbourList)) + if args.verbose: print("scanning %s chunks for neighbour cropping" % len(firstRemoveList)) + resultList = pool.map(partial(neighbourScan, keepList=keepList, cropList=cropList), firstRemoveList, 64) + neighbourList = [x[1] for x in [x for x in resultList if x[0]]] + removeList = [x[1] for x in [x for x in resultList if not x[0]]] + if args.verbose: print("keeping %s neighbouring images" % len(neighbourList)) - if args.verbose: print("deleting %s, keeping %s of %s existing images" % (len(removeList), len(keepList) + len(neighbourList), len(keepList) + len(neighbourList) + len(removeList))) + if args.verbose: print("deleting %s, keeping %s of %s existing images" % (len(removeList), len(keepList) + len(neighbourList), len(keepList) + len(neighbourList) + len(removeList))) - if args.verbose: print("removing identical images") - for x in removeList: - os.remove(os.path.join(topPath, "Images", newMap["path"], *x)) + if args.verbose: print("removing identical images") + for x in removeList: + os.remove(os.path.join(topPath, "Images", newMap["path"], *x)) - if args.verbose: print("creating render index") - for surfaceName, daytime in newComparedSurfaces: - z = surface["zoom"]["max"] - with Path(topPath, "Images", newMap["path"], surfaceName, daytime, "ref.txt").open("w", encoding="utf-8") as f: - for aList in (keepList, neighbourList): - for coord in aList: - if coord[0] == surfaceName and coord[1] == daytime and coord[2] == str(z): - f.write("%s %s\n" % (coord[3], os.path.splitext(coord[4])[0])) + if args.verbose: print("creating render index") + for surfaceName, daytime in newComparedSurfaces: + z = surface["zoom"]["max"] + with Path(topPath, "Images", newMap["path"], surfaceName, daytime, "ref.txt").open("w", encoding="utf-8") as f: + for aList in (keepList, neighbourList): + for coord in aList: + if coord[0] == surfaceName and coord[1] == daytime and coord[2] == str(z): + f.write("%s %s\n" % (coord[3], os.path.splitext(coord[4])[0])) - if args.verbose: print("creating client index") - for aList in (keepList, neighbourList): - for coord in aList: - x = int(coord[3]) - y = int(os.path.splitext(coord[4])[0]) - if coord[0] not in allImageIndex: - allImageIndex[coord[0]] = {} - if coord[1] not in allImageIndex[coord[0]]: - allImageIndex[coord[0]][coord[1]] = {} - if y not in allImageIndex[coord[0]][coord[1]]: - allImageIndex[coord[0]][coord[1]][y] = [x] - elif x not in allImageIndex[coord[0]][coord[1]][y]: - allImageIndex[coord[0]][coord[1]][y].append(x) + if args.verbose: print("creating client index") + for aList in (keepList, neighbourList): + for coord in aList: + x = int(coord[3]) + y = int(os.path.splitext(coord[4])[0]) + if coord[0] not in allImageIndex: + allImageIndex[coord[0]] = {} + if coord[1] not in allImageIndex[coord[0]]: + allImageIndex[coord[0]][coord[1]] = {} + if y not in allImageIndex[coord[0]][coord[1]]: + allImageIndex[coord[0]][coord[1]][y] = [x] + elif x not in allImageIndex[coord[0]][coord[1]][y]: + allImageIndex[coord[0]][coord[1]][y].append(x) @@ -310,58 +311,59 @@ def readCropList(path, combinePrevious): - if args.verbose: print("comparing renderboxes") - if "renderboxesCompared" not in outdata["maps"][str(new)]: - changed = True - outdata["maps"][str(new)]["renderboxesCompared"] = True + if args.verbose: print("comparing renderboxes") + if "renderboxesCompared" not in outdata["maps"][str(new)]: + changed = True + outdata["maps"][str(new)]["renderboxesCompared"] = True - compareList = {} - totalCount = 0 - for surfaceName, surface in newMap["surfaces"].items(): - linksByPath = {} - for linkIndex, link in enumerate(surface["links"]): + compareList = {} + totalCount = 0 + for surfaceName, surface in newMap["surfaces"].items(): + linksByPath = {} + for linkIndex, link in enumerate(surface["links"]): - if surfaceName not in outdata["maps"][str(new)]["surfaces"]: - outdata["maps"][str(new)]["surfaces"][surfaceName] = { "links": [] } - outdata["maps"][str(new)]["surfaces"][surfaceName]["links"].append({ "path": newMap["path"] }) + if surfaceName not in outdata["maps"][str(new)]["surfaces"]: + outdata["maps"][str(new)]["surfaces"][surfaceName] = { "links": [] } + outdata["maps"][str(new)]["surfaces"][surfaceName]["links"].append({ "path": newMap["path"] }) - for daytime in ("day", "night"): - if link["type"] == "link_renderbox_area" and (link["daynight"] or daytime == "day"): - path = os.path.join(link["toSurface"], daytime if link["daynight"] else "day", "renderboxes", str(surface["zoom"]["max"]), link["filename"]) + for daytime in ("day", "night"): + if link["type"] == "link_renderbox_area" and (link["daynight"] or daytime == "day"): + if "filename" in link: + path = os.path.join(link["toSurface"], daytime if link["daynight"] else "day", "renderboxes", str(surface["zoom"]["max"]), link["filename"]) - if path not in linksByPath: - linksByPath[path] = [ (surfaceName, linkIndex) ] - else: - linksByPath[path].append((surfaceName, linkIndex)) + if path not in linksByPath: + linksByPath[path] = [ (surfaceName, linkIndex) ] + else: + linksByPath[path].append((surfaceName, linkIndex)) - totalCount += 1 + totalCount += 1 - for old in range(new-1, -1, -1): - if surfaceName in data["maps"][old]["surfaces"]: - for linkIndex, link in enumerate(data["maps"][old]["surfaces"][surfaceName]["links"]): - for daytime in ("day", "night"): - if link["type"] == "link_renderbox_area" and (link["daynight"] or daytime == "day"): - path = os.path.join(link["toSurface"], daytime if link["daynight"] else "day", "renderboxes", str(surface["zoom"]["max"]), link["filename"]) - if path in linksByPath and path not in compareList: - oldPath = link["path"] if "path" in link else outdata["maps"][str(old)]["surfaces"][surfaceName]["links"][linkIndex]["path"] - compareList[path] = (path, oldPath, linksByPath[path]) + for old in range(new-1, -1, -1): + if surfaceName in data["maps"][old]["surfaces"]: + for linkIndex, link in enumerate(data["maps"][old]["surfaces"][surfaceName]["links"]): + for daytime in ("day", "night"): + if link["type"] == "link_renderbox_area" and (link["daynight"] or daytime == "day"): + path = os.path.join(link["toSurface"], daytime if link["daynight"] else "day", "renderboxes", str(surface["zoom"]["max"]), link["filename"]) + if path in linksByPath and path not in compareList: + oldPath = link["path"] if "path" in link else outdata["maps"][str(old)]["surfaces"][surfaceName]["links"][linkIndex]["path"] + compareList[path] = (path, oldPath, linksByPath[path]) - compareList = compareList.values() - resultList = pool.map(partial(compareRenderbox, basePath=os.path.join(topPath, "Images"), new=str(newMap["path"])), compareList, 16) + compareList = compareList.values() + resultList = pool.map(partial(compareRenderbox, basePath=os.path.join(topPath, "Images"), new=str(newMap["path"])), compareList, 16) - count = 0 - for (isDifferent, path, oldPath, links) in resultList: - if not isDifferent: - os.remove(path) + count = 0 + for (isDifferent, path, oldPath, links) in resultList: + if not isDifferent: + os.remove(path) - for (surfaceName, linkIndex) in links: - outdata["maps"][str(new)]["surfaces"][surfaceName]["links"][linkIndex] = { "path": oldPath } + for (surfaceName, linkIndex) in links: + outdata["maps"][str(new)]["surfaces"][surfaceName]["links"][linkIndex] = { "path": oldPath } - else: - count += 1 + else: + count += 1 - if args.verbose: print("removed %s of %s compared renderboxes, found %s new" % (count, len(compareList), totalCount)) + if args.verbose: print("removed %s of %s compared renderboxes, found %s new" % (count, len(compareList), totalCount)) @@ -372,43 +374,43 @@ def readCropList(path, combinePrevious): - # compress and build string - for surfaceName, daytimeImageIndex in allImageIndex.items(): - indexList = [] - daytime = "night" if "night" in daytimeImageIndex and data["maps"][new]["surfaces"][surfaceName] and str(data["maps"][new]["surfaces"][surfaceName]["night"]) else "day" - if daytime not in daytimeImageIndex: # this is true if nothing changed - continue - surfaceImageIndex = daytimeImageIndex[daytime] - for y, xList in surfaceImageIndex.items(): - string = getBase64(y, False) - isLastChangedImage = False - isLastNightImage = False + # compress and build string + for surfaceName, daytimeImageIndex in allImageIndex.items(): + indexList = [] + daytime = "night" if "night" in daytimeImageIndex and data["maps"][new]["surfaces"][surfaceName] and str(data["maps"][new]["surfaces"][surfaceName]["night"]) else "day" + if daytime not in daytimeImageIndex: # this is true if nothing changed + continue + surfaceImageIndex = daytimeImageIndex[daytime] + for y, xList in surfaceImageIndex.items(): + string = getBase64(y, False) + isLastChangedImage = False + isLastNightImage = False - for x in range(min(xList), max(xList) + 2): - isChangedImage = x in xList #does the image exist at all? - isNightImage = daytime == "night" and (str(x), str(y)) not in allDayImages[surfaceName] #is this image only in night? - if isLastChangedImage != isChangedImage or (isChangedImage and isLastNightImage != isNightImage): #differential encoding - string += getBase64(x, isNightImage if isChangedImage else isLastNightImage) - isLastChangedImage = isChangedImage - isLastNightImage = isNightImage - indexList.append(string) + for x in range(min(xList), max(xList) + 2): + isChangedImage = x in xList #does the image exist at all? + isNightImage = daytime == "night" and (str(x), str(y)) not in allDayImages[surfaceName] #is this image only in night? + if isLastChangedImage != isChangedImage or (isChangedImage and isLastNightImage != isNightImage): #differential encoding + string += getBase64(x, isNightImage if isChangedImage else isLastNightImage) + isLastChangedImage = isChangedImage + isLastNightImage = isNightImage + indexList.append(string) - if surfaceName not in outdata["maps"][str(new)]["surfaces"]: - outdata["maps"][str(new)]["surfaces"][surfaceName] = {} - outdata["maps"][str(new)]["surfaces"][surfaceName]["chunks"] = '='.join(indexList) - if len(indexList) > 0: - changed = True + if surfaceName not in outdata["maps"][str(new)]["surfaces"]: + outdata["maps"][str(new)]["surfaces"][surfaceName] = {} + outdata["maps"][str(new)]["surfaces"][surfaceName]["chunks"] = '='.join(indexList) + if len(indexList) > 0: + changed = True - if changed: - if args.verbose: print("writing mapInfo.out.json") - with outFile.open("w+", encoding="utf-8") as f: - json.dump(outdata, f) + if changed: + if args.verbose: print("writing mapInfo.out.json") + with outFile.open("w+", encoding="utf-8") as f: + json.dump(outdata, f) - if args.verbose: print("deleting empty folders") - for curdir, subdirs, files in os.walk(Path(topPath, timestamp, surfaceReference, daytimeReference)): - if len(subdirs) == 0 and len(files) == 0: - os.rmdir(curdir) + if args.verbose: print("deleting empty folders") + for curdir, subdirs, files in os.walk(Path(topPath, timestamp, surfaceReference, daytimeReference)): + if len(subdirs) == 0 and len(files) == 0: + os.rmdir(curdir) diff --git a/zoom.py b/zoom.py index b2a81e3..54a2294 100644 --- a/zoom.py +++ b/zoom.py @@ -4,15 +4,12 @@ from argparse import Namespace import os from pathlib import Path -import subprocess -import sys -import time from shutil import get_terminal_size as tsize from sys import platform as _platform import numpy import psutil -from PIL import Image, ImageChops +from PIL import Image from turbojpeg import TurboJPEG maxQuality = False # Set this to true if you want to compress/postprocess the images yourself later @@ -31,442 +28,442 @@ def printErase(arg): - try: - tsiz = tsize()[0] - print("\r{}{}\n".format(arg, " " * (tsiz*math.ceil(len(arg)/tsiz)-len(arg) - 1)), end="", flush=True) - except: - #raise - pass + try: + tsiz = tsize()[0] + print("\r{}{}\n".format(arg, " " * (tsiz*math.ceil(len(arg)/tsiz)-len(arg) - 1)), end="", flush=True) + except: + #raise + pass # note that these are all 64 bit libraries since factorio doesnt support 32 bit. if os.name == "nt": - jpeg = TurboJPEG(Path(__file__, "..", "mozjpeg/turbojpeg.dll").resolve().as_posix()) + jpeg = TurboJPEG(Path(__file__, "..", "mozjpeg/turbojpeg.dll").resolve().as_posix()) # elif _platform == "darwin": # I'm not actually sure if mac can run linux libraries or not. # jpeg = TurboJPEG("mozjpeg/libturbojpeg.dylib") # If anyone on mac has problems with the line below please make an issue :) else: - jpeg = TurboJPEG(Path(__file__, "..", "mozjpeg/libturbojpeg.so").resolve().as_posix()) + jpeg = TurboJPEG(Path(__file__, "..", "mozjpeg/libturbojpeg.so").resolve().as_posix()) def saveCompress(img, path: Path): - if maxQuality: # do not waste any time compressing the image - return img.save(path, subsampling=0, quality=100) + if maxQuality: # do not waste any time compressing the image + return img.save(path, subsampling=0, quality=100) - outFile = path.open("wb") - outFile.write(jpeg.encode(numpy.array(img)[:, :, ::-1].copy())) - outFile.close() + outFile = path.open("wb") + outFile.write(jpeg.encode(numpy.array(img)[:, :, ::-1].copy())) + outFile.close() def simpleZoom(workQueue): - for (folder, start, stop, filename) in workQueue: - path = Path(folder, str(start), filename) - img = Image.open(path.with_suffix(EXT), mode="r").convert("RGB") - if OUTEXT != EXT: - saveCompress(img, path.with_suffix(OUTEXT)) - path.with_suffix(EXT).unlink() - - for z in range(start - 1, stop - 1, -1): - if img.size[0] >= MINRENDERBOXSIZE * 2 and img.size[1] >= MINRENDERBOXSIZE * 2: - img = img.resize((img.size[0] // 2, img.size[1] // 2), Image.Resampling.LANCZOS) - zFolder = Path(folder, str(z)) - if not zFolder.exists(): - zFolder.mkdir(parents=True) - saveCompress(img, Path(zFolder, filename).with_suffix(OUTEXT)) + for (folder, start, stop, filename) in workQueue: + path = Path(folder, str(start), filename) + img = Image.open(path.with_suffix(EXT), mode="r").convert("RGB") + if OUTEXT != EXT: + saveCompress(img, path.with_suffix(OUTEXT)) + path.with_suffix(EXT).unlink() + + for z in range(start - 1, stop - 1, -1): + if img.size[0] >= MINRENDERBOXSIZE * 2 and img.size[1] >= MINRENDERBOXSIZE * 2: + img = img.resize((img.size[0] // 2, img.size[1] // 2), Image.Resampling.LANCZOS) + zFolder = Path(folder, str(z)) + if not zFolder.exists(): + zFolder.mkdir(parents=True) + saveCompress(img, Path(zFolder, filename).with_suffix(OUTEXT)) def zoomRenderboxes(daytimeSurfaces, toppath, timestamp, subpath, args): - with Path(toppath, "mapInfo.json").open("r+", encoding="utf-8") as mapInfoFile: - mapInfo = json.load(mapInfoFile) - - outFile = Path(toppath, "mapInfo.out.json") - if outFile.exists(): - with outFile.open("r", encoding="utf-8") as mapInfoOutFile: - outInfo = json.load(mapInfoOutFile) - else: - outInfo = {"maps": {}} - - mapLayer = None - mapIndex = None - - for i, m in enumerate(mapInfo["maps"]): - if m["path"] == timestamp: - mapLayer = m - mapIndex = str(i) - - if not mapLayer or not mapIndex: - raise Exception("mapLayer or mapIndex missing") - - if mapIndex not in outInfo["maps"]: - outInfo["maps"][mapIndex] = {"surfaces": {}} - - zoomWork = set() - for daytime, activeSurfaces in daytimeSurfaces.items(): - surfaceZoomLevels = {} - for surfaceName in activeSurfaces: - surfaceZoomLevels[surfaceName] = ( - mapLayer["surfaces"][surfaceName]["zoom"]["max"] - - mapLayer["surfaces"][surfaceName]["zoom"]["min"] - ) - - for surfaceName, surface in mapLayer["surfaces"].items(): - if "links" in surface: - - if surfaceName not in outInfo["maps"][mapIndex]["surfaces"]: - outInfo["maps"][mapIndex]["surfaces"][surfaceName] = {} - if "links" not in outInfo["maps"][mapIndex]["surfaces"][surfaceName]: - outInfo["maps"][mapIndex]["surfaces"][surfaceName]["links"] = [] - - for linkIndex, link in enumerate(surface["links"]): - if link["type"] == "link_renderbox_area" and "zoom" in link: - totalZoomLevelsRequired = 0 - for zoomSurface, zoomLevel in link["maxZoomFromSurfaces"].items(): - if zoomSurface in surfaceZoomLevels: - totalZoomLevelsRequired = max( - totalZoomLevelsRequired, - zoomLevel + surfaceZoomLevels[zoomSurface], - ) - - if not outInfo["maps"][mapIndex]["surfaces"][surfaceName]["links"][linkIndex]: - outInfo["maps"][mapIndex]["surfaces"][surfaceName]["links"][linkIndex] = {} - if "zoom" not in outInfo["maps"][mapIndex]["surfaces"][surfaceName]["links"][linkIndex]: - outInfo["maps"][mapIndex]["surfaces"][surfaceName]["links"][linkIndex]["zoom"] = {} - - link["zoom"]["min"] = link["zoom"]["max"] - totalZoomLevelsRequired - outInfo["maps"][mapIndex]["surfaces"][surfaceName]["links"][linkIndex]["zoom"]["min"] = link["zoom"]["min"] - - # an assumption is made that the total zoom levels required doesnt change between snapshots. - if (link if "path" in link else outInfo["maps"][mapIndex]["surfaces"][surfaceName]["links"][linkIndex])["path"] == timestamp: - zoomWork.add( - ( - Path( - subpath, - mapLayer["path"], - link["toSurface"], - daytime if link["daynight"] else "day", - "renderboxes", - ).resolve(), - link["zoom"]["max"], - link["zoom"]["min"], - link["filename"], - ) - ) - - with outFile.open("w", encoding="utf-8") as mapInfoOutFile: - json.dump(outInfo, mapInfoOutFile) - mapInfoOutFile.truncate() - - maxthreads = args.zoomthreads if args.zoomthreads else args.maxthreads - processes = [] - zoomWork = list(zoomWork) - for i in range(0, min(maxthreads, len(zoomWork))): - p = mp.Process(target=simpleZoom, args=(zoomWork[i::maxthreads],)) - p.start() - processes.append(p) - for p in processes: - p.join() + with Path(toppath, "mapInfo.json").open("r+", encoding="utf-8") as mapInfoFile: + mapInfo = json.load(mapInfoFile) + + outFile = Path(toppath, "mapInfo.out.json") + if outFile.exists(): + with outFile.open("r", encoding="utf-8") as mapInfoOutFile: + outInfo = json.load(mapInfoOutFile) + else: + outInfo = {"maps": {}} + + mapLayer = None + mapIndex = None + + for i, m in enumerate(mapInfo["maps"]): + if m["path"] == timestamp: + mapLayer = m + mapIndex = str(i) + + if not mapLayer or not mapIndex: + raise Exception("mapLayer or mapIndex missing") + + if mapIndex not in outInfo["maps"]: + outInfo["maps"][mapIndex] = {"surfaces": {}} + + zoomWork = set() + for daytime, activeSurfaces in daytimeSurfaces.items(): + surfaceZoomLevels = {} + for surfaceName in activeSurfaces: + surfaceZoomLevels[surfaceName] = ( + mapLayer["surfaces"][surfaceName]["zoom"]["max"] + - mapLayer["surfaces"][surfaceName]["zoom"]["min"] + ) + + for surfaceName, surface in mapLayer["surfaces"].items(): + if "links" in surface: + + if surfaceName not in outInfo["maps"][mapIndex]["surfaces"]: + outInfo["maps"][mapIndex]["surfaces"][surfaceName] = {} + if "links" not in outInfo["maps"][mapIndex]["surfaces"][surfaceName]: + outInfo["maps"][mapIndex]["surfaces"][surfaceName]["links"] = [] + + for linkIndex, link in enumerate(surface["links"]): + if link["type"] == "link_renderbox_area" and "zoom" in link: + totalZoomLevelsRequired = 0 + for zoomSurface, zoomLevel in link["maxZoomFromSurfaces"].items(): + if zoomSurface in surfaceZoomLevels: + totalZoomLevelsRequired = max( + totalZoomLevelsRequired, + zoomLevel + surfaceZoomLevels[zoomSurface], + ) + + if not outInfo["maps"][mapIndex]["surfaces"][surfaceName]["links"][linkIndex]: + outInfo["maps"][mapIndex]["surfaces"][surfaceName]["links"][linkIndex] = {} + if "zoom" not in outInfo["maps"][mapIndex]["surfaces"][surfaceName]["links"][linkIndex]: + outInfo["maps"][mapIndex]["surfaces"][surfaceName]["links"][linkIndex]["zoom"] = {} + + link["zoom"]["min"] = link["zoom"]["max"] - totalZoomLevelsRequired + outInfo["maps"][mapIndex]["surfaces"][surfaceName]["links"][linkIndex]["zoom"]["min"] = link["zoom"]["min"] + + # an assumption is made that the total zoom levels required doesnt change between snapshots. + if (link if "path" in link else outInfo["maps"][mapIndex]["surfaces"][surfaceName]["links"][linkIndex])["path"] == timestamp: + zoomWork.add( + ( + Path( + subpath, + mapLayer["path"], + link["toSurface"], + daytime if link["daynight"] else "day", + "renderboxes", + ).resolve(), + link["zoom"]["max"], + link["zoom"]["min"], + link["filename"], + ) + ) + + with outFile.open("w", encoding="utf-8") as mapInfoOutFile: + json.dump(outInfo, mapInfoOutFile) + mapInfoOutFile.truncate() + + maxthreads = args.zoomthreads if args.zoomthreads else args.maxthreads + processes = [] + zoomWork = list(zoomWork) + for i in range(0, min(maxthreads, len(zoomWork))): + p = mp.Process(target=simpleZoom, args=(zoomWork[i::maxthreads],)) + p.start() + processes.append(p) + for p in processes: + p.join() def work(basepath, pathList, surfaceName, daytime, size, start, stop, last, chunk, keepLast=False): - chunksize = 2 ** (start - stop) - if start > stop: - for k in range(start, stop, -1): - x = chunksize * chunk[0] - y = chunksize * chunk[1] - for j in range(y, y + chunksize, 2): - for i in range(x, x + chunksize, 2): - - coords = [(0, 0), (1, 0), (0, 1), (1, 1)] - paths = [ - Path( - basepath, - pathList[0], - surfaceName, - daytime, - str(k), - str(i + coord[0]), - str(j + coord[1]), - ).with_suffix(EXT) - for coord in coords - ] - - if any(path.exists() for path in paths): - - if not Path(basepath, pathList[0], surfaceName, daytime, str(k - 1), str(i // 2)).exists(): - try: - Path(basepath, pathList[0], surfaceName, daytime, str(k - 1), str(i // 2)).mkdir(parents=True) - except OSError: - pass - - isOriginal = [] - for m in range(len(coords)): - isOriginal.append(paths[m].is_file()) - if not isOriginal[m]: - for n in range(1, len(pathList)): - paths[m] = Path(basepath, pathList[n], surfaceName, daytime, str(k), str(i + coords[m][0]), str(j + coords[m][1])).with_suffix(OUTEXT) - if paths[m].is_file(): - break - - result = Image.new("RGB", (size, size), BACKGROUNDCOLOR) - - images = [] - for m in range(len(coords)): - if paths[m].is_file(): - img = Image.open(paths[m], mode="r").convert("RGB") - result.paste( - box=( - coords[m][0] * size // 2, - coords[m][1] * size // 2, - ), - im=img.resize( - (size // 2, size // 2), Image.Resampling.LANCZOS - ), - ) - - if isOriginal[m]: - images.append((img, paths[m])) - - if k == last + 1: - saveCompress(result, Path(basepath, pathList[0], surfaceName, daytime, str(k - 1), str(i // 2), str(j // 2)).with_suffix(OUTEXT)) - if OUTEXT != EXT and (k != last + 1 or keepLast): - result.save(Path(basepath, pathList[0], surfaceName, daytime, str(k - 1), str(i // 2), str(j // 2), ).with_suffix(EXT)) - - if OUTEXT != EXT: - for img, path in images: - saveCompress(img, path.with_suffix(OUTEXT)) - path.unlink() - - chunksize = chunksize // 2 - elif stop == last: - path = Path(basepath, pathList[0], surfaceName, daytime, str(start), str(chunk[0]), str(chunk[1])) - img = Image.open(path.with_suffix(EXT), mode="r").convert("RGB") - saveCompress(img, path.with_suffix(OUTEXT)) - path.with_suffix(EXT).unlink() + chunksize = 2 ** (start - stop) + if start > stop: + for k in range(start, stop, -1): + x = chunksize * chunk[0] + y = chunksize * chunk[1] + for j in range(y, y + chunksize, 2): + for i in range(x, x + chunksize, 2): + + coords = [(0, 0), (1, 0), (0, 1), (1, 1)] + paths = [ + Path( + basepath, + pathList[0], + surfaceName, + daytime, + str(k), + str(i + coord[0]), + str(j + coord[1]), + ).with_suffix(EXT) + for coord in coords + ] + + if any(path.exists() for path in paths): + + if not Path(basepath, pathList[0], surfaceName, daytime, str(k - 1), str(i // 2)).exists(): + try: + Path(basepath, pathList[0], surfaceName, daytime, str(k - 1), str(i // 2)).mkdir(parents=True) + except OSError: + pass + + isOriginal = [] + for m in range(len(coords)): + isOriginal.append(paths[m].is_file()) + if not isOriginal[m]: + for n in range(1, len(pathList)): + paths[m] = Path(basepath, pathList[n], surfaceName, daytime, str(k), str(i + coords[m][0]), str(j + coords[m][1])).with_suffix(OUTEXT) + if paths[m].is_file(): + break + + result = Image.new("RGB", (size, size), BACKGROUNDCOLOR) + + images = [] + for m in range(len(coords)): + if paths[m].is_file(): + img = Image.open(paths[m], mode="r").convert("RGB") + result.paste( + box=( + coords[m][0] * size // 2, + coords[m][1] * size // 2, + ), + im=img.resize( + (size // 2, size // 2), Image.Resampling.LANCZOS + ), + ) + + if isOriginal[m]: + images.append((img, paths[m])) + + if k == last + 1: + saveCompress(result, Path(basepath, pathList[0], surfaceName, daytime, str(k - 1), str(i // 2), str(j // 2)).with_suffix(OUTEXT)) + if OUTEXT != EXT and (k != last + 1 or keepLast): + result.save(Path(basepath, pathList[0], surfaceName, daytime, str(k - 1), str(i // 2), str(j // 2), ).with_suffix(EXT)) + + if OUTEXT != EXT: + for img, path in images: + saveCompress(img, path.with_suffix(OUTEXT)) + path.unlink() + + chunksize = chunksize // 2 + elif stop == last: + path = Path(basepath, pathList[0], surfaceName, daytime, str(start), str(chunk[0]), str(chunk[1])) + img = Image.open(path.with_suffix(EXT), mode="r").convert("RGB") + saveCompress(img, path.with_suffix(OUTEXT)) + path.with_suffix(EXT).unlink() def thread(basepath, pathList, surfaceName, daytime, size, start, stop, last, allChunks, counter, resultQueue, keepLast=False): - #print(start, stop, chunks) - while True: - with counter.get_lock(): - i = counter.value - 1 - if i < 0: - return - counter.value = i - chunk = allChunks[i] - work(basepath, pathList, surfaceName, daytime, size, start, stop, last, chunk, keepLast) - resultQueue.put(True) + #print(start, stop, chunks) + while True: + with counter.get_lock(): + i = counter.value - 1 + if i < 0: + return + counter.value = i + chunk = allChunks[i] + work(basepath, pathList, surfaceName, daytime, size, start, stop, last, chunk, keepLast) + resultQueue.put(True) def zoom( - outFolder: Path, - timestamp: str = None, - surfaceReference: str = None, - daytimeReference: str = None, - basepath: Path = None, - needsThumbnail: bool = True, - args: Namespace = Namespace(), + outFolder: Path, + timestamp: str = None, + surfaceReference: str = None, + daytimeReference: str = None, + basepath: Path = None, + needsThumbnail: bool = True, + args: Namespace = Namespace(), ): - psutil.Process(os.getpid()).nice(psutil.BELOW_NORMAL_PRIORITY_CLASS if os.name == "nt" else 10) - - workFolder = basepath if basepath else Path(__file__, "..", "..", "..", "script-output", "FactorioMaps").resolve() - - topPath = Path(workFolder, outFolder) - dataPath = Path(topPath, "mapInfo.json") - imagePath = Path(topPath, "Images") - maxthreads = args.zoomthreads if args.zoomthreads else args.maxthreads - - with dataPath.open("r", encoding="utf-8") as f: - data = json.load(f) - for mapIndex, map in enumerate(data["maps"]): - if timestamp is None or map["path"] == timestamp: - for surfaceName, surface in map["surfaces"].items(): - if surfaceReference is None or surfaceName == surfaceReference: - maxzoom = surface["zoom"]["max"] - minzoom = surface["zoom"]["min"] - - daytimes = [] - if "day" in surface: - daytimes.append("day") - if "night" in surface: - daytimes.append("night") - for daytime in daytimes: - if daytimeReference is None or daytime == daytimeReference: - if not Path(topPath, "Images", str(map["path"]), surfaceName, daytime, str(maxzoom - 1)).is_dir(): - - print(f"zoom {0:5.1f}% [{' ' * (tsize()[0]-15)}]", end="") - - generateThumbnail = ( - needsThumbnail - and mapIndex == len(data["maps"]) - 1 - and surfaceName - == ( - "nauvis" - if "nauvis" in map["surfaces"] - else sorted(map["surfaces"].keys())[0] - ) - and daytime == daytimes[0] - ) - - allBigChunks = {} - minX = float("inf") - maxX = float("-inf") - minY = float("inf") - maxY = float("-inf") - imageSize: int = None - for xStr in Path(imagePath, str(map["path"]), surfaceName, daytime, str(maxzoom)).iterdir(): - x = int(xStr.name) - minX = min(minX, x) - maxX = max(maxX, x) - for yStr in Path(imagePath, str(map["path"]), surfaceName, daytime, str(maxzoom), xStr).iterdir(): - if imageSize is None: - imageSize = Image.open(Path(imagePath, str(map["path"]), surfaceName, daytime, str(maxzoom), xStr, yStr), mode="r").size[0] - y = int(yStr.stem) - minY = min(minY, y) - maxY = max(maxY, y) - allBigChunks[ - ( - x >> maxzoom-minzoom, - y >> maxzoom-minzoom, - ) - ] = True - - if len(allBigChunks) <= 0: - continue - - pathList = [] - for otherMapIndex in range(mapIndex, -1, -1): - pathList.append(str(data["maps"][otherMapIndex]["path"])) - - threadsplit = 0 - while 4**threadsplit * len(allBigChunks) < maxthreads: - threadsplit = threadsplit + 1 - threadsplit = min(max(maxzoom - minzoom - 3, 0), threadsplit + 3) - allChunks = [] - for pos in list(allBigChunks): - for i in range(2**threadsplit): - for j in range(2**threadsplit): - allChunks.append( - ( - pos[0] * (2**threadsplit) + i, - pos[1] * (2**threadsplit) + j, - ) - ) - - threads = min(len(allChunks), maxthreads) - processes = [] - originalSize = len(allChunks) - - # print(("%s %s %s %s" % (pathList[0], str(surfaceName), daytime, pathList))) - # print(("%s-%s (total: %s):" % (start, stop + threadsplit, len(allChunks)))) - counter = mp.Value("i", originalSize) - resultQueue = mp.Queue() - for _ in range(0, threads): - p = mp.Process( - target=thread, - args=( - imagePath, - pathList, - surfaceName, - daytime, - imageSize, - maxzoom, - minzoom + threadsplit, - minzoom, - allChunks, - counter, - resultQueue, - generateThumbnail, - ), - ) - p.start() - processes.append(p) - - doneSize = 0 - for _ in range(originalSize): - resultQueue.get(True) - doneSize += 1 - progress = float(doneSize) / originalSize - tsiz = tsize()[0] - 15 - print( - "\rzoom {:5.1f}% [{}{}]".format( - round(progress * 98, 1), - "=" * int(progress * tsiz), - " " * (tsiz - int(progress * tsiz)), - ), - end="", - ) - - for p in processes: - p.join() - - if threadsplit > 0: - # print(("finishing up: %s-%s (total: %s)" % (stop + threadsplit, stop, len(allBigChunks)))) - processes = [] - i = len(allBigChunks) - 1 - for chunk in list(allBigChunks): - p = mp.Process( - target=work, - args=( - imagePath, - pathList, - surfaceName, - daytime, - imageSize, - minzoom + threadsplit, - minzoom, - minzoom, - chunk, - generateThumbnail, - ), - ) - i = i - 1 - p.start() - processes.append(p) - for p in processes: - p.join() - - if generateThumbnail: - printErase("generating thumbnail") - minzoompath = Path( - imagePath, - str(map["path"]), - surfaceName, - daytime, - str(minzoom), - ) - - if imageSize is None: - raise Exception("Missing imageSize for thumbnail generation") - - thumbnail = Image.new( - "RGB", - ( - (maxX - minX + 1) * imageSize >> maxzoom-minzoom, - (maxY - minY + 1) * imageSize >> maxzoom-minzoom, - ), - BACKGROUNDCOLOR, - ) - bigMinX = minX >> maxzoom-minzoom - bigMinY = minY >> maxzoom-minzoom - xOffset = ((bigMinX * imageSize << maxzoom-minzoom) - minX * imageSize) >> maxzoom-minzoom - yOffset = ((bigMinY * imageSize << maxzoom-minzoom) - minY * imageSize) >> maxzoom-minzoom - for chunk in list(allBigChunks): - path = Path(minzoompath, str(chunk[0]), str(chunk[1])).with_suffix(EXT) - thumbnail.paste( - box=( - xOffset + (chunk[0] - bigMinX) * imageSize, - yOffset + (chunk[1] - bigMinY) * imageSize, - ), - im=Image.open(path, mode="r") - .convert("RGB") - .resize((imageSize, imageSize), Image.Resampling.LANCZOS), - ) - - if OUTEXT != EXT: - path.unlink() - - thumbnail.save(Path(imagePath, "thumbnail" + THUMBNAILEXT)) - - print("\rzoom {:5.1f}% [{}]".format(100, "=" * (tsize()[0] - 15))) + psutil.Process(os.getpid()).nice(psutil.BELOW_NORMAL_PRIORITY_CLASS if os.name == "nt" else 10) + + workFolder = basepath if basepath else Path(__file__, "..", "..", "..", "script-output", "FactorioMaps").resolve() + + topPath = Path(workFolder, outFolder) + dataPath = Path(topPath, "mapInfo.json") + imagePath = Path(topPath, "Images") + maxthreads = args.zoomthreads if args.zoomthreads else args.maxthreads + + with dataPath.open("r", encoding="utf-8") as f: + data = json.load(f) + for mapIndex, map in enumerate(data["maps"]): + if timestamp is None or map["path"] == timestamp: + for surfaceName, surface in map["surfaces"].items(): + if surfaceReference is None or surfaceName == surfaceReference: + maxzoom = surface["zoom"]["max"] + minzoom = surface["zoom"]["min"] + + daytimes = [] + if "day" in surface: + daytimes.append("day") + if "night" in surface: + daytimes.append("night") + for daytime in daytimes: + if daytimeReference is None or daytime == daytimeReference: + if not Path(topPath, "Images", str(map["path"]), surfaceName, daytime, str(maxzoom - 1)).is_dir(): + + print(f"zoom {0:5.1f}% [{' ' * (tsize()[0]-15)}]", end="") + + generateThumbnail = ( + needsThumbnail + and mapIndex == len(data["maps"]) - 1 + and surfaceName + == ( + "nauvis" + if "nauvis" in map["surfaces"] + else sorted(map["surfaces"].keys())[0] + ) + and daytime == daytimes[0] + ) + + allBigChunks = {} + minX = float("inf") + maxX = float("-inf") + minY = float("inf") + maxY = float("-inf") + imageSize: int = None + for xStr in Path(imagePath, str(map["path"]), surfaceName, daytime, str(maxzoom)).iterdir(): + x = int(xStr.name) + minX = min(minX, x) + maxX = max(maxX, x) + for yStr in Path(imagePath, str(map["path"]), surfaceName, daytime, str(maxzoom), xStr).iterdir(): + if imageSize is None: + imageSize = Image.open(Path(imagePath, str(map["path"]), surfaceName, daytime, str(maxzoom), xStr, yStr), mode="r").size[0] + y = int(yStr.stem) + minY = min(minY, y) + maxY = max(maxY, y) + allBigChunks[ + ( + x >> maxzoom-minzoom, + y >> maxzoom-minzoom, + ) + ] = True + + if len(allBigChunks) <= 0: + continue + + pathList = [] + for otherMapIndex in range(mapIndex, -1, -1): + pathList.append(str(data["maps"][otherMapIndex]["path"])) + + threadsplit = 0 + while 4**threadsplit * len(allBigChunks) < maxthreads: + threadsplit = threadsplit + 1 + threadsplit = min(max(maxzoom - minzoom - 3, 0), threadsplit + 3) + allChunks = [] + for pos in list(allBigChunks): + for i in range(2**threadsplit): + for j in range(2**threadsplit): + allChunks.append( + ( + pos[0] * (2**threadsplit) + i, + pos[1] * (2**threadsplit) + j, + ) + ) + + threads = min(len(allChunks), maxthreads) + processes = [] + originalSize = len(allChunks) + + # print(("%s %s %s %s" % (pathList[0], str(surfaceName), daytime, pathList))) + # print(("%s-%s (total: %s):" % (start, stop + threadsplit, len(allChunks)))) + counter = mp.Value("i", originalSize) + resultQueue = mp.Queue() + for _ in range(0, threads): + p = mp.Process( + target=thread, + args=( + imagePath, + pathList, + surfaceName, + daytime, + imageSize, + maxzoom, + minzoom + threadsplit, + minzoom, + allChunks, + counter, + resultQueue, + generateThumbnail, + ), + ) + p.start() + processes.append(p) + + doneSize = 0 + for _ in range(originalSize): + resultQueue.get(True) + doneSize += 1 + progress = float(doneSize) / originalSize + tsiz = tsize()[0] - 15 + print( + "\rzoom {:5.1f}% [{}{}]".format( + round(progress * 98, 1), + "=" * int(progress * tsiz), + " " * (tsiz - int(progress * tsiz)), + ), + end="", + ) + + for p in processes: + p.join() + + if threadsplit > 0: + # print(("finishing up: %s-%s (total: %s)" % (stop + threadsplit, stop, len(allBigChunks)))) + processes = [] + i = len(allBigChunks) - 1 + for chunk in list(allBigChunks): + p = mp.Process( + target=work, + args=( + imagePath, + pathList, + surfaceName, + daytime, + imageSize, + minzoom + threadsplit, + minzoom, + minzoom, + chunk, + generateThumbnail, + ), + ) + i = i - 1 + p.start() + processes.append(p) + for p in processes: + p.join() + + if generateThumbnail: + printErase("generating thumbnail") + minzoompath = Path( + imagePath, + str(map["path"]), + surfaceName, + daytime, + str(minzoom), + ) + + if imageSize is None: + raise Exception("Missing imageSize for thumbnail generation") + + thumbnail = Image.new( + "RGB", + ( + (maxX - minX + 1) * imageSize >> maxzoom-minzoom, + (maxY - minY + 1) * imageSize >> maxzoom-minzoom, + ), + BACKGROUNDCOLOR, + ) + bigMinX = minX >> maxzoom-minzoom + bigMinY = minY >> maxzoom-minzoom + xOffset = ((bigMinX * imageSize << maxzoom-minzoom) - minX * imageSize) >> maxzoom-minzoom + yOffset = ((bigMinY * imageSize << maxzoom-minzoom) - minY * imageSize) >> maxzoom-minzoom + for chunk in list(allBigChunks): + path = Path(minzoompath, str(chunk[0]), str(chunk[1])).with_suffix(EXT) + thumbnail.paste( + box=( + xOffset + (chunk[0] - bigMinX) * imageSize, + yOffset + (chunk[1] - bigMinY) * imageSize, + ), + im=Image.open(path, mode="r") + .convert("RGB") + .resize((imageSize, imageSize), Image.Resampling.LANCZOS), + ) + + if OUTEXT != EXT: + path.unlink() + + thumbnail.save(Path(imagePath, "thumbnail" + THUMBNAILEXT)) + + print("\rzoom {:5.1f}% [{}]".format(100, "=" * (tsize()[0] - 15))) From f4bc2df212ead01353e910a57e380ffe49c844b7 Mon Sep 17 00:00:00 2001 From: L0laapk3 Date: Fri, 13 Oct 2023 16:04:59 +0200 Subject: [PATCH 06/13] fix renderboxes not showing up. Fixes #118 --- auto.py | 6 +++--- ref.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/auto.py b/auto.py index e2b6c45..d14d69b 100644 --- a/auto.py +++ b/auto.py @@ -543,8 +543,8 @@ def driveExists(drive): daytimes = [] if args.day: daytimes.append("day") - # if args.night: - # daytimes.append("night") + if args.night: + daytimes.append("night") for index, savename in () if args.dry else enumerate(saveGames): for daytimeIndex, setDaytime in enumerate(daytimes): @@ -745,7 +745,7 @@ def refZoom(): data["maps"][int(mapIndex)]["surfaces"][surfaceName]["chunks"] = surfaceStuff["chunks"] if "links" in surfaceStuff: for linkIndex, link in enumerate(surfaceStuff["links"]): - if "filename" in link: + if "zoom" in link: data["maps"][int(mapIndex)]["surfaces"][surfaceName]["links"][linkIndex]["path"] = link["path"] data["maps"][int(mapIndex)]["surfaces"][surfaceName]["links"][linkIndex]["zoom"]["min"] = link["zoom"]["min"] destf.seek(0) diff --git a/ref.py b/ref.py index b0bf0cc..4fde62e 100644 --- a/ref.py +++ b/ref.py @@ -328,7 +328,7 @@ def readCropList(path, combinePrevious): for daytime in ("day", "night"): if link["type"] == "link_renderbox_area" and (link["daynight"] or daytime == "day"): - if "filename" in link: + if "zoom" in link: path = os.path.join(link["toSurface"], daytime if link["daynight"] else "day", "renderboxes", str(surface["zoom"]["max"]), link["filename"]) if path not in linksByPath: From a2b50a6838634c053613cd33683b8fe90a444c17 Mon Sep 17 00:00:00 2001 From: L0laapk3 Date: Fri, 13 Oct 2023 17:26:43 +0200 Subject: [PATCH 07/13] stop tags from being too large --- web/index.css | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/web/index.css b/web/index.css index 4fea8cc..36e1795 100644 --- a/web/index.css +++ b/web/index.css @@ -177,6 +177,11 @@ map-marker { display: block; cursor: grab; } +map-marker > img { + width: 32px; + height: 32px; + object-fit: contain; +} map-marker span { color: white; font-size: 1em; From 3a47c7ee13b2331fdb9a694ca6c567d562028dd1 Mon Sep 17 00:00:00 2001 From: L0laapk3 Date: Fri, 13 Oct 2023 17:38:45 +0200 Subject: [PATCH 08/13] tag style improvements --- web/index.css | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/web/index.css b/web/index.css index 36e1795..18d0ea6 100644 --- a/web/index.css +++ b/web/index.css @@ -170,25 +170,26 @@ img.leaflet-tile.leaflet-tile-loaded { map-marker { - width: 32px; - height: 32px; - margin-left: -16px; - margin-top: -16px; + --label-image-size: 48px; + width: var(--label-image-size); + height: var(--label-image-size); + margin-left: calc(var(--label-image-size) / -2); + margin-top: calc(var(--label-image-size) / -2); display: block; cursor: grab; } map-marker > img { - width: 32px; - height: 32px; + width: var(--label-image-size); + height: var(--label-image-size); object-fit: contain; } map-marker span { color: white; font-size: 1em; - margin: 0 -32px; + margin: 0 calc(-1 * var(--label-image-size)); text-align: center; display: block; - width: 96px; + width: calc(3 * var(--label-image-size)); } map-marker span img { height: 2em; From 3d722d1d59164b9edd96b095cf2810ef0ec5b023 Mon Sep 17 00:00:00 2001 From: L0laapk3 Date: Fri, 13 Oct 2023 17:53:30 +0200 Subject: [PATCH 09/13] improve json->lua, clean autorun file on dirty exit --- auto.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/auto.py b/auto.py index d14d69b..95383f3 100644 --- a/auto.py +++ b/auto.py @@ -269,12 +269,16 @@ def changeModlist(modpath: Path,newState: bool): json.dump(modlist, f, indent=2) +AUTORUN_PATH = Path(__file__, "..", "autorun.lua").resolve() +def clearAutorun(): + AUTORUN_PATH.open('w', encoding="utf-8").close() + def buildAutorun(args: Namespace, workFolder: Path, outFolder: Path, isFirstSnapshot: bool, daytime: str): printErase("Building autorun.lua") mapInfoPath = Path(workFolder, "mapInfo.json") if mapInfoPath.is_file(): with mapInfoPath.open("r", encoding='utf-8') as f: - mapInfoLua = re.sub(r'"([^"]+)" *:', lambda m: '["'+m.group(1)+'"] = ', f.read().replace("[", "{").replace("]", "}")) + mapInfoLua = re.sub(r'"([^"]+)" *:', lambda m: '["'+m.group(1)+'"] = ', f.read().replace("[", "{").replace("]", "}").replace('"', '\\"')) # TODO: Update for new argument parsing # if isFirstSnapshot: # f.seek(0) @@ -298,7 +302,7 @@ def buildAutorun(args: Namespace, workFolder: Path, outFolder: Path, isFirstSnap def lowerBool(value: bool): return str(value).lower() - with Path(__file__, "..", "autorun.lua").resolve().open("w", encoding="utf-8") as f: + with AUTORUN_PATH.resolve().open("w", encoding="utf-8") as f: surfaceString = '{"' + '", "'.join(args.surface) + '"}' if args.surface else "nil" autorunString = \ f'''fm.autorun = {{ @@ -642,8 +646,7 @@ def driveExists(drive): while not datapath.exists(): time.sleep(0.4) - # empty autorun.lua - Path(__file__, "..", "autorun.lua").resolve().open('w', encoding="utf-8").close() + clearAutorun() latest = [] with datapath.open('r', encoding="utf-8") as f: @@ -899,6 +902,8 @@ def addTag(tags, itemType, itemName, force=False): except: pass + clearAutorun() + changeModlist(args.mod_path, False) if __name__ == '__main__': From b636ada9e261d222ce8d1d937934cea98c52c10f Mon Sep 17 00:00:00 2001 From: L0laapk3 Date: Fri, 13 Oct 2023 17:56:39 +0200 Subject: [PATCH 10/13] add rich text to limitations --- README.md | 1 + autorun.lua | 17 +++++++++++++++++ 2 files changed, 18 insertions(+) diff --git a/README.md b/README.md index ac01447..3913ff5 100644 --- a/README.md +++ b/README.md @@ -88,6 +88,7 @@ If you wish to host your map for other people to a server, you need to take into * If you only have the steam version of factorio, steam will ask you to confirm the arguments everytime the script tries to start up. The popup window will sometimes not focus properly. Please press alt tab a couple of times until it shows up. The only way to get around this is to install the standalone version of factorio. * If the program crashes while making a snapshot, it is very likely to leave timelines behind in a 'bricked' state and will probably mess up future snapshots. The easiest way is to simply start over and regenerate all the snapshots from old savefiles. If thats not a possibility, feel free to contact me on discord (L0laapk3#2010) or create an Issue, I'll do my best to help you out. * Running this on headless servers is not possible due to factorio limitations. +* Not all factorio rich text is supported [#93](https://github.com/L0laapk3/FactorioMaps/issues/93). # Issues If you have problems or questions setting things up, feel free to reach out to me on discord at L0laapk3#2010. diff --git a/autorun.lua b/autorun.lua index e69de29..65b6e8c 100644 --- a/autorun.lua +++ b/autorun.lua @@ -0,0 +1,17 @@ +fm.autorun = { + HD = false, + daytime = "day", + alt_mode = true, + tags = true, + around_tag_range = 5.2, + around_build_range = 5.2, + around_connect_range = 1.2, + connect_types = {"lamp", "electric-pole", "radar", "straight-rail", "curved-rail", "rail-signal", "rail-chain-signal", "locomotive", "cargo-wagon", "fluid-wagon", "car"}, + date = "13/10/23", + surfaces = nil, + name = "tagtest/", + mapInfo = {\["options"] = {\["ranges"] = {\["build"] = 5.2, \["connect"] = 1.2, \["tag"] = 5.2}, \["HD"] = false, \["defaultTimestamp"] = -1}, \["seed"] = 238255205, \["mapExchangeString"] = ">>>eNpjZGBkiGYAgwZ7EOZgSc5PzIHxQJgrOb+gILVIN78oFVmYM7moNCVVNz8TVXFqXmpupW5SYjGKYo7Movw8dBNYi0vy81BFSopSU4uRRbhLixLzMktz0fUyMKbWGPM1tMgxgPD/egaD//9BGMi6APQLCDMwNkBUAsWggJEzNSe1LLEkMz+PzyDe0EwXzmVNzslMS2NgUHCEYJAtZABGMOnQ70BIxXyQCmYk8Q/2jOuSmW4AABR/Tbk=<<<", \["maps"] = {{\["tick"] = 160544, \["path"] = "1", \["date"] = "13/10/23", \["mods"] = {\["base"] = "1.1.91", \["L0laapk3_FactorioMaps"] = "4.3.0"}, \["surfaces"] = {\["nauvis"] = {\["spawn"] = {\["y"] = 0, \["x"] = 0}, \["zoom"] = {\["min"] = 16, \["max"] = 20}, \["tags"] = {{\["iconType"] = "virtual", \["iconName"] = "signal-red", \["iconPath"] = "Images/labels/virtual/signal-red.png", \["position"] = {\["y"] = -35, \["x"] = -139}, \["text"] = "all{ kinds{ of } nonsense.;)(", \["last_user"] = "L0laapk3", \["force"] = "player"}, {\["position"] = {\["y"] = 75, \["x"] = -131}, \["text"] = "< > yo