mirror of
https://github.com/RetroDECK/RetroDECK.git
synced 2024-11-22 05:55:38 +00:00
Merge pull request #635 from XargonWan/feat/steam-sync
SteamSync: Added BoilR + merge
This commit is contained in:
commit
5347488108
60
functions/steam-sync/config.toml
Normal file
60
functions/steam-sync/config.toml
Normal file
|
@ -0,0 +1,60 @@
|
|||
debug = false
|
||||
config_version = 1
|
||||
blacklisted_games = []
|
||||
|
||||
[steamgrid_db]
|
||||
enabled = true
|
||||
prefer_animated = false
|
||||
banned_images = []
|
||||
only_download_boilr_images = false
|
||||
allow_nsfw = false
|
||||
|
||||
[steam]
|
||||
create_collections = false
|
||||
optimize_for_big_picture = false
|
||||
stop_steam = false
|
||||
start_steam = false
|
||||
|
||||
[bottles]
|
||||
enabled = false
|
||||
|
||||
[epic_games]
|
||||
enabled = false
|
||||
safe_launch = []
|
||||
|
||||
[flatpak]
|
||||
enabled = false
|
||||
|
||||
[gog]
|
||||
enabled = false
|
||||
create_symlinks = true
|
||||
|
||||
[heroic]
|
||||
enabled = false
|
||||
launch_games_through_heroic = []
|
||||
default_launch_through_heroic = true
|
||||
|
||||
[itch]
|
||||
enabled = false
|
||||
create_symlinks = true
|
||||
|
||||
[legendary]
|
||||
enabled = false
|
||||
|
||||
[lutris]
|
||||
enabled = false
|
||||
executable = "lutris"
|
||||
flatpak = true
|
||||
flatpak_image = "net.lutris.Lutris"
|
||||
installed = true
|
||||
|
||||
[origin]
|
||||
enabled = false
|
||||
|
||||
[uplay]
|
||||
enabled = false
|
||||
|
||||
[minigalaxy]
|
||||
enabled = true
|
||||
create_symlinks = false
|
||||
games_folder = "/var/config/boilr/sync/"
|
|
@ -5,7 +5,6 @@ import re
|
|||
import shlex
|
||||
import shutil
|
||||
import glob
|
||||
import vdf
|
||||
import sys
|
||||
|
||||
import xml.etree.ElementTree as ET
|
||||
|
@ -267,103 +266,61 @@ alt_command_list={
|
|||
"Beetle PCE": "flatpak run --command=retroarch net.retrodeck.retrodeck -L /var/config/retroarch/cores/mednafen_pce_libretro.so"
|
||||
}
|
||||
|
||||
STEAM_DATA_DIRS = (
|
||||
"~/.steam/debian-installation",
|
||||
"~/.steam",
|
||||
"~/.local/share/steam",
|
||||
"~/.local/share/Steam",
|
||||
"~/.steam/steam",
|
||||
"~/.var/app/com.valvesoftware.Steam/data/steam",
|
||||
"~/.var/app/com.valvesoftware.Steam/data/Steam",
|
||||
"/usr/share/steam",
|
||||
"/usr/local/share/steam",
|
||||
)
|
||||
def create_shortcut_new(games,rdhome):
|
||||
old_games=os.listdir(rdhome+"/sync/")
|
||||
|
||||
def create_shortcut(games, launch_config_name=None):
|
||||
shortcut_path = get_shortcuts_vdf_path()
|
||||
if os.path.exists(shortcut_path):
|
||||
with open(shortcut_path, "rb") as shortcut_file:
|
||||
shortcuts = vdf.binary_loads(shortcut_file.read())['shortcuts'].values()
|
||||
else:
|
||||
shortcuts = []
|
||||
|
||||
old_shortcuts=[]
|
||||
for shortcut in shortcuts:
|
||||
if "net.retrodeck.retrodeck" in shortcut["Exe"]:
|
||||
keep=False
|
||||
for game in games:
|
||||
gameid=generate_shortcut_id(game[0])
|
||||
if gameid==shortcut["appid"]:
|
||||
shortcut["Exe"]=game[1]
|
||||
game[0]="###"
|
||||
keep=True
|
||||
break
|
||||
if keep:
|
||||
old_shortcuts.append(shortcut)
|
||||
else:
|
||||
old_shortcuts.append(shortcut)
|
||||
|
||||
new_shortcuts=[]
|
||||
for game in games:
|
||||
if not game[0]=="###":
|
||||
new_shortcuts=new_shortcuts+[generate_shortcut(game, launch_config_name)]
|
||||
|
||||
shortcuts = list(old_shortcuts) + list(new_shortcuts)
|
||||
|
||||
updated_shortcuts = {
|
||||
'shortcuts': {
|
||||
str(index): elem for index, elem in enumerate(shortcuts)
|
||||
}
|
||||
}
|
||||
with open(shortcut_path, "wb") as shortcut_file:
|
||||
shortcut_file.write(vdf.binary_dumps(updated_shortcuts))
|
||||
|
||||
def get_config_path():
|
||||
config_paths = search_recursive_in_steam_dirs("userdata/**/config/")
|
||||
if not config_paths:
|
||||
return None
|
||||
return config_paths[0]
|
||||
|
||||
def get_shortcuts_vdf_path():
|
||||
config_path = get_config_path()
|
||||
if not config_path:
|
||||
return None
|
||||
return os.path.join(config_path, "shortcuts.vdf")
|
||||
|
||||
def search_recursive_in_steam_dirs(path_suffix):
|
||||
"""Perform a recursive search based on glob and returns a
|
||||
list of hits"""
|
||||
results = []
|
||||
for candidate in STEAM_DATA_DIRS:
|
||||
glob_path = os.path.join(os.path.expanduser(candidate), path_suffix)
|
||||
for path in glob.glob(glob_path):
|
||||
results.append(path)
|
||||
return results
|
||||
|
||||
def generate_shortcut(game, launch_config_name):
|
||||
return {
|
||||
'appid': generate_shortcut_id(game[0]),
|
||||
'appname': f'{game[0]}',
|
||||
'Exe': f'{game[1]}',
|
||||
'StartDir': f'{os.path.expanduser("~")}',
|
||||
'icon': "",
|
||||
'LaunchOptions': "",
|
||||
'IsHidden': 0,
|
||||
'AllowDesktopConfig': 1,
|
||||
'AllowOverlay': 1,
|
||||
'OpenVR': 0,
|
||||
'Devkit': 0,
|
||||
'DevkitOverrideAppID': 0,
|
||||
'LastPlayTime': 0,
|
||||
}
|
||||
|
||||
def generate_preliminary_id(name):
|
||||
unique_id = ''.join(["RetroDECK", name])
|
||||
top = binascii.crc32(str.encode(unique_id, 'utf-8')) | 0x80000000
|
||||
return (top << 32) | 0x02000000
|
||||
|
||||
def generate_shortcut_id(name):
|
||||
return (generate_preliminary_id(name) >> 32) - 0x100000000
|
||||
try:
|
||||
i=old_games.index(game[0])
|
||||
old_games[i]=0
|
||||
except ValueError:
|
||||
print(game[0]+" is a new game!")
|
||||
|
||||
path=rdhome+"/sync/"+game[0]
|
||||
print("Go to path: "+path)
|
||||
if not os.path.exists(path):
|
||||
os.makedirs(path)
|
||||
fl=open(path+"/goggame-0.info","w")
|
||||
fl.write('{\n')
|
||||
fl.write(' "buildId": "",\n')
|
||||
fl.write(' "clientId": "",\n')
|
||||
fl.write(' "gameId": "",\n')
|
||||
fl.write(' "name": "'+game[0]+'",\n')
|
||||
fl.write(' "playTasks": [\n')
|
||||
fl.write(' {\n')
|
||||
fl.write(' "category": "launcher",\n')
|
||||
fl.write(' "isPrimary": true,\n')
|
||||
fl.write(' "languages": [\n')
|
||||
fl.write(' "en-US"\n')
|
||||
fl.write(' ],\n')
|
||||
fl.write(' "name": "'+game[0]+'",\n')
|
||||
fl.write(' "path": "launch.sh",\n')
|
||||
fl.write(' "type": "FileTask"\n')
|
||||
fl.write(' }\n')
|
||||
fl.write(' ]\n')
|
||||
fl.write('}\n')
|
||||
fl.close()
|
||||
|
||||
fl=open(path+"/launch.sh","w")
|
||||
fl.write("#!/bin/bash\n\n")
|
||||
fl.write('if test "$(whereis flatpak)" = "flatpak:"\n')
|
||||
fl.write("then\n")
|
||||
fl.write("flatpak-spawn --host "+game[1]+"\n")
|
||||
fl.write("else\n")
|
||||
fl.write(game[1]+"\n")
|
||||
fl.write("fi\n")
|
||||
fl.close()
|
||||
|
||||
st=os.stat(path+"/launch.sh")
|
||||
os.chmod(path+"/launch.sh", st.st_mode | 0o0111)
|
||||
|
||||
print("Start removing")
|
||||
print(old_games)
|
||||
for game in old_games:
|
||||
if game:
|
||||
shutil.rmtree(rdhome+"/sync/"+game)
|
||||
|
||||
os.system("boilr --no-ui")
|
||||
|
||||
def addToSteam():
|
||||
print("Open RetroDECK config file: {}".format(os.path.expanduser("~/.var/app/net.retrodeck.retrodeck/config/retrodeck/retrodeck.cfg")))
|
||||
|
@ -381,6 +338,12 @@ def addToSteam():
|
|||
command_list_default["pico8"]=command_list_default["pico8"].replace("{GAMEDIR}",roms_folder+"/pico8")
|
||||
alt_command_list["PICO-8 Splore (Standalone)"]=alt_command_list["PICO-8 Splore (Standalone)"].replace("{GAMEDIR}",roms_folder+"/pico8")
|
||||
|
||||
if not os.path.exists(rdhome+"/sync/"):
|
||||
os.makedirs(rdhome+"/sync/")
|
||||
|
||||
if not os.path.exists(os.path.expanduser("~/.var/app/net.retrodeck.retrodeck/config/boilr/sync")):
|
||||
os.symlink(rdhome+"/sync",os.path.expanduser("~/.var/app/net.retrodeck.retrodeck/config/boilr/sync"))
|
||||
|
||||
for system in os.listdir(rdhome+"/gamelists/"):
|
||||
print("Start parsing system: {}".format(system))
|
||||
|
||||
|
@ -428,8 +391,10 @@ def addToSteam():
|
|||
else:
|
||||
games.append([name,alt_command_list[altemulator]+" '"+roms_folder+"/"+system+path[1:]+"'"])
|
||||
print(alt_command_list[altemulator]+" '"+roms_folder+"/"+system+path[1:]+"'")
|
||||
|
||||
create_shortcut(games)
|
||||
|
||||
create_shortcut_new(games,rdhome)
|
||||
|
||||
if __name__=="__main__":
|
||||
addToSteam()
|
||||
|
||||
print("Finish!")
|
||||
|
|
|
@ -1,467 +0,0 @@
|
|||
"""
|
||||
Module for deserializing/serializing to and from VDF
|
||||
|
||||
https://github.com/ValvePython/vdf
|
||||
|
||||
MIT License
|
||||
"""
|
||||
# pylint: disable=raise-missing-from
|
||||
|
||||
__version__ = "3.2"
|
||||
__author__ = "Rossen Georgiev"
|
||||
|
||||
import re
|
||||
import struct
|
||||
from binascii import crc32
|
||||
from io import StringIO as unicodeIO
|
||||
|
||||
string_type = str
|
||||
int_type = int
|
||||
BOMS = '\ufffe\ufeff'
|
||||
|
||||
|
||||
def strip_bom(line):
|
||||
return line.lstrip(BOMS)
|
||||
|
||||
|
||||
# string escaping
|
||||
_unescape_char_map = {
|
||||
r"\n": "\n",
|
||||
r"\t": "\t",
|
||||
r"\v": "\v",
|
||||
r"\b": "\b",
|
||||
r"\r": "\r",
|
||||
r"\f": "\f",
|
||||
r"\a": "\a",
|
||||
r"\\": "\\",
|
||||
r"\?": "?",
|
||||
r"\"": "\"",
|
||||
r"\'": "\'",
|
||||
}
|
||||
_escape_char_map = {v: k for k, v in _unescape_char_map.items()}
|
||||
|
||||
|
||||
def _re_escape_match(m):
|
||||
return _escape_char_map[m.group()]
|
||||
|
||||
|
||||
def _re_unescape_match(m):
|
||||
return _unescape_char_map[m.group()]
|
||||
|
||||
|
||||
def _escape(text):
|
||||
return re.sub(r"[\n\t\v\b\r\f\a\\\?\"']", _re_escape_match, text)
|
||||
|
||||
|
||||
def _unescape(text):
|
||||
return re.sub(r"(\\n|\\t|\\v|\\b|\\r|\\f|\\a|\\\\|\\\?|\\\"|\\')", _re_unescape_match, text)
|
||||
|
||||
# parsing and dumping for KV1
|
||||
|
||||
|
||||
def parse(fp, mapper=dict, merge_duplicate_keys=True, escaped=True):
|
||||
"""
|
||||
Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a VDF)
|
||||
to a Python object.
|
||||
|
||||
``mapper`` specifies the Python object used after deserializetion. ``dict` is
|
||||
used by default. Alternatively, ``collections.OrderedDict`` can be used if you
|
||||
wish to preserve key order. Or any object that acts like a ``dict``.
|
||||
|
||||
``merge_duplicate_keys`` when ``True`` will merge multiple KeyValue lists with the
|
||||
same key into one instead of overwriting. You can se this to ``False`` if you are
|
||||
using ``VDFDict`` and need to preserve the duplicates.
|
||||
"""
|
||||
if not issubclass(mapper, dict):
|
||||
raise TypeError("Expected mapper to be subclass of dict, got %s" % type(mapper))
|
||||
if not hasattr(fp, 'readline'):
|
||||
raise TypeError("Expected fp to be a file-like object supporting line iteration")
|
||||
|
||||
lineno = 0
|
||||
stack = [mapper()]
|
||||
expect_bracket = False
|
||||
|
||||
re_keyvalue = re.compile(r'^("(?P<qkey>(?:\\.|[^\\"])+)"|(?P<key>#?[a-z0-9\-\_\\\?]+))'
|
||||
r'([ \t]*('
|
||||
r'"(?P<qval>(?:\\.|[^\\"])*)(?P<vq_end>")?'
|
||||
r'|(?P<val>[a-z0-9\-\_\\\?\*\.]+)'
|
||||
r'))?',
|
||||
flags=re.I)
|
||||
|
||||
for lineno, line in enumerate(fp, 1):
|
||||
if lineno == 1:
|
||||
line = strip_bom(line)
|
||||
|
||||
line = line.lstrip()
|
||||
|
||||
# skip empty and comment lines
|
||||
if line == "" or line[0] == '/':
|
||||
continue
|
||||
|
||||
# one level deeper
|
||||
if line[0] == "{":
|
||||
expect_bracket = False
|
||||
continue
|
||||
|
||||
if expect_bracket:
|
||||
raise SyntaxError("vdf.parse: expected openning bracket",
|
||||
(getattr(fp, 'name', '<%s>' % fp.__class__.__name__), lineno, 1, line))
|
||||
|
||||
# one level back
|
||||
if line[0] == "}":
|
||||
if len(stack) > 1:
|
||||
stack.pop()
|
||||
continue
|
||||
|
||||
raise SyntaxError("vdf.parse: one too many closing parenthasis",
|
||||
(getattr(fp, 'name', '<%s>' % fp.__class__.__name__), lineno, 0, line))
|
||||
|
||||
# parse keyvalue pairs
|
||||
while True:
|
||||
match = re_keyvalue.match(line)
|
||||
|
||||
if not match:
|
||||
try:
|
||||
line += next(fp)
|
||||
continue
|
||||
except StopIteration:
|
||||
raise SyntaxError("vdf.parse: unexpected EOF (open key quote?)",
|
||||
(getattr(fp, 'name', '<%s>' % fp.__class__.__name__), lineno, 0, line))
|
||||
|
||||
key = match.group('key') if match.group('qkey') is None else match.group('qkey')
|
||||
val = match.group('val') if match.group('qval') is None else match.group('qval')
|
||||
|
||||
if escaped:
|
||||
key = _unescape(key)
|
||||
|
||||
# we have a key with value in parenthesis, so we make a new dict obj (level deeper)
|
||||
if val is None:
|
||||
if merge_duplicate_keys and key in stack[-1]:
|
||||
_m = stack[-1][key]
|
||||
else:
|
||||
_m = mapper()
|
||||
stack[-1][key] = _m
|
||||
|
||||
stack.append(_m)
|
||||
expect_bracket = True
|
||||
|
||||
# we've matched a simple keyvalue pair, map it to the last dict obj in the stack
|
||||
else:
|
||||
# if the value is line consume one more line and try to match again,
|
||||
# until we get the KeyValue pair
|
||||
if match.group('vq_end') is None and match.group('qval') is not None:
|
||||
try:
|
||||
line += next(fp)
|
||||
continue
|
||||
except StopIteration:
|
||||
raise SyntaxError("vdf.parse: unexpected EOF (open quote for value?)",
|
||||
(getattr(fp, 'name', '<%s>' % fp.__class__.__name__), lineno, 0, line))
|
||||
|
||||
stack[-1][key] = _unescape(val) if escaped else val
|
||||
|
||||
# exit the loop
|
||||
break
|
||||
|
||||
if len(stack) != 1:
|
||||
raise SyntaxError("vdf.parse: unclosed parenthasis or quotes (EOF)",
|
||||
(getattr(fp, 'name', '<%s>' % fp.__class__.__name__), lineno, 0, line))
|
||||
|
||||
return stack.pop()
|
||||
|
||||
|
||||
def loads(s, **kwargs):
|
||||
"""
|
||||
Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
|
||||
document) to a Python object.
|
||||
"""
|
||||
if not isinstance(s, string_type):
|
||||
raise TypeError("Expected s to be a str, got %s" % type(s))
|
||||
fp = unicodeIO(s)
|
||||
return parse(fp, **kwargs)
|
||||
|
||||
|
||||
def load(fp, **kwargs):
|
||||
"""
|
||||
Deserialize ``fp`` (a ``.readline()``-supporting file-like object containing
|
||||
a JSON document) to a Python object.
|
||||
"""
|
||||
return parse(fp, **kwargs)
|
||||
|
||||
|
||||
def dumps(obj, pretty=False, escaped=True):
|
||||
"""
|
||||
Serialize ``obj`` to a VDF formatted ``str``.
|
||||
"""
|
||||
if not isinstance(obj, dict):
|
||||
raise TypeError("Expected data to be an instance of``dict``")
|
||||
if not isinstance(pretty, bool):
|
||||
raise TypeError("Expected pretty to be of type bool")
|
||||
if not isinstance(escaped, bool):
|
||||
raise TypeError("Expected escaped to be of type bool")
|
||||
|
||||
return ''.join(_dump_gen(obj, pretty, escaped))
|
||||
|
||||
|
||||
def dump(obj, fp, pretty=False, escaped=True):
|
||||
"""
|
||||
Serialize ``obj`` as a VDF formatted stream to ``fp`` (a
|
||||
``.write()``-supporting file-like object).
|
||||
"""
|
||||
if not isinstance(obj, dict):
|
||||
raise TypeError("Expected data to be an instance of``dict``")
|
||||
if not hasattr(fp, 'write'):
|
||||
raise TypeError("Expected fp to have write() method")
|
||||
if not isinstance(pretty, bool):
|
||||
raise TypeError("Expected pretty to be of type bool")
|
||||
if not isinstance(escaped, bool):
|
||||
raise TypeError("Expected escaped to be of type bool")
|
||||
|
||||
for chunk in _dump_gen(obj, pretty, escaped):
|
||||
fp.write(chunk)
|
||||
|
||||
|
||||
def _dump_gen(data, pretty=False, escaped=True, level=0):
|
||||
indent = "\t"
|
||||
line_indent = ""
|
||||
|
||||
if pretty:
|
||||
line_indent = indent * level
|
||||
|
||||
for key, value in data.items():
|
||||
if escaped and isinstance(key, string_type):
|
||||
key = _escape(key)
|
||||
|
||||
if isinstance(value, dict):
|
||||
yield '%s"%s"\n%s{\n' % (line_indent, key, line_indent)
|
||||
for chunk in _dump_gen(value, pretty, escaped, level + 1):
|
||||
yield chunk
|
||||
yield "%s}\n" % line_indent
|
||||
else:
|
||||
if escaped and isinstance(value, string_type):
|
||||
value = _escape(value)
|
||||
|
||||
yield '%s"%s" "%s"\n' % (line_indent, key, value)
|
||||
|
||||
|
||||
# binary VDF
|
||||
class BASE_INT(int_type):
|
||||
def __repr__(self):
|
||||
return "%s(%s)" % (self.__class__.__name__, self)
|
||||
|
||||
|
||||
class UINT_64(BASE_INT):
|
||||
pass
|
||||
|
||||
|
||||
class INT_64(BASE_INT):
|
||||
pass
|
||||
|
||||
|
||||
class POINTER(BASE_INT):
|
||||
pass
|
||||
|
||||
|
||||
class COLOR(BASE_INT):
|
||||
pass
|
||||
|
||||
|
||||
BIN_NONE = b'\x00'
|
||||
BIN_STRING = b'\x01'
|
||||
BIN_INT32 = b'\x02'
|
||||
BIN_FLOAT32 = b'\x03'
|
||||
BIN_POINTER = b'\x04'
|
||||
BIN_WIDESTRING = b'\x05'
|
||||
BIN_COLOR = b'\x06'
|
||||
BIN_UINT64 = b'\x07'
|
||||
BIN_END = b'\x08'
|
||||
BIN_INT64 = b'\x0A'
|
||||
BIN_END_ALT = b'\x0B'
|
||||
|
||||
|
||||
def binary_loads(s, mapper=dict, merge_duplicate_keys=True, alt_format=False):
|
||||
"""
|
||||
Deserialize ``s`` (``bytes`` containing a VDF in "binary form")
|
||||
to a Python object.
|
||||
|
||||
``mapper`` specifies the Python object used after deserializetion. ``dict` is
|
||||
used by default. Alternatively, ``collections.OrderedDict`` can be used if you
|
||||
wish to preserve key order. Or any object that acts like a ``dict``.
|
||||
|
||||
``merge_duplicate_keys`` when ``True`` will merge multiple KeyValue lists with the
|
||||
same key into one instead of overwriting. You can se this to ``False`` if you are
|
||||
using ``VDFDict`` and need to preserve the duplicates.
|
||||
"""
|
||||
if not isinstance(s, bytes):
|
||||
raise TypeError("Expected s to be bytes, got %s" % type(s))
|
||||
if not issubclass(mapper, dict):
|
||||
raise TypeError("Expected mapper to be subclass of dict, got %s" % type(mapper))
|
||||
|
||||
# helpers
|
||||
int32 = struct.Struct('<i')
|
||||
uint64 = struct.Struct('<Q')
|
||||
int64 = struct.Struct('<q')
|
||||
float32 = struct.Struct('<f')
|
||||
|
||||
def read_string(s, idx, wide=False):
|
||||
if wide:
|
||||
end = s.find(b'\x00\x00', idx)
|
||||
if (end - idx) % 2 != 0:
|
||||
end += 1
|
||||
else:
|
||||
end = s.find(b'\x00', idx)
|
||||
|
||||
if end == -1:
|
||||
raise SyntaxError("Unterminated cstring (offset: %d)" % idx)
|
||||
result = s[idx:end]
|
||||
if wide:
|
||||
result = result.decode('utf-16')
|
||||
elif bytes is not str:
|
||||
result = result.decode('utf-8', 'replace')
|
||||
else:
|
||||
try:
|
||||
result.decode('ascii')
|
||||
except:
|
||||
result = result.decode('utf-8', 'replace')
|
||||
return result, end + (2 if wide else 1)
|
||||
|
||||
stack = [mapper()]
|
||||
idx = 0
|
||||
CURRENT_BIN_END = BIN_END if not alt_format else BIN_END_ALT
|
||||
|
||||
while len(s) > idx:
|
||||
t = s[idx:idx + 1]
|
||||
idx += 1
|
||||
|
||||
if t == CURRENT_BIN_END:
|
||||
if len(stack) > 1:
|
||||
stack.pop()
|
||||
continue
|
||||
break
|
||||
|
||||
key, idx = read_string(s, idx)
|
||||
|
||||
if t == BIN_NONE:
|
||||
if merge_duplicate_keys and key in stack[-1]:
|
||||
_m = stack[-1][key]
|
||||
else:
|
||||
_m = mapper()
|
||||
stack[-1][key] = _m
|
||||
stack.append(_m)
|
||||
elif t == BIN_STRING:
|
||||
stack[-1][key], idx = read_string(s, idx)
|
||||
elif t == BIN_WIDESTRING:
|
||||
stack[-1][key], idx = read_string(s, idx, wide=True)
|
||||
elif t in (BIN_INT32, BIN_POINTER, BIN_COLOR):
|
||||
val = int32.unpack_from(s, idx)[0]
|
||||
|
||||
if t == BIN_POINTER:
|
||||
val = POINTER(val)
|
||||
elif t == BIN_COLOR:
|
||||
val = COLOR(val)
|
||||
|
||||
stack[-1][key] = val
|
||||
idx += int32.size
|
||||
elif t == BIN_UINT64:
|
||||
stack[-1][key] = UINT_64(uint64.unpack_from(s, idx)[0])
|
||||
idx += uint64.size
|
||||
elif t == BIN_INT64:
|
||||
stack[-1][key] = INT_64(int64.unpack_from(s, idx)[0])
|
||||
idx += int64.size
|
||||
elif t == BIN_FLOAT32:
|
||||
stack[-1][key] = float32.unpack_from(s, idx)[0]
|
||||
idx += float32.size
|
||||
else:
|
||||
raise SyntaxError("Unknown data type at offset %d: %s" % (idx - 1, repr(t)))
|
||||
|
||||
if len(s) != idx or len(stack) != 1:
|
||||
raise SyntaxError("Binary VDF ended at offset %d, but length is %d" % (idx, len(s)))
|
||||
|
||||
return stack.pop()
|
||||
|
||||
|
||||
def binary_dumps(obj, alt_format=False):
|
||||
"""
|
||||
Serialize ``obj`` to a binary VDF formatted ``bytes``.
|
||||
"""
|
||||
return b''.join(_binary_dump_gen(obj, alt_format=alt_format))
|
||||
|
||||
|
||||
def _binary_dump_gen(obj, level=0, alt_format=False):
|
||||
if level == 0 and len(obj) == 0:
|
||||
return
|
||||
|
||||
int32 = struct.Struct('<i')
|
||||
uint64 = struct.Struct('<Q')
|
||||
int64 = struct.Struct('<q')
|
||||
float32 = struct.Struct('<f')
|
||||
|
||||
for key, value in obj.items():
|
||||
if isinstance(key, string_type):
|
||||
key = key.encode('utf-8')
|
||||
else:
|
||||
raise TypeError("dict keys must be of type str, got %s" % type(key))
|
||||
|
||||
if isinstance(value, dict):
|
||||
yield BIN_NONE + key + BIN_NONE
|
||||
for chunk in _binary_dump_gen(value, level + 1, alt_format=alt_format):
|
||||
yield chunk
|
||||
elif isinstance(value, UINT_64):
|
||||
yield BIN_UINT64 + key + BIN_NONE + uint64.pack(value)
|
||||
elif isinstance(value, INT_64):
|
||||
yield BIN_INT64 + key + BIN_NONE + int64.pack(value)
|
||||
elif isinstance(value, string_type):
|
||||
try:
|
||||
value = value.encode('utf-8') + BIN_NONE
|
||||
yield BIN_STRING
|
||||
except:
|
||||
value = value.encode('utf-16') + BIN_NONE * 2
|
||||
yield BIN_WIDESTRING
|
||||
yield key + BIN_NONE + value
|
||||
elif isinstance(value, float):
|
||||
yield BIN_FLOAT32 + key + BIN_NONE + float32.pack(value)
|
||||
elif isinstance(value, (COLOR, POINTER, int, int_type)):
|
||||
if isinstance(value, COLOR):
|
||||
yield BIN_COLOR
|
||||
elif isinstance(value, POINTER):
|
||||
yield BIN_POINTER
|
||||
else:
|
||||
yield BIN_INT32
|
||||
yield key + BIN_NONE
|
||||
yield int32.pack(value)
|
||||
else:
|
||||
raise TypeError("Unsupported type: %s" % type(value))
|
||||
|
||||
yield BIN_END if not alt_format else BIN_END_ALT
|
||||
|
||||
|
||||
def vbkv_loads(s, mapper=dict, merge_duplicate_keys=True):
|
||||
"""
|
||||
Deserialize ``s`` (``bytes`` containing a VBKV to a Python object.
|
||||
|
||||
``mapper`` specifies the Python object used after deserializetion. ``dict` is
|
||||
used by default. Alternatively, ``collections.OrderedDict`` can be used if you
|
||||
wish to preserve key order. Or any object that acts like a ``dict``.
|
||||
|
||||
``merge_duplicate_keys`` when ``True`` will merge multiple KeyValue lists with the
|
||||
same key into one instead of overwriting. You can se this to ``False`` if you are
|
||||
using ``VDFDict`` and need to preserve the duplicates.
|
||||
"""
|
||||
if s[:4] != b'VBKV':
|
||||
raise ValueError("Invalid header")
|
||||
|
||||
checksum, = struct.unpack('<i', s[4:8])
|
||||
|
||||
if checksum != crc32(s[8:]):
|
||||
raise ValueError("Invalid checksum")
|
||||
|
||||
return binary_loads(s[8:], mapper, merge_duplicate_keys, alt_format=True)
|
||||
|
||||
|
||||
def vbkv_dumps(obj):
|
||||
"""
|
||||
Serialize ``obj`` to a VBKV formatted ``bytes``.
|
||||
"""
|
||||
data = b''.join(_binary_dump_gen(obj, alt_format=True))
|
||||
checksum = crc32(data)
|
||||
|
||||
return b'VBKV' + struct.pack('<i', checksum) + data
|
|
@ -1,219 +0,0 @@
|
|||
# pylint: disable=no-member,unnecessary-dunder-call
|
||||
from collections import Counter
|
||||
|
||||
_iter_values = 'values'
|
||||
_range = range
|
||||
_string_type = str
|
||||
import collections as _c
|
||||
|
||||
|
||||
class _kView(_c.KeysView):
|
||||
def __iter__(self):
|
||||
return self._mapping.iterkeys()
|
||||
|
||||
|
||||
class _vView(_c.ValuesView):
|
||||
def __iter__(self):
|
||||
return self._mapping.itervalues()
|
||||
|
||||
|
||||
class _iView(_c.ItemsView):
|
||||
def __iter__(self):
|
||||
return self._mapping.iteritems()
|
||||
|
||||
|
||||
class VDFDict(dict):
|
||||
def __init__(self, data=None):
|
||||
"""
|
||||
This is a dictionary that supports duplicate keys and preserves insert order
|
||||
|
||||
``data`` can be a ``dict``, or a sequence of key-value tuples. (e.g. ``[('key', 'value'),..]``)
|
||||
The only supported type for key is str.
|
||||
|
||||
Get/set duplicates is done by tuples ``(index, key)``, where index is the duplicate index
|
||||
for the specified key. (e.g. ``(0, 'key')``, ``(1, 'key')``...)
|
||||
|
||||
When the ``key`` is ``str``, instead of tuple, set will create a duplicate and get will look up ``(0, key)``
|
||||
"""
|
||||
super().__init__()
|
||||
self.__omap = []
|
||||
self.__kcount = Counter()
|
||||
|
||||
if data is not None:
|
||||
if not isinstance(data, (list, dict)):
|
||||
raise ValueError("Expected data to be list of pairs or dict, got %s" % type(data))
|
||||
self.update(data)
|
||||
|
||||
def __repr__(self):
|
||||
out = "%s(" % self.__class__.__name__
|
||||
out += "%s)" % repr(list(self.iteritems()))
|
||||
return out
|
||||
|
||||
def __len__(self):
|
||||
return len(self.__omap)
|
||||
|
||||
def _verify_key_tuple(self, key):
|
||||
if len(key) != 2:
|
||||
raise ValueError("Expected key tuple length to be 2, got %d" % len(key))
|
||||
if not isinstance(key[0], int):
|
||||
raise TypeError("Key index should be an int")
|
||||
if not isinstance(key[1], _string_type):
|
||||
raise TypeError("Key value should be a str")
|
||||
|
||||
def _normalize_key(self, key):
|
||||
if isinstance(key, _string_type):
|
||||
key = (0, key)
|
||||
elif isinstance(key, tuple):
|
||||
self._verify_key_tuple(key)
|
||||
else:
|
||||
raise TypeError("Expected key to be a str or tuple, got %s" % type(key))
|
||||
return key
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
if isinstance(key, _string_type):
|
||||
key = (self.__kcount[key], key)
|
||||
self.__omap.append(key)
|
||||
elif isinstance(key, tuple):
|
||||
self._verify_key_tuple(key)
|
||||
if key not in self:
|
||||
raise KeyError("%s doesn't exist" % repr(key))
|
||||
else:
|
||||
raise TypeError("Expected either a str or tuple for key")
|
||||
super().__setitem__(key, value)
|
||||
self.__kcount[key[1]] += 1
|
||||
|
||||
def __getitem__(self, key):
|
||||
return super().__getitem__(self._normalize_key(key))
|
||||
|
||||
def __delitem__(self, key):
|
||||
key = self._normalize_key(key)
|
||||
result = super().__delitem__(key)
|
||||
|
||||
start_idx = self.__omap.index(key)
|
||||
del self.__omap[start_idx]
|
||||
|
||||
dup_idx, skey = key
|
||||
self.__kcount[skey] -= 1
|
||||
tail_count = self.__kcount[skey] - dup_idx
|
||||
|
||||
if tail_count > 0:
|
||||
for idx in _range(start_idx, len(self.__omap)):
|
||||
if self.__omap[idx][1] == skey:
|
||||
oldkey = self.__omap[idx]
|
||||
newkey = (dup_idx, skey)
|
||||
super().__setitem__(newkey, self[oldkey])
|
||||
super().__delitem__(oldkey)
|
||||
self.__omap[idx] = newkey
|
||||
|
||||
dup_idx += 1
|
||||
tail_count -= 1
|
||||
if tail_count == 0:
|
||||
break
|
||||
|
||||
if self.__kcount[skey] == 0:
|
||||
del self.__kcount[skey]
|
||||
|
||||
return result
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.iterkeys())
|
||||
|
||||
def __contains__(self, key):
|
||||
return super().__contains__(self._normalize_key(key))
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, VDFDict):
|
||||
return list(self.items()) == list(other.items())
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def clear(self):
|
||||
super().clear()
|
||||
self.__kcount.clear()
|
||||
self.__omap = []
|
||||
|
||||
def get(self, key, *args):
|
||||
return super().get(self._normalize_key(key), *args)
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
if key not in self:
|
||||
self.__setitem__(key, default)
|
||||
return self.__getitem__(key)
|
||||
|
||||
def pop(self, key):
|
||||
key = self._normalize_key(key)
|
||||
value = self.__getitem__(key)
|
||||
self.__delitem__(key)
|
||||
return value
|
||||
|
||||
def popitem(self):
|
||||
if not self.__omap:
|
||||
raise KeyError("VDFDict is empty")
|
||||
key = self.__omap[-1]
|
||||
return key[1], self.pop(key)
|
||||
|
||||
def update(self, data=None, **kwargs):
|
||||
if isinstance(data, dict):
|
||||
data = data.items()
|
||||
elif not isinstance(data, list):
|
||||
raise TypeError("Expected data to be a list or dict, got %s" % type(data))
|
||||
|
||||
for key, value in data:
|
||||
self.__setitem__(key, value)
|
||||
|
||||
def iterkeys(self):
|
||||
return (key[1] for key in self.__omap)
|
||||
|
||||
def keys(self):
|
||||
return _kView(self)
|
||||
|
||||
def itervalues(self):
|
||||
return (self[key] for key in self.__omap)
|
||||
|
||||
def values(self):
|
||||
return _vView(self)
|
||||
|
||||
def iteritems(self):
|
||||
return ((key[1], self[key]) for key in self.__omap)
|
||||
|
||||
def items(self):
|
||||
return _iView(self)
|
||||
|
||||
def get_all_for(self, key):
|
||||
""" Returns all values of the given key """
|
||||
if not isinstance(key, _string_type):
|
||||
raise TypeError("Key needs to be a string.")
|
||||
return [self[(idx, key)] for idx in _range(self.__kcount[key])]
|
||||
|
||||
def remove_all_for(self, key):
|
||||
""" Removes all items with the given key """
|
||||
if not isinstance(key, _string_type):
|
||||
raise TypeError("Key need to be a string.")
|
||||
|
||||
for idx in _range(self.__kcount[key]):
|
||||
super().__delitem__((idx, key))
|
||||
|
||||
self.__omap = list(filter(lambda x: x[1] != key, self.__omap))
|
||||
|
||||
del self.__kcount[key]
|
||||
|
||||
def has_duplicates(self):
|
||||
"""
|
||||
Returns ``True`` if the dict contains keys with duplicates.
|
||||
Recurses through any all keys with value that is ``VDFDict``.
|
||||
"""
|
||||
for n in getattr(self.__kcount, _iter_values)():
|
||||
if n != 1:
|
||||
return True
|
||||
|
||||
def dict_recurse(obj):
|
||||
for v in getattr(obj, _iter_values)():
|
||||
if isinstance(v, VDFDict) and v.has_duplicates():
|
||||
return True
|
||||
if isinstance(v, dict):
|
||||
return dict_recurse(v)
|
||||
return False
|
||||
|
||||
return dict_recurse(self)
|
|
@ -3,7 +3,8 @@ runtime: org.kde.Platform
|
|||
runtime-version: "6.5"
|
||||
sdk: org.kde.Sdk
|
||||
sdk-extensions:
|
||||
- org.freedesktop.Sdk.Extension.llvm16 # Needed for rpcs3 (llvm15) but llvm16 for CITRA
|
||||
- org.freedesktop.Sdk.Extension.llvm16 # Needed for rpcs3 (llvm15) but llvm16 for CITRA
|
||||
- org.freedesktop.Sdk.Extension.rust-stable # Needed for BoilR
|
||||
# base: io.qt.qtwebengine.BaseApp # Needed for Yuzu - Disabled as we're using AppImage for Yuzu
|
||||
# base-version: "6.5" # Needed for Yuzu - Disabled as we're using AppImage for Yuzu
|
||||
command: retrodeck.sh
|
||||
|
@ -43,6 +44,10 @@ finish-args:
|
|||
- --env=SDL_VIDEO_WAYLAND_WMCLASS=net.retrodeck.retrodeck
|
||||
# XEMU - Fixes issues with openSUSE systems, QEMU_AUDIO_DRV is defined as "pa" causing xemu to not launch
|
||||
- --unset-env=QEMU_AUDIO_DRV
|
||||
# BoilR
|
||||
- --filesystem=xdg-data/Steam:rw #Steam (flatpak)
|
||||
- --filesystem=~/.steam:rw # Steam (Non-flatpak)
|
||||
- --filesystem=~/.var/app/com.valvesoftware.Steam:rw # Steam (Flatpak)
|
||||
|
||||
cleanup:
|
||||
# ES-DE
|
||||
|
@ -447,6 +452,24 @@ modules:
|
|||
|
||||
# External manifests start
|
||||
|
||||
- name: boilr
|
||||
buildsystem: simple
|
||||
build-options:
|
||||
append-path: /usr/lib/sdk/rust-stable/bin
|
||||
env:
|
||||
# To pick up vendored deps
|
||||
CARGO_HOME: /run/build/boilr/cargo
|
||||
RUSTFLAGS: "-L /app/lib/"
|
||||
build-commands:
|
||||
- cargo --offline fetch --manifest-path Cargo.toml --verbose
|
||||
- cargo --offline build --release --verbose --features flatpak
|
||||
- install -Dm755 ./target/release/boilr -t /app/bin/
|
||||
sources:
|
||||
- type: git
|
||||
url: https://github.com/PhilipK/BoilR
|
||||
commit: b33d965f227fe971fd590cb022f608521b506ef3
|
||||
- rd-submodules/boilr/cargo-lock.json
|
||||
|
||||
# RetroArch - START
|
||||
# https://github.com/flathub/org.libretro.RetroArch
|
||||
|
||||
|
|
5325
rd-submodules/boilr/cargo-lock.json
Normal file
5325
rd-submodules/boilr/cargo-lock.json
Normal file
File diff suppressed because it is too large
Load diff
|
@ -115,7 +115,7 @@ configurator_welcome_dialog() {
|
|||
"RetroDECK: Tools" "Compress games, move RetroDECK and install optional features" \
|
||||
"RetroDECK: Troubleshooting" "Backup data, perform BIOS / multi-disc file checks checks and emulator resets" \
|
||||
"RetroDECK: About" "Show additional information about RetroDECK" \
|
||||
"Add to Steam" "Add to Steam all the favorite games, it will not remove added games")
|
||||
"Sync with Steam" "Sync with Steam all the favorites games")
|
||||
fi
|
||||
|
||||
choice=$(zenity --list --title="RetroDECK Configurator Utility" --cancel-label="Quit" \
|
||||
|
|
Loading…
Reference in a new issue