API updates

This commit is contained in:
icenine451 2025-04-04 18:00:04 -04:00
parent 29e4214368
commit a7f25d3757
4 changed files with 221 additions and 10 deletions

View file

@ -0,0 +1,142 @@
#!/bin/bash
# This is the main data processing hub for the RetroDECK API.
# It will handle the direct demands of the API requests by leveraging the rest of the RetroDECK functions.
# Most of these functions will have been adapted from the ones built for the Zenity Configurator, with the Zenity specifics pulled out and all data passed through JSON objects.
api_find_compatible_games() {
# Supported parameters:
# "everything" - All games found (regardless of format)
# "all" - Only user-chosen games (later selected via checklist)
# "chd", "zip", "rvz" - Only games matching that compression type
log d "Started find_compatible_games with parameter: $1"
local target_selection="$1"
local compression_format
if [[ "$1" == "everything" ]]; then
compression_format="all"
else
compression_format="$1"
fi
local compressible_systems_list
if [[ "$compression_format" == "all" ]]; then
compressible_systems_list=$(jq -r '.compression_targets | to_entries[] | .value[]' "$features")
log d "compressible_systems_list: $compressible_systems_list"
else
compressible_systems_list=$(jq -r '.compression_targets["'"$compression_format"'"][]' "$features")
log d "compressible_systems_list: $compressible_systems_list"
fi
log d "Finding compatible games for compression ($1)"
log d "compression_targets: $compression_targets"
local output_file="$(mktemp)"
# Initialize the empty JSON file meant for final output
echo '[]' > "$output_file"
while IFS= read -r system; do
while (( $(jobs -p | wc -l) >= $max_threads )); do # Wait for a background task to finish if max_threads has been hit
sleep 0.1
done
(
if [[ -d "$roms_folder/$system" ]]; then
local compression_candidates
compression_candidates=$(find "$roms_folder/$system" -type f -not -iname "*.txt")
if [[ -n "$compression_candidates" ]]; then
while IFS= read -r game; do
while (( $(jobs -p | wc -l) >= $max_threads )); do # Wait for a background task to finish if max_threads has been hit
sleep 0.1
done
(
local compatible_compression_format
compatible_compression_format=$(find_compatible_compression_format "$game")
if [[ -f "${game%.*}.$compatible_compression_format" ]]; then # If a compressed version of this game already exists
log d "Skipping $game because a $compatible_compression_format version already exists."
exit
fi
local file_ext="${game##*.}"
case "$compression_format" in
"chd")
if [[ "$compatible_compression_format" == "chd" ]]; then
log d "Game $game is compatible with CHD compression"
# Build a JSON object for this game
json_init
json_add "game" "$game"
json_add "compression" "$compatible_compression_format"
# Build the complete JSON object for this game
json_obj=$(json_build)
# Write the final JSON object to the output file, locking it to prevent write race conditions.
(
flock -x 200
jq --argjson obj "$json_obj" '. + [$obj]' "$output_file" > "$output_file.tmp" && mv "$output_file.tmp" "$output_file"
) 200>"$RD_FILE_LOCK"
fi
;;
"zip")
if [[ "$compatible_compression_format" == "zip" ]]; then
log d "Game $game is compatible with ZIP compression"
# Build a JSON object for this game.
json_init
json_add "game" "$game"
json_add "compression" "$compatible_compression_format"
# Build the complete JSON object for this game
json_obj=$(json_build)
# Write the final JSON object to the output file, locking it to prevent write race conditions.
(
flock -x 200
jq --argjson obj "$json_obj" '. + [$obj]' "$output_file" > "$output_file.tmp" && mv "$output_file.tmp" "$output_file"
) 200>"$RD_FILE_LOCK"
fi
;;
"rvz")
if [[ "$compatible_compression_format" == "rvz" ]]; then
log d "Game $game is compatible with ZIP compression"
# Build a JSON object for this game.
json_init
json_add "game" "$game"
json_add "compression" "$compatible_compression_format"
# Build the complete JSON object for this game
json_obj=$(json_build)
# Write the final JSON object to the output file, locking it to prevent write race conditions.
(
flock -x 200
jq --argjson obj "$json_obj" '. + [$obj]' "$output_file" > "$output_file.tmp" && mv "$output_file.tmp" "$output_file"
) 200>"$RD_FILE_LOCK"
fi
;;
"all")
if [[ "$compatible_compression_format" != "none" ]]; then
log d "Game $game is compatible with ZIP compression"
# Build a JSON object for this game.
json_init
json_add "game" "$game"
json_add "compression" "$compatible_compression_format"
# Build the complete JSON object for this game
json_obj=$(json_build)
# Write the final JSON object to the output file, locking it to prevent write race conditions.
(
flock -x 200
jq --argjson obj "$json_obj" '. + [$obj]' "$output_file" > "$output_file.tmp" && mv "$output_file.tmp" "$output_file"
) 200>"$RD_FILE_LOCK"
fi
;;
esac
) &
done < <(printf '%s\n' "$compression_candidates")
wait # wait for background tasks to finish
fi
else
log d "Rom folder for $system is missing, skipping"
fi
) &
done < <(printf '%s\n' "$compressible_systems_list")
wait # wait for background tasks to finish
final_json=$(cat "$output_file")
echo "$final_json"
}

View file

@ -4,7 +4,7 @@
# It will accept JSON objects as requests in a single FIFO request pipe ($REQUEST_PIPE)
# and return each processed response through a unique named pipe, which MUST be created by the requesting client.
# Each JSON object needs, at minimum a "action" element with a valid value and a "request_id" with a unique value.
# Each processed response will be returned on a FIFO named pipe at the location "$XDG_CONFIG_HOME/retrodeck/api/response_$request_id" so that actions can be processed asynchronously
# Each processed response will be returned on a FIFO named pipe at the location "/tmp/response_$request_id" so that actions can be processed asynchronously
# If the response pipe does not exist when the data is done processing the response will not be sent at all, so the client must ensure that the response pipe exists when the JSON object is sent to the server!
# The response ID can be any unique value, an example ID generation statement in Bash is request_id="retrodeck_request_$(date +%s)_$$"
# The server can be started, stopped or have its running status checked by calling the script like this: retrodeck_api start
@ -75,7 +75,7 @@ run_server() {
cleanup() {
# Cleanup function to ensure named pipe is removed on exit
log d "Cleaning up server resources..."
rm -f "$REQUEST_PIPE"
rm -f "$PID_FILE" "$REQUEST_PIPE"
exit 0
}
@ -118,7 +118,7 @@ process_request() {
return 1
fi
local response_pipe="$rd_api_dir/response_${request_id}"
local response_pipe="/home/deck/.var/app/net.retrodeck.retrodeck/config/retrodeck/api/response_${request_id}"
if [[ ! -p "$response_pipe" ]]; then
echo "Error: Response pipe $response_pipe does not exist" >&2
@ -132,20 +132,35 @@ process_request() {
# Process request asynchronously
{
local data
data=$(echo "$json_input" | jq -r '.data // empty')
if [[ -z "$data" ]]; then
echo "{\"status\":\"error\",\"message\":\"Missing required field: data\",\"request_id\":\"$request_id\"}" > "$response_pipe"
return 1
fi
case "$action" in
"check_status")
echo "{\"status\":\"success\",\"request_id\":\"$request_id\"}" > "$response_pipe"
;;
"wait")
local data
data=$(echo "$json_input" | jq -r '.data // empty')
if [[ -z "$data" ]]; then
echo "{\"status\":\"error\",\"message\":\"Missing required field: data\",\"request_id\":\"$request_id\"}" > "$response_pipe"
return 1
fi
local result=$(wait_example_function "$data")
echo "{\"status\":\"success\",\"result\":$result,\"request_id\":\"$request_id\"}" > "$response_pipe"
;;
"get")
case $data in
"compressible_games")
local compression_format=$(echo "$json_input" | jq -r '.format // empty')
if [[ -n "$compression_format" ]]; then
local result
if result=$(api_find_compatible_games "$compression_format"); then
echo "{\"status\":\"success\",\"result\":$result,\"request_id\":\"$request_id\"}" > "$response_pipe"
else
echo "{\"status\":\"error\",\"message\":$result,\"request_id\":\"$request_id\"}" > "$response_pipe"
fi
fi
;;
esac
;;
*)
echo "{\"status\":\"error\",\"message\":\"Unknown action: $action\",\"request_id\":\"$request_id\"}" > "$response_pipe"
;;

View file

@ -44,7 +44,9 @@ fi
log i "CPU: Using $max_threads out of $cpu_cores available CPU cores for multi-threaded operations"
source /app/libexec/050_save_migration.sh
source /app/libexec/api.sh
source /app/libexec/api_data_processing.sh
source /app/libexec/api_server.sh
source /app/libexec/json_processing.sh
source /app/libexec/checks.sh
source /app/libexec/compression.sh
source /app/libexec/dialogs.sh

View file

@ -0,0 +1,52 @@
#!/bin/bash
# This is a reusable JSON object creator, meant to abstract the actual jq commands for easier readbility and use.
# The purpose is similar to the get_setting_value and set_setting_value functions, where you don't need to know the actual sed commands to get what you want.
json_init() {
# Initialize an empty JSON file (using a temp file)
# This temp file will be accessible in any functions in the shell that created it
# If multiple subshells are being used (by concurrent multi-threading) multiple temp files will be created.
JSON_BUILDER_TMP=$(mktemp)
echo '{}' > "$JSON_BUILDER_TMP"
}
json_add() {
# Adds a string or raw value to the JSON
# type = "string" (default) or "raw" - "type" can be omitted and will just default to string, which is generally fine. If the value is numeric and will be used for math, it can save a step downstream to store it raw.
# Usage: json_add "key" "value" "type"
local key="$1"
local value="$2"
local type="${3:-string}"
if [[ "$type" == "raw" ]]; then
jq --argjson val "$value" ". + {\"$key\": \$val}" "$JSON_BUILDER_TMP" > "$JSON_BUILDER_TMP.tmp"
else
jq --arg val "$value" ". + {\"$key\": \$val}" "$JSON_BUILDER_TMP" > "$JSON_BUILDER_TMP.tmp"
fi
mv "$JSON_BUILDER_TMP.tmp" "$JSON_BUILDER_TMP"
}
json_add_array() {
# Add an array (from bash array) to the JSON
# USAGE: json_add_array "key" "${my_array[@]}"
local key="$1"
shift
local arr=("$@")
# Convert bash array to JSON array
local json_array
json_array=$(printf '%s\n' "${arr[@]}" | jq -R . | jq -s .)
jq --argjson val "$json_array" ". + {\"$key\": \$val}" "$JSON_BUILDER_TMP" > "$JSON_BUILDER_TMP.tmp"
mv "$JSON_BUILDER_TMP.tmp" "$JSON_BUILDER_TMP"
}
json_build() {
# This exports the final JSON object and removes the temp file.
# USAGE: (after building the JSON object in the temp file using the above functions) json_object=$(json_build)
cat "$JSON_BUILDER_TMP"
rm -f "$JSON_BUILDER_TMP"
}