9
0
mirror of https://github.com/Kas-tle/java2bedrock.sh.git synced 2025-12-19 14:59:13 +00:00

Add more config opts

This commit is contained in:
Kas-tle
2022-10-24 18:14:46 +00:00
committed by GitHub
parent b13db2ff81
commit 92fcf21aa2
4 changed files with 102 additions and 71 deletions

View File

@@ -83,6 +83,15 @@ body:
- entity_alphatest_one_sided - entity_alphatest_one_sided
validations: validations:
required: false required: false
- type: dropdown
id: archive-scratch
attributes:
label: Archive Scratch Files
description: |
Should scratch files be archived? This will create a zip file containing the scratch files used by the script during the conversion process. If not selected, this will default to `false`.
options:
- true
- false
- type: checkboxes - type: checkboxes
id: terms id: terms
attributes: attributes:

View File

@@ -18,6 +18,7 @@ jobs:
default_assets_version: ${{ steps.organize-inputs.outputs.DEFAULT_ASSETS_VERSION }} default_assets_version: ${{ steps.organize-inputs.outputs.DEFAULT_ASSETS_VERSION }}
block_material: ${{ steps.organize-inputs.outputs.BLOCK_MATERIAL }} block_material: ${{ steps.organize-inputs.outputs.BLOCK_MATERIAL }}
attachable_material: ${{ steps.organize-inputs.outputs.ATTACHABLE_MATERIAL }} attachable_material: ${{ steps.organize-inputs.outputs.ATTACHABLE_MATERIAL }}
archive_scratch: ${{ steps.organize-inputs.outputs.ARCHIVE_SCRATCH }}
steps: steps:
- name: Issue Forms Body Parser - name: Issue Forms Body Parser
id: parse-issue id: parse-issue
@@ -34,7 +35,8 @@ jobs:
"merge_pack_url": test_input(.["bedrock-merge-pack-direct-download-url"].text; " null ")[1:-1], "merge_pack_url": test_input(.["bedrock-merge-pack-direct-download-url"].text; " null ")[1:-1],
"default_assets_version": test_input(.["default-assets-version"].text; "1.19.2"), "default_assets_version": test_input(.["default-assets-version"].text; "1.19.2"),
"block_material": test_input(.["block-material"].text; "alpha_test"), "block_material": test_input(.["block-material"].text; "alpha_test"),
"attachable_material": test_input(.["attachable-material"].text; "entity_alphatest_one_sided") "attachable_material": test_input(.["attachable-material"].text; "entity_alphatest_one_sided"),
"archive_scratch": test_input(.["archive-scratch"].text; "false")
}' > inputs.json }' > inputs.json
echo "PACK_URL=$(jq -r '.pack_url' inputs.json)" >> $GITHUB_OUTPUT echo "PACK_URL=$(jq -r '.pack_url' inputs.json)" >> $GITHUB_OUTPUT
echo "DEFAULT_PACK_URL=$(jq -r '.default_pack_url' inputs.json)" >> $GITHUB_OUTPUT echo "DEFAULT_PACK_URL=$(jq -r '.default_pack_url' inputs.json)" >> $GITHUB_OUTPUT
@@ -42,6 +44,7 @@ jobs:
echo "DEFAULT_ASSETS_VERSION=$(jq -r '.default_assets_version' inputs.json)" >> $GITHUB_OUTPUT echo "DEFAULT_ASSETS_VERSION=$(jq -r '.default_assets_version' inputs.json)" >> $GITHUB_OUTPUT
echo "BLOCK_MATERIAL=$(jq -r '.block_material' inputs.json)" >> $GITHUB_OUTPUT echo "BLOCK_MATERIAL=$(jq -r '.block_material' inputs.json)" >> $GITHUB_OUTPUT
echo "ATTACHABLE_MATERIAL=$(jq -r '.attachable_material' inputs.json)" >> $GITHUB_OUTPUT echo "ATTACHABLE_MATERIAL=$(jq -r '.attachable_material' inputs.json)" >> $GITHUB_OUTPUT
echo "ARCHIVE_SCRATCH=$(jq -r '.archive_scratch' inputs.json)" >> $GITHUB_OUTPUT
convert-pack: convert-pack:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: get-pack-info needs: get-pack-info
@@ -78,6 +81,7 @@ jobs:
DEFAULT_ASSETS_VERSION: ${{ needs.get-pack-info.outputs.default_assets_version }} DEFAULT_ASSETS_VERSION: ${{ needs.get-pack-info.outputs.default_assets_version }}
BLOCK_MATERIAL: ${{ needs.get-pack-info.outputs.block_material }} BLOCK_MATERIAL: ${{ needs.get-pack-info.outputs.block_material }}
ATTACHABLE_MATERIAL: ${{ needs.get-pack-info.outputs.attachable_material }} ATTACHABLE_MATERIAL: ${{ needs.get-pack-info.outputs.attachable_material }}
ARCHIVE_SCRATCH: ${{ needs.get-pack-info.outputs.archive_scratch }}
run: | run: |
mkdir -p staging mkdir -p staging
cp converter.sh staging/ cp converter.sh staging/
@@ -89,7 +93,7 @@ jobs:
COLUMNS=$COLUMNS-1 curl --no-styled-output -#L -o merge_pack.zip "${MERGE_PACK_URL}" COLUMNS=$COLUMNS-1 curl --no-styled-output -#L -o merge_pack.zip "${MERGE_PACK_URL}"
MERGE_PACK_FILE="merge_pack.zip" MERGE_PACK_FILE="merge_pack.zip"
fi fi
./converter.sh input_pack.zip -w "false" -m ${MERGE_PACK_FILE} -a ${ATTACHABLE_MATERIAL} -b ${BLOCK_MATERIAL} -f ${DEFAULT_PACK_URL} -v ${DEFAULT_ASSETS_VERSION} ./converter.sh input_pack.zip -w "false" -m ${MERGE_PACK_FILE} -a ${ATTACHABLE_MATERIAL} -b ${BLOCK_MATERIAL} -f ${DEFAULT_PACK_URL} -v ${DEFAULT_ASSETS_VERSION} -s ${ARCHIVE_SCRATCH} -u "true"
- name: Upload converted pack - name: Upload converted pack
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v3
with: with:

3
.gitignore vendored
View File

@@ -19,4 +19,5 @@ nongenerated.json
item_textures.json item_textures.json
atlases.json atlases.json
icons.json icons.json
generated.json generated.json
scratch_files/

View File

@@ -69,7 +69,7 @@ else
fi fi
# get user defined start flags # get user defined start flags
while getopts w:m:a:b:f:v: flag "${@:2}" while getopts w:m:a:b:f:v:s:u: flag "${@:2}"
do do
case "${flag}" in case "${flag}" in
w) warn=${OPTARG};; w) warn=${OPTARG};;
@@ -78,9 +78,18 @@ do
b) block_material=${OPTARG};; b) block_material=${OPTARG};;
f) fallback_pack=${OPTARG};; f) fallback_pack=${OPTARG};;
v) default_asset_version=${OPTARG};; v) default_asset_version=${OPTARG};;
s) save_scratch=${OPTARG};;
u) disable_ulimit=${OPTARG};;
esac esac
done done
if [[ ${disable_ulimit} == "true" ]]
then
ulimit -s unlimited
status_message info "Changed ulimit settings for script:"
ulimit -a
fi
# warn user about limitations of the script # warn user about limitations of the script
printf '\e[1;31m%-6s\e[m\n' " printf '\e[1;31m%-6s\e[m\n' "
███████████████████████████████████████████████████████████████████████████████ ███████████████████████████████████████████████████████████████████████████████
@@ -150,11 +159,12 @@ fi
# Download geyser mappings # Download geyser mappings
status_message process "Downloading the latest geyser item mappings" status_message process "Downloading the latest geyser item mappings"
mkdir -p ./scratch_files
printf "\e[3m\e[37m" printf "\e[3m\e[37m"
echo echo
COLUMNS=$COLUMNS-1 curl --no-styled-output -#L -o item_mappings.json https://raw.githubusercontent.com/GeyserMC/mappings/master/items.json COLUMNS=$COLUMNS-1 curl --no-styled-output -#L -o scratch_files/item_mappings.json https://raw.githubusercontent.com/GeyserMC/mappings/master/items.json
echo echo
COLUMNS=$COLUMNS-1 curl --no-styled-output -#L -o item_texture.json https://raw.githubusercontent.com/Kas-tle/java2bedrockMappings/main/item_texture.json COLUMNS=$COLUMNS-1 curl --no-styled-output -#L -o scratch_files/item_texture.json https://raw.githubusercontent.com/Kas-tle/java2bedrockMappings/main/item_texture.json
echo echo
printf "${C_CLOSE}" printf "${C_CLOSE}"
@@ -162,7 +172,7 @@ printf "${C_CLOSE}"
# technically we only need to iterate over actual item models that contain overrides, but the constraints of bash would likely make such an approach less efficent # technically we only need to iterate over actual item models that contain overrides, but the constraints of bash would likely make such an approach less efficent
status_message process "Iterating through all vanilla associated model JSONs to generate initial predicate config\nOn a large pack, this may take some time...\n" status_message process "Iterating through all vanilla associated model JSONs to generate initial predicate config\nOn a large pack, this may take some time...\n"
jq --slurpfile item_texture item_texture.json --slurpfile item_mappings item_mappings.json -n ' jq --slurpfile item_texture scratch_files/item_texture.json --slurpfile item_mappings scratch_files/item_mappings.json -n '
[inputs | {(input_filename | sub("(.+)/(?<itemname>.*?).json"; .itemname)): .overrides?[]?}] | [inputs | {(input_filename | sub("(.+)/(?<itemname>.*?).json"; .itemname)): .overrides?[]?}] |
def maxdur($input): def maxdur($input):
@@ -238,7 +248,7 @@ inputs | {
} }
] ]
' ${model_array[@]} | sponge parents.json ' ${model_array[@]} | sponge scratch_files/parents.json
# add initial parental information to config.json # add initial parental information to config.json
status_message critical "Removing config entries with non-supported parentals\n" status_message critical "Removing config entries with non-supported parentals\n"
@@ -269,22 +279,22 @@ def gtest($input_g):
.[1] | map_values(. + ({"parent": (intest(.path) // null)} | if gtest(.parent) == null then . else empty end)) .[1] | map_values(. + ({"parent": (intest(.path) // null)} | if gtest(.parent) == null then . else empty end))
| walk(if type == "object" then with_entries(select(.value != null)) else . end) | walk(if type == "object" then with_entries(select(.value != null)) else . end)
' parents.json config.json | sponge config.json ' scratch_files/parents.json config.json | sponge config.json
# obtain hashes of all model predicate info to ensure consistent model naming # obtain hashes of all model predicate info to ensure consistent model naming
jq -r '.[] | [.geyserID, (.item + "_c" + (.nbt.CustomModelData | tostring) + "_d" + (.nbt.Damage | tostring) + "_u" + (.nbt.Unbreakable | tostring))] | @tsv | gsub("\\t";",")' config.json > paths.csv jq -r '.[] | [.geyserID, (.item + "_c" + (.nbt.CustomModelData | tostring) + "_d" + (.nbt.Damage | tostring) + "_u" + (.nbt.Unbreakable | tostring))] | @tsv | gsub("\\t";",")' config.json > scratch_files/paths.csv
function write_hash () { function write_hash () {
local hash=$(echo -n "${1}" | md5sum | head -c 7) && echo "${2},${hash}" >> "${3}" local hash=$(echo -n "${1}" | md5sum | head -c 7) && echo "${2},${hash}" >> "${3}"
} }
while IFS=, read -r gid predicate while IFS=, read -r gid predicate
do write_hash "${predicate}" "${gid}" "hashes.csv" & do write_hash "${predicate}" "${gid}" "scratch_files/hashes.csv" &
done < paths.csv > /dev/null done < scratch_files/paths.csv > /dev/null
jq -cR 'split(",")' hashes.csv | jq -s 'map({(.[0]): .[1]}) | add' > hashmap.json jq -cR 'split(",")' scratch_files/hashes.csv | jq -s 'map({(.[0]): .[1]}) | add' > scratch_files/hashmap.json
jq --slurpfile hashmap hashmap.json ' jq --slurpfile hashmap scratch_files/hashmap.json '
map_values( map_values(
.geyserID as $gid .geyserID as $gid
| . += {"path_hash": ("gmdl_" + ($hashmap[] | .[($gid)]))} | . += {"path_hash": ("gmdl_" + ($hashmap[] | .[($gid)]))}
@@ -456,7 +466,7 @@ done
status_message completion "Initial pack setup complete\n" status_message completion "Initial pack setup complete\n"
jq -r '.[] | select(.parent != null) | [.path, .geyserID, .parent, .namespace, .model_path, .model_name, .path_hash] | @tsv | gsub("\\t";",")' config.json | sponge pa.csv jq -r '.[] | select(.parent != null) | [.path, .geyserID, .parent, .namespace, .model_path, .model_name, .path_hash] | @tsv | gsub("\\t";",")' config.json | sponge scratch_files/pa.csv
_start=1 _start=1
_end="$(jq -r '(. | length) + ([.[] | select(.parent != null)] | length)' config.json)" _end="$(jq -r '(. | length) + ([.[] | select(.parent != null)] | length)' config.json)"
@@ -483,10 +493,10 @@ do
local model_name=${6} local model_name=${6}
local path_hash=${7} local path_hash=${7}
local elements="$(jq -rc '.elements' ${file} | tee ${gid}.elements.temp)" local elements="$(jq -rc '.elements' ${file} | tee scratch_files/${gid}.elements.temp)"
local element_parent=${file} local element_parent=${file}
local textures="$(jq -rc '.textures' ${file} | tee ${gid}.textures.temp)" local textures="$(jq -rc '.textures' ${file} | tee scratch_files/${gid}.textures.temp)"
local display="$(jq -rc '.display' ${file} | tee ${gid}.display.temp)" local display="$(jq -rc '.display' ${file} | tee scratch_files/${gid}.display.temp)"
status_message process "Locating parental info for child model with GeyserID ${gid}" status_message process "Locating parental info for child model with GeyserID ${gid}"
# itterate through parented models until they all have geometry, display, and textures # itterate through parented models until they all have geometry, display, and textures
@@ -494,38 +504,38 @@ do
do do
if [[ ${elements} = null ]] if [[ ${elements} = null ]]
then then
local elements="$(jq -rc '.elements' ${parental} 2> /dev/null | tee ${gid}.elements.temp || (echo && echo null))" local elements="$(jq -rc '.elements' ${parental} 2> /dev/null | tee scratch_files/${gid}.elements.temp || (echo && echo null))"
local element_parent=${parental} local element_parent=${parental}
fi fi
if [[ ${textures} = null ]] if [[ ${textures} = null ]]
then then
local textures="$(jq -rc '.textures' ${parental} 2> /dev/null | tee ${gid}.textures.temp || (echo && echo null))" local textures="$(jq -rc '.textures' ${parental} 2> /dev/null | tee scratch_files/${gid}.textures.temp || (echo && echo null))"
fi fi
if [[ ${display} = null ]] if [[ ${display} = null ]]
then then
local display="$(jq -rc '.display' ${parental} 2> /dev/null | tee ${gid}.display.temp || (echo && echo null))" local display="$(jq -rc '.display' ${parental} 2> /dev/null | tee scratch_files/${gid}.display.temp || (echo && echo null))"
fi fi
local parental="$(jq -rc 'def namespace: if contains(":") then sub("\\:(.+)"; "") else "minecraft" end; ("./assets/" + (.parent? | namespace) + "/models/" + ((.parent? // empty) | sub("(.*?)\\:"; "")) + ".json") // "null"' ${parental} 2> /dev/null || (echo && echo null))" local parental="$(jq -rc 'def namespace: if contains(":") then sub("\\:(.+)"; "") else "minecraft" end; ("./assets/" + (.parent? | namespace) + "/models/" + ((.parent? // empty) | sub("(.*?)\\:"; "")) + ".json") // "null"' ${parental} 2> /dev/null || (echo && echo null))"
local texture_0="$(jq -rc 'def namespace: if contains(":") then sub("\\:(.+)"; "") else "minecraft" end; ("./assets/" + ([.[]][0]? | namespace) + "/textures/" + (([.[]][0]? // empty) | sub("(.*?)\\:"; "")) + ".png") // "null"' ${gid}.textures.temp)" local texture_0="$(jq -rc 'def namespace: if contains(":") then sub("\\:(.+)"; "") else "minecraft" end; ("./assets/" + ([.[]][0]? | namespace) + "/textures/" + (([.[]][0]? // empty) | sub("(.*?)\\:"; "")) + ".png") // "null"' scratch_files/${gid}.textures.temp)"
done done
# if we can, generate a model now # if we can, generate a model now
if [[ ${elements} != null && ${textures} != null ]] if [[ ${elements} != null && ${textures} != null ]]
then then
jq -n --slurpfile jelements ${gid}.elements.temp --slurpfile jtextures ${gid}.textures.temp --slurpfile jdisplay ${gid}.display.temp ' jq -n --slurpfile jelements scratch_files/${gid}.elements.temp --slurpfile jtextures scratch_files/${gid}.textures.temp --slurpfile jdisplay scratch_files/${gid}.display.temp '
{ {
"textures": ($jtextures[]), "textures": ($jtextures[]),
"elements": ($jelements[]) "elements": ($jelements[])
} + (if $jdisplay then ({"display": ($jdisplay[])}) else {} end) } + (if $jdisplay then ({"display": ($jdisplay[])}) else {} end)
' | sponge ${file} ' | sponge ${file}
echo >> count.csv echo >> scratch_files/count.csv
local tot_pos=$(wc -l < count.csv) local tot_pos=$(wc -l < scratch_files/count.csv)
status_message completion "Located all parental info for Child ${gid}\n$(ProgressBar ${tot_pos} ${_end})" status_message completion "Located all parental info for Child ${gid}\n$(ProgressBar ${tot_pos} ${_end})"
echo echo
# check if this is a 2d item dervived from ./assets/minecraft/models/builtin/generated # check if this is a 2d item dervived from ./assets/minecraft/models/builtin/generated
elif [[ ${textures} != null && ${parental} = "./assets/minecraft/models/builtin/generated.json" && -f "${texture_0}" ]] elif [[ ${textures} != null && ${parental} = "./assets/minecraft/models/builtin/generated.json" && -f "${texture_0}" ]]
then then
jq -n --slurpfile jelements ${gid}.elements.temp --slurpfile jtextures ${gid}.textures.temp --slurpfile jdisplay ${gid}.display.temp ' jq -n --slurpfile jelements scratch_files/${gid}.elements.temp --slurpfile jtextures scratch_files/${gid}.textures.temp --slurpfile jdisplay scratch_files/${gid}.display.temp '
{ {
"textures": ([$jtextures[]][0]) "textures": ([$jtextures[]][0])
} + (if $jdisplay then ({"display": ($jdisplay[])}) else {} end) } + (if $jdisplay then ({"display": ($jdisplay[])}) else {} end)
@@ -534,32 +544,32 @@ do
mkdir -p "./target/rp/textures/geyser/geyser_custom/${namespace}/${model_path}" mkdir -p "./target/rp/textures/geyser/geyser_custom/${namespace}/${model_path}"
cp "${texture_0}" "./target/rp/textures/geyser/geyser_custom/${namespace}/${model_path}/${model_name}.png" cp "${texture_0}" "./target/rp/textures/geyser/geyser_custom/${namespace}/${model_path}/${model_name}.png"
# add texture to item atlas # add texture to item atlas
echo "${path_hash},textures/geyser/geyser_custom/${namespace}/${model_path}/${model_name}" >> icons.csv echo "${path_hash},textures/geyser/geyser_custom/${namespace}/${model_path}/${model_name}" >> scratch_files/icons.csv
echo "${gid}" >> generated.csv echo "${gid}" >> scratch_files/generated.csv
echo >> count.csv echo >> scratch_files/count.csv
local tot_pos=$(wc -l < count.csv) local tot_pos=$(wc -l < scratch_files/count.csv)
status_message completion "Located all parental info for 2D Child ${gid}\n$(ProgressBar ${tot_pos} ${_end})" status_message completion "Located all parental info for 2D Child ${gid}\n$(ProgressBar ${tot_pos} ${_end})"
echo echo
# otherwise, remove it from our config # otherwise, remove it from our config
else else
echo "${gid}" >> deleted.csv echo "${gid}" >> scratch_files/deleted.csv
echo >> count.csv echo >> scratch_files/count.csv
local tot_pos=$(wc -l < count.csv) local tot_pos=$(wc -l < scratch_files/count.csv)
status_message critical "Deleting ${gid} from config as no suitable parent information was found\n$(ProgressBar ${tot_pos} ${_end})" status_message critical "Deleting ${gid} from config as no suitable parent information was found\n$(ProgressBar ${tot_pos} ${_end})"
echo echo
fi fi
rm -f ${gid}.elements.temp ${gid}.textures.temp ${gid}.display.temp rm -f scratch_files/${gid}.elements.temp scratch_files/${gid}.textures.temp scratch_files/${gid}.display.temp
} }
wait_for_jobs wait_for_jobs
resolve_parental "${file}" "${gid}" "${parental}" "${namespace}" "${model_path}" "${model_name}" "${path_hash}" & resolve_parental "${file}" "${gid}" "${parental}" "${namespace}" "${model_path}" "${model_name}" "${path_hash}" &
done < pa.csv done < scratch_files/pa.csv
wait # wait for all the jobs to finish wait # wait for all the jobs to finish
# update generated models in config # update generated models in config
if [[ -f generated.csv ]] if [[ -f scratch_files/generated.csv ]]
then then
jq -cR 'split(",")' generated.csv | jq -s 'map({(.[0]): true}) | add' > generated.json jq -cR 'split(",")' scratch_files/generated.csv | jq -s 'map({(.[0]): true}) | add' > scratch_files/generated.json
jq -s ' jq -s '
.[0] as $generated_models .[0] as $generated_models
| .[1] | .[1]
@@ -567,25 +577,25 @@ then
.geyserID as $gid .geyserID as $gid
| .generated = ($generated_models[($gid)] // false) | .generated = ($generated_models[($gid)] // false)
) )
' generated.json config.json | sponge config.json ' scratch_files/generated.json config.json | sponge config.json
fi fi
# add icon textures to item atlas # add icon textures to item atlas
if [[ -f icons.csv ]] if [[ -f scratch_files/icons.csv ]]
then then
jq -cR 'split(",")' icons.csv | jq -s 'map({(.[0]): {"textures": .[1]}}) | add' > icons.json jq -cR 'split(",")' scratch_files/icons.csv | jq -s 'map({(.[0]): {"textures": .[1]}}) | add' > scratch_files/icons.json
jq -s ' jq -s '
.[0] as $icons .[0] as $icons
| .[1] | .[1]
| .texture_data += $icons | .texture_data += $icons
' icons.json ./target/rp/textures/item_texture.json | sponge ./target/rp/textures/item_texture.json ' scratch_files/icons.json ./target/rp/textures/item_texture.json | sponge ./target/rp/textures/item_texture.json
fi fi
# delete unsuitable models # delete unsuitable models
if [[ -f deleted.csv ]] if [[ -f scratch_files/deleted.csv ]]
then then
jq -cR 'split(",")' deleted.csv | jq -s '.' > deleted.json jq -cR 'split(",")' scratch_files/deleted.csv | jq -s '.' > scratch_files/deleted.json
jq -s '.[0] as $deleted | .[1] | delpaths($deleted)' deleted.json config.json | sponge config.json jq -s '.[0] as $deleted | .[1] | delpaths($deleted)' scratch_files/deleted.json config.json | sponge config.json
fi fi
status_message process "Compiling final model list" status_message process "Compiling final model list"
@@ -595,7 +605,7 @@ model_list=( $(jq -r '.[] | select(.generated == false) | .path' config.json) )
# get our final texture list to be atlased # get our final texture list to be atlased
# get a bash array of all texture files in our resource pack # get a bash array of all texture files in our resource pack
status_message process "Generating an array of all model PNG files to crosscheck with our atlas" status_message process "Generating an array of all model PNG files to crosscheck with our atlas"
jq -n '$ARGS.positional' --args $(find ./assets/**/textures -type f -name '*.png') | sponge all_textures.temp jq -n '$ARGS.positional' --args $(find ./assets/**/textures -type f -name '*.png') | sponge scratch_files/all_textures.temp
# get bash array of all texture files listed in our models # get bash array of all texture files listed in our models
status_message process "Generating union atlas arrays for all model textures" status_message process "Generating union atlas arrays for all model textures"
jq -s ' jq -s '
@@ -603,7 +613,7 @@ def namespace:
if contains(":") then sub("\\:(.+)"; "") else "minecraft" end; if contains(":") then sub("\\:(.+)"; "") else "minecraft" end;
[.[]| [.textures[]?] | unique] [.[]| [.textures[]?] | unique]
| map(map("./assets/" + (. | namespace) + "/textures/" + (. | sub("(.*?)\\:"; "")) + ".png")) | map(map("./assets/" + (. | namespace) + "/textures/" + (. | sub("(.*?)\\:"; "")) + ".png"))
' ${model_list[@]} | sponge union_atlas.temp ' ${model_list[@]} | sponge scratch_files/union_atlas.temp
jq ' jq '
def intersects(a;b): any(a[]; . as $x | any(b[]; . == $x)); def intersects(a;b): any(a[]; . as $x | any(b[]; . == $x));
@@ -614,19 +624,19 @@ def mapatlas(set):
[["./assets/minecraft/textures/0.png"]] + [["./assets/minecraft/textures/0.png"]] +
reduce .[] as $entry ([]; mapatlas($entry)) reduce .[] as $entry ([]; mapatlas($entry))
' union_atlas.temp | sponge union_atlas.temp ' scratch_files/union_atlas.temp | sponge scratch_files/union_atlas.temp
total_union_atlas=($(jq -r 'length - 1' union_atlas.temp)) total_union_atlas=($(jq -r 'length - 1' scratch_files/union_atlas.temp))
mkdir spritesheet mkdir -p scratch_files/spritesheet
status_message process "Generating $((1+${total_union_atlas})) sprite sheets..." status_message process "Generating $((1+${total_union_atlas})) sprite sheets..."
for i in $(seq 0 ${total_union_atlas}) for i in $(seq 0 ${total_union_atlas})
do do
generate_atlas () { generate_atlas () {
# find the union of all texture files listed in this atlas and all texture files in our resource pack # find the union of all texture files listed in this atlas and all texture files in our resource pack
local texture_list=( $(jq -s --arg index "${1}" -r '(.[1][($index | tonumber)] - .[0] | length > 0) as $fallback_needed | ((.[1][($index | tonumber)] - (.[1][($index | tonumber)] - .[0])) + (if $fallback_needed then ["./assets/minecraft/textures/0.png"] else [] end)) | .[]' all_textures.temp union_atlas.temp) ) local texture_list=( $(jq -s --arg index "${1}" -r '(.[1][($index | tonumber)] - .[0] | length > 0) as $fallback_needed | ((.[1][($index | tonumber)] - (.[1][($index | tonumber)] - .[0])) + (if $fallback_needed then ["./assets/minecraft/textures/0.png"] else [] end)) | .[]' scratch_files/all_textures.temp scratch_files/union_atlas.temp) )
status_message process "Generating sprite sheet ${1} of ${total_union_atlas}" status_message process "Generating sprite sheet ${1} of ${total_union_atlas}"
spritesheet-js -f json --name spritesheet/${1} --fullpath ${texture_list[@]} > /dev/null 2>&1 spritesheet-js -f json --name scratch_files/spritesheet/${1} --fullpath ${texture_list[@]} > /dev/null 2>&1
echo ${1} >> atlases.csv echo ${1} >> scratch_files/atlases.csv
} }
wait_for_jobs wait_for_jobs
generate_atlas "${i}" & generate_atlas "${i}" &
@@ -634,18 +644,18 @@ done
wait # wait for all the jobs to finish wait # wait for all the jobs to finish
# generate terrain texture atlas # generate terrain texture atlas
jq -cR 'split(",")' atlases.csv | jq -s 'map({("gmdl_atlas_" + .[0]): {"textures": ("textures/geyser/geyser_custom/" + .[0])}}) | add' > atlases.json jq -cR 'split(",")' scratch_files/atlases.csv | jq -s 'map({("gmdl_atlas_" + .[0]): {"textures": ("textures/geyser/geyser_custom/" + .[0])}}) | add' > scratch_files/atlases.json
jq -s ' jq -s '
.[0] as $atlases .[0] as $atlases
| .[1] | .[1]
| .texture_data += $atlases | .texture_data += $atlases
' atlases.json ./target/rp/textures/terrain_texture.json | sponge ./target/rp/textures/terrain_texture.json ' scratch_files/atlases.json ./target/rp/textures/terrain_texture.json | sponge ./target/rp/textures/terrain_texture.json
status_message completion "All sprite sheets generated" status_message completion "All sprite sheets generated"
mv spritesheet/*.png ./target/rp/textures/geyser/geyser_custom mv scratch_files/spritesheet/*.png ./target/rp/textures/geyser/geyser_custom
# begin conversion # begin conversion
jq -r '.[] | [.path, .geyserID, .generated, .namespace, .model_path, .model_name, .path_hash] | @tsv | gsub("\\t";",")' config.json | sponge all.csv jq -r '.[] | [.path, .geyserID, .generated, .namespace, .model_path, .model_name, .path_hash] | @tsv | gsub("\\t";",")' config.json | sponge scratch_files/all.csv
while IFS=, read -r file gid generated namespace model_path model_name path_hash while IFS=, read -r file gid generated namespace model_path model_name path_hash
do do
@@ -661,14 +671,14 @@ do
# find which texture atlas we will be using if not generated # find which texture atlas we will be using if not generated
if [[ ${generated} = "false" ]] if [[ ${generated} = "false" ]]
then then
local atlas_index=$(jq -r -s 'def namespace: if contains(":") then sub("\\:(.+)"; "") else "minecraft" end; def intersects(a;b): any(a[]; . as $x | any(b[]; . == $x)); (.[0] | [.textures[]] | map("./assets/" + (. | namespace) + "/textures/" + (. | sub("(.*?)\\:"; "")) + ".png")) as $inp | [(.[1] | (map(if intersects(.;$inp) then . else empty end)[])) as $entry | .[1] | to_entries[] | select(.value == $entry).key][0] // 0' ${file} union_atlas.temp) local atlas_index=$(jq -r -s 'def namespace: if contains(":") then sub("\\:(.+)"; "") else "minecraft" end; def intersects(a;b): any(a[]; . as $x | any(b[]; . == $x)); (.[0] | [.textures[]] | map("./assets/" + (. | namespace) + "/textures/" + (. | sub("(.*?)\\:"; "")) + ".png")) as $inp | [(.[1] | (map(if intersects(.;$inp) then . else empty end)[])) as $entry | .[1] | to_entries[] | select(.value == $entry).key][0] // 0' ${file} scratch_files/union_atlas.temp)
else else
local atlas_index=0 local atlas_index=0
fi fi
status_message process "Starting conversion of model with GeyserID ${gid}" status_message process "Starting conversion of model with GeyserID ${gid}"
mkdir -p ./target/rp/models/blocks/geyser_custom/${namespace}/${model_path} mkdir -p ./target/rp/models/blocks/geyser_custom/${namespace}/${model_path}
jq --slurpfile atlas spritesheet/${atlas_index}.json --arg generated "${generated}" --arg binding "c.item_slot == 'head' ? 'head' : q.item_slot_to_bone_name(c.item_slot)" --arg path_hash "${path_hash}" -c ' jq --slurpfile atlas scratch_files/spritesheet/${atlas_index}.json --arg generated "${generated}" --arg binding "c.item_slot == 'head' ? 'head' : q.item_slot_to_bone_name(c.item_slot)" --arg path_hash "${path_hash}" -c '
.textures as $texture_list | .textures as $texture_list |
def namespace: if contains(":") then sub("\\:(.+)"; "") else "minecraft" end; def namespace: if contains(":") then sub("\\:(.+)"; "") else "minecraft" end;
def tobool: if .=="true" then true elif .=="false" then false else null end; def tobool: if .=="true" then true elif .=="false" then false else null end;
@@ -977,15 +987,15 @@ do
' | sponge ./target/rp/attachables/geyser_custom/${namespace}/${model_path}/${model_name}.attachable.json ' | sponge ./target/rp/attachables/geyser_custom/${namespace}/${model_path}/${model_name}.attachable.json
# progress # progress
echo >> count.csv echo >> scratch_files/count.csv
local tot_pos=$((cur_pos + $(wc -l < count.csv))) local tot_pos=$((cur_pos + $(wc -l < scratch_files/count.csv)))
status_message completion "${gid} converted\n$(ProgressBar ${tot_pos} ${_end})" status_message completion "${gid} converted\n$(ProgressBar ${tot_pos} ${_end})"
echo echo
} }
wait_for_jobs wait_for_jobs
convert_model ${file} ${gid} ${generated} ${namespace} ${model_path} ${model_name} ${path_hash} & convert_model ${file} ${gid} ${generated} ${namespace} ${model_path} ${model_name} ${path_hash} &
done < all.csv done < scratch_files/all.csv
wait # wait for all the jobs to finish wait # wait for all the jobs to finish
# write lang file US # write lang file US
@@ -1082,7 +1092,7 @@ jq '
' config.json | sponge ./target/geyser_mappings.json ' config.json | sponge ./target/geyser_mappings.json
# Add sprites if sprites.json exists in the root pack # Add sprites if sprites.json exists in the root pack
if [ -f sprites.json ]; then if [ -f scratch_files/sprites.json ]; then
status_message process "Adding provided sprite paths from sprites.json" status_message process "Adding provided sprite paths from sprites.json"
jq -r ' jq -r '
to_entries to_entries
@@ -1091,19 +1101,19 @@ if [ -f sprites.json ]; then
| [((.item | split(":")[-1]) + "_c" + (.custom_model_data | tostring) + "_d" + (.damage_predicate | tostring) + "_u" + (.unbreakable | tostring)), .sprite] | [((.item | split(":")[-1]) + "_c" + (.custom_model_data | tostring) + "_d" + (.damage_predicate | tostring) + "_u" + (.unbreakable | tostring)), .sprite]
| @tsv | @tsv
| gsub("\\t";",") | gsub("\\t";",")
' sprites.json > sprites.csv ' scratch_files/sprites.json > scratch_files/sprites.csv
while IFS=, read -r predicate icon while IFS=, read -r predicate icon
do write_hash "${predicate}" "${icon}" "sprite_hashes.csv" & do write_hash "${predicate}" "${icon}" "scratch_files/sprite_hashes.csv" &
done < sprites.csv > /dev/null done < scratch_files/sprites.csv > /dev/null
jq -cR 'split(",")' sprite_hashes.csv | jq -s 'map({("gmdl_" + .[1]): {"textures": .[0]}}) | add' > sprite_hashmap.json jq -cR 'split(",")' scratch_files/sprite_hashes.csv | jq -s 'map({("gmdl_" + .[1]): {"textures": .[0]}}) | add' > scratch_files/sprite_hashmap.json
jq -s ' jq -s '
.[0] as $icon_sprites .[0] as $icon_sprites
| .[1] | .[1]
| .texture_data += $icon_sprites | .texture_data += $icon_sprites
' sprite_hashmap.json ./target/rp/textures/item_texture.json | sponge ./target/rp/textures/item_texture.json ' scratch_files/sprite_hashmap.json ./target/rp/textures/item_texture.json | sponge ./target/rp/textures/item_texture.json
jq -s ' jq -s '
{ {
@@ -1118,14 +1128,21 @@ if [ -f sprites.json ]; then
))} ))}
) | add) ) | add)
} }
' sprite_hashmap.json ./target/geyser_mappings.json | sponge ./target/geyser_mappings.json ' scratch_files/sprite_hashmap.json ./target/geyser_mappings.json | sponge ./target/geyser_mappings.json
rm -f sprites.json && rm -f sprites.csv && rm -f sprite_hashes.csv && rm -f sprite_hashmap.json
fi fi
# cleanup # cleanup
status_message critical "Deleting scratch files" rm -rf assets && rm -f pack.mcmeta && rm -f pack.png
rm -rf spritesheet && rm -rf assets && rm -f pack.mcmeta && rm -f pack.png && rm -f parents.json && rm -f all.csv && rm -f pa.csv && rm -f *.temp && rm -f item_mappings.json && rm -f item_texture.json && rm paths.csv && rm hashes.csv && rm hashmap.json && rm count.csv && rm -f atlases.csv && rm -f atlases.json && rm -f generated.csv && rm -f generated.json && rm -f icons.csv && rm -f icons.json && rm -f deleted.csv && rm -f deleted.json if [[ ${save_scratch} != "true" ]]
then
rm -rf scratch_files
status_message critical "Deleted scratch files"
else
cd ./scratch_files > /dev/null && zip -rq8 scratch_files.zip . -x "*/.*" && cd .. > /dev/null && mv ./scratch_files/scratch_files.zip ./target/scratch_files.zip
status_message completion "Archived scratch files\n"
fi
status_message process "Compressing output packs" status_message process "Compressing output packs"
mkdir ./target/packaged mkdir ./target/packaged