1
Fork 0

Glass server db updater & more!

This commit is contained in:
prescientmoon 2025-04-08 23:40:48 +02:00
parent e78fc06a89
commit df292832b8
Signed by: prescientmoon
SSH key fingerprint: SHA256:UUF9JT2s8Xfyv76b8ZuVL7XrmimH4o49p4b+iexbVH4
9 changed files with 1634 additions and 1454 deletions

View file

@ -61,11 +61,11 @@
"shimmeringvoid": {
"flake": false,
"locked": {
"lastModified": 1744143898,
"narHash": "sha256-JwcjV/uofAjyJs3xLJ57Uv7N7vkXDgovLjvwzKU5Qwo=",
"lastModified": 1744146722,
"narHash": "sha256-qvh6UxyLbbaSrrpAJwW3fzA1YqrwnTUT8qj17poi0aY=",
"ref": "refs/heads/main",
"rev": "97fe5eded99a08f96e2d3f061199b0730331b60b",
"revCount": 11,
"rev": "b6ab7c08daac587a1da5649cfdbc082ef2a01a20",
"revCount": 13,
"type": "git",
"url": "ssh://forgejo@ssh.git.moonythm.dev/prescientmoon/shimmeringvoid.git"
},

View file

@ -25,7 +25,12 @@
in
{
packages = {
inherit (pkgs) shimmeringmoon glass-bundler private-config;
inherit (pkgs)
shimmeringmoon
glass-bundler
private-config
glass-server-db-updater
;
default = pkgs.shimmeringmoon;
};
@ -55,7 +60,7 @@
SHIMMERING_FONT_DIR = pkgs.shimmering-fonts;
SHIMMERING_CC_DIR = pkgs.arcaea-ptt-data;
SHIMMERING_PRIVATE_CONFIG_DIR = pkgs.private-config;
SHIMMERING_PRIVATE_COMPTIME_CONFIG_DIR = inputs.shimmeringdarkness;
SHIMMERING_COMPTIME_PRIVATE_CONFIG_DIR = inputs.shimmeringdarkness;
};
# }}}
}

View file

@ -2,6 +2,6 @@
pkgs.fetchFromGitHub {
owner = "OllyDoge";
repo = "ASCPSongConstant";
rev = "19e727711938239a42371d231d6fcd0d40a5f6b1";
sha256 = "12jxfbbkra5x7vqar615qq9l55vr7rd1bnzc2csp6l7rzz8x4930";
rev = "f99de5d1b9b1e82f24a284d97e986d7bb4d0fd69";
sha256 = "0vvl0852la65xkfxh4amrnh81m08v9aknlp73ifh7llxd1k6qb9q";
}

View file

@ -0,0 +1,42 @@
{
arcaea-ptt-data,
lib,
makeWrapper,
python3,
stdenvNoCC,
symlinkJoin,
}:
let
unpatched = stdenvNoCC.mkDerivation {
name = "glass-server-db-updater";
src = lib.fileset.toSource {
root = ../scripts;
fileset = lib.fileset.unions [ ../scripts/update-db-songs.py ];
};
nativeBuildInputs = [ makeWrapper ];
buildPhase = ''
runHook preBuild
echo "#!${python3}/bin/python" > glass-server-db-updater
cat $src/update-db-songs.py >> glass-server-db-updater
chmod +x glass-server-db-updater
runHook postBuild
'';
installPhase = ''
runHook preInstall
install -Dm755 glass-server-db-updater -t $out/bin/
runHook postInstall
'';
};
in
symlinkJoin {
inherit (unpatched) name meta;
paths = [ unpatched ];
nativeBuildInputs = [ makeWrapper ];
postBuild = ''
wrapProgram $out/bin/glass-server-db-updater \
--set SHIMMERING_CC_DIR "${arcaea-ptt-data}"
'';
}

View file

@ -14,4 +14,5 @@ final: prev: {
arcaea-ptt-data = final.callPackage ./cc-data.nix { };
shimmeringmoon = final.callPackage ./shimmeringmoon.nix { };
glass-server-db-updater = final.callPackage ./glass-server-db-updater.nix { };
}

View file

@ -0,0 +1,75 @@
import sqlite3
import json
import sys
import os
# Check if the correct number of arguments are provided
if len(sys.argv) != 2:
print("Usage: update-db-songs <db_file>")
sys.exit(1)
# {{{ Collect data
json_file_path = f"{os.environ.get('SHIMMERING_CC_DIR')}/ptt.json"
db_file_path = sys.argv[1]
try:
with open(json_file_path, "r") as json_file:
json_data = json.load(json_file)
except Exception as e:
print(f"Error reading JSON file: {e}")
sys.exit(1)
try:
conn = sqlite3.connect(db_file_path)
cursor = conn.cursor()
except sqlite3.Error as e:
print(f"Error connecting to SQLite database: {e}")
sys.exit(1)
cursor.execute("SELECT song_id FROM chart")
current_entries = {row[0] for row in cursor.fetchall()}
# }}}
# {{{ Print diff & delete entries
json_entries = set(json_data.keys())
removed_entries = current_entries - json_entries
if removed_entries:
print(f"Removed entries: {removed_entries}")
else:
print("No entries were removed.")
added_entries = json_entries - current_entries
if added_entries:
print(f"Added entries: {added_entries}")
else:
print("No new entries were added.")
cursor.execute("DELETE FROM chart")
# }}}
# {{{ Add new entries
for song_id, ratings in json_data.items():
cursor.execute(
"""
INSERT INTO chart(song_id)
VALUES (?)
""",
[song_id],
)
for rating_type, rating_value in ratings.items():
rating_column = ["prs", "pst", "ftr", "byn", "etr"][int(rating_type)]
rating_column = f"rating_{rating_column}"
cursor.execute(
f"""
UPDATE chart
SET {rating_column}=?
WHERE song_id=?
""",
(rating_value, song_id),
)
conn.commit()
conn.close()
# }}}
# Print final status
print("Database updated successfully.")

View file

@ -188,20 +188,27 @@ pub fn import_songlist(
};
song_count += 1;
transaction.execute(
"
transaction
.execute(
"
INSERT INTO songs(id,title,shorthand,artist,side,bpm)
VALUES (?,?,?,?,?,?)
",
(
song.id,
song.title.get(),
&song.shorthand,
&song.artist,
Side::SIDES[song.side as usize],
song.bpm,
),
)?;
(
song.id,
song.title.get(),
&song.shorthand,
&song.artist,
Side::SIDES[song.side as usize],
song.bpm,
),
)
.with_context(|| {
anyhow!(
"Failed to create song \"{}\" from songlist file",
song.title.get(),
)
})?;
for chart in song.difficulties {
if chart.rating == 0 {
@ -251,24 +258,36 @@ pub fn import_songlist(
anyhow!("Cannot find PTT data for song '{}' [{}]", name, difficulty)
})?;
transaction.execute(
"
transaction
.execute(
"
INSERT INTO charts(
song_id, title, difficulty,
level, note_count, chart_constant,
note_design
) VALUES(?,?,?,?,?,?,?)
",
(
song.id,
chart.title.as_ref().map(|t| t.get()),
difficulty,
level,
notecount,
rating_as_fixed(cc),
chart.chart_designer,
),
)?;
(
song.id,
chart.title.as_ref().map(|t| t.get()),
difficulty,
level,
notecount,
rating_as_fixed(cc),
chart.chart_designer,
),
)
.with_context(|| {
anyhow!(
"Failed to create chart \"{}\" [{}] from songlist file",
chart
.title
.as_ref()
.map(|t| t.get())
.unwrap_or(song.title.get()),
difficulty,
)
})?;
}
}

View file

@ -16,7 +16,7 @@ use crate::context::Error;
pub const SPLIT_FACTOR: u32 = 8;
pub const IMAGE_VEC_DIM: usize = (SPLIT_FACTOR * SPLIT_FACTOR * 3) as usize;
pub const BITMAP_IMAGE_SIZE: u32 = 174;
pub const JACKET_RECOGNITITION_DIMENSIONS: usize = 20;
pub const JACKET_RECOGNITITION_DIMENSIONS: usize = 64;
// {{{ (Image => vector) encoding
#[allow(clippy::identity_op)]

File diff suppressed because it is too large Load diff