add album downloading and rewrite cache
This commit is contained in:
parent
f2800f13c8
commit
a3cefee49a
33 changed files with 2573 additions and 277 deletions
|
|
@ -1,6 +1,7 @@
|
|||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
|
@ -9,3 +10,12 @@ trim_trailing_whitespace = true
|
|||
|
||||
[{*.nix,*.yml}]
|
||||
indent_size = 2
|
||||
|
||||
# autogenerated files
|
||||
[{drizzle/**, package-lock.json}]
|
||||
charset = unset
|
||||
end_of_line = unset
|
||||
indent_style = unset
|
||||
indent_size = unset
|
||||
insert_final_newline = unset
|
||||
trim_trailing_whitespace = unset
|
||||
|
|
|
|||
|
|
@ -2,5 +2,6 @@ MEDIA_USER_TOKEN=RE8gTk9UIFRSVVNUIFRIRU0uIFRIRVJFIElTIFNPTUVUSElORyBISURJTkcgT04
|
|||
ITUA=US
|
||||
WIDEVINE_CLIENT_ID=YTg1OGx2NmdpM3M1eWQ1YW0zaGtsN3FxOTM5Mzg3MjBrdjcxc3B4aXM1MnRscHViOGJkazl2ZGE2ZGN4dWFwYzJxMXo3ZzN6bWVsMjVuMnhhazc2cjdobHlxa2FkZjdibGYybXA4cWZkanZ6aGUydWI5bWF6ejcyajVkbmthbHA=
|
||||
WIDEVINE_PRIVATE_KEY=aGFpaWlpaWlpaWlpaSBtZW93IDozMzMgd2Fzc3VwCg==
|
||||
MIGRATIONS_DIR=./drizzle
|
||||
VIEWS_DIR=./views
|
||||
PUBLIC_DIR=./public
|
||||
|
|
|
|||
3
.gitignore
vendored
3
.gitignore
vendored
|
|
@ -1,3 +1,4 @@
|
|||
# build stuff
|
||||
/dist
|
||||
/result
|
||||
/node_modules
|
||||
|
|
@ -5,5 +6,5 @@
|
|||
.env
|
||||
config.toml
|
||||
|
||||
# the cache directory for songs
|
||||
# database stuff
|
||||
/cache
|
||||
|
|
|
|||
10
README.md
10
README.md
|
|
@ -16,7 +16,7 @@ thank you to [gamdl](https://github.com/glomatico/gamdl) for inspiring this proj
|
|||
|
||||
`WIDEVINE_PRIVATE_KEY` is essentially the same process of obtainment, you'll get it from the same guide!! i'm not sure how to easily find one of these on the web, but i'm sure you end users (user count: 0 (<img src="./docs/true.png" alt="robert downey jr. true image" height="13">)) can pull through. this is also in base64 (`cat private_key.pem | base64 -w 0`)
|
||||
|
||||
`PUBLIC_DIR` and `VIEWS_DIR` should typically not need to be set by the user if using this repository as the working directory. blank values will result in simply `views` and `public` being grabbed from the cwd, which also so happens to be the default in [`.env.example`](./.env.example). set this manually to your own value if you get full runtime errors when accessing pages relating to templates being missing, assets having unexpected 404 issues, etc. this value is also recommended for packagers, to prevent the users having to copy over views and public--see how the nix build works!
|
||||
`MIGRATIONS_DIR`, `PUBLIC_DIR`, and `VIEWS_DIR` should typically not need to be set by the user if using this repository as the working directory. blank values will result in simply `drizzle`, `views`, and `public` being grabbed from the cwd, which also so happens to be the default in [`.env.example`](./.env.example). set this manually to your own value if you get full runtime errors when accessing pages relating to templates being missing, assets having unexpected 404 issues, etc. this value is also recommended for packagers, to prevent the users having to copy over views and public--see how the nix build works!
|
||||
|
||||
### config
|
||||
|
||||
|
|
@ -34,6 +34,14 @@ a system module is provided for your convenience, and the main output is `nixosM
|
|||
|
||||
after importing this module, the option `services.amdl` will show up, which is documented in [`flake.nix`](./flake.nix) somewhat well. everything under the `config` tree follows the `config.toml` well, along with everything under the `env` tree. defaults are provided for everything that isn't the ITUA inside of the env section. make sure to set those!!
|
||||
|
||||
#### nginx information
|
||||
|
||||
a decent amount of nginx setups (and ones on nixos using `recommendedProxySettings`) have proxy buffering on, i recommend turning that off (if the whole file isnt downloaded before the read timeout, then it will just drop the file)
|
||||
|
||||
```nginx
|
||||
proxy_buffering off;
|
||||
```
|
||||
|
||||
## limitations / the formats
|
||||
|
||||
currently you can only get basic widevine ones, everything related to playready and fairplay encryption methods are not supported, sorry!! someday i will get this working, at least for playready. it's just that no one has written a library yet but has for python (yuck!!) lossless audio is unfortunately out of the question currently. it will be a while till someone breaks fairplay drm
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ port = 2000
|
|||
# max 25, min 5
|
||||
search_count = 5
|
||||
# displayed codecs, recommended to use default
|
||||
# see src/downloader/index.ts for a list of codecs
|
||||
# see src/constants/codecs.ts for a list of codecs
|
||||
displayed_codecs = ["aac_legacy", "aac_he_legacy"]
|
||||
|
||||
[downloader]
|
||||
|
|
@ -23,8 +23,10 @@ ytdlp_path = "yt-dlp"
|
|||
# where to store downloaded files (music, lyrics, etc.)
|
||||
# this directory will be created if it does not exist
|
||||
directory = "cache"
|
||||
# where to store the database file
|
||||
database = "file:cache/cache.sqlite"
|
||||
# how long to keep downloaded files (in seconds)
|
||||
ttl = 3600 # (1 hour)
|
||||
file_ttl = 3600 # (1 hour)
|
||||
|
||||
[downloader.api]
|
||||
# two letter language code (ISO 639-1), followed by a dash (-) and a two letter country code (ISO 3166-1 alpha-2)
|
||||
|
|
|
|||
12
drizzle.config.ts
Normal file
12
drizzle.config.ts
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
import { defineConfig } from "drizzle-kit";
|
||||
import toml from "toml";
|
||||
import fs from "fs";
|
||||
|
||||
export default defineConfig({
|
||||
out: "./drizzle", // TODO: unhardcode
|
||||
schema: "./src/database/schema.ts",
|
||||
dialect: "sqlite",
|
||||
dbCredentials: {
|
||||
url: toml.parse(fs.readFileSync("config.toml", "utf-8")).downloader.cache.database // TODO: unscuff
|
||||
}
|
||||
});
|
||||
12
drizzle/0000_init.sql
Normal file
12
drizzle/0000_init.sql
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
CREATE TABLE `file_cache` (
|
||||
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
`name` text NOT NULL,
|
||||
`expiry` integer NOT NULL
|
||||
);
|
||||
--> statement-breakpoint
|
||||
CREATE TABLE `key_cache` (
|
||||
`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
`songId` text NOT NULL,
|
||||
`codec` text NOT NULL,
|
||||
`decryptionKey` text NOT NULL
|
||||
);
|
||||
87
drizzle/meta/0000_snapshot.json
Normal file
87
drizzle/meta/0000_snapshot.json
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
{
|
||||
"version": "6",
|
||||
"dialect": "sqlite",
|
||||
"id": "b88a9929-0bda-4344-b012-87c3335389ed",
|
||||
"prevId": "00000000-0000-0000-0000-000000000000",
|
||||
"tables": {
|
||||
"file_cache": {
|
||||
"name": "file_cache",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "integer",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": true
|
||||
},
|
||||
"name": {
|
||||
"name": "name",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"expiry": {
|
||||
"name": "expiry",
|
||||
"type": "integer",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
},
|
||||
"key_cache": {
|
||||
"name": "key_cache",
|
||||
"columns": {
|
||||
"id": {
|
||||
"name": "id",
|
||||
"type": "integer",
|
||||
"primaryKey": true,
|
||||
"notNull": true,
|
||||
"autoincrement": true
|
||||
},
|
||||
"songId": {
|
||||
"name": "songId",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"codec": {
|
||||
"name": "codec",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
},
|
||||
"decryptionKey": {
|
||||
"name": "decryptionKey",
|
||||
"type": "text",
|
||||
"primaryKey": false,
|
||||
"notNull": true,
|
||||
"autoincrement": false
|
||||
}
|
||||
},
|
||||
"indexes": {},
|
||||
"foreignKeys": {},
|
||||
"compositePrimaryKeys": {},
|
||||
"uniqueConstraints": {},
|
||||
"checkConstraints": {}
|
||||
}
|
||||
},
|
||||
"views": {},
|
||||
"enums": {},
|
||||
"_meta": {
|
||||
"schemas": {},
|
||||
"tables": {},
|
||||
"columns": {}
|
||||
},
|
||||
"internal": {
|
||||
"indexes": {}
|
||||
}
|
||||
}
|
||||
13
drizzle/meta/_journal.json
Normal file
13
drizzle/meta/_journal.json
Normal file
|
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"version": "7",
|
||||
"dialect": "sqlite",
|
||||
"entries": [
|
||||
{
|
||||
"idx": 0,
|
||||
"version": "6",
|
||||
"when": 1756375000167,
|
||||
"tag": "0000_init",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -25,7 +25,7 @@
|
|||
# uncomment this and let the build fail, then get the current hash
|
||||
# very scuffed but endorsed!
|
||||
# npmDepsHash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=";
|
||||
npmDepsHash = "sha256-lvueqcSBjtt9RSMwq2NWCAVT0NrZwDmhEYkjtdOs7js=";
|
||||
npmDepsHash = "sha256-11AayHpPu7ocBPRB5k4SU7b99Aqc/dufAy2Yg5oPvGE=";
|
||||
|
||||
nativeBuildInputs = with pkgs; [ makeWrapper ];
|
||||
|
||||
|
|
@ -35,12 +35,13 @@
|
|||
runHook preInstall
|
||||
|
||||
mkdir -p $out
|
||||
mv node_modules dist views public $out/
|
||||
mv dist drizzle public views node_modules $out/
|
||||
makeWrapper ${pkgs.nodejs-slim}/bin/node $out/bin/amdl \
|
||||
--prefix PATH : ${makeBinPath buildInputs} \
|
||||
--add-flags "$out/dist/src/index.js" \
|
||||
--set VIEWS_DIR $out/views \
|
||||
--set MIGRATIONS_DIR $out/drizzle \
|
||||
--set PUBLIC_DIR $out/public \
|
||||
--set VIEWS_DIR $out/views \
|
||||
--set NODE_ENV production
|
||||
|
||||
runHook postInstall
|
||||
|
|
|
|||
1955
package-lock.json
generated
1955
package-lock.json
generated
File diff suppressed because it is too large
Load diff
11
package.json
11
package.json
|
|
@ -9,19 +9,26 @@
|
|||
"dev": "concurrently 'node --watch dist/src/index.js' 'tsc --watch'",
|
||||
"build": "npm run lint && tsc",
|
||||
"lint": "eslint .",
|
||||
"lint:fix": "eslint . --fix"
|
||||
"lint:fix": "eslint . --fix",
|
||||
"migrate": "drizzle-kit migrate",
|
||||
"migrate:gen": "drizzle-kit generate",
|
||||
"migrate:drop": "drizzle-kit drop"
|
||||
},
|
||||
"dependencies": {
|
||||
"@libsql/client": "^0.15.12",
|
||||
"archiver": "^7.0.1",
|
||||
"axios": "^1.11.0",
|
||||
"callsites": "^4.2.0",
|
||||
"chalk": "^5.4.1",
|
||||
"data-uri-to-buffer": "^6.0.2",
|
||||
"dotenv": "^17.2.1",
|
||||
"drizzle-orm": "^0.44.4",
|
||||
"express": "^5.1.0",
|
||||
"express-handlebars": "^8.0.3",
|
||||
"format-duration": "^3.0.2",
|
||||
"node-widevine": "^0.1.3",
|
||||
"parse-hls": "^1.0.7",
|
||||
"pretty-bytes": "^7.0.1",
|
||||
"pssh-tools": "^1.2.0",
|
||||
"source-map-support": "^0.5.21",
|
||||
"swagger-ui-express": "^5.0.1",
|
||||
|
|
@ -33,11 +40,13 @@
|
|||
},
|
||||
"devDependencies": {
|
||||
"@stylistic/eslint-plugin": "^3.1.0",
|
||||
"@types/archiver": "^6.0.3",
|
||||
"@types/express": "^5.0.3",
|
||||
"@types/source-map-support": "^0.5.10",
|
||||
"@types/swagger-ui-express": "^4.1.8",
|
||||
"@typescript-eslint/parser": "^7.12.0",
|
||||
"concurrently": "^9.2.0",
|
||||
"drizzle-kit": "^0.31.4",
|
||||
"eslint": "^8.57.1",
|
||||
"typescript": "^5.9.2",
|
||||
"typescript-eslint": "^8.39.1"
|
||||
|
|
|
|||
|
|
@ -123,10 +123,13 @@ footer {
|
|||
width: 100%;
|
||||
}
|
||||
.result-info {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
display: grid;
|
||||
grid-template-columns: auto 1fr auto;
|
||||
grid-template-rows: auto;
|
||||
grid-auto-flow: row;
|
||||
align-items: center;
|
||||
gap: 1em;
|
||||
gap: 0 1em;
|
||||
padding-right: 1em;
|
||||
}
|
||||
.result-info img {
|
||||
width: 4em;
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import axios, { type AxiosInstance } from "axios";
|
||||
import { ampApiUrl, appleMusicHomepageUrl, licenseApiUrl, webplaybackApiUrl } from "../constants/urls.js";
|
||||
import type { GetPlaylistResponse, GetSongResponse, SearchResponse } from "./types/responses.js";
|
||||
import type { GetAlbumResponse, GetPlaylistResponse, GetSongResponse, SearchResponse } from "./types/responses.js";
|
||||
import type { AlbumAttributesExtensionTypes, AnyAttributesExtensionTypes, SongAttributesExtensionTypes } from "./types/extensions.js";
|
||||
import { getToken } from "./token.js";
|
||||
import { config, env } from "../config.js";
|
||||
|
|
@ -41,8 +41,8 @@ export default class AppleMusicApi {
|
|||
id: string,
|
||||
extend: T = [] as unknown[] as T,
|
||||
relationships: U = ["tracks"] as U
|
||||
): Promise<GetSongResponse<T, U>> {
|
||||
return (await this.http.get<GetSongResponse<T, U>>(`/v1/catalog/${this.storefront}/albums/${id}`, {
|
||||
): Promise<GetAlbumResponse<T, U>> {
|
||||
return (await this.http.get<GetAlbumResponse<T, U>>(`/v1/catalog/${this.storefront}/albums/${id}`, {
|
||||
params: {
|
||||
extend: extend.join(","),
|
||||
include: relationships.join(",")
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ export async function getToken(baseUrl: string): Promise<string> {
|
|||
throw new Error("could not find match for the api token in the index javascript file");
|
||||
}
|
||||
|
||||
log.debug("got api token");
|
||||
log.info("got api token");
|
||||
|
||||
return token;
|
||||
}
|
||||
|
|
|
|||
192
src/cache.ts
192
src/cache.ts
|
|
@ -1,97 +1,131 @@
|
|||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import timeago from "timeago.js";
|
||||
import { config } from "./config.js";
|
||||
import { db } from "./database/index.js";
|
||||
import { fileCacheTable, keyCacheTable } from "./database/schema.js";
|
||||
import fsPromises from "fs/promises";
|
||||
import { and, eq } from "drizzle-orm";
|
||||
import * as log from "./log.js";
|
||||
import prettyBytes from "pretty-bytes";
|
||||
|
||||
// DO NOT READ FURTHER INTO THIS FILE
|
||||
// COGNITIVE DISSONANCE WARNING
|
||||
|
||||
// TODO: hourly cache reports
|
||||
// TODO: swap to sqlite
|
||||
// TODO: make async fs calls
|
||||
// TODO: rework EVERYTHING
|
||||
// TODO: refresh cache timer on download
|
||||
|
||||
interface CacheEntry {
|
||||
fileName: string;
|
||||
expiry: number; // milliseconds, not seconds
|
||||
// try creating cache if it doesn't exist
|
||||
// a bit scuffed but that ok
|
||||
try {
|
||||
log.debug(`ensuring cache directory "${config.downloader.cache.directory}" exists`);
|
||||
await fsPromises.mkdir(config.downloader.cache.directory, { recursive: true });
|
||||
} catch (err) {
|
||||
log.error("failed to create cache directory!");
|
||||
log.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const cacheTtl = config.downloader.cache.ttl * 1000;
|
||||
const cacheFile = path.join(config.downloader.cache.directory, "cache.json");
|
||||
const fileTtl = config.downloader.cache.file_ttl * 1000;
|
||||
const timers = new Map<string, NodeJS.Timeout>();
|
||||
|
||||
if (!fs.existsSync(config.downloader.cache.directory)) {
|
||||
log.debug("cache directory not found, creating it");
|
||||
fs.mkdirSync(config.downloader.cache.directory, { recursive: true });
|
||||
}
|
||||
if (!fs.existsSync(cacheFile)) {
|
||||
log.debug("cache file not found, creating it");
|
||||
fs.writeFileSync(cacheFile, JSON.stringify([]), { encoding: "utf-8" });
|
||||
try {
|
||||
let entriesCleared = 0;
|
||||
let entriesClearedBytes = 0;
|
||||
log.debug("cache cleanup and expiry timers starting");
|
||||
|
||||
await Promise.all((await db.select().from(fileCacheTable)).map(async ({ name, expiry }) => {
|
||||
if (expiry < Date.now()) {
|
||||
entriesCleared++;
|
||||
entriesClearedBytes += (await fsPromises.stat(path.join(config.downloader.cache.directory, name))).size;
|
||||
await dropFile(name);
|
||||
} else {
|
||||
await scheduleDeletion(name, expiry);
|
||||
}
|
||||
}));
|
||||
|
||||
log.debug("cache cleanup complete!");
|
||||
log.debug(`cleared ${entriesCleared} entr${entriesCleared === 1 ? "y" : "ies"}, freeing up ${prettyBytes(entriesClearedBytes)}!`);
|
||||
} catch (err) {
|
||||
log.error("failed to run cache cleanup!");
|
||||
log.error(err);
|
||||
}
|
||||
|
||||
let cache = JSON.parse(fs.readFileSync(cacheFile, { encoding: "utf-8" })) as CacheEntry[];
|
||||
|
||||
// TODO: change how this works
|
||||
// this is so uncomfy
|
||||
cache.push = function(...items: CacheEntry[]): number {
|
||||
for (const entry of items) {
|
||||
log.debug(`cache entry ${entry.fileName} added, expires ${timeago.format(entry.expiry)}`);
|
||||
setTimeout(() => {
|
||||
log.debug(`cache entry ${entry.fileName} expired, cleaning`);
|
||||
removeCacheEntry(entry.fileName);
|
||||
rewriteCache();
|
||||
}, entry.expiry - Date.now());
|
||||
async function scheduleDeletion(name: string, expiry: number): Promise<void> {
|
||||
if (timers.has(name)) {
|
||||
clearTimeout(timers.get(name) as NodeJS.Timeout);
|
||||
}
|
||||
|
||||
return Array.prototype.push.apply(this, items);
|
||||
};
|
||||
const timeout = setTimeout(async () => {
|
||||
await dropFile(name);
|
||||
timers.delete(name);
|
||||
}, expiry - Date.now());
|
||||
|
||||
function rewriteCache(): void {
|
||||
// cache is in fact []. i checked
|
||||
fs.writeFileSync(cacheFile, JSON.stringify(cache), { encoding: "utf-8" });
|
||||
timers.set(name, timeout);
|
||||
}
|
||||
|
||||
function removeCacheEntry(fileName: string): void {
|
||||
cache = cache.filter((entry) => { return entry.fileName !== fileName; });
|
||||
try {
|
||||
fs.unlinkSync(path.join(config.downloader.cache.directory, fileName));
|
||||
} catch (err) {
|
||||
if ((err as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
log.debug(`file for cache entry ${fileName} missing, dropping`);
|
||||
} else {
|
||||
log.error(`could not remove cache entry ${fileName}!`);
|
||||
async function dropFile(name: string): Promise<void> {
|
||||
const size = (await fsPromises.stat(path.join(config.downloader.cache.directory, name))).size;
|
||||
await fsPromises.unlink(path.join(config.downloader.cache.directory, name)).catch((err) => {
|
||||
if (err.code !== "ENOENT") {
|
||||
log.error(`failed to delete cached file ${name} for whatever reason!`);
|
||||
log.error("manual removal may be necessary!");
|
||||
log.error(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// clear cache entries that are expired
|
||||
// this is for when the program is killed when cache entries are present
|
||||
// those could expire while the program is not running, therefore not being cleaned
|
||||
let expiryLogPrinted = false;
|
||||
for (const entry of cache) {
|
||||
if (entry.expiry < Date.now()) {
|
||||
if (!expiryLogPrinted) { log.info("old expired cache entries are present, cleaning them"); }
|
||||
expiryLogPrinted = true;
|
||||
log.debug(`cache entry ${entry.fileName} expired ${timeago.format(entry.expiry)}; cleaning`);
|
||||
removeCacheEntry(entry.fileName);
|
||||
rewriteCache();
|
||||
}
|
||||
}
|
||||
|
||||
export function isCached(fileName: string): boolean {
|
||||
const entry = cache.find((e) => { return e.fileName === fileName; });
|
||||
const cached = entry !== undefined && entry.expiry > Date.now();
|
||||
if (cached) { log.debug(`cache HIT for ${fileName}`); }
|
||||
else { log.debug(`cache MISS for ${fileName}`); }
|
||||
return cached;
|
||||
}
|
||||
|
||||
export function addToCache(fileName: string): void {
|
||||
cache.push({
|
||||
fileName: fileName,
|
||||
expiry: Date.now() + cacheTtl
|
||||
});
|
||||
rewriteCache();
|
||||
|
||||
log.debug(`deleted file ${name} from cache, freeing up ${prettyBytes(size)}`);
|
||||
|
||||
await db.delete(fileCacheTable).where(eq(fileCacheTable.name, name));
|
||||
}
|
||||
|
||||
export async function addFileToCache(fileName: string): Promise<void> {
|
||||
const expiry = Date.now() + fileTtl;
|
||||
const existing = await db.select().from(fileCacheTable).where(eq(fileCacheTable.name, fileName)).get();
|
||||
|
||||
if (existing) {
|
||||
await db.update(fileCacheTable).set({ expiry: expiry }).where(eq(fileCacheTable.name, fileName));
|
||||
await scheduleDeletion(fileName, expiry);
|
||||
} else {
|
||||
await db.insert(fileCacheTable).values({name: fileName, expiry: expiry });
|
||||
await scheduleDeletion(fileName, expiry);
|
||||
}
|
||||
}
|
||||
|
||||
export async function isFileCached(fileName: string): Promise<boolean> {
|
||||
const existing = await db.select().from(fileCacheTable).where(eq(fileCacheTable.name, fileName)).get();
|
||||
|
||||
if (existing !== undefined) {
|
||||
await db.update(fileCacheTable).set({ expiry: Date.now() + fileTtl }).where(eq(fileCacheTable.name, fileName));
|
||||
await scheduleDeletion(fileName, existing.expiry);
|
||||
|
||||
log.debug(`cache HIT for file ${fileName}, extending expiry`);
|
||||
return true;
|
||||
} else {
|
||||
log.debug(`cache MISS for file ${fileName}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: add a key ttl? its probably not necessary but would be a nice to have
|
||||
// its pretty small anyway
|
||||
export async function addKeyToCache(songId: string, codec: string, decryptionKey: string): Promise<void> {
|
||||
const existing = await db.select().from(keyCacheTable).where(and(
|
||||
eq(keyCacheTable.songId, songId),
|
||||
eq(keyCacheTable.codec, codec),
|
||||
eq(keyCacheTable.decryptionKey, decryptionKey)
|
||||
)).get();
|
||||
|
||||
if (existing) {
|
||||
return;
|
||||
} else {
|
||||
await db.insert(keyCacheTable).values({ songId: songId, codec: codec, decryptionKey: decryptionKey });
|
||||
}
|
||||
}
|
||||
|
||||
export async function getKeyFromCache(songId: string, codec: string): Promise<string | undefined> {
|
||||
const existing = await db.select().from(keyCacheTable).where(and(
|
||||
eq(keyCacheTable.songId, songId),
|
||||
eq(keyCacheTable.codec, codec)
|
||||
)).get();
|
||||
|
||||
if (existing !== undefined) {
|
||||
log.debug(`cache HIT for key of ${songId} (${codec})`);
|
||||
return existing.decryptionKey;
|
||||
} else {
|
||||
log.debug(`cache MISS for key of ${songId} (${codec})`);
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,7 +20,8 @@ const configSchema = z.object({
|
|||
ytdlp_path: z.string(),
|
||||
cache: z.object({
|
||||
directory: z.string(),
|
||||
ttl: z.number().int().min(0)
|
||||
database: z.string(),
|
||||
file_ttl: z.number().int().min(0)
|
||||
}),
|
||||
api: z.object({
|
||||
language: z.string()
|
||||
|
|
@ -33,6 +34,7 @@ const envSchema = z.object({
|
|||
ITUA: z.string(),
|
||||
WIDEVINE_CLIENT_ID: z.string(),
|
||||
WIDEVINE_PRIVATE_KEY: z.string(),
|
||||
MIGRATIONS_DIR: z.string().default("./drizzle"),
|
||||
VIEWS_DIR: z.string().default("./views"),
|
||||
PUBLIC_DIR: z.string().default("./public")
|
||||
});
|
||||
|
|
|
|||
26
src/database/index.ts
Normal file
26
src/database/index.ts
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
import { createClient } from "@libsql/client";
|
||||
import { config, env } from "../config.js";
|
||||
import { drizzle } from "drizzle-orm/libsql";
|
||||
import { migrate } from "drizzle-orm/libsql/migrator";
|
||||
import fsPromises from "fs/promises";
|
||||
import * as log from "../log.js";
|
||||
|
||||
try {
|
||||
if (config.downloader.cache.database.startsWith("file:")) {
|
||||
const databaseDir = config.downloader.cache.database.split("file:")[1].split("/").slice(0, -1).join("/");
|
||||
log.debug(`ensuring database directory "${databaseDir}" exists`);
|
||||
await fsPromises.mkdir(databaseDir, { recursive: true });
|
||||
}
|
||||
} catch (err) {
|
||||
log.error("failed to create database directory!");
|
||||
log.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// TODO: nice looking errors
|
||||
export const client = createClient({ url: config.downloader.cache.database });
|
||||
client.execute("PRAGMA foreign_keys = ON;");
|
||||
client.execute("PRAGMA journal_mode = WAL;");
|
||||
export const db = drizzle(config.downloader.cache.database);
|
||||
|
||||
await migrate(db, { migrationsFolder: env.MIGRATIONS_DIR });
|
||||
14
src/database/schema.ts
Normal file
14
src/database/schema.ts
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
import { int, sqliteTable, text } from "drizzle-orm/sqlite-core";
|
||||
|
||||
export const fileCacheTable = sqliteTable("file_cache", {
|
||||
id: int().primaryKey({ autoIncrement: true }),
|
||||
name: text().notNull(),
|
||||
expiry: int().notNull()
|
||||
});
|
||||
|
||||
export const keyCacheTable = sqliteTable("key_cache", {
|
||||
id: int().primaryKey({ autoIncrement: true }),
|
||||
songId: text().notNull(),
|
||||
codec: text().notNull(),
|
||||
decryptionKey: text().notNull()
|
||||
});
|
||||
|
|
@ -1,19 +1,19 @@
|
|||
import { createWriteStream } from "node:fs";
|
||||
import type { GetSongResponse } from "appleMusicApi/types/responses.js";
|
||||
import path from "node:path";
|
||||
import { config } from "../config.js";
|
||||
import { pipeline } from "node:stream/promises";
|
||||
import { addToCache, isCached } from "../cache.js";
|
||||
import type { GetSongResponse } from "../appleMusicApi/types/responses.js";
|
||||
import { stripAlbumGarbage } from "./format.js";
|
||||
import { downloadAlbumCover } from "./index.js";
|
||||
import type { AlbumAttributes, SongAttributes } from "../appleMusicApi/types/attributes.js";
|
||||
|
||||
// TODO: simply add more fields. ha!
|
||||
// TODO: add lyrics (what format??)
|
||||
// TODO: where it does file name formatting to hit caches, i think we should normalize this throughout files in a function
|
||||
export class FileMetadata {
|
||||
private readonly trackAttributes: SongAttributes<[]>;
|
||||
private readonly albumAttributes: AlbumAttributes<[]>;
|
||||
public readonly artist: string;
|
||||
public readonly title: string;
|
||||
public readonly album: string;
|
||||
public readonly albumArtist: string;
|
||||
public readonly isPartOfCompilation: boolean;
|
||||
public readonly artwork: string;
|
||||
public readonly track?: number;
|
||||
public readonly disc?: number;
|
||||
public readonly date?: string;
|
||||
|
|
@ -21,13 +21,14 @@ export class FileMetadata {
|
|||
public readonly isrc?: string;
|
||||
public readonly composer?: string;
|
||||
|
||||
constructor(
|
||||
private constructor(
|
||||
trackAttributes: SongAttributes<[]>,
|
||||
albumAttributes: AlbumAttributes<[]>,
|
||||
artist: string,
|
||||
title: string,
|
||||
album: string,
|
||||
albumArtist: string,
|
||||
isPartOfCompilation: boolean,
|
||||
artwork: string,
|
||||
track?: number,
|
||||
disc?: number,
|
||||
date?: string,
|
||||
|
|
@ -35,12 +36,13 @@ export class FileMetadata {
|
|||
isrc?: string,
|
||||
composer?: string
|
||||
) {
|
||||
this.trackAttributes = trackAttributes;
|
||||
this.albumAttributes = albumAttributes;
|
||||
this.artist = artist;
|
||||
this.title = title;
|
||||
this.album = album.replace(/- (EP|Single)$/, "").trim();
|
||||
this.album = stripAlbumGarbage(album);
|
||||
this.albumArtist = albumArtist;
|
||||
this.isPartOfCompilation = isPartOfCompilation;
|
||||
this.artwork = artwork;
|
||||
this.track = track;
|
||||
this.disc = disc;
|
||||
this.date = date;
|
||||
|
|
@ -53,17 +55,14 @@ export class FileMetadata {
|
|||
const trackAttributes = trackMetadata.data[0].attributes;
|
||||
const albumAttributes = trackMetadata.data[0].relationships.albums.data[0].attributes;
|
||||
|
||||
const artworkUrl = trackAttributes.artwork.url
|
||||
.replace("{w}", trackAttributes.artwork.width.toString())
|
||||
.replace("{h}", trackAttributes.artwork.height.toString());
|
||||
|
||||
return new FileMetadata(
|
||||
trackAttributes,
|
||||
albumAttributes,
|
||||
trackAttributes.artistName,
|
||||
trackAttributes.name,
|
||||
albumAttributes.name,
|
||||
albumAttributes.artistName,
|
||||
albumAttributes.isCompilation,
|
||||
artworkUrl,
|
||||
trackAttributes.trackNumber,
|
||||
trackAttributes.discNumber,
|
||||
trackAttributes.releaseDate,
|
||||
|
|
@ -73,32 +72,12 @@ export class FileMetadata {
|
|||
);
|
||||
}
|
||||
|
||||
public async setupFfmpegInputs(encryptedPath: string): Promise<string[]> {
|
||||
// url is in a weird format
|
||||
// only things we care about is the uuid and file extension i think?
|
||||
// i dont wanna use the original file name because what if. what if theres a collision
|
||||
const extension = this.artwork.slice(this.artwork.lastIndexOf(".") + 1);
|
||||
const uuid = this.artwork.split("/").at(-3);
|
||||
|
||||
if (uuid === undefined) { throw new Error("could not get uuid from artwork url!"); }
|
||||
|
||||
const imageFileName = `${uuid}.${extension}`;
|
||||
const imagePath = path.join(config.downloader.cache.directory, imageFileName);
|
||||
|
||||
if (!isCached(imageFileName)) {
|
||||
const response = await fetch(this.artwork);
|
||||
|
||||
if (!response.ok) { throw new Error(`failed to fetch artwork: ${response.status}`); }
|
||||
if (!response.body) { throw new Error("no response body for artwork!"); }
|
||||
|
||||
await pipeline(response.body as ReadableStream, createWriteStream(imagePath));
|
||||
|
||||
addToCache(imageFileName);
|
||||
}
|
||||
public async setupFfmpegInputs(audioInput: string): Promise<string[]> {
|
||||
const albumCover = await downloadAlbumCover(this.albumAttributes);
|
||||
|
||||
return [
|
||||
"-i", encryptedPath,
|
||||
"-i", imagePath,
|
||||
"-i", audioInput,
|
||||
"-i", albumCover,
|
||||
"-map", "0",
|
||||
"-map", "1",
|
||||
"-disposition:v", "attached_pic",
|
||||
|
|
|
|||
|
|
@ -1,4 +1,8 @@
|
|||
import type { SongAttributes } from "../appleMusicApi/types/attributes.js";
|
||||
import type { AlbumAttributes, SongAttributes } from "../appleMusicApi/types/attributes.js";
|
||||
|
||||
// TODO: make these configurable, too opinionated right now
|
||||
// eventually i'll make an account system? maybe you could do through there
|
||||
// or i'll just make it config on the server
|
||||
|
||||
const illegalCharReplacements: Record<string, string> = {
|
||||
"?": "?",
|
||||
|
|
@ -13,10 +17,11 @@ const illegalCharReplacements: Record<string, string> = {
|
|||
"|": "|"
|
||||
};
|
||||
|
||||
// TODO: make these configurable, too opinionated right now
|
||||
// eventually i'll make an account system? maybe you could do through there
|
||||
// or i'll just make it config on the server
|
||||
export function formatSong(trackAttributes: SongAttributes<[]>): string {
|
||||
export function stripAlbumGarbage(input: string): string {
|
||||
return input.replace(/- (EP|Single)$/, "").trim();
|
||||
}
|
||||
|
||||
export function formatSongForFs(trackAttributes: SongAttributes<[]>): string {
|
||||
const title = trackAttributes.name.replace(/[?!*\/\\:"<>|]/g, (match) => illegalCharReplacements[match] || match);
|
||||
const disc = trackAttributes.discNumber;
|
||||
const track = trackAttributes.trackNumber;
|
||||
|
|
@ -26,3 +31,10 @@ export function formatSong(trackAttributes: SongAttributes<[]>): string {
|
|||
|
||||
return `${disc}-${track.toString().padStart(2, "0")} - ${title}`;
|
||||
}
|
||||
|
||||
export function formatAlbumForFs(albumAttributes: AlbumAttributes<[]>): string {
|
||||
const artist = albumAttributes.artistName.replace(/[?!*\/\\:"<>|]/g, (match) => illegalCharReplacements[match] || match);
|
||||
const album = stripAlbumGarbage(albumAttributes.name).replace(/[?!*\/\\:"<>|]/g, (match) => illegalCharReplacements[match] || match);
|
||||
|
||||
return `${artist} - ${album}`;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,46 +1,35 @@
|
|||
import { config } from "../config.js";
|
||||
import { spawn } from "node:child_process";
|
||||
import path from "node:path";
|
||||
import { addToCache, isCached } from "../cache.js";
|
||||
import { addFileToCache, isFileCached } from "../cache.js";
|
||||
import type { RegularCodecType, WebplaybackCodecType } from "./codecType.js";
|
||||
import type { GetSongResponse } from "../appleMusicApi/types/responses.js";
|
||||
import { FileMetadata } from "./fileMetadata.js";
|
||||
import { createDecipheriv } from "node:crypto";
|
||||
import * as log from "../log.js";
|
||||
import type { AlbumAttributes } from "../appleMusicApi/types/attributes.js";
|
||||
import { pipeline } from "node:stream/promises";
|
||||
import { createWriteStream } from "node:fs";
|
||||
|
||||
export async function downloadSongFile(streamUrl: string, decryptionKey: string, songCodec: RegularCodecType | WebplaybackCodecType, songResponse: GetSongResponse<[], ["albums"]>): Promise<string> {
|
||||
log.debug("downloading song file and hopefully decrypting it");
|
||||
log.debug({ streamUrl: streamUrl, songCodec: songCodec });
|
||||
|
||||
let baseOutputName = streamUrl.match(/(?:.*\/)\s*(\S*?)[.?]/)?.[1];
|
||||
if (!baseOutputName) { throw new Error("could not get base output name from stream url!"); }
|
||||
baseOutputName += `_${songCodec}`;
|
||||
const encryptedName = baseOutputName + "_enc.mp4";
|
||||
const encryptedPath = path.join(config.downloader.cache.directory, encryptedName);
|
||||
const decryptedName = baseOutputName + ".m4a";
|
||||
const decryptedPath = path.join(config.downloader.cache.directory, decryptedName);
|
||||
|
||||
if ( // TODO: remove check for encrypted file/cache for encrypted?
|
||||
isCached(encryptedName) &&
|
||||
isCached(decryptedName)
|
||||
) { return decryptedPath; }
|
||||
if (await isFileCached(decryptedName)) { return decryptedPath; }
|
||||
|
||||
await new Promise<void>((res, rej) => {
|
||||
const child = spawn(config.downloader.ytdlp_path, [
|
||||
"--quiet",
|
||||
"--no-warnings",
|
||||
"--allow-unplayable-formats",
|
||||
"--fixup", "never",
|
||||
"--paths", config.downloader.cache.directory,
|
||||
"--output", encryptedName,
|
||||
streamUrl
|
||||
]);
|
||||
child.on("error", (err) => { rej(err); });
|
||||
child.stderr.on("data", (data) => { rej(new Error(data.toString().trim())); });
|
||||
child.on("exit", () => { res(); });
|
||||
});
|
||||
|
||||
addToCache(encryptedName);
|
||||
const ytdlp = spawn(config.downloader.ytdlp_path, [
|
||||
"--quiet",
|
||||
"--no-warnings",
|
||||
"--allow-unplayable-formats",
|
||||
"--fixup", "never",
|
||||
"--paths", config.downloader.cache.directory,
|
||||
"--output", "-",
|
||||
streamUrl
|
||||
], { stdio: ["ignore", "pipe", "pipe"] });
|
||||
ytdlp.on("error", (err) => { throw err; });
|
||||
ytdlp.stderr.on("data", (data) => { throw new Error(data.toString().trim()); });
|
||||
|
||||
const fileMetadata = FileMetadata.fromSongResponse(songResponse);
|
||||
|
||||
|
|
@ -49,17 +38,18 @@ export async function downloadSongFile(streamUrl: string, decryptionKey: string,
|
|||
"-loglevel", "error",
|
||||
"-y",
|
||||
"-decryption_key", decryptionKey,
|
||||
...await fileMetadata.setupFfmpegInputs(encryptedPath),
|
||||
...await fileMetadata.setupFfmpegInputs("pipe:0"),
|
||||
...await fileMetadata.toFfmpegArgs(),
|
||||
"-movflags", "+faststart",
|
||||
decryptedPath
|
||||
]);
|
||||
], { stdio: ["pipe", "pipe", "pipe"] });
|
||||
ytdlp.stdout.pipe(child.stdin);
|
||||
child.on("error", (err) => { rej(err); });
|
||||
child.stderr.on("data", (data) => { rej(new Error(data.toString().trim())); });
|
||||
child.on("exit", () => { res(); } );
|
||||
});
|
||||
|
||||
addToCache(decryptedName);
|
||||
await addFileToCache(decryptedName);
|
||||
|
||||
return decryptedPath;
|
||||
}
|
||||
|
|
@ -69,11 +59,7 @@ export async function downloadSongFile(streamUrl: string, decryptionKey: string,
|
|||
// TODO: less mem alloc/access
|
||||
// TODO: use actual atom scanning. what if the magic bytes appear in a sample
|
||||
export async function fetchAndDecryptStreamSegment(segmentUrl: string, decryptionKey: string, fetchLength: number, offset: number): Promise<Uint8Array> {
|
||||
log.debug("downloading and hopefully decrypting stream segment");
|
||||
log.debug({ segmentUrl: segmentUrl, offset: offset, fetchLength: fetchLength });
|
||||
|
||||
const response = await fetch(segmentUrl, { headers: { "range": `bytes=${offset}-${offset + fetchLength - 1}` }});
|
||||
|
||||
const file = new Uint8Array(await response.arrayBuffer());
|
||||
|
||||
// this translates to "moof"
|
||||
|
|
@ -122,6 +108,31 @@ export async function fetchAndDecryptStreamSegment(segmentUrl: string, decryptio
|
|||
return file;
|
||||
}
|
||||
|
||||
export async function downloadAlbumCover(albumAttributes: AlbumAttributes<[]>): Promise<string> {
|
||||
const url = albumAttributes.artwork.url
|
||||
.replace("{w}", albumAttributes.artwork.width.toString())
|
||||
.replace("{h}", albumAttributes.artwork.height.toString());
|
||||
const name = albumAttributes.playParams?.id;
|
||||
const extension = url.slice(url.lastIndexOf(".") + 1);
|
||||
|
||||
if (!name) { throw new Error("no artwork name found! this may indicate the album isnt acessable w/ your subscription!"); }
|
||||
|
||||
const imageFileName = `${name}.${extension}`;
|
||||
const imagePath = path.join(config.downloader.cache.directory, imageFileName);
|
||||
|
||||
if (await isFileCached(imageFileName) === false) {
|
||||
const response = await fetch(url);
|
||||
|
||||
if (!response.ok) { throw new Error(`failed to fetch artwork: ${response.status}`); }
|
||||
if (!response.body) { throw new Error("no response body for artwork!"); }
|
||||
|
||||
await pipeline(response.body as ReadableStream, createWriteStream(imagePath));
|
||||
await addFileToCache(imageFileName);
|
||||
}
|
||||
|
||||
return imagePath;
|
||||
}
|
||||
|
||||
interface IvValue {
|
||||
value: Buffer;
|
||||
subsamples: Subsample[];
|
||||
|
|
|
|||
|
|
@ -4,11 +4,8 @@ import z from "zod";
|
|||
import { CodecType, regularCodecTypeSchema, webplaybackCodecTypeSchema, type RegularCodecType, type WebplaybackCodecType } from "../../../downloader/codecType.js";
|
||||
import { appleMusicApi } from "../../../appleMusicApi/index.js";
|
||||
import StreamInfo from "../../../downloader/streamInfo.js";
|
||||
import hls from "parse-hls";
|
||||
import { paths } from "../../openApi.js";
|
||||
|
||||
type M3u8 = ReturnType<typeof hls.default.parse>;
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
const path = "/convertPlaylist";
|
||||
|
|
@ -33,35 +30,17 @@ paths[path] = {
|
|||
router.get(path, async (req, res, next) => {
|
||||
try {
|
||||
const { id, codec } = (await validate(req, schema)).query;
|
||||
|
||||
const codecType = new CodecType(codec);
|
||||
|
||||
let m3u8Parsed: M3u8;
|
||||
let streamUrl: string;
|
||||
const trackMetadata = await appleMusicApi.getSong(id);
|
||||
const trackAttributes = trackMetadata.data[0].attributes;
|
||||
const streamInfo = await (codecType.regularOrWebplayback === "regular"
|
||||
? StreamInfo.fromTrackMetadata(trackAttributes, codecType.codecType as RegularCodecType)
|
||||
: StreamInfo.fromWebplayback(await appleMusicApi.getWebplayback(id), codecType.codecType as WebplaybackCodecType)
|
||||
);
|
||||
|
||||
if (codecType.regularOrWebplayback === "regular") {
|
||||
const regularCodec = codecType.codecType as RegularCodecType; // safe cast, zod
|
||||
const trackMetadata = await appleMusicApi.getSong(id);
|
||||
const trackAttributes = trackMetadata.data[0].attributes;
|
||||
const streamInfo = await StreamInfo.fromTrackMetadata(trackAttributes, regularCodec);
|
||||
|
||||
m3u8Parsed = streamInfo.streamParsed;
|
||||
streamUrl = streamInfo.streamUrl;
|
||||
} else if (codecType.regularOrWebplayback === "webplayback") {
|
||||
const webplaybackCodec = codecType.codecType as WebplaybackCodecType; // safe cast, zod
|
||||
const webplaybackResponse = await appleMusicApi.getWebplayback(id);
|
||||
const streamInfo = await StreamInfo.fromWebplayback(webplaybackResponse, webplaybackCodec);
|
||||
|
||||
m3u8Parsed = streamInfo.streamParsed;
|
||||
streamUrl = streamInfo.streamUrl;
|
||||
} else {
|
||||
// TODO: this is unreachable
|
||||
// typescript doesn't think so
|
||||
// i think its because of the "let"s why we need this
|
||||
// fucks up our planned de-dupe on every use of `regularOrWebplayback`
|
||||
// damn !
|
||||
throw new Error("invalid codec type!");
|
||||
}
|
||||
const m3u8Parsed = streamInfo.streamParsed;
|
||||
const streamUrl = streamInfo.streamUrl;
|
||||
|
||||
const ogMp4Name = m3u8Parsed.segments[0].uri;
|
||||
const ogMp4Url = streamUrl.substring(0, streamUrl.lastIndexOf("/")) + "/" + ogMp4Name;
|
||||
|
|
|
|||
|
|
@ -7,7 +7,8 @@ import { z } from "zod";
|
|||
import { validate } from "../../validate.js";
|
||||
import { CodecType, regularCodecTypeSchema, webplaybackCodecTypeSchema, type RegularCodecType, type WebplaybackCodecType } from "../../../downloader/codecType.js";
|
||||
import { paths } from "../../openApi.js";
|
||||
import { formatSong } from "../../../downloader/format.js";
|
||||
import { formatSongForFs } from "../../../downloader/format.js";
|
||||
import { addKeyToCache, getKeyFromCache } from "../../../cache.js";
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
|
|
@ -39,44 +40,29 @@ router.get(path, async (req, res, next) => {
|
|||
|
||||
const codecType = new CodecType(codec);
|
||||
|
||||
if (codecType.regularOrWebplayback === "regular") {
|
||||
const regularCodec = codecType.codecType as RegularCodecType; // safe cast, zod
|
||||
const trackMetadata = await appleMusicApi.getSong(id);
|
||||
const trackAttributes = trackMetadata.data[0].attributes;
|
||||
const streamInfo = await StreamInfo.fromTrackMetadata(trackAttributes, regularCodec);
|
||||
const trackMetadata = await appleMusicApi.getSong(id);
|
||||
const trackAttributes = trackMetadata.data[0].attributes;
|
||||
const streamInfo = await (codecType.regularOrWebplayback === "regular"
|
||||
? StreamInfo.fromTrackMetadata(trackAttributes, codecType.codecType as RegularCodecType)
|
||||
: StreamInfo.fromWebplayback(await appleMusicApi.getWebplayback(id), codecType.codecType as WebplaybackCodecType)
|
||||
);
|
||||
|
||||
if (streamInfo.widevinePssh !== undefined) {
|
||||
const decryptionKey = await getWidevineDecryptionKey(streamInfo.widevinePssh, streamInfo.trackId);
|
||||
|
||||
const filePath = await downloadSongFile(streamInfo.streamUrl, decryptionKey, regularCodec, trackMetadata);
|
||||
const fileExt = "." + filePath.split(".").at(-1) as string; // safe cast, filePath is always a valid path
|
||||
const fileName = formatSong(trackAttributes) + fileExt;
|
||||
|
||||
res.attachment(fileName);
|
||||
res.sendFile(filePath, { root: "." });
|
||||
} else {
|
||||
throw new Error("no decryption key found for regular codec! this is typical. don't fret!");
|
||||
}
|
||||
} else if (codecType.regularOrWebplayback === "webplayback") {
|
||||
const webplaybackCodec = codecType.codecType as WebplaybackCodecType; // safe cast, zod
|
||||
const webplaybackResponse = await appleMusicApi.getWebplayback(id);
|
||||
const trackMetadata = await appleMusicApi.getSong(id);
|
||||
const trackAttributes = trackMetadata.data[0].attributes;
|
||||
const streamInfo = await StreamInfo.fromWebplayback(webplaybackResponse, webplaybackCodec);
|
||||
|
||||
if (streamInfo.widevinePssh !== undefined) {
|
||||
const decryptionKey = await getWidevineDecryptionKey(streamInfo.widevinePssh, streamInfo.trackId);
|
||||
|
||||
const filePath = await downloadSongFile(streamInfo.streamUrl, decryptionKey, webplaybackCodec, trackMetadata);
|
||||
const fileExt = "." + filePath.split(".").at(-1) as string; // safe cast, filePath is always a valid path
|
||||
const fileName = formatSong(trackAttributes) + fileExt;
|
||||
|
||||
res.attachment(fileName);
|
||||
res.sendFile(filePath, { root: "." });
|
||||
} else {
|
||||
throw new Error("no decryption key found for web playback! this should not happen..");
|
||||
}
|
||||
if (streamInfo.widevinePssh === undefined) {
|
||||
if (codecType.regularOrWebplayback === "regular") { throw new Error("failed to get widevine pssh, this is typical"); }
|
||||
else { throw new Error("failed to get widevine pssh for web playback, this should not happen.."); }
|
||||
}
|
||||
|
||||
const decryptionKey =
|
||||
await getKeyFromCache(id, codecType.codecType) ||
|
||||
await getWidevineDecryptionKey(streamInfo.widevinePssh, streamInfo.trackId);
|
||||
await addKeyToCache(id, codecType.codecType, decryptionKey);
|
||||
|
||||
const filePath = await downloadSongFile(streamInfo.streamUrl, decryptionKey, codecType.codecType, trackMetadata);
|
||||
const fileExt = "." + filePath.split(".").at(-1) as string; // safe cast, filePath is always a valid path
|
||||
const fileName = formatSongForFs(trackAttributes) + fileExt;
|
||||
|
||||
res.attachment(fileName);
|
||||
res.sendFile(filePath, { root: "." });
|
||||
} catch (err) {
|
||||
next(err);
|
||||
}
|
||||
|
|
|
|||
105
src/web/endpoints/back/downloadAlbum.ts
Normal file
105
src/web/endpoints/back/downloadAlbum.ts
Normal file
|
|
@ -0,0 +1,105 @@
|
|||
import express from "express";
|
||||
import { paths } from "../../openApi.js";
|
||||
import { CodecType, regularCodecTypeSchema, webplaybackCodecTypeSchema, type RegularCodecType, type WebplaybackCodecType } from "../../../downloader/codecType.js";
|
||||
import z from "zod";
|
||||
import { validate } from "../../validate.js";
|
||||
import StreamInfo from "../../../downloader/streamInfo.js";
|
||||
import { appleMusicApi } from "../../../appleMusicApi/index.js";
|
||||
import { getWidevineDecryptionKey } from "../../../downloader/keygen.js";
|
||||
import { downloadAlbumCover, downloadSongFile } from "../../../downloader/index.js";
|
||||
import { formatAlbumForFs, formatSongForFs } from "../../../downloader/format.js";
|
||||
import archiver from "archiver";
|
||||
import { addKeyToCache, getKeyFromCache } from "../../../cache.js";
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
const path = "/downloadAlbum";
|
||||
const schema = z.object({
|
||||
query: z.object({
|
||||
id: z.string(),
|
||||
codec: z.enum([...regularCodecTypeSchema.options, ...webplaybackCodecTypeSchema.options])
|
||||
})
|
||||
});
|
||||
|
||||
paths[path] = {
|
||||
get: {
|
||||
requestParams: { query: schema.shape.query },
|
||||
responses: {
|
||||
200: { description: "returns an album in a zip" },
|
||||
400: { description: "bad request, invalid query parameters. sent as a zod error with details" },
|
||||
default: { description: "upstream api error, or some other error" }
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
interface AlbumEntry {
|
||||
path: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
// TODO: include album art?
|
||||
router.get(path, async (req, res, next) => {
|
||||
try {
|
||||
const { id, codec } = (await validate(req, schema)).query;
|
||||
|
||||
const files: AlbumEntry[] = [];
|
||||
|
||||
const albumMetadata = await appleMusicApi.getAlbum(id);
|
||||
const albumAttributes = albumMetadata.data[0].attributes;
|
||||
const tracks = albumMetadata.data[0].relationships.tracks.data;
|
||||
|
||||
for (const track of tracks) {
|
||||
const trackId = track.attributes.playParams?.id;
|
||||
if (trackId === undefined) { throw new Error("track id gone, this may indicate your song isn't accessable w/ your subscription!"); }
|
||||
|
||||
const codecType = new CodecType(codec);
|
||||
|
||||
const trackMetadata = await appleMusicApi.getSong(trackId);
|
||||
const trackAttributes = trackMetadata.data[0].attributes;
|
||||
const streamInfo = await (codecType.regularOrWebplayback === "regular"
|
||||
? StreamInfo.fromTrackMetadata(trackAttributes, codecType.codecType as RegularCodecType)
|
||||
: StreamInfo.fromWebplayback(await appleMusicApi.getWebplayback(trackId), codecType.codecType as WebplaybackCodecType)
|
||||
);
|
||||
|
||||
if (streamInfo.widevinePssh === undefined) {
|
||||
if (codecType.regularOrWebplayback === "regular") { throw new Error("failed to get widevine pssh, this is typical"); }
|
||||
else { throw new Error("failed to get widevine pssh for web playback, this should not happen.."); }
|
||||
}
|
||||
|
||||
const decryptionKey =
|
||||
await getKeyFromCache(trackId, codecType.codecType) ||
|
||||
await getWidevineDecryptionKey(streamInfo.widevinePssh, streamInfo.trackId);
|
||||
await addKeyToCache(trackId, codecType.codecType, decryptionKey);
|
||||
|
||||
const filePath = await downloadSongFile(streamInfo.streamUrl, decryptionKey, codecType.codecType, trackMetadata);
|
||||
const fileExt = "." + filePath.split(".").at(-1) as string; // safe cast, filePath is always a valid path
|
||||
const fileName = formatSongForFs(trackAttributes) + fileExt;
|
||||
|
||||
files.push({
|
||||
path: filePath,
|
||||
name: fileName
|
||||
});
|
||||
}
|
||||
|
||||
const fileName = formatAlbumForFs(albumAttributes) + ".zip";
|
||||
const zipArchiver = archiver("zip");
|
||||
|
||||
zipArchiver.on("error", (err) => { throw err; });
|
||||
zipArchiver.pipe(res);
|
||||
|
||||
for (const file of files) {
|
||||
zipArchiver.file(file.path, { name: file.name });
|
||||
}
|
||||
|
||||
const albumCover = await downloadAlbumCover(albumAttributes);
|
||||
const albumCoverExt = albumCover.slice(albumCover.lastIndexOf(".") + 1);
|
||||
zipArchiver.file(await downloadAlbumCover(albumAttributes), { name: `cover.${albumCoverExt}` });
|
||||
zipArchiver.finalize();
|
||||
|
||||
res.attachment(fileName);
|
||||
} catch (err) {
|
||||
next(err);
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
|
@ -7,6 +7,7 @@ import StreamInfo from "../../../downloader/streamInfo.js";
|
|||
import { appleMusicApi } from "../../../appleMusicApi/index.js";
|
||||
import { getWidevineDecryptionKey } from "../../../downloader/keygen.js";
|
||||
import { fetchAndDecryptStreamSegment } from "../../../downloader/index.js";
|
||||
import { addKeyToCache, getKeyFromCache } from "../../../cache.js";
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
|
|
@ -54,40 +55,28 @@ router.get(path, async (req, res, next) => {
|
|||
|
||||
const codecType = new CodecType(codec);
|
||||
|
||||
if (codecType.regularOrWebplayback === "regular") {
|
||||
const regularCodec = codecType.codecType as RegularCodecType; // safe cast, zod
|
||||
const trackMetadata = await appleMusicApi.getSong(id);
|
||||
const trackAttributes = trackMetadata.data[0].attributes;
|
||||
const streamInfo = await StreamInfo.fromTrackMetadata(trackAttributes, regularCodec);
|
||||
const trackMetadata = await appleMusicApi.getSong(id);
|
||||
const trackAttributes = trackMetadata.data[0].attributes;
|
||||
const streamInfo = await (codecType.regularOrWebplayback === "regular"
|
||||
? StreamInfo.fromTrackMetadata(trackAttributes, codecType.codecType as RegularCodecType)
|
||||
: StreamInfo.fromWebplayback(await appleMusicApi.getWebplayback(id), codecType.codecType as WebplaybackCodecType)
|
||||
);
|
||||
|
||||
if (streamInfo.widevinePssh !== undefined) {
|
||||
const decryptionKey = await getWidevineDecryptionKey(streamInfo.widevinePssh, streamInfo.trackId);
|
||||
const file = await fetchAndDecryptStreamSegment(originalMp4, decryptionKey, end - start + 1, start);
|
||||
|
||||
res.setHeader("Content-Type", "application/mp4");
|
||||
res.setHeader("Content-Range", `bytes ${start}-${end}/*`);
|
||||
res.setHeader("Accept-Ranges", "bytes");
|
||||
res.status(206).send(file);
|
||||
} else {
|
||||
throw new Error("no decryption key found for regular codec! this is typical. don't fret!");
|
||||
}
|
||||
} else if (codecType.regularOrWebplayback === "webplayback") {
|
||||
const webplaybackCodec = codecType.codecType as WebplaybackCodecType; // safe cast, zod
|
||||
const webplaybackResponse = await appleMusicApi.getWebplayback(id);
|
||||
const streamInfo = await StreamInfo.fromWebplayback(webplaybackResponse, webplaybackCodec);
|
||||
|
||||
if (streamInfo.widevinePssh !== undefined) {
|
||||
const decryptionKey = await getWidevineDecryptionKey(streamInfo.widevinePssh, streamInfo.trackId);
|
||||
const file = await fetchAndDecryptStreamSegment(originalMp4, decryptionKey, end - start + 1, start);
|
||||
|
||||
res.setHeader("Content-Type", "application/mp4");
|
||||
res.setHeader("Content-Range", `bytes ${start}-${end}/*`);
|
||||
res.setHeader("Accept-Ranges", "bytes");
|
||||
res.status(206).send(file);
|
||||
} else {
|
||||
throw new Error("no decryption key found for web playback! this should not happen..");
|
||||
}
|
||||
if (streamInfo.widevinePssh === undefined) {
|
||||
if (codecType.regularOrWebplayback === "regular") { throw new Error("failed to get widevine pssh, this is typical"); }
|
||||
else { throw new Error("failed to get widevine pssh for web playback, this should not happen.."); }
|
||||
}
|
||||
|
||||
const decryptionKey =
|
||||
await getKeyFromCache(id, codecType.codecType) ||
|
||||
await getWidevineDecryptionKey(streamInfo.widevinePssh, streamInfo.trackId);
|
||||
await addKeyToCache(id, codecType.codecType, decryptionKey);
|
||||
|
||||
const file = await fetchAndDecryptStreamSegment(originalMp4, decryptionKey, end - start + 1, start);
|
||||
res.setHeader("Content-Type", "application/mp4");
|
||||
res.setHeader("Content-Range", `bytes ${start}-${end}/*`);
|
||||
res.setHeader("Accept-Ranges", "bytes");
|
||||
res.status(206).send(file);
|
||||
} catch (err) {
|
||||
next(err);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ router.get("/download", async (req, res, next) => {
|
|||
const { id } = (await validate(req, schema)).query;
|
||||
|
||||
res.render("download", {
|
||||
title: "download",
|
||||
title: "download track",
|
||||
codecs: config.server.frontend.displayed_codecs,
|
||||
id: id
|
||||
});
|
||||
|
|
|
|||
28
src/web/endpoints/front/downloadAlbum.ts
Normal file
28
src/web/endpoints/front/downloadAlbum.ts
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
import express from "express";
|
||||
import { validate } from "../../validate.js";
|
||||
import { z } from "zod";
|
||||
import { config } from "../../../config.js";
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
const schema = z.object({
|
||||
query: z.object({
|
||||
id: z.string()
|
||||
})
|
||||
});
|
||||
|
||||
router.get("/downloadAlbum", async (req, res, next) => {
|
||||
try {
|
||||
const { id } = (await validate(req, schema)).query;
|
||||
|
||||
res.render("downloadAlbum", {
|
||||
title: "download album",
|
||||
codecs: config.server.frontend.displayed_codecs,
|
||||
id: id
|
||||
});
|
||||
} catch (err) {
|
||||
next(err);
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
|
@ -42,6 +42,8 @@ router.get("/", async (req, res, next) => {
|
|||
name: name,
|
||||
artists: [artistName],
|
||||
cover: cover,
|
||||
id: result.attributes.playParams?.id,
|
||||
isAlbum: true,
|
||||
tracks: tracks.map((track) => {
|
||||
const { artistName, name, durationInMillis, discNumber, trackNumber } = track.attributes;
|
||||
|
||||
|
|
@ -52,7 +54,8 @@ router.get("/", async (req, res, next) => {
|
|||
artists: [artistName],
|
||||
duration: durationInMillis,
|
||||
cover: cover,
|
||||
id: track.attributes.playParams?.id
|
||||
id: track.attributes.playParams?.id,
|
||||
isAlbum: false
|
||||
};
|
||||
})
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,15 +1,18 @@
|
|||
import documentation from "./front/documentation.js";
|
||||
import frontDownload from "./front/download.js";
|
||||
import frontDownloadAlbum from "./front/downloadAlbum.js";
|
||||
import search from "./front/search.js";
|
||||
export const front = [
|
||||
documentation,
|
||||
frontDownload,
|
||||
frontDownloadAlbum,
|
||||
search
|
||||
];
|
||||
|
||||
import backDownload from "./back/download.js";
|
||||
import convertPlaylist from "./back/convertPlaylist.js";
|
||||
import downloadSegment from "./back/downloadSegment.js";
|
||||
import downloadAlbum from "./back/downloadAlbum.js";
|
||||
import getAlbumMetadata from "./back/getAlbumMetadata.js";
|
||||
import getPlaylistMetadata from "./back/getPlaylistMetadata.js";
|
||||
import getTrackMetadata from "./back/getTrackMetadata.js";
|
||||
|
|
@ -17,6 +20,7 @@ export const back = [
|
|||
backDownload,
|
||||
convertPlaylist,
|
||||
downloadSegment,
|
||||
downloadAlbum,
|
||||
getAlbumMetadata,
|
||||
getPlaylistMetadata,
|
||||
getTrackMetadata
|
||||
|
|
|
|||
9
views/downloadAlbum.handlebars
Normal file
9
views/downloadAlbum.handlebars
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
<form class="download-form" action="/api/downloadAlbum" method="get">
|
||||
<select name="codec">
|
||||
{{#each codecs as |codec|}}
|
||||
<option value="{{codec}}">{{codec}}</option>
|
||||
{{/each}}
|
||||
</select>
|
||||
<input type="hidden" name="id" value="{{id}}">
|
||||
<input type="submit" value="download!">
|
||||
</form>
|
||||
|
|
@ -1,5 +1,9 @@
|
|||
{{#if id}}
|
||||
<a href="/download?id={{id}}">dl</a>
|
||||
{{#if isAlbum}}
|
||||
<a href="/downloadAlbum?id={{id}}">dl</a>
|
||||
{{else}}
|
||||
<a href="/download?id={{id}}">dl</a>
|
||||
{{/if}}
|
||||
{{else}}
|
||||
<span class="light">dl</span>
|
||||
{{/if}}
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@
|
|||
<h2>{{name}}</h2>
|
||||
<span class="light">{{arrayJoin artists ", "}}</span>
|
||||
</div>
|
||||
{{> download}}
|
||||
</div>
|
||||
<hr>
|
||||
<ol class="result-tracklist">
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue