Added concurrency control for downloads using p-limit (max 6 simultaneous). Removed cli-progress and progress bars from downloads, as it shits the bed with concurrency. Simplified axios requests in downloadJar (removed custom headers as it was used only for cli-progress). Refactored Jenkins handling to use API JSON instead of HTML scraping. (as i had weird failing with a few jenkins servers, and this seemed to fix it completely) Removed cli-progress dependency; added p-limit dependency.
398 lines
13 KiB
JavaScript
398 lines
13 KiB
JavaScript
const axios = require("axios");
|
|
const fs = require("fs");
|
|
const path = require("path");
|
|
const { pipeline } = require("stream/promises");
|
|
const cheerio = require("cheerio");
|
|
const SftpClient = require("ssh2-sftp-client");
|
|
const crypto = require("crypto");
|
|
const pLimit = require("p-limit").default;
|
|
|
|
// --- Config & Secrets ---
|
|
const CONFIG_PATH = "config.json";
|
|
const SECRET_PATH = "secret.key";
|
|
|
|
// Load or create encryption key
|
|
let secret;
|
|
if (!fs.existsSync(SECRET_PATH)) {
|
|
secret = crypto.randomBytes(32);
|
|
fs.writeFileSync(SECRET_PATH, secret.toString("hex"));
|
|
console.log("🔐 Generated and saved a new secret key.");
|
|
} else {
|
|
secret = Buffer.from(fs.readFileSync(SECRET_PATH, "utf-8"), "hex");
|
|
}
|
|
|
|
function encryptPassword(password) {
|
|
const iv = crypto.randomBytes(16);
|
|
const cipher = crypto.createCipheriv("aes-256-cbc", secret, iv);
|
|
const encrypted = Buffer.concat([cipher.update(password, "utf8"), cipher.final()]);
|
|
return {
|
|
iv: iv.toString("hex"),
|
|
password: encrypted.toString("hex"),
|
|
};
|
|
}
|
|
|
|
function decryptPassword(encrypted) {
|
|
const decipher = crypto.createDecipheriv(
|
|
"aes-256-cbc",
|
|
secret,
|
|
Buffer.from(encrypted.iv, "hex")
|
|
);
|
|
const decrypted = Buffer.concat([
|
|
decipher.update(Buffer.from(encrypted.password, "hex")),
|
|
decipher.final(),
|
|
]);
|
|
return decrypted.toString("utf8");
|
|
}
|
|
|
|
// Load config and auto-convert plaintext SFTP password
|
|
let config = JSON.parse(fs.readFileSync(CONFIG_PATH, "utf-8"));
|
|
const sftpConfig = config.sftp || { enabled: false };
|
|
|
|
if (sftpConfig.password && !sftpConfig.encryptedPassword) {
|
|
sftpConfig.encryptedPassword = encryptPassword(sftpConfig.password);
|
|
delete sftpConfig.password;
|
|
config.sftp = sftpConfig;
|
|
fs.writeFileSync(CONFIG_PATH, JSON.stringify(config, null, 2));
|
|
console.log("🔐 Encrypted SFTP password saved to config.json.");
|
|
}
|
|
|
|
// --- Utilities ---
|
|
const ensureDir = (dir) => {
|
|
if (!fs.existsSync(dir)) fs.mkdirSync(dir);
|
|
};
|
|
const DOWNLOAD_PATH = config.global.downloadPath || "downloads";
|
|
const PLUGIN_PATH = path.join(DOWNLOAD_PATH, "Plugins");
|
|
const SERVEREXEC_PATH = path.join(DOWNLOAD_PATH, "Serverexec");
|
|
|
|
[DOWNLOAD_PATH, PLUGIN_PATH, SERVEREXEC_PATH].forEach(ensureDir);
|
|
|
|
|
|
const downloadJar = async (url, name) => {
|
|
const isServerJar = name === "server.jar";
|
|
const destDir = isServerJar ? SERVEREXEC_PATH : PLUGIN_PATH;
|
|
const filePath = path.join(destDir, name);
|
|
|
|
if (fs.existsSync(filePath)) {
|
|
console.log(`🟡 Skipped (already exists): ${name}`);
|
|
return;
|
|
}
|
|
|
|
const response = await axios.get(url, { responseType: "stream" });
|
|
|
|
await pipeline(
|
|
response.data,
|
|
fs.createWriteStream(filePath)
|
|
);
|
|
|
|
console.log(`✔️ Saved: ${name}`);
|
|
};
|
|
|
|
|
|
// --- Handle Jenkins ---
|
|
const handleJenkins = async (url) => {
|
|
const apiURL = url.endsWith("/")
|
|
? url + "api/json?tree=artifacts[fileName,relativePath]"
|
|
: url + "/api/json?tree=artifacts[fileName,relativePath]";
|
|
|
|
const data = await axios.get(apiURL).then((res) => res.data);
|
|
|
|
const links = data.artifacts
|
|
.filter((a) => a.fileName.endsWith(".jar"))
|
|
.map((a) => a.relativePath);
|
|
|
|
if (!links.length) throw new Error("No .jar files found on page");
|
|
|
|
const base = new URL(url + (url.endsWith("/") ? "" : "/") + "artifact/");
|
|
|
|
const preferred = ["paper", "spigot", "bukkit"];
|
|
const skip = ["javadoc", "sources", "cli", "bootstrap", "mojangapi", "nashorn", "remapper", "fabric", "neoforge"];
|
|
const essentialsOK = ["EssentialsX", "EssentialsXChat", "EssentialsXSpawn", "EssentialsXGeoIP"];
|
|
|
|
const valid = links
|
|
.map((href) => {
|
|
const fileName = path.basename(href);
|
|
const lower = fileName.toLowerCase();
|
|
if (skip.some((term) => lower.includes(term))) return null;
|
|
if (fileName.startsWith("EssentialsX")) {
|
|
const baseName = fileName.split("-")[0];
|
|
if (!essentialsOK.includes(baseName)) return null;
|
|
}
|
|
return { href, fileName: lower };
|
|
})
|
|
.filter(Boolean);
|
|
|
|
for (const tag of preferred) {
|
|
const pick = valid.find((f) => f.fileName.includes(tag));
|
|
if (pick) {
|
|
const finalURL = new URL(pick.href, base).href;
|
|
await downloadJar(finalURL, path.basename(pick.href));
|
|
return;
|
|
}
|
|
}
|
|
|
|
// Fallback: all valid jars
|
|
for (const f of valid) {
|
|
await downloadJar(new URL(f.href, base).href, path.basename(f.href));
|
|
}
|
|
};
|
|
|
|
// --- Handle GitHub ---
|
|
const handleGitHub = async (url) => {
|
|
const match = url.match(/github\.com\/([^/]+\/[^/]+)/);
|
|
if (!match) throw new Error("Invalid GitHub URL format");
|
|
const repo = match[1];
|
|
const apiURL = `https://api.github.com/repos/${repo}/releases/latest`;
|
|
const { data } = await axios.get(apiURL);
|
|
|
|
const preferred = data.assets.find((a) => a.name === "GeyserSkinManager-Spigot.jar");
|
|
const fallback = data.assets.find((a) => a.name.endsWith(".jar"));
|
|
|
|
const chosen = preferred || fallback;
|
|
if (!chosen) throw new Error("No .jar assets found");
|
|
|
|
await downloadJar(chosen.browser_download_url, chosen.name);
|
|
};
|
|
|
|
// --- Handle Modrinth ---
|
|
const handleModrinth = async (url) => {
|
|
const match = url.match(/modrinth\.com\/plugin\/([^/]+)/);
|
|
if (!match) throw new Error("Invalid Modrinth URL format");
|
|
const project = match[1];
|
|
|
|
const versions = await axios
|
|
.get(`https://api.modrinth.com/v2/project/${project}/version`)
|
|
.then((res) => res.data);
|
|
|
|
// Filter to only versions compatible with Spigot/Paper/etc
|
|
const compatible = versions.find((v) =>
|
|
(v.loaders || []).some((loader) =>
|
|
["spigot", "paper", "bukkit", "purpur", "folia"].includes(loader.toLowerCase())
|
|
)
|
|
);
|
|
|
|
if (!compatible) throw new Error("No compatible Spigot/Paper version found");
|
|
|
|
const file = compatible.files.find((f) => f.filename.endsWith(".jar"));
|
|
if (!file) throw new Error("No .jar file in compatible version");
|
|
|
|
await downloadJar(file.url, file.filename);
|
|
};
|
|
|
|
// --- Handle Direct (Mainly for Floodgate) ---
|
|
const handleDirect = async (url) => {
|
|
let name = path.basename(url.split("?")[0]);
|
|
|
|
if (url.includes("download.geysermc.org")) {
|
|
name = "floodgate-spigot.jar";
|
|
} else {
|
|
try {
|
|
const head = await axios.head(url);
|
|
const disp = head.headers["content-disposition"];
|
|
const match = disp?.match(/filename[^=]*=(?:UTF-8'')?["']?([^"';]+)/i);
|
|
if (match) name = decodeURIComponent(match[1]);
|
|
} catch {}
|
|
}
|
|
|
|
name = name.replace(/[<>:"/\\|?*\x00-\x1F]/g, "");
|
|
await downloadJar(url, name);
|
|
};
|
|
|
|
// --- Handle PaperMC ---
|
|
const handlePaperMC = async (url) => {
|
|
if (url.includes("hangar.papermc.io")) {
|
|
const { data: html } = await axios.get(url);
|
|
const $ = cheerio.load(html);
|
|
|
|
const jarLink = $('a[href$=".jar"]').first().attr("href");
|
|
if (!jarLink) throw new Error("❌ No .jar link found on Hangar page");
|
|
|
|
const downloadURL = jarLink.startsWith("http")
|
|
? jarLink
|
|
: `https://hangar.papermc.io${jarLink}`;
|
|
|
|
const fileName = path.basename(downloadURL);
|
|
await downloadJar(downloadURL, fileName);
|
|
return;
|
|
}
|
|
};
|
|
|
|
// --- Handle dev.bukkit.org ---
|
|
const handleBukkit = async (url) => {
|
|
if (!url.includes("dev.bukkit.org")) {
|
|
throw new Error("Not a dev.bukkit.org URL");
|
|
}
|
|
|
|
const html = await axios.get(url).then((res) => res.data);
|
|
const $ = cheerio.load(html);
|
|
|
|
|
|
let projectName = $("span.overflow-tip").text().trim() || "unknown-project";
|
|
|
|
projectName = projectName
|
|
.replace(/[<>:"/\\|?*\x00-\x1F()™©®]/g, "")
|
|
.replace(/[^\w.-]/g, "_");
|
|
|
|
|
|
const downloadLink = $("a.button.alt.fa-icon-download[href*='/files/latest']")
|
|
.attr("href");
|
|
|
|
if (!downloadLink) {
|
|
throw new Error("No 'Download Latest File' link found on dev.bukkit.org page");
|
|
}
|
|
|
|
const fullDownloadLink = `https://dev.bukkit.org${downloadLink}`;
|
|
console.log(`🔗 Found download link: ${fullDownloadLink}`);
|
|
|
|
let filename = `${projectName}.jar`;
|
|
|
|
try {
|
|
const head = await axios.head(fullDownloadLink);
|
|
const disp = head.headers["content-disposition"];
|
|
const match = disp?.match(/filename[^=]*=(?:UTF-8'')?["']?([^"';]+)/i);
|
|
if (match) filename = decodeURIComponent(match[1]);
|
|
} catch (err) {
|
|
console.log("Could not retrieve filename from headers, using project name.");
|
|
}
|
|
|
|
await downloadJar(fullDownloadLink, filename);
|
|
};
|
|
|
|
// --- Handle PaperMC Server DL ---
|
|
const downloadLatestPaperMC = async () => {
|
|
console.log("📥 Downloading latest PaperMC server jar...");
|
|
|
|
const projectURL = 'https://api.papermc.io/v2/projects/paper';
|
|
const { data: versionsData } = await axios.get(projectURL);
|
|
const latestVersion = versionsData.versions[versionsData.versions.length - 1];
|
|
|
|
const buildsURL = `${projectURL}/versions/${latestVersion}`;
|
|
const { data: buildsData } = await axios.get(buildsURL);
|
|
const latestBuild = buildsData.builds[buildsData.builds.length - 1];
|
|
|
|
const jarInfoURL = `${projectURL}/versions/${latestVersion}/builds/${latestBuild}`;
|
|
const { data: jarInfo } = await axios.get(jarInfoURL);
|
|
|
|
const jarPath = jarInfo.downloads.application.name;
|
|
const downloadURL = `https://api.papermc.io/v2/projects/paper/versions/${latestVersion}/builds/${latestBuild}/downloads/${jarPath}`;
|
|
|
|
await downloadJar(downloadURL, "server.jar");
|
|
};
|
|
|
|
// --- Upload to SFTP ---
|
|
const uploadToSFTP = async () => {
|
|
if (!sftpConfig.enabled) {
|
|
console.log("📦 SFTP is disabled in config.");
|
|
return;
|
|
}
|
|
|
|
const sftp = new SftpClient();
|
|
const remote = sftpConfig.remotePath || "/";
|
|
const connectOptions = {
|
|
host: sftpConfig.host,
|
|
port: sftpConfig.port || 22,
|
|
username: sftpConfig.username,
|
|
};
|
|
|
|
if (sftpConfig.privateKeyPath && fs.existsSync(sftpConfig.privateKeyPath)) {
|
|
connectOptions.privateKey = fs.readFileSync(sftpConfig.privateKeyPath);
|
|
} else if (sftpConfig.encryptedPassword) {
|
|
connectOptions.password = decryptPassword(sftpConfig.encryptedPassword);
|
|
} else {
|
|
throw new Error("Missing SFTP password or private key.");
|
|
}
|
|
|
|
const extractBaseName = (filename) => {
|
|
return filename.replace(/[-_.](v?\d.*)?\.jar$/, "").trim();
|
|
};
|
|
|
|
try {
|
|
await sftp.connect(connectOptions);
|
|
|
|
const uploadFolder = async (localDir, remoteDir) => {
|
|
const files = fs.readdirSync(localDir).filter(f => f.endsWith(".jar"));
|
|
const remoteFiles = await sftp.list(remoteDir);
|
|
const remoteJars = remoteFiles.filter(f => f.name.endsWith(".jar"));
|
|
|
|
for (const file of files) {
|
|
const baseName = extractBaseName(file);
|
|
const toDelete = remoteJars.filter(r => extractBaseName(r.name) === baseName);
|
|
for (const del of toDelete) {
|
|
const fullPath = path.posix.join(remoteDir, del.name);
|
|
await sftp.delete(fullPath);
|
|
console.log(`🗑️ Deleted remote: ${fullPath}`);
|
|
}
|
|
|
|
const localPath = path.join(localDir, file);
|
|
const remotePath = path.posix.join(remoteDir, file);
|
|
await sftp.fastPut(localPath, remotePath);
|
|
console.log(`⬆️ Uploaded to ${remoteDir}: ${file}`);
|
|
}
|
|
};
|
|
|
|
await uploadFolder(PLUGIN_PATH, path.posix.join(remote, "plugins"));
|
|
await uploadFolder(SERVEREXEC_PATH, remote);
|
|
|
|
} catch (err) {
|
|
console.error("❌ SFTP Error:", err.message);
|
|
} finally {
|
|
const timeout = new Promise((_, reject) =>
|
|
setTimeout(() => reject(new Error("Timeout closing SFTP connection")), 5000)
|
|
);
|
|
try {
|
|
await Promise.race([sftp.end(), timeout]);
|
|
console.log("🔌 SFTP connection closed.");
|
|
// Clean up local files after upload
|
|
const deleteFilesInDir = (dir) => {
|
|
fs.readdirSync(dir).forEach(file => {
|
|
const filePath = path.join(dir, file);
|
|
if (fs.lstatSync(filePath).isFile()) {
|
|
fs.unlinkSync(filePath);
|
|
console.log(`🧹 Deleted: ${filePath}`);
|
|
}
|
|
});
|
|
};
|
|
|
|
deleteFilesInDir(PLUGIN_PATH);
|
|
deleteFilesInDir(SERVEREXEC_PATH);
|
|
} catch (e) {
|
|
console.warn("⚠️ Error or timeout closing SFTP:", e.message);
|
|
}
|
|
}
|
|
};
|
|
|
|
// --- Main ---
|
|
(async () => {
|
|
ensureDir(DOWNLOAD_PATH);
|
|
const existingFiles = fs.readdirSync(DOWNLOAD_PATH).filter(f => f.endsWith(".jar"));
|
|
for (const file of existingFiles) {
|
|
const filePath = path.join(DOWNLOAD_PATH, file);
|
|
fs.unlinkSync(filePath);
|
|
console.log(`🗑️ Deleted local file: ${file}`);
|
|
}
|
|
|
|
await downloadLatestPaperMC();
|
|
|
|
console.log("\n🔍 Starting plugin downloads from configured URLs...");
|
|
|
|
const limit = pLimit(6);
|
|
|
|
await Promise.all(
|
|
config.urls.map(url =>
|
|
limit(async () => {
|
|
try {
|
|
if (url.includes("github.com")) return handleGitHub(url);
|
|
if (url.includes("modrinth.com")) return handleModrinth(url);
|
|
if (url.includes("papermc.io")) return handlePaperMC(url);
|
|
if (url.includes("dev.bukkit.org")) return handleBukkit(url);
|
|
if (url.includes("/job/")) return handleJenkins(url);
|
|
if (url.endsWith(".jar") || url.includes("download.geysermc.org")) return handleDirect(url);
|
|
} catch (err) {
|
|
console.error(`❌ Failed: ${err.message}`);
|
|
}
|
|
})
|
|
)
|
|
);
|
|
|
|
await uploadToSFTP();
|
|
})();
|