The Big Three! (V1.1.1)
Added concurrency control for downloads using p-limit (max 6 simultaneous). Removed cli-progress and progress bars from downloads, as it shits the bed with concurrency. Simplified axios requests in downloadJar (removed custom headers as it was used only for cli-progress). Refactored Jenkins handling to use API JSON instead of HTML scraping. (as i had weird failing with a few jenkins servers, and this seemed to fix it completely) Removed cli-progress dependency; added p-limit dependency.
This commit is contained in:
78
index.js
78
index.js
@@ -2,10 +2,10 @@ const axios = require("axios");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const { pipeline } = require("stream/promises");
|
||||
const cliProgress = require("cli-progress");
|
||||
const cheerio = require("cheerio");
|
||||
const SftpClient = require("ssh2-sftp-client");
|
||||
const crypto = require("crypto");
|
||||
const pLimit = require("p-limit").default;
|
||||
|
||||
// --- Config & Secrets ---
|
||||
const CONFIG_PATH = "config.json";
|
||||
@@ -76,35 +76,34 @@ const downloadJar = async (url, name) => {
|
||||
console.log(`🟡 Skipped (already exists): ${name}`);
|
||||
return;
|
||||
}
|
||||
const USER_AGENT = config.global?.userAgent || "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36";
|
||||
const response = await axios.get(url, {
|
||||
responseType: "stream",
|
||||
headers: {
|
||||
"User-Agent":
|
||||
USER_AGENT,
|
||||
},
|
||||
});
|
||||
|
||||
const total = parseInt(response.headers["content-length"] || "0", 10);
|
||||
const bar = new cliProgress.SingleBar({}, cliProgress.Presets.shades_classic);
|
||||
bar.start(total, 0);
|
||||
const response = await axios.get(url, { responseType: "stream" });
|
||||
|
||||
response.data.on("data", (chunk) => bar.increment(chunk.length));
|
||||
response.data.on("end", () => bar.stop());
|
||||
await pipeline(
|
||||
response.data,
|
||||
fs.createWriteStream(filePath)
|
||||
);
|
||||
|
||||
await pipeline(response.data, fs.createWriteStream(filePath));
|
||||
console.log(`✔️ Saved: ${filePath}`);
|
||||
console.log(`✔️ Saved: ${name}`);
|
||||
};
|
||||
|
||||
|
||||
// --- Handle Jenkins ---
|
||||
const handleJenkins = async (url) => {
|
||||
const html = await axios.get(url).then((res) => res.data);
|
||||
const $ = cheerio.load(html);
|
||||
const links = $("a[href$='.jar']").map((_, el) => $(el).attr("href")).get();
|
||||
const apiURL = url.endsWith("/")
|
||||
? url + "api/json?tree=artifacts[fileName,relativePath]"
|
||||
: url + "/api/json?tree=artifacts[fileName,relativePath]";
|
||||
|
||||
const data = await axios.get(apiURL).then((res) => res.data);
|
||||
|
||||
const links = data.artifacts
|
||||
.filter((a) => a.fileName.endsWith(".jar"))
|
||||
.map((a) => a.relativePath);
|
||||
|
||||
if (!links.length) throw new Error("No .jar files found on page");
|
||||
|
||||
const base = new URL(url);
|
||||
const base = new URL(url + (url.endsWith("/") ? "" : "/") + "artifact/");
|
||||
|
||||
const preferred = ["paper", "spigot", "bukkit"];
|
||||
const skip = ["javadoc", "sources", "cli", "bootstrap", "mojangapi", "nashorn", "remapper", "fabric", "neoforge"];
|
||||
const essentialsOK = ["EssentialsX", "EssentialsXChat", "EssentialsXSpawn", "EssentialsXGeoIP"];
|
||||
@@ -115,8 +114,8 @@ const handleJenkins = async (url) => {
|
||||
const lower = fileName.toLowerCase();
|
||||
if (skip.some((term) => lower.includes(term))) return null;
|
||||
if (fileName.startsWith("EssentialsX")) {
|
||||
const base = fileName.split("-")[0];
|
||||
if (!essentialsOK.includes(base)) return null;
|
||||
const baseName = fileName.split("-")[0];
|
||||
if (!essentialsOK.includes(baseName)) return null;
|
||||
}
|
||||
return { href, fileName: lower };
|
||||
})
|
||||
@@ -373,29 +372,26 @@ const uploadToSFTP = async () => {
|
||||
|
||||
await downloadLatestPaperMC();
|
||||
|
||||
console.log("\n🔍 Starting plugin downloads from configured URLs...");
|
||||
for (const url of config.urls) {
|
||||
console.log(`\n📥 ${url}`);
|
||||
console.log("\n🔍 Starting plugin downloads from configured URLs...");
|
||||
|
||||
const limit = pLimit(6);
|
||||
|
||||
await Promise.all(
|
||||
config.urls.map(url =>
|
||||
limit(async () => {
|
||||
try {
|
||||
if (url.includes("github.com")) {
|
||||
await handleGitHub(url);
|
||||
} else if (url.includes("modrinth.com")) {
|
||||
await handleModrinth(url);
|
||||
} else if (url.includes("papermc.io")) {
|
||||
await handlePaperMC(url);
|
||||
} else if (url.includes("dev.bukkit.org")) {
|
||||
await handleBukkit(url);
|
||||
} else if (url.includes("/job/")) {
|
||||
await handleJenkins(url);
|
||||
} else if (url.endsWith(".jar") || url.includes("download.geysermc.org")) {
|
||||
await handleDirect(url);
|
||||
} else {
|
||||
console.warn("⚠️ Skipping unknown URL format.");
|
||||
}
|
||||
if (url.includes("github.com")) return handleGitHub(url);
|
||||
if (url.includes("modrinth.com")) return handleModrinth(url);
|
||||
if (url.includes("papermc.io")) return handlePaperMC(url);
|
||||
if (url.includes("dev.bukkit.org")) return handleBukkit(url);
|
||||
if (url.includes("/job/")) return handleJenkins(url);
|
||||
if (url.endsWith(".jar") || url.includes("download.geysermc.org")) return handleDirect(url);
|
||||
} catch (err) {
|
||||
console.error(`❌ Failed: ${err.message}`);
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
);
|
||||
|
||||
await uploadToSFTP();
|
||||
})();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "download-plugs",
|
||||
"version": "1.1.0",
|
||||
"version": "1.1.1",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"run": "node index.js"
|
||||
@@ -12,7 +12,7 @@
|
||||
"dependencies": {
|
||||
"axios": "^1.9.0",
|
||||
"cheerio": "^1.0.0",
|
||||
"cli-progress": "^3.12.0",
|
||||
"p-limit": "^7.2.0",
|
||||
"ssh2-sftp-client": "^12.0.0"
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user