mirror of
https://github.com/stashapp/CommunityScrapers.git
synced 2025-12-10 00:41:16 -06:00
[site_generator] update dependencies, prettier
This commit is contained in:
parent
bf30f85c26
commit
2097e95873
3272
site_generator/package-lock.json
generated
3272
site_generator/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -3,10 +3,10 @@
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"fuse.js": "^7.1.0",
|
||||
"glob": "^11.0.2",
|
||||
"simple-git": "^3.28.0",
|
||||
"yaml": "^2.8.0",
|
||||
"zod": "^3.25.62"
|
||||
"glob": "^12.0.0",
|
||||
"simple-git": "^3.30.0",
|
||||
"yaml": "^2.8.1",
|
||||
"zod": "^4.1.12"
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "serve site/",
|
||||
@ -17,11 +17,11 @@
|
||||
"ci-build": "npm run generate && npm run fuse-index"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^24.0.1",
|
||||
"prettier": "^3.5.3",
|
||||
"serve": "^14.2.4",
|
||||
"@types/node": "^24.10.1",
|
||||
"prettier": "^3.6.2",
|
||||
"serve": "^14.2.5",
|
||||
"ts-node": "^10.9.2",
|
||||
"ts-to-zod": "^3.15.0",
|
||||
"typescript": "^5.8.3"
|
||||
"ts-to-zod": "^5.0.1",
|
||||
"typescript": "^5.9.3"
|
||||
}
|
||||
}
|
||||
|
||||
@ -48,7 +48,7 @@ details {
|
||||
white-space: pre;
|
||||
}
|
||||
mark {
|
||||
color: #000
|
||||
color: #000;
|
||||
}
|
||||
#top-btn {
|
||||
position: fixed;
|
||||
|
||||
@ -159,7 +159,7 @@ async function search(searchValue) {
|
||||
console.debug(searchValue, results);
|
||||
const filterTable = results.map((result) => result.item);
|
||||
setTable(filterTable, searchValue);
|
||||
window.location.hash = searchValue
|
||||
window.location.hash = searchValue;
|
||||
}
|
||||
|
||||
// parse scrapers.json
|
||||
@ -172,9 +172,11 @@ const fuseIndex = await fetch("assets/fuse-index.json")
|
||||
.then((data) => Fuse.parseIndex(data));
|
||||
fuse = new Fuse(rawScraperList, fuseConfig, fuseIndex);
|
||||
// if query in URL, jump automatically
|
||||
const query = window.location.hash.slice(1)
|
||||
const query = window.location.hash.slice(1);
|
||||
if (query) {
|
||||
searchInput.value = query;
|
||||
search(query);
|
||||
}
|
||||
searchInput.addEventListener("input", event => debounce(search(event.target.value), 300));
|
||||
searchInput.addEventListener("input", (event) =>
|
||||
debounce(search(event.target.value), 300),
|
||||
);
|
||||
|
||||
@ -2,7 +2,7 @@ import { parse } from "yaml";
|
||||
import { readFileSync, writeFileSync } from "fs";
|
||||
import { ymlScraper } from "./types";
|
||||
import { glob } from "glob";
|
||||
import { z } from "zod";
|
||||
import * as z from "zod";
|
||||
import { ymlScraperSchema } from "./zodType";
|
||||
import { git } from "./git";
|
||||
import { exportScraper, scraperExport } from "./scraper";
|
||||
@ -45,12 +45,19 @@ async function parseRepository(
|
||||
return parseScrapers(allYmlFiles);
|
||||
}
|
||||
|
||||
const mergeScraperArr = (oldScrapers: scraperExport[], newScrapers: scraperExport[]) => {
|
||||
const mergeScraperArr = (
|
||||
oldScrapers: scraperExport[],
|
||||
newScrapers: scraperExport[],
|
||||
) => {
|
||||
// iterate through newScrapers and delete from old if exists
|
||||
const cleanOldScrapers = oldScrapers.filter((oldScraper) =>
|
||||
!newScrapers.some((newScraper) => newScraper.filename === oldScraper.filename));
|
||||
const cleanOldScrapers = oldScrapers.filter(
|
||||
(oldScraper) =>
|
||||
!newScrapers.some(
|
||||
(newScraper) => newScraper.filename === oldScraper.filename,
|
||||
),
|
||||
);
|
||||
return [...cleanOldScrapers, ...newScrapers];
|
||||
}
|
||||
};
|
||||
|
||||
function validate(scraper: ymlScraper) {
|
||||
ymlScraperSchema.parse(scraper);
|
||||
@ -77,12 +84,15 @@ export async function validateNewScrapers(): Promise<void> {
|
||||
try {
|
||||
readFileSync("scrapers-debug.json", "utf8");
|
||||
} catch {
|
||||
console.log("no scrapers-debug.json found, cowardly refusing to do partial updates")
|
||||
console.log(
|
||||
"no scrapers-debug.json found, cowardly refusing to do partial updates",
|
||||
);
|
||||
// run full validation
|
||||
return validateAllScrapers();
|
||||
}
|
||||
// get modified files
|
||||
const newScrapers = await git.diff(["--name-only", "HEAD^1", "HEAD"])
|
||||
const newScrapers = await git
|
||||
.diff(["--name-only", "HEAD^1", "HEAD"])
|
||||
// skip empty lines
|
||||
.then((files) => files.split("\n").filter((file) => file.length))
|
||||
// skip files not in scrapers
|
||||
@ -92,27 +102,33 @@ export async function validateNewScrapers(): Promise<void> {
|
||||
// check if only yml files
|
||||
const nonYml = newScrapers.some((file) => !file.endsWith(".yml"));
|
||||
if (nonYml) {
|
||||
console.log("non-yml files detected, cowardly refusing to do partial updates")
|
||||
console.log(
|
||||
"non-yml files detected, cowardly refusing to do partial updates",
|
||||
);
|
||||
// run full validation
|
||||
return validateAllScrapers();
|
||||
}
|
||||
if (!newScrapers.length) {
|
||||
console.log("no new scrapers detected, recycling old mdscrapers")
|
||||
const oldScrapers = JSON.parse(readFileSync("scrapers-debug.json", "utf8")) as scraperExport[];
|
||||
console.log("no new scrapers detected, recycling old mdscrapers");
|
||||
const oldScrapers = JSON.parse(
|
||||
readFileSync("scrapers-debug.json", "utf8"),
|
||||
) as scraperExport[];
|
||||
writeFileSync("site/assets/scrapers.json", JSON.stringify(oldScrapers));
|
||||
return;
|
||||
}
|
||||
console.log("only validating new scrapers")
|
||||
console.log("only validating new scrapers");
|
||||
const newValidScrapers = await parseScrapers(newScrapers)
|
||||
.then((undefScrapers) => undefScrapers.map(validate))
|
||||
.then((scrapers) => scrapers.map(exportScraper));
|
||||
.then((scrapers) => scrapers.map((s) => exportScraper(s as ymlScraper)));
|
||||
let newMdScrapers: scraperExport[] = await Promise.all(newValidScrapers);
|
||||
// merge with old scrapers
|
||||
const oldScrapers = JSON.parse(readFileSync("scrapers-debug.json", "utf8")) as scraperExport[];
|
||||
const oldScrapers = JSON.parse(
|
||||
readFileSync("scrapers-debug.json", "utf8"),
|
||||
) as scraperExport[];
|
||||
let newScraperArr = mergeScraperArr(oldScrapers, newMdScrapers);
|
||||
newScraperArr = newScraperArr.sort((a, b) => (a.name > b.name ? 1 : -1));
|
||||
// export to files
|
||||
writeFileSync("scrapers-debug.json", JSON.stringify(newScraperArr, null, 2));
|
||||
writeFileSync("site/assets/scrapers.json", JSON.stringify(newScraperArr));
|
||||
console.log("PARTIAL VALIDATED");
|
||||
}
|
||||
}
|
||||
|
||||
@ -6,4 +6,4 @@ async function main() {
|
||||
await validateNewScrapers();
|
||||
} else await validateAllScrapers();
|
||||
}
|
||||
main()
|
||||
main();
|
||||
|
||||
@ -139,7 +139,7 @@ async function getLastUpdate(scraper: ymlScraper): Promise<Date | false> {
|
||||
.catch((err) => false);
|
||||
const chosenPath = isFolder ? folder : filename;
|
||||
const latestUpdate = await git
|
||||
.log({ file: "../scrapers/"+chosenPath, maxCount: 1 })
|
||||
.log({ file: "../scrapers/" + chosenPath, maxCount: 1 })
|
||||
.then((gitLog) => gitLog?.latest);
|
||||
return latestUpdate ? new Date(latestUpdate.date) : false;
|
||||
}
|
||||
|
||||
@ -41,7 +41,7 @@ const replaceRegexSchema = z.object({
|
||||
const baseUrlScraperSchema = z.object({
|
||||
action: urlScrapeActionsSchema,
|
||||
scraper: z.string(),
|
||||
queryURLReplace: z.record(z.array(replaceRegexSchema)).optional(),
|
||||
queryURLReplace: z.record(z.string(), z.array(replaceRegexSchema)).optional(),
|
||||
});
|
||||
|
||||
const byFragmentScraperSchema = baseUrlScraperSchema.extend({
|
||||
@ -61,6 +61,7 @@ export const byUrlScraperSchema = baseUrlScraperSchema.extend({
|
||||
});
|
||||
|
||||
const xPathScraperSchema = z.record(
|
||||
z.string(),
|
||||
z.object({
|
||||
fixed: z.string().optional(),
|
||||
selector: z.string().optional(),
|
||||
@ -127,7 +128,7 @@ export const anyScraperSchema = z.union([
|
||||
scriptScraperSchema,
|
||||
]);
|
||||
|
||||
export const ymlScraperSchema = z.record(z.any()).and(
|
||||
export const ymlScraperSchema = z.record(z.string(), z.any()).and(
|
||||
z.object({
|
||||
filename: z.string(),
|
||||
name: z.string(),
|
||||
|
||||
@ -2,7 +2,6 @@
|
||||
"compilerOptions": {
|
||||
"strict": true,
|
||||
"strictNullChecks": true,
|
||||
"target": "ES2020",
|
||||
"moduleResolution": "node"
|
||||
"target": "ES2020"
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user