fix: wrong filter fixed

This commit is contained in:
Jan-Henrik 2026-03-10 09:57:21 +01:00
parent 96f60cf1b7
commit c8788e328b

View file

@ -81,7 +81,7 @@ function rawMarkerPath(url: string): string { return `${rawDir(url)}/.source`; }
* different output from the same source + bbox. Forces a re-filter on the
* existing raw data without re-downloading.
*/
const FILTER_VERSION = 2;
const FILTER_VERSION = 3;
interface RawMarker { source: string }
interface CityMarker { source: string; bbox: [number, number, number, number]; filterVersion: number }
@ -369,8 +369,30 @@ async function filterGtfsForCity(
}
}
// ── Step 5: filter routes, calendar, calendar_dates ────────────────────────
// ── Step 5: filter routes, calendar, calendar_dates; copy agency/feed_info ──
// Collect agency IDs from the filtered routes so we can filter agency.txt.
const validAgencyIds = new Set<string>();
{
const src = path.join(rawDir, "routes.txt");
if (existsSync(src)) {
const lines = readFileSync(src, "utf8").split(/\r?\n/).filter((l) => l.trim());
if (lines.length >= 2) {
const idx = colIndex(lines[0]);
const routeIdCol = idx.get("route_id") ?? -1;
const agencyIdCol = idx.get("agency_id") ?? -1;
for (let i = 1; i < lines.length; i++) {
const fields = splitCsv(lines[i]);
if (validRouteIds.has(fields[routeIdCol] ?? "")) {
const aid = fields[agencyIdCol] ?? "";
if (aid) validAgencyIds.add(aid);
}
}
}
}
}
for (const [file, idCol, validIds] of [
["agency.txt", "agency_id", validAgencyIds],
["routes.txt", "route_id", validRouteIds],
["calendar.txt", "service_id", validServiceIds],
["calendar_dates.txt", "service_id", validServiceIds],
@ -390,6 +412,12 @@ async function filterGtfsForCity(
writeFileSync(dest, out.join("\n") + "\n");
}
// ── Step 5b: copy feed_info.txt verbatim (not filterable, Valhalla may need it) ──
{
const src = path.join(rawDir, "feed_info.txt");
if (existsSync(src)) copyFileSync(src, path.join(destDir, "feed_info.txt"));
}
// ── Step 6: shapes.txt (large — stream-filter) ─────────────────────────────
if (validShapeIds.size > 0) {
await filterLargeCsv(