nlw-api/index.js

253 lines
8.5 KiB
JavaScript

import express from 'express';
import { fetchAllLevels as fetchNLWLevels } from './nlw.js';
import { fetchAllLevels as fetchIDSLevels } from './ids.js';
import fs from 'fs/promises';
import path from 'path';
import { request } from 'undici';
import PQueue from 'p-queue';
const cacheFolder = process.env.CACHE_DIR || './cache/';
let levels = {
nlw: {
regular: [],
pending: [],
platformer: [],
},
ids: {
regular: [],
platformer: [],
},
metadata: [],
};
function getAllLevels() {
return [...levels.nlw.regular, ...levels.nlw.platformer, ...levels.nlw.pending, ...levels.ids.regular, ...levels.ids.platformer];
}
async function exists(f) {
try {
await fs.stat(f);
console.log(f);
return true;
} catch {
return false;
}
}
// a bit awful but oh well
async function loadCache() {
if (await exists(path.join(cacheFolder, 'nlw-regular.json'))) levels.nlw.regular = JSON.parse(await fs.readFile(path.join(cacheFolder, 'nlw-regular.json'), 'utf8'));
if (await exists(path.join(cacheFolder, 'nlw-pending.json'))) levels.nlw.pending = JSON.parse(await fs.readFile(path.join(cacheFolder, 'nlw-pending.json'), 'utf8'));
if (await exists(path.join(cacheFolder, 'nlw-platformer.json'))) levels.nlw.platformer = JSON.parse(await fs.readFile(path.join(cacheFolder, 'nlw-platformer.json'), 'utf8'));
if (await exists(path.join(cacheFolder, 'ids-regular.json'))) levels.ids.regular = JSON.parse(await fs.readFile(path.join(cacheFolder, 'ids-regular.json'), 'utf8'));
if (await exists(path.join(cacheFolder, 'ids-platformer.json'))) levels.ids.regular = JSON.parse(await fs.readFile(path.join(cacheFolder, 'ids-platformer.json'), 'utf8'));
if (await exists(path.join(cacheFolder, 'metadata.json'))) levels.metadata = JSON.parse(await fs.readFile(path.join(cacheFolder, 'metadata.json'), 'utf8'));
}
async function saveCache() {
await fs.writeFile(path.join(cacheFolder, 'nlw-regular.json'), JSON.stringify(levels.nlw.regular));
await fs.writeFile(path.join(cacheFolder, 'nlw-pending.json'), JSON.stringify(levels.nlw.pending));
await fs.writeFile(path.join(cacheFolder, 'nlw-platformer.json'), JSON.stringify(levels.nlw.platformer));
await fs.writeFile(path.join(cacheFolder, 'ids-regular.json'), JSON.stringify(levels.ids.regular));
await fs.writeFile(path.join(cacheFolder, 'ids-platformer.json'), JSON.stringify(levels.ids.platformer));
await fs.writeFile(path.join(cacheFolder, 'metadata.json'), JSON.stringify(levels.metadata));
}
async function fetchSheets() {
if (!process.env.API_KEY || process.env.API_KEY === '') {
console.warn('! API_KEY not set, going to rely on cache');
} else {
const nlw = await fetchNLWLevels();
const ids = await fetchIDSLevels();
levels = { nlw, ids, metadata: levels.metadata };
await saveCache();
}
await loadupMetadataQueue();
}
async function fetchLevelData(name, creator, loose = false) {
console.log('looking up metadata for', name, 'by', creator);
const params = new URLSearchParams(loose ? {
'query': name,
'filter': 'cache_demon=true',
} : {
'filter': `cache_demon=true AND cache_level_name='${name}'`,
});
const { statusCode, headers, trailers, body } = await request(`https://history.geometrydash.eu/api/v1/search/level/advanced/?${params.toString()}`);
const data = await body.json();
if (!data.hits) {
console.warn('got fucked up response from gdhistory:', data);
return
}
if (data.hits.length === 0 && !loose) {
return await fetchLevelData(name, creator, true);
} else if (data.hits.length === 0) {
return undefined;
}
if (data.hits.length === 1) return data.hits[0];
const exact = data.hits.filter(h => h.cache_level_name.toLowerCase() === name.toLowerCase());
if (exact.length === 1) return exact[0];
const creatorHits = data.hits.filter(h => creator.toLowerCase().includes(h.cache_username.toLowerCase()) || h.cache_username.toLowerCase().includes(creator.toLowerCase()));
if (creatorHits.length === 1) return creatorHits[0];
return data.hits.sort((a, b) => b.cache_demon_type - a.cache_demon_type)[0]; // pick highest demon diff. somehow effective? very stupid
}
const metadataFetchQueue = new PQueue({ concurrency: 10, interval: 500, intervalCap: 2 });
metadataFetchQueue.on('empty', async () => {
console.log('metadata fetch queue empty, idling');
await metadataSanityCheck();
await metadataGarbageCollect();
});
// hopefully will prevent cross-sheet duplicate fighting
// eg. the same level in multiple sheets being marked as duplicate metadata
function normalizeCreatorName(name) {
return name.replace('&', 'and').trim().toLowerCase();
}
function getMetadata(level) {
return levels.metadata.find(m =>
level.name === m.name &&
// normalized on the righthand side to prevent completely invalidating old caches
normalizeCreatorName(level.creator) === normalizeCreatorName(m.creator)
);
}
async function loadupMetadataQueue() {
const list = getAllLevels();
const noMetadata = list.filter(l => getMetadata(l) === undefined);
if (noMetadata.length === 0) {
console.log('no metadata to fetch!');
return
}
console.log(noMetadata.length, 'levels with no metadata, starting fetch');
metadataFetchQueue.addAll(
noMetadata.map(level => (async () => {
// to prevent race conditions, let's check if the metadata exists already and cancel if so
if (getMetadata(level)) {
console.log(`metadata for ${level.name} by ${level.creator} already found, lol`);
return;
}
const data = await fetchLevelData(level.name, level.creator);
if (!data) {
console.error('failed to find metadata!');
return;
}
console.log('id', data.online_id);
levels.metadata.push({
name: level.name,
creator: normalizeCreatorName(level.creator),
id: data.online_id,
});
await saveCache();
}))
);
}
async function metadataSanityCheck() {
const duplicates = levels.metadata.filter((l1, index) => levels.metadata.findIndex(l2 => l1.id === l2.id) !== index);
if (duplicates.length > 0) {
console.log('WARNING - duplicate IDs found in metadata table!:');
const ids = new Set(duplicates.map(l => l.id));
const badLevels = [...ids].flatMap(id => levels.metadata.filter(l => l.id === id));
for (const level of badLevels) {
console.log(`${level.id} ${level.name} by ${level.creator}`);
}
console.log('clearing out, will be refreshed next metadata fetch cycle');
levels.metadata = levels.metadata.filter(l => !ids.has(l.id));
await saveCache();
//await loadupMetadataQueue();
}
}
async function metadataGarbageCollect() {
const metadata = [];
for (const level of getAllLevels()) {
const data = getMetadata(level);
if (data && metadata.findIndex(m => m.name === data.name && m.creator === data.creator) === -1) metadata.push(data);
}
if (metadata.length < levels.metadata.length) {
console.log(`garbage collecting metadata (${levels.metadata.length - metadata.length} levels)`);
levels.metadata = levels.metadata.filter(m => metadata.indexOf(m) !== -1);
await saveCache();
}
}
await loadCache();
//await loadupMetadataQueue();
const app = express();
app.get('/', (req, res) => {
res.redirect('https://git.oat.zone/oat/nlw-api');
});
app.get('/list', (req, res) => {
const type = req.query.type;
let list;
if (!type || type === '' || type === 'regular') {
list = levels.nlw.regular;
} else if (type === 'platformer') {
list = levels.nlw.platformer;
} else if (type === 'pending') {
list = levels.nlw.pending;
} else if (type === 'all') {
list = [
...levels.nlw.regular.map(l => ({ type: 'regular', ...l })),
...levels.nlw.platformer.map(l => ({ type: 'platformer', ...l })),
...levels.nlw.pending.map(l => ({ type: 'pending', ...l })),
];
} else {
return res.status(400);
}
res.json(list.map(l => ({ ...(getMetadata(l) || {}), ...l })));
});
app.get('/ids', (req, res) => {
const type = req.query.type;
let list;
if (!type || type === '' || type === 'regular') {
list = levels.ids.regular;
} else if (type === 'platformer') {
list = levels.ids.regular;
} else if (type === 'all') {
list = [
...levels.ids.regular.map(l => ({ type: 'regular', ...l })),
...levels.ids.platformer.map(l => ({ type: 'platformer', ...l })),
];
} else {
return res.status(400);
}
res.json(list.map(l => ({ ...(getMetadata(l) || {}), ...l })));
});
const port = process.env.PORT || 8080
app.listen(port);
console.log(`lisening on port ${port}`);
await fetchSheets();
setInterval(fetchSheets, 1000 * 60 * 60);