mirror of
https://github.com/Lissy93/web-check.git
synced 2026-05-13 16:01:23 -04:00
- Sitemap endpoint now recursively expands sitemap-index files - Fixes #165 - Strips :port from target URLs in get-ip, dns, dns-server, ports, mail-config - Fixes #203 - Configurable trust proxy (TRUST_PROXY env) so app works behind Traefik/nginx - Fixes #157 - Tranco rank now correctly says "top 1 million" (was "100 million") - Fixes #257 - Adds engines.node ">=20" so Vercel picks a supported runtime - Re #212 - Raises Vercel maxDuration from 10s to 60s, cutting most 504 timeouts - Re #251 - Re #287 - Bumps axios 1.4.8 to 1.16, closing 4 high-severity SSRF/DoS CVEs - Re #289 - Fixes mail-config crash where dns module was awaited as if promise-based - Adds reusable structured logging util for the API - Bumps a whole bunch of deps, and resolves lots of open npm CVEs
91 lines
3.0 KiB
JavaScript
91 lines
3.0 KiB
JavaScript
import puppeteer from 'puppeteer-core';
|
|
import chromium from 'chrome-aws-lambda';
|
|
import { randomUUID } from 'crypto';
|
|
import { execFile } from 'child_process';
|
|
import { promises as fs } from 'fs';
|
|
import path from 'path';
|
|
import middleware from './_common/middleware.js';
|
|
import { createLogger } from './_common/logger.js';
|
|
|
|
const log = createLogger('screenshot');
|
|
|
|
// Capture a screenshot via the system Chromium binary; faster cold-start than puppeteer.
|
|
const directChromiumScreenshot = async (url) => {
|
|
const tmpDir = '/tmp';
|
|
const screenshotPath = path.join(tmpDir, `screenshot-${randomUUID()}.png`);
|
|
log.debug(`direct method, saving to ${screenshotPath}`);
|
|
|
|
return new Promise((resolve, reject) => {
|
|
const chromePath = process.env.CHROME_PATH || '/usr/bin/chromium';
|
|
const args = [
|
|
'--headless',
|
|
'--disable-gpu',
|
|
'--no-sandbox',
|
|
`--screenshot=${screenshotPath}`,
|
|
url,
|
|
];
|
|
execFile(chromePath, args, async (error) => {
|
|
if (error) return reject(error);
|
|
try {
|
|
const buf = await fs.readFile(screenshotPath);
|
|
await fs.unlink(screenshotPath).catch(err =>
|
|
log.warn(`temp cleanup failed: ${err.message}`)
|
|
);
|
|
resolve(buf.toString('base64'));
|
|
} catch (readError) {
|
|
reject(readError);
|
|
}
|
|
});
|
|
});
|
|
};
|
|
|
|
// Fallback path that uses puppeteer with the bundled chrome-aws-lambda binary.
|
|
const puppeteerScreenshot = async (targetUrl) => {
|
|
let browser = null;
|
|
try {
|
|
browser = await puppeteer.launch({
|
|
args: [...chromium.args, '--no-sandbox'],
|
|
defaultViewport: { width: 800, height: 600 },
|
|
executablePath: process.env.CHROME_PATH || '/usr/bin/chromium',
|
|
headless: true,
|
|
ignoreHTTPSErrors: true,
|
|
ignoreDefaultArgs: ['--disable-extensions'],
|
|
});
|
|
const page = await browser.newPage();
|
|
await page.emulateMediaFeatures([{ name: 'prefers-color-scheme', value: 'dark' }]);
|
|
page.setDefaultNavigationTimeout(8000);
|
|
await page.goto(targetUrl, { waitUntil: 'domcontentloaded' });
|
|
await page.evaluate(() => {
|
|
if (!document.querySelector('body')) {
|
|
throw new Error('No body element found on the page');
|
|
}
|
|
});
|
|
const buffer = await page.screenshot();
|
|
return buffer.toString('base64');
|
|
} finally {
|
|
if (browser) await browser.close().catch(() => {});
|
|
}
|
|
};
|
|
|
|
const screenshotHandler = async (targetUrl) => {
|
|
if (!targetUrl) throw new Error('URL is missing from queryStringParameters');
|
|
try { new URL(targetUrl); }
|
|
catch { throw new Error('URL provided is invalid'); }
|
|
|
|
log.debug(`request received: ${targetUrl}`);
|
|
try {
|
|
return { image: await directChromiumScreenshot(targetUrl) };
|
|
} catch (directError) {
|
|
log.warn(`direct chromium failed, falling back to puppeteer: ${directError.message}`);
|
|
}
|
|
try {
|
|
return { image: await puppeteerScreenshot(targetUrl) };
|
|
} catch (error) {
|
|
log.error(`puppeteer screenshot failed: ${error.message}`);
|
|
throw error;
|
|
}
|
|
};
|
|
|
|
export const handler = middleware(screenshotHandler);
|
|
export default handler;
|