
Convert Any Website to CSV or JSON: A Practical Guide
Clients always want data in CSV or JSON. Here's how to scrape a website and output clean, structured files. Step 1: Scrape the Data const cheerio = require ( ' cheerio ' ); async function scrapeTable ( url ) { const res = await fetch ( url , { headers : { ' User-Agent ' : ' DataBot/1.0 ' } }); const $ = cheerio . load ( await res . text ()); const headers = $ ( ' table th ' ). map (( i , el ) => $ ( el ). text (). trim ()). get (); const rows = []; $ ( ' table tr ' ). each (( i , row ) => { if ( i === 0 ) return ; // skip header const cells = $ ( row ). find ( ' td ' ). map (( j , cell ) => $ ( cell ). text (). trim ()). get (); if ( cells . length ) rows . push ( cells ); }); return { headers , rows }; } Step 2: Export as JSON const fs = require ( ' fs ' ); function toJSON ( headers , rows ) { const data = rows . map ( row => { const obj = {}; headers . forEach (( h , i ) => obj [ h ] = row [ i ] || '' ); return obj ; }); fs . writeFileSync ( ' output.json ' , JSON . stringify ( data
Continue reading on Dev.to Webdev
Opens in a new tab



