I’m trying to make a rudimentary scraper for a subreddit I like.
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
<h1>Place Holder</h1>
<p id="name"></p>
</body>
<script>
// API for get requests
let fetchRes = fetch('https://www.reddit.com/r/meme/top.json');
fetchRes.then(res =>
res.json()).then(d => {
console.log(d)
// for testing purposes, use the following:
// console.log(d.data.children[0].data.id);
for (let i = 0; i < d.data.children.length; i++) {
x = d.data.children[i].data.url
document.getElementById("name").append("<br><img src='" + x + "'>")
console.log(d.data.children[i].data.url);
}
})
</script>
</html>
If my code is:
.document.getElementById("name").innerHTML = x
Then it will grab the latest post/image and place it perfectly into my page. HOWEVER, if I keep the .append, it just lists all the img urls instead of properly embedding them. What am I doing wrong?