I have a node.js app running locally in netbeans. Its entry point is an app.js file:
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (g && (g = 0, op[0] && (_ = 0)), _) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
Object.defineProperty(exports, "__esModule", { value: true });
/* eslint-disable @typescript-eslint/no-var-requires */
import * as express_1 from "express";
import * as cors_1 from "cors";
import * as logger_1 from "./config/logger.js";
import * as database_1 from"./database/index.js";
import * as auth_routes_1 from "./routes/auth.routes.js";
import * as auth_routes_2 from "./routes/items.routes.js";
// import cron from 'node-cron'
import * as index_1 from "./helper/scrapping/index.js";
import * as morgan_1 from "morgan";
var scrap = new index_1.Scrapping();
var app = (0, express_1.default)();
(0, database_1.default)();
var port = process.env.PORT || 8080;
import dotenv from 'dotenv';
dotenv.config();
app.use((0, cors_1.default)());
app.use(express_1.default.json({
limit: "50mb",
type: [
"application/json",
"text/plain", // AWS sends this content-type for its messages/notifications
],
}));
app.use(express_1.default.urlencoded({ extended: true }));
app.use((0, morgan_1.default)(':date *** :method :: :url ** :response-time'));
app.use(routes_1.default);
setInterval(function () { return __awaiter(void 0, void 0, void 0, function () {
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
console.log('running jobs');
return [4 /*yield*/, scrap.saveDailyJobSimilarItem()];
case 1:
_a.sent();
return [4 /*yield*/, scrap.saveDailyJobSameItem()];
case 2:
_a.sent();
console.log('cleaning');
return [4 /*yield*/, scrap.cleanDailyJobsCategories()];
case 3:
_a.sent();
return [4 /*yield*/, scrap.cleanDailyJobsItems()];
case 4:
_a.sent();
return [2 /*return*/];
}
});
}); }, 86400000);
// setInterval(async () => {
// await scrap.saveDailyJobSameItem()
// console.log('running same job')
// }, 92500000)
// setTimeout(async () => {
// console.log('it begins')
// await scrap.saveDailyJobSimilarItem()
// console.log('running similar job')
// }, 1000)
// setTimeout(async () => {
// console.log('it begins 2.0')
// await scrap.saveDailyJobSameItem()
// console.log('running same job')
// }, 3600000)
app.get('/test-simi', function (req, res) { return __awaiter(void 0, void 0, void 0, function () {
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
// const x = await scrap.saveDailyJobSameItem()
return [4 /*yield*/, scrap.saveDailyJobSimilarItem()];
case 1:
// const x = await scrap.saveDailyJobSameItem()
_a.sent();
res.json('running');
return [2 /*return*/];
}
});
}); });
app.get('/test-same', function (req, res) { return __awaiter(void 0, void 0, void 0, function () {
return __generator(this, function (_a) {
switch (_a.label) {
case 0: return [4 /*yield*/, scrap.saveDailyJobSameItem()
// const x = await scrap.saveDailyJobSimilarItem()
];
case 1:
_a.sent();
// const x = await scrap.saveDailyJobSimilarItem()
res.json('running');
return [2 /*return*/];
}
});
}); });
app.listen(port, function () {
logger_1.logger.info("server listening on http://localhost:".concat(port));
});
That app.js invokes an index.js file shown here:
// /* eslint-disable @typescript-eslint/no-empty-function */
import puppeteer from "puppeteer";
import Model from "../../schema/scrapped-items.model";
import _ from "underscore";
import dailyJob from "../../schema/daily-job.model";
import axios, { AxiosResponse } from "axios";
import CategoryModel from "../../schema/category.model";
import itemModel from "../../schema/item.model";
import { similarity } from "../utils/similarity";
import dailyJobModel from "../../schema/daily-job.model";
import categoryModel from "../../schema/category.model";
import { logger } from '../../config/logger';
class Scrapping {
public async scrapCall(item) {
try {
const url = `https://xxx-yyy.zzz.com/api/v1/scrap-item?item=${item}`;
const response = await axios.get(url);
return response.data;
} catch (error) {
console.log(error);
}
}
public async ebayScrapping(item) {
try {
const browser = await puppeteer.launch({ args: ["--no-sandbox"] });
const page = await browser.newPage();
await page.goto("https://www.ebay.com/");
await page.waitForSelector("#gh-ac");
await page.type("#gh-ac", `${item}`);
await page.click('input[value="Search"]');
await page.waitForSelector("div.s-item__wrapper");
const link = await page.$$eval("img.s-item__image-img", (items) => {
return items.map((item: any) => {
return item.src;
});
});
const title = await page.$$eval("h3.s-item__title", (items) => {
return items.map((item: any) => {
return item.innerText;
});
});
const price = await page.$$eval("span.s-item__price", (items) => {
return items.map((item: any) => {
return item.innerText;
});
});
const invs = [];
for (let i = 0, length = 17; i < length; i++) {
const inv: any = {
price: this.priceToStr(price[i]),
title: title[i],
};
if (i < link.length) {
inv.link = link[i];
inv.baseCurrency = "$";
inv.date = new Date();
}
invs.push(inv);
}
return invs;
} catch (error) {
if (error instanceof puppeteer.errors.TimeoutError) {
return error.message;
}
}
}
public async ebayScrappingDaily(
itemId?: any,
user?: any,
id?: any,
item?: any
) {
try {
const browser = await puppeteer.launch({ args: ["--no-sandbox"] });
const page = await browser.newPage();
await page.goto("https://www.ebay.com/");
await page.waitForSelector("#gh-ac");
await page.type("#gh-ac", `${item}`);
await page.click('input[value="Search"]');
await page.waitForSelector("div.s-item__wrapper");
const link = await page.$$eval("img.s-item__image-img", (items) => {
return items.map((item: any) => {
return item.src;
});
});
const title = await page.$$eval("h3.s-item__title", (items) => {
return items.map((item: any) => {
return item.innerText;
});
});
const price = await page.$$eval("span.s-item__price", (items) => {
return items.map((item: any) => {
return item.innerText;
});
});
const invs = [];
for (let i = 0, length = 17; i < length; i++) {
const inv: any = {
price: this.priceToStr(price[i]),
title: title[i],
};
if (i < link.length) {
inv.link = link[i];
inv.baseCurrency = "$";
inv.date = new Date();
}
invs.push(inv);
}
return invs;
} catch (error) {
console.log(error);
throw new Error(error.message);
}
}
public async saveScrapItem(category, item_title, itemId, user) {
const same_data = await this.scrappingBee(item_title, itemId);
same_data.forEach(x => x.similarity = similarity(x.title,item_title))
let good_items = []
for(const aitem of same_data){
if(aitem.similarity >= 0.8) good_items.push(aitem)
}
if(good_items.length == 0){
good_items = same_data.slice(0,30)
}
const similar_data = same_data.slice(good_items.length,same_data.length)
const average = await this.getAveragePrice(good_items);
const median = await this.getMedianPrice(good_items);
const {high, low} = await this.getHighLowPrice(same_data)
const createNewScrapItem = await Model.create({
_itemId: itemId,
_userId: user,
similar_data: similar_data,
same_data: same_data,
average,
median,
highest_price: high,
lowest_price: low,
});
if (createNewScrapItem) return true;
}
public async saveDailyJobSimilarItem() {
// const data = await Model.find({});
// const similar_data = data.map((item) => item.similar_data).flat()
const categories = await CategoryModel.find({});
const all_categories = categories[0].category
const items = [];
for (let i = 0; i < all_categories.length; i += 1) {
items.push(all_categories.slice(i, i + 1));
}
let response;
let offset = 0;
_(items).each((item) => {
setTimeout(() => {
item.forEach(async (category) => {
if (category) {
// console.log(item.title)
const data = await this.scrappingBeeDaily(category);
// data.forEach(el => el.similarity = similarity(el.title,item.title))
const average = await this.getAveragePrice(data);
const median = await this.getMedianPrice(data);
return await dailyJob.create({
median: median,
average: average,
category: category,
similar_data: data,
});
}
return response;
});
}, 25000 + offset);
offset += 25000;
});
}
public async saveDailyJobSameItem() {
const items_data = await itemModel.find({});
const same_data = items_data.map((item) => {
let item_keywords = item.item_keywords.filter(x=>x.toLowerCase() != item.item_title.toLowerCase())
let title = item.item_title
item_keywords.forEach(x=>title+=" "+x)
return {
title: title,
item: item._id,
}
})
const items = [];
for (let i = 0; i < same_data.length; i += 1) {
items.push(same_data.slice(i, i + 1));
}
let response;
let offset = 0;
_(items).each((item) => {
setTimeout(() => {
item.forEach(async (item) => {
if (item.title !== 'Shop on eBay') {
// console.log(item.title)
const data = await this.scrappingBeeDaily(item.title, item.item);
data.forEach(el => el.similarity = similarity(el.title,item.title))
let same_data = data
let good_items = []
for(const aitem of same_data){
if(aitem.similarity >= 0.8) good_items.push(aitem)
}
if(good_items.length == 0){
good_items = same_data.slice(0,30)
}
const average = await this.getAveragePrice(good_items);
const median = await this.getMedianPrice(good_items);
const {high, low} = await this.getHighLowPrice(good_items)
return await dailyJob.create({
_scrapId: item.item,
median: median,
average: average,
same_data: data,
lowest_price: low,
highest_price: high,
});
}
return response;
});
}, 25000 + offset);
offset += 25000;
});
}
},
},
},
},
});
const response = data.data;
const invs = [];
await response.map(async (item) => {
if (item) {
const inv: any = {
price: await this.priceToStr(item.price),
title: item.title,
};
if (item.link) {
inv.link = item.link;
inv.baseCurrency = "$";
inv.date = new Date();
inv.url = item.url
inv.item = id
}
invs.push(inv);
}
});
return invs.slice(0,100);
} catch (error) {
throw new Error("ScrapingBee Error: " + error.message);
}
}
public async scrappingBeeDaily(items, id = null) {
const url = items.split(" ").join("+")
try {
const { data } = await axios.get("https://app.scrapingbee.com/api/v1", {
params: {
api_key:
"DCXO8PT2BDINHZNQDJUMHLK9FYAKG3MDW9U4T1A4G7KNZ4IN7WNYA796GELUFA1KW9VQ7R9ZXSXN28IH",
url: `https://www.ebay.com/sch/i.html?_from=R40&_trksid=p2380057.m570.l1313&_nkw=${url}&_sacat=0`,
// Wait for there to be at least one
// non-empty .event-tile element
wait_for: ".s-item",
extract_rules: {
data: {
// Lets create a list with data
// extracted from the .event-tile element
selector: ".s-item",
type: "list",
// Each object in the list should
output: {
// have a title lifted from
// the .event-tile__title element
price: ".s-item__price",
title: ".s-item__title",
link: {
selector: ".s-item__image-wrapper img",
output: "@src",
},
url: {
selector: ".s-item__link",
output: "@href"
}
},
},
},
},
});
const response = data.data;
const invs = [];
await response.map(async (item) => {
if (item) {
const inv: any = {
price: await this.priceToStr(item.price),
title: item.title,
};
if (item.link) {
inv.link = item.link;
inv.baseCurrency = "$";
inv.date = new Date();
inv.url = item.url
inv.category = items
inv.item = id
}
invs.push(inv);
}
});
return invs.slice(0,100);
} catch (error) {
console.log(error)
throw new Error("ScrapingBee Error: " + error.message);
}
}
public async testing(id) {
console.log('testing...')
let items_data = await itemModel.find({});
console.log('asdfasdf')
console.log(items_data.length)
items_data = items_data.filter(x=>x._id==id)
if(items_data.length > 1 || items_data.length ==0){
console.log('fail asdfss')
console.log(items_data.length)
return
}
console.log('whhhhaaat')
// console.log(items_data)
// console.log(items_data[0])
const same_data = items_data.map((item) => {
let item_keywords = item.item_keywords.filter(x=>x.toLowerCase() != item.item_title.toLocaleLowerCase())
let title = item.item_title
item_keywords.forEach(x=>title+=" "+x)
return {
title: title,
item: item._id,
}
})
const items = [];
for (let i = 0; i < same_data.length; i += 1) {
items.push(same_data.slice(i, i + 1));
}
let response;
let offset = 0;
_(items).each((item) => {
// setTimeout(() => {
item.forEach(async (item) => {
if (item.title !== 'Shop on eBay') {
// console.log(item.title)
const data = await this.scrappingBeeDaily(item.title, item.item);
// console.log(data)
data.forEach(el => el.similarity = similarity(el.title,item.title))
const average = await this.getAveragePrice(data);
const median = await this.getMedianPrice(data);
const {high, low} = await this.getHighLowPrice(data)
console.log('creating')
return await dailyJob.create({
_scrapId: item.item,
median: median,
average: average,
same_data: data,
lowest_price: low,
highest_price: high,
});
}
return response;
});
});
// offset += 25000;
// });
}
}
export { Scrapping };
When executed, the console log states:
"C:Program Filesnodejsnode.exe" "C:UserslincoDocumentsNetBeansProjectspricer_collectiblesapp.js" "run" "build" "&&" "node" "./bin/www"
file:///C:/Users/linco/Documents/NetBeansProjects/pricer_collectibles/helper/scrapping/index.js:15
public async scrapCall(item) {
^^^^^
SyntaxError: Unexpected token 'async'
at ModuleLoader.moduleStrategy (node:internal/modules/esm/translators:168:18)
at callTranslator (node:internal/modules/esm/loader:279:14)
at ModuleLoader.moduleProvider (node:internal/modules/esm/loader:285:30)
Node.js v21.7.1
Done.
tsconfig.json file is:
{
"compilerOptions": {
/* Basic Options */
"target": "ESNEXT", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017','ES2018' or 'ESNEXT'. */
"module": "ESNext", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
"lib": ["dom","ESNext"], /* Specify library files to be included in the compilation. */
"allowJs": true, /* Allow javascript files to be compiled. */
// "checkJs": true, /* Report errors in .js files. */
"jsx": "react-native", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
// "declaration": true, /* Generates corresponding '.d.ts' file. */
// "sourceMap": true, /* Generates corresponding '.map' file. */
// "outFile": "./", /* Concatenate and emit output to single file. */
// "outDir": "./", /* Redirect output structure to the directory. */
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
// "removeComments": true, /* Do not emit comments to output. */
"noEmit": true, /* Do not emit outputs. */
// "incremental": true, /* Enable incremental compilation */
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
"isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
"types": [ "node" ],
"typeRoots": [ "../node_modules/@types" ],
/* Strict Type-Checking Options */
"strict": true, /* Enable all strict type-checking options. */
// "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
// "strictNullChecks": true, /* Enable strict null checks. */
// "strictFunctionTypes": true, /* Enable strict checking of function types. */
// "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
/* Additional Checks */
// "noUnusedLocals": true, /* Report errors on unused locals. */
// "noUnusedParameters": true, /* Report errors on unused parameters. */
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
/* Module Resolution Options */
"moduleResolution": "Node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
// "typeRoots": [], /* List of folders to include type definitions from. */
// "types": [], /* Type declaration files to be included in compilation. */
"allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
"esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
"skipLibCheck": true, /* Skip type checking of declaration files. */
"resolveJsonModule": true /* Allows importing modules with a ‘.json’ extension, which is a common practice in node projects. */
/* Source Map Options */
// "sourceRoot": "./", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
// "mapRoot": "./", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
/* Experimental Options */
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
},
"exclude": ["node_modules", "babel.config.js", "metro.config.js", "jest.config.js"],
"ts-node": {
"esm": true,
"experimentalSpecifierResolution": "node"
}
}
package.json file is:
{
"name": "pricer-collectibles",
"version": "0.0.4",
"main": "app.js",
"type": "module",
"dependencies": {
"cookie-parser": "~1.4.4",
"cors": "2.8.5",
"debug": "~2.6.9",
"dotenv": "16.4.5",
"express": "~4.16.1",
"http-errors": "~1.6.3",
"jade": "~1.11.0",
"less-middleware": "~2.2.1",
"mongoose": "8.2.2",
"morgan": "~1.9.1",
"window": "4.2.7",
"winston": "3.12.0"
},
"scripts": {
"start": "node app.js run build && node ./bin/www"
}
}
Why is the environment showing an error where I’m trying to call asynchronous functions?