added a timer based on ms
This commit is contained in:
parent
c869723a95
commit
63a9a11b79
@ -1,6 +1,6 @@
|
||||
{
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 2017,
|
||||
"ecmaVersion": 2018,
|
||||
"sourceType": "module",
|
||||
"ecmaFeatures": {
|
||||
"jsx": false
|
||||
|
@ -11,14 +11,29 @@ async function run() {
|
||||
|
||||
if (typeof(process.env.CD_CRON) === 'string' ) {
|
||||
console.log(`${cdScraper.id} cron set for ${process.env.CD_CRON}`);
|
||||
new CronJob(process.env.CD_CRON, async function() {
|
||||
await cdScraper.run();
|
||||
new CronJob(process.env.CD_CRON, async () => {
|
||||
console.log('go');
|
||||
await cdScraper.run();
|
||||
console.log('ready to go again..');
|
||||
}, null, true);
|
||||
}
|
||||
|
||||
if (process.env.SCRAPE_START === cdScraper.id)
|
||||
{
|
||||
if (typeof(process.env.CD_EVERY) === 'string' ) {
|
||||
console.log(`${cdScraper.id}_EVERY set for ${process.env.CD_EVERY} seconds`);
|
||||
|
||||
console.log('++');
|
||||
await cdScraper.run();
|
||||
console.log('--');
|
||||
setInterval(async () => {
|
||||
console.log('go');
|
||||
await cdScraper.run();
|
||||
console.log('ready to go again..');
|
||||
// }, parseInt(process.env.CD_EVERY, 10));
|
||||
}, process.env.CD_EVERY);
|
||||
}
|
||||
|
||||
if (process.env.SCRAPE_START === cdScraper.id) {
|
||||
console.log('go');
|
||||
await cdScraper.run();
|
||||
}
|
||||
|
@ -10,13 +10,12 @@ const jsonfile = require('jsonfile');
|
||||
|
||||
const Diff = require('text-diff');
|
||||
|
||||
const time = require("time-since");
|
||||
const time = require('time-since');
|
||||
const pug = require('pug');
|
||||
|
||||
const PNG = require('pngjs').PNG;
|
||||
const pixelmatch = require('pixelmatch');
|
||||
const compareImages = require("resemblejs/compareImages");
|
||||
|
||||
const compareImages = require('resemblejs/compareImages');
|
||||
|
||||
const email = require('smtp-email-sender')({
|
||||
'host': 'mail.caliban.io',
|
||||
@ -38,9 +37,13 @@ class ChangeDetection extends Scraper {
|
||||
|
||||
this.setID('CD');
|
||||
|
||||
this.run = this._debounce(async () => {
|
||||
/* this.run = _.debounce(async () => {
|
||||
await this.__run();
|
||||
}, 5000);
|
||||
}, 5000);*/
|
||||
|
||||
this.run = async () => {
|
||||
await this.__run();
|
||||
};
|
||||
}
|
||||
|
||||
pugTest(data, newpath) {
|
||||
@ -52,7 +55,7 @@ class ChangeDetection extends Scraper {
|
||||
|
||||
const attachments = [
|
||||
{
|
||||
path:data.diffPNG
|
||||
'path':data.diffPNG
|
||||
}
|
||||
];
|
||||
|
||||
@ -62,28 +65,29 @@ class ChangeDetection extends Scraper {
|
||||
'to': 'Martin <martind2000@gmail.com>',
|
||||
'subject': `ChangeDetection: ${data.name}`,
|
||||
'html': html,
|
||||
attachments: attachments
|
||||
'attachments': attachments
|
||||
});
|
||||
}
|
||||
|
||||
async generateDiffScreenshotV2(previous, today) {
|
||||
let {dir, root, ext, name} = path.parse(today);
|
||||
const { dir, root, ext } = path.parse(today);
|
||||
let { name } = path.parse(today);
|
||||
|
||||
const options = {
|
||||
output: {
|
||||
errorColor: {
|
||||
red: 255,
|
||||
green: 0,
|
||||
blue: 255
|
||||
'output': {
|
||||
'errorColor': {
|
||||
'red': 255,
|
||||
'green': 0,
|
||||
'blue': 255
|
||||
},
|
||||
errorType: "movement",
|
||||
transparency: 0.3,
|
||||
largeImageThreshold: 1200,
|
||||
useCrossOrigin: false,
|
||||
outputDiff: true
|
||||
'errorType': 'movement',
|
||||
'transparency': 0.3,
|
||||
'largeImageThreshold': 1200,
|
||||
'useCrossOrigin': false,
|
||||
'outputDiff': true
|
||||
},
|
||||
scaleToSameSize: false,
|
||||
ignore: "colors"
|
||||
'scaleToSameSize': false,
|
||||
'ignore': 'colors'
|
||||
};
|
||||
|
||||
// const img1 = PNG.sync.read(fs.readFileSync(previous));
|
||||
@ -92,56 +96,43 @@ class ChangeDetection extends Scraper {
|
||||
const img1 = fs.readFileSync(previous);
|
||||
const img2 = fs.readFileSync(today);
|
||||
|
||||
|
||||
const data = await compareImages(img1, img2,
|
||||
options
|
||||
);
|
||||
|
||||
|
||||
name = name.concat('_diff');
|
||||
|
||||
|
||||
const endFilename = path.format({dir, root, ext, name});
|
||||
|
||||
|
||||
const endFilename = path.format({ dir, root, ext, name });
|
||||
|
||||
logger.debug('diffFilename', endFilename);
|
||||
|
||||
fs.writeFileSync(endFilename, data.getBuffer());
|
||||
|
||||
return endFilename;
|
||||
|
||||
}
|
||||
|
||||
async generateDiffScreenshot(previous, today) {
|
||||
|
||||
let {dir, root, ext, name} = path.parse(today);
|
||||
const { dir, root, ext } = path.parse(today);
|
||||
let { name } = path.parse(today);
|
||||
|
||||
const img1 = PNG.sync.read(fs.readFileSync(previous));
|
||||
const img2 = PNG.sync.read(fs.readFileSync(today));
|
||||
const {width, height} = img1;
|
||||
const diff = new PNG({width, height});
|
||||
|
||||
pixelmatch(img1.data, img2.data, diff.data, width, height, {threshold: 0.1});
|
||||
const { width, height } = img1;
|
||||
const diff = new PNG({ width, height });
|
||||
|
||||
pixelmatch(img1.data, img2.data, diff.data, width, height, { 'threshold': 0.1 });
|
||||
|
||||
name = name.concat('_diff');
|
||||
|
||||
|
||||
const endFilename = path.format({dir, root, ext, name});
|
||||
|
||||
|
||||
const endFilename = path.format({ dir, root, ext, name });
|
||||
|
||||
logger.debug('diffFilename', endFilename);
|
||||
|
||||
fs.writeFileSync(endFilename, PNG.sync.write(diff));
|
||||
|
||||
return endFilename;
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
async processItem(item) {
|
||||
logger.debug(`Processing ${item.name}...`);
|
||||
|
||||
@ -178,9 +169,11 @@ class ChangeDetection extends Scraper {
|
||||
const cleanedDiff = diff.cleanupSemantic(textDiff);
|
||||
const levenshtein = diff.levenshtein(textDiff);
|
||||
|
||||
// logger.debug('textDiff:', textDiff);
|
||||
|
||||
logger.debug('levenshtein:', levenshtein);
|
||||
|
||||
logger.debug('cleanedDiff:',cleanedDiff );
|
||||
logger.debug('cleanedDiff:', cleanedDiff );
|
||||
|
||||
if (levenshtein !== 0) {
|
||||
logger.info('Changed...');
|
||||
@ -203,13 +196,13 @@ class ChangeDetection extends Scraper {
|
||||
fs.writeFileSync(oldFile, innerText.body, 'utf-8');
|
||||
this.stats.set(filename, stats);
|
||||
|
||||
const pugData = {...stats, ...item};
|
||||
const pugData = { ...stats, ...item}; // eslint-disable-line
|
||||
|
||||
// console.log(pugData);
|
||||
await this.sendSMTP(pugData, './');
|
||||
} else {
|
||||
logger.debug('No change...');
|
||||
}
|
||||
else
|
||||
logger.debug('No change...');
|
||||
}
|
||||
}
|
||||
|
||||
@ -230,15 +223,12 @@ class ChangeDetection extends Scraper {
|
||||
|
||||
const oldFile = `${this.path}/previous.html`;
|
||||
|
||||
|
||||
const innerText = await this.page.evaluate(() => {
|
||||
return {
|
||||
'body': document.body.innerText
|
||||
};
|
||||
});
|
||||
|
||||
|
||||
|
||||
if (!fs.existsSync(oldFile))
|
||||
fs.writeFileSync(oldFile, body.body, 'utf-8');
|
||||
else {
|
||||
@ -331,7 +321,8 @@ class ChangeDetection extends Scraper {
|
||||
await this.saveSettings();
|
||||
|
||||
await this._done();
|
||||
/*}
|
||||
|
||||
/* }
|
||||
catch(e) {
|
||||
throw new Error(e);
|
||||
}*/
|
||||
|
Loading…
Reference in New Issue
Block a user