Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

rewrote the package, added download feature too #114

Merged
merged 2 commits into from
Oct 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions src/example.js
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
'use strict';

var Scraper = require('./google/scraper');
const Scraper = require('./google');
Copy link
Owner

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nice, you can see that the first commit is 10 years old ;)


let google = new Scraper();
const google = new Scraper();

(async () => {
const results = await google.scrape('banana', 10); // Or ['banana', 'strawberry'] for multi-queries
const results = await google.downloadImages('cat', 10);
console.log('results', results);
})();
175 changes: 175 additions & 0 deletions src/google/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
'use strict';

const puppeteer = require('puppeteer');
const fs = require("fs");
const axios = require("axios");
const path = require('path');
const logger = require('../logger');

/**
* @param {string | array} userAgent user agent
* @param {object} puppeteer puppeteer options
* @param {object} tbs extra options for TBS request parameter
*/
class GoogleScraper {
constructor({
userAgent = [
'Mozilla/5.0 (X11; Linux i686; rv:64.0) Gecko/20100101 Firefox/64.0',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/15.0 Safari/605.1.15',
'Mozilla/5.0 (iPhone; CPU iPhone OS 15_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/15.0 Mobile/15E148 Safari/604.1',
'Mozilla/5.0 (Linux; Android 10; SM-G970F) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Mobile Safari/537.36',
],
scrollDelay = 500,
puppeteer = { headless: true },
tbs = {},
safe = false,
} = {}) {
this.userAgent = Array.isArray(userAgent)
? userAgent[Math.floor(Math.random() * userAgent.length)]
: userAgent;
this.scrollDelay = scrollDelay;
this.puppeteerOptions = puppeteer;
this.tbs = this._parseRequestParameters(tbs);
this.safe = this._isQuerySafe(safe);
this.browser = null;
}

/**
* Method to download images based on query
* @param {string | string[]} queries
* @param {number} limit
* @param {string} directory
* @returns {object}
*/
async downloadImages(queries, limit = 5, directory = 'downloads') {
const downloadFolder = path.join(process.cwd(), directory);

if (!fs.existsSync(downloadFolder)) {
fs.mkdirSync(downloadFolder);
}

const imageUrls = await this.getImageUrl(queries, limit);

for (const queryKey in imageUrls) {
const imageUrlList = imageUrls[queryKey];
for (let i = 0; i < imageUrlList.length; i++) {
const { url } = imageUrlList[i];
let extension = '.jpg';
try {
const response = await axios.head(url);
const contentType = response.headers['content-type'];
if (contentType) {
if (contentType.includes('image/jpeg')) extension = '.jpg';
else if (contentType.includes('image/png')) extension = '.png';
else if (contentType.includes('image/gif')) extension = '.gif';
else if (contentType.includes('image/webp')) extension = '.webp';
}
} catch (error) {
logger.info(`Error fetching headers for ${url}: ${error.message}`);
}
const fileName = `${queryKey}_${i + 1}${extension}`;
const queryDownloadPath = path.join(downloadFolder, queryKey);
if (!fs.existsSync(queryDownloadPath)) {
fs.mkdirSync(queryDownloadPath);
}

const filePath = path.join(queryDownloadPath, fileName);

try {
const imageResponse = await axios.get(url, { responseType: 'arraybuffer' });
fs.writeFileSync(filePath, imageResponse.data);
logger.info(`Downloaded ${fileName}`);
} catch (error) {
logger.error(`Error downloading image from ${url}: ${error.message}`);
}
}
logger.info(`Saved files at ${downloadFolder}`);
}

return imageUrls;
}

/**
* Method to get an object with image urls
* @param {string | string[]} queries
* @param {number} limit
* @returns {object}
*/
async getImageUrl(queries, limit = 5) {
try {
const browser = await puppeteer.launch({ ...this.puppeteerOptions });
const page = await browser.newPage();
await page.setBypassCSP(true);
await page.setUserAgent(this.userAgent);
const queriesIsArray = Array.isArray(queries);
const imageUrlObject = {};

/**
* Used for DRY
* @param {string} query
*/
const getUrls = async (query) => {
const pageUrl = `https://www.google.com/search?${this.safe}&source=lnms&tbs=${this.tbs}&tbm=isch&q=${this._parseRequestQueries(query)}`;
logger.debug(pageUrl);
await page.goto(pageUrl);

await page.evaluate(async () => {
for (let i = 0; i < 10; i++) {
window.scrollBy(0, window.innerHeight);
await new Promise(resolve => setTimeout(resolve, this.scrollDelay));
}
});

await page.waitForSelector('img');

const images = await page.evaluate(() => {
const imageElements = document.querySelectorAll('img');
return Array.from(imageElements)
.map(img => img.src)
.filter(url => url.startsWith('http') && !url.includes('google'));
});

const queryKey = query.replace(/\s/g, '');
imageUrlObject[queryKey] = images.slice(0, limit).map(url => ({ query, url }));
}

if (queriesIsArray) {
for (const query of queries) {
await getUrls(query);
}
} else {
await getUrls(queries);
}

await browser.close();
return imageUrlObject;

} catch (err) {
logger.error('An error occurred:', err);
}
}

_parseRequestParameters(tbs) {
if (!tbs) {
return '';
}

return encodeURIComponent(
Object.entries(tbs)
.filter(([, value]) => value)
.map(([key, value]) => `${key}:${value}`)
.join(',')
);
}

_parseRequestQueries(query) {
return query ? encodeURIComponent(query) : '';
}

_isQuerySafe(safe) {
return safe ? '&safe=active' : '';
}
}

module.exports = GoogleScraper;
Loading
Loading