Skip to content

Commit

Permalink
[Update] split code
Browse files Browse the repository at this point in the history
  • Loading branch information
HoangTran0410 committed Sep 15, 2021
1 parent 37f9b42 commit b3299bd
Show file tree
Hide file tree
Showing 3 changed files with 66 additions and 61 deletions.
1 change: 1 addition & 0 deletions config.js
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
export const ACCESS_TOKEN = "YOUR_ACCESS_TOKEN";
64 changes: 64 additions & 0 deletions dowload_album.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import fetch from "node-fetch";
import fs from "fs";
import { ACCESS_TOKEN } from "./config.js";

const HOST = "https://graph.facebook.com";

export const fetchAlbumDataFromCursor = async (
albumId,
cursor,
limit = 100
) => {
// create link to fetch
let url = `${HOST}/${albumId}/photos?fields=largest_image&limit=${limit}&access_token=${ACCESS_TOKEN}`;
if (cursor) {
url += `&after=${cursor}`;
}

// fetch data
const response = await fetch(url);
const json = await response.json();

// return all_links + next cursor
return {
all_links: json.data.map((_) => _.largest_image.source),
nextCursor: json.paging?.cursors?.after || null,
};
};

export const fetchAlbumData = async (
albumId,
pageNum = Infinity,
pageSize = 100
) => {
let currentPage = 0;
let hasNextCursor = true;
let nextCursor = null;
let all_links = [];

while (hasNextCursor && currentPage < pageNum) {
console.log(`> Fetching page: ${currentPage}, pageSize: ${pageSize} ...`);

const data = await fetchAlbumDataFromCursor(albumId, nextCursor, pageSize);
all_links = all_links.concat(data.all_links);

console.log(`> Fetched ${data.all_links.length} photos.`);

nextCursor = data.nextCursor;
hasNextCursor = nextCursor != null;
currentPage++;
}

return all_links;
};

export const saveAlbumPhotoLinks = async (albumId) => {
const links = await fetchAlbumData(albumId);

const fileName = `data/${albumId}.txt`;
console.log(`Writting ${links.length} photo links to ${fileName}`);
fs.writeFile(fileName, links.join("\n"), { flag: "a+" }, (err) => {
if (err) throw err;
console.log("Saved!");
});
};
62 changes: 1 addition & 61 deletions index.js
Original file line number Diff line number Diff line change
@@ -1,64 +1,4 @@
import fetch from "node-fetch";
import fs from "fs";

const HOST = "https://graph.facebook.com";
const ACCESS_TOKEN = "YOUR_ACCESS_TOKEN";

const fetchAlbumDataFromCursor = async (albumId, cursor, limit = 100) => {
// create link to fetch
let url = `${HOST}/${albumId}/photos?fields=largest_image&limit=${limit}&access_token=${ACCESS_TOKEN}`;
if (cursor) {
url += `&after=${cursor}`;
}

// fetch data
const response = await fetch(url);
const json = await response.json();

// save data to all_link
let all_links = json.data.map((_) => _.largest_image.source);
console.log(`Fetched ${all_links.length} photos.`);

// get next cursor
const nextCursor = json.paging?.cursors?.after || null;

// return all_links + next cursor
return {
all_links,
nextCursor,
};
};

const fetchAlbumData = async (albumId, pageNum = Infinity, pageSize = 100) => {
let currentPage = 0;
let hasNextCursor = true;
let nextCursor = null;
let all_links = [];

while (hasNextCursor && currentPage < pageNum) {
console.log(`> Fetching page: ${currentPage}, pageSize: ${pageSize}`);

const data = await fetchAlbumDataFromCursor(albumId, nextCursor, pageSize);
all_links = all_links.concat(data.all_links);

nextCursor = data.nextCursor;
hasNextCursor = nextCursor != null;
currentPage++;
}

return all_links;
};

const saveAlbumPhotoLinks = async (albumId) => {
const links = await fetchAlbumData(albumId);

const fileName = `data/${albumId}.txt`;
console.log(`Writting ${links.length} photo links to ${fileName}`);
fs.writeFile(fileName, links.join("\n"), { flag: "a+" }, (err) => {
if (err) throw err;
console.log("Saved!");
});
};
import { saveAlbumPhotoLinks } from "./dowload_album.js";

// https://www.facebook.com/media/set/?vanity=ColourfulSpace&set=a.945632905514659
saveAlbumPhotoLinks("945632905514659");

0 comments on commit b3299bd

Please sign in to comment.