Skip to content
This repository has been archived by the owner on Feb 13, 2025. It is now read-only.

Commit

Permalink
PR Changes
Browse files Browse the repository at this point in the history
  • Loading branch information
Brandon32 committed Jun 10, 2024
1 parent 7c24887 commit 9650996
Show file tree
Hide file tree
Showing 3 changed files with 64 additions and 17 deletions.
1 change: 1 addition & 0 deletions bacom-upload/.env.example
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
SITE_ID=your_site_id
DRIVE_ID=your_drive_id
BEARER_TOKEN=your_bearer_token
53 changes: 36 additions & 17 deletions bacom-upload/migration.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,9 @@ import { entryToPath } from '../bulk-update/document-manager/document-manager.js
dotenv.config({ path: 'bacom-upload/.env' });

const { SITE_ID, DRIVE_ID, BEARER_TOKEN } = process.env;
const LOCAL_SAVE = false;
const GRAPH_UPLOAD = true;
const PROMPT_CONTINUE = true;
const { pathname } = new URL('.', import.meta.url);
const dateString = ExcelReporter.getDateString();
const rl = readline.createInterface({
Expand All @@ -25,17 +27,20 @@ const config = {
list: [
'https://main--bacom--adobecom.hlx.live/query-index.json',
],
sharepointFolder: 'bacom', // The root folder in SharePoint to upload the documents to
siteUrl: 'https://main--bacom--adobecom.hlx.live',
stagePath: '/drafts/staged-content',
locales: JSON.parse(fs.readFileSync(`${pathname}locales.json`, 'utf8')),
prodSiteUrl: 'https://business.adobe.com',
reporter: new ExcelReporter(`${pathname}reports/${dateString}.xlsx`, false),
reporter: new ExcelReporter(`${pathname}reports/${dateString}.xlsx`, true),
outputDir: `${pathname}output`,
mdDir: `${pathname}md`,
mdCacheMs: 1 * 24 * 60 * 60 * 1000, // 1 day(s)
fetchWaitMs: 20,
};

let inputBearerToken = '';

/**
* Creates a block with the given name and fields.
*
Expand Down Expand Up @@ -71,16 +76,20 @@ async function uploadChunk(uploadUrl, buffer, chunkStart, chunkEnd) {

try {
const res = await fetch(uploadUrl, { method: 'PUT', body, headers });
console.log(res);
const uploadData = await res.json();
console.log(uploadData);
return uploadData;
} catch (e) {
console.error(e.message);
console.error(`Error uploading chunk: ${e.message}`);
return false;
}
}

/**
* Verifies the validity of a bearer token by making a request to the Microsoft Graph API.
*
* @param {string} bearerToken - The bearer token to be verified.
* @returns {Promise<{ success: boolean, message: string }>} - A promise that resolves to an object containing the success status and a message.
*/
async function verifyBearerToken(bearerToken) {
try {
const res = await fetch('https://graph.microsoft.com/v1.0/me', { headers: { Authorization: `Bearer ${bearerToken}` } });
Expand Down Expand Up @@ -122,6 +131,7 @@ async function getBearerToken(token = '') {
bearerToken = await new Promise((resolve) => {
rl.question('Bearer token: ', resolve);
});
inputBearerToken = bearerToken;
}

const verification = await verifyBearerToken(bearerToken);
Expand All @@ -144,7 +154,6 @@ async function getBearerToken(token = '') {
* @returns {Promise<boolean>} - A promise that resolves to true if the document is uploaded successfully, false otherwise.
*/
async function uploadDocument(sessionUrl, buffer, bearerToken) {
console.error('NOT IMPLEMENTED!'); process.exit(0);
const chunkSize = 1024 * 10240; // 10 MB

const headers = {
Expand All @@ -163,6 +172,11 @@ async function uploadDocument(sessionUrl, buffer, bearerToken) {
if (!uploadData) {
return false;
}
console.log(`Uploaded chunk ${i / chunkSize + 1}/${Math.ceil(buffer.length / chunkSize)}`);
if (uploadData?.webUrl) {
console.log(`Document URL: ${uploadData?.webUrl}`);
config.reporter.log('upload', 'document', 'Document chunk uploaded successfully', { webUrl: uploadData.webUrl });
}
}

return true;
Expand All @@ -176,29 +190,33 @@ async function uploadDocument(sessionUrl, buffer, bearerToken) {
* @returns {Promise<boolean>} - Returns true if the document upload is successful, otherwise false.
*/
async function upload(entry, mdast) {
const bearerToken = await getBearerToken(BEARER_TOKEN);
const documentPath = entryToPath(entry);
const stagedEntry = localizeStagePath(documentPath, config.stagePath, config.locales);
const SPFileName = `website${stagedEntry}`;
const SPFileName = `${config.sharepointFolder}${stagedEntry}`;
const createSessionUrl = `https://graph.microsoft.com/v1.0/sites/${SITE_ID}/drives/${DRIVE_ID}/root:/${SPFileName}.docx:/createUploadSession`;

console.log(`Uploading ${entry} to ${stagedEntry} in SharePoint...`);
console.log(`URL: ${createSessionUrl}`);
console.log(`Uploading ${entry} to ${stagedEntry}.docx in SharePoint...`);
console.log(`Upload URL: ${createSessionUrl}`);

const shouldContinue = await new Promise((resolve) => {
rl.question('Type \'y\' to continue with upload (ctrl-c to quit): ', resolve);
});
if (PROMPT_CONTINUE) {
const shouldContinue = await new Promise((resolve) => {
rl.question('Type \'y\' to continue with upload (ctrl-c to quit): ', resolve);
});

if (shouldContinue.toLowerCase() !== 'y') return false;
if (shouldContinue.toLowerCase() !== 'y') return false;
}

console.log('Uploading document...');

const buffer = await mdast2docx(mdast);
const bearerToken = await getBearerToken(inputBearerToken ?? BEARER_TOKEN);
const success = await uploadDocument(createSessionUrl, buffer, bearerToken);

if (success) {
config.reporter.log('sharepoint', 'upload', 'Document uploaded successfully');
console.log('Document uploaded successfully');
} else {
config.reporter.log('sharepoint', 'failed', 'Document upload failed');
console.error('Document upload failed');
}

Expand Down Expand Up @@ -229,7 +247,9 @@ export async function migrate(document) {
config.reporter.log('migration', 'create', 'Created hide block');
}

await saveDocument(document, config);
if (LOCAL_SAVE) {
await saveDocument(document, config);
}
if (GRAPH_UPLOAD) {
await upload(entry, mdast);
}
Expand Down Expand Up @@ -257,7 +277,7 @@ export async function init(list) {
}

if (BEARER_TOKEN) {
console.log('Bearer Token set');
console.log('Bearer Token set from environment variable');
}

await BulkUpdate(config, migrate);
Expand All @@ -269,8 +289,7 @@ export async function init(list) {
*/
if (import.meta.url === `file://${process.argv[1]}`) {
const args = process.argv.slice(2);
const DEFAULTS = ['bacom-upload/list.json'];
const [list] = args.length ? args : DEFAULTS;
const [list] = args;

await init(list);
process.exit(0);
Expand Down
27 changes: 27 additions & 0 deletions bacom-upload/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Bacom Upload

This is a proof of concept project to test the upload of docx files to Sharepoint using the Graph API.

## Steps

1. Update the `.env` file with the required environment variables.
2. Run the upload script to upload the docx files to Sharepoint.

## Environment Variables
Replace the following environment variables in the `.env` file:

```bash
SITE_ID=your_site_id
DRIVE_ID=your_drive_id
BEARER_TOKEN=your_bearer_token
```

Use the https://developer.microsoft.com/en-us/graph/graph-explorer to get the `SITE_ID`, `DRIVE_ID`, and `BEARER_TOKEN` values.

## Usage

Run the upload script directly:

```bash
node bacom-upload/migration.js
```

0 comments on commit 9650996

Please sign in to comment.