Skip to content
This repository has been archived by the owner on Feb 13, 2025. It is now read-only.

Commit

Permalink
PR Changes
Browse files Browse the repository at this point in the history
  • Loading branch information
Brandon32 committed Jun 10, 2024
1 parent 7c24887 commit fb11769
Show file tree
Hide file tree
Showing 3 changed files with 44 additions and 8 deletions.
1 change: 1 addition & 0 deletions bacom-upload/.env.example
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
SITE_ID=your_site_id
DRIVE_ID=your_drive_id
BEARER_TOKEN=your_bearer_token
24 changes: 16 additions & 8 deletions bacom-upload/migration.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,17 +25,20 @@ const config = {
list: [
'https://main--bacom--adobecom.hlx.live/query-index.json',
],
sharepointFolder: 'bacom', // The root folder in SharePoint to upload the documents to
siteUrl: 'https://main--bacom--adobecom.hlx.live',
stagePath: '/drafts/staged-content',
locales: JSON.parse(fs.readFileSync(`${pathname}locales.json`, 'utf8')),
prodSiteUrl: 'https://business.adobe.com',
reporter: new ExcelReporter(`${pathname}reports/${dateString}.xlsx`, false),
reporter: new ExcelReporter(`${pathname}reports/${dateString}.xlsx`, true),
outputDir: `${pathname}output`,
mdDir: `${pathname}md`,
mdCacheMs: 1 * 24 * 60 * 60 * 1000, // 1 day(s)
fetchWaitMs: 20,
};

let inputBearerToken = '';

/**
* Creates a block with the given name and fields.
*
Expand Down Expand Up @@ -71,9 +74,7 @@ async function uploadChunk(uploadUrl, buffer, chunkStart, chunkEnd) {

try {
const res = await fetch(uploadUrl, { method: 'PUT', body, headers });
console.log(res);
const uploadData = await res.json();
console.log(uploadData);
return uploadData;
} catch (e) {
console.error(e.message);
Expand Down Expand Up @@ -122,6 +123,7 @@ async function getBearerToken(token = '') {
bearerToken = await new Promise((resolve) => {
rl.question('Bearer token: ', resolve);
});
inputBearerToken = bearerToken;
}

const verification = await verifyBearerToken(bearerToken);
Expand All @@ -144,7 +146,6 @@ async function getBearerToken(token = '') {
* @returns {Promise<boolean>} - A promise that resolves to true if the document is uploaded successfully, false otherwise.
*/
async function uploadDocument(sessionUrl, buffer, bearerToken) {
console.error('NOT IMPLEMENTED!'); process.exit(0);
const chunkSize = 1024 * 10240; // 10 MB

const headers = {
Expand All @@ -163,6 +164,11 @@ async function uploadDocument(sessionUrl, buffer, bearerToken) {
if (!uploadData) {
return false;
}
console.log(`Uploaded chunk ${i / chunkSize + 1}/${Math.ceil(buffer.length / chunkSize)}`);
if (uploadData?.webUrl) {
console.log(`Document: ${uploadData?.webUrl}`);
config.reporter.log('upload', 'document', 'Document chunk uploaded successfully', { webUrl: uploadData.webUrl });
}
}

return true;
Expand All @@ -176,14 +182,13 @@ async function uploadDocument(sessionUrl, buffer, bearerToken) {
* @returns {Promise<boolean>} - Returns true if the document upload is successful, otherwise false.
*/
async function upload(entry, mdast) {
const bearerToken = await getBearerToken(BEARER_TOKEN);
const documentPath = entryToPath(entry);
const stagedEntry = localizeStagePath(documentPath, config.stagePath, config.locales);
const SPFileName = `website${stagedEntry}`;
const SPFileName = `${config.sharepointFolder}${stagedEntry}`;
const createSessionUrl = `https://graph.microsoft.com/v1.0/sites/${SITE_ID}/drives/${DRIVE_ID}/root:/${SPFileName}.docx:/createUploadSession`;

console.log(`Uploading ${entry} to ${stagedEntry} in SharePoint...`);
console.log(`URL: ${createSessionUrl}`);
console.log(`Upload URL: ${createSessionUrl}`);

const shouldContinue = await new Promise((resolve) => {
rl.question('Type \'y\' to continue with upload (ctrl-c to quit): ', resolve);
Expand All @@ -194,11 +199,14 @@ async function upload(entry, mdast) {
console.log('Uploading document...');

const buffer = await mdast2docx(mdast);
const bearerToken = await getBearerToken(inputBearerToken ?? BEARER_TOKEN);
const success = await uploadDocument(createSessionUrl, buffer, bearerToken);

if (success) {
config.reporter.log('sharepoint', 'upload', 'Document uploaded successfully');
console.log('Document uploaded successfully');
} else {
config.reporter.log('sharepoint', 'failed', 'Document upload failed');
console.error('Document upload failed');
}

Expand Down Expand Up @@ -257,7 +265,7 @@ export async function init(list) {
}

if (BEARER_TOKEN) {
console.log('Bearer Token set');
console.log('Bearer Token set from environment variable');
}

await BulkUpdate(config, migrate);
Expand Down
27 changes: 27 additions & 0 deletions bacom-upload/readme.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Bacom Upload

This is a proof of concept project to test the upload of docx files to Sharepoint using the Graph API.

## Steps

1. Update the `.env` file with the required environment variables.
2. Run the upload script to upload the docx files to Sharepoint.

## Environment Variables
Replace the following environment variables in the `.env` file:

```bash
SITE_ID=your_site_id
DRIVE_ID=your_drive_id
BEARER_TOKEN=your_bearer_token
```

Use the https://developer.microsoft.com/en-us/graph/graph-explorer to get the `SITE_ID`, `DRIVE_ID`, and `BEARER_TOKEN` values.

## Usage

Run the upload script directly:

```bash
node bacom-upload/migration.js
```

0 comments on commit fb11769

Please sign in to comment.