Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Create GitHub Actions Test Setup #652

Open
wants to merge 13 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 44 additions & 0 deletions .github/workflows/s3-bucket.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
name: S3 Bucket Test

on:
push:
workflow_dispatch:

jobs:
test-on-os-node-matrix:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
node: [18, 20, 22]
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
S3_BUCKET: ${{ secrets.S3_BUCKET }}

name: Test S3 Bucket - Node ${{ matrix.node }} on ${{ matrix.os }}

steps:
- name: Checkout ${{ github.ref }}
uses: actions/checkout@v4

- name: Setup node ${{ matrix.node }}
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node }}

- name: NPM Install
run: npm install

- name: Show Environment Info
run: |
printenv
node --version
npm --version

- name: Run S3 Tests (against ${{ env.S3_BUCKET }} bucket)
run: |
npm run bucket ${{ env.S3_BUCKET }}
npm run test:s3
if: ${{ env.S3_BUCKET != '' }}

7 changes: 7 additions & 0 deletions lib/install.js
Original file line number Diff line number Diff line change
Expand Up @@ -233,3 +233,10 @@ function install(gyp, argv, callback) {
});
}
}

// setting an environment variable: node_pre_gyp_mock_s3 to any value
// enables intercepting outgoing http requests to s3 (using nock) and
// serving them from a mocked S3 file system (using mock-aws-s3)
if (process.env.node_pre_gyp_mock_s3) {
require('./mock/http')();
}
39 changes: 39 additions & 0 deletions lib/mock/http.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
'use strict';

module.exports = exports = http_mock;

const fs = require('fs');
const path = require('path');
const nock = require('nock');
const os = require('os');

const log = require('npmlog');
log.disableProgress(); // disable the display of a progress bar
log.heading = 'node-pre-gyp'; // differentiate node-pre-gyp's logs from npm's

function http_mock() {
log.warn('mocking http requests to s3');

const basePath = `${os.tmpdir()}/mock`;

nock(new RegExp('([a-z0-9]+[.])*s3[.]us-east-1[.]amazonaws[.]com'))
.persist()
.get(() => true) //a function that always returns true is a catch all for nock
.reply(
(uri) => {
const bucket = 'npg-mock-bucket';
const mockDir = uri.indexOf(bucket) === -1 ? `${basePath}/${bucket}` : basePath;
const filepath = path.join(mockDir, uri.replace(new RegExp('%2B', 'g'), '+'));

try {
fs.accessSync(filepath, fs.constants.R_OK);
} catch (e) {
return [404, 'not found\n'];
}

// mock s3 functions write to disk
// return what is read from it.
return [200, fs.createReadStream(filepath)];
}
);
}
42 changes: 42 additions & 0 deletions lib/mock/s3.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
'use strict';

module.exports = exports = s3_mock;

const AWSMock = require('mock-aws-s3');
const os = require('os');

const log = require('npmlog');
log.disableProgress(); // disable the display of a progress bar
log.heading = 'node-pre-gyp'; // differentiate node-pre-gyp's logs from npm's

function s3_mock() {
log.warn('mocking s3 operations');

AWSMock.config.basePath = `${os.tmpdir()}/mock`;

const s3 = AWSMock.S3();

// wrapped callback maker. fs calls return code of ENOENT but AWS.S3 returns
// NotFound.
const wcb = (fn) => (err, ...args) => {
if (err && err.code === 'ENOENT') {
err.code = 'NotFound';
}
return fn(err, ...args);
};

return {
listObjects(params, callback) {
return s3.listObjects(params, wcb(callback));
},
headObject(params, callback) {
return s3.headObject(params, wcb(callback));
},
deleteObject(params, callback) {
return s3.deleteObject(params, wcb(callback));
},
putObject(params, callback) {
return s3.putObject(params, wcb(callback));
}
};
}
17 changes: 3 additions & 14 deletions lib/node-pre-gyp.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,18 +10,13 @@ module.exports = exports;
* Module dependencies.
*/

// load mocking control function for accessing s3 via https. the function is a noop always returning
// false if not mocking.
exports.mockS3Http = require('./util/s3_setup').get_mockS3Http();
exports.mockS3Http('on');
const mocking = exports.mockS3Http('get');


const fs = require('fs');
const path = require('path');
const nopt = require('nopt');
const log = require('npmlog');
log.disableProgress();
log.disableProgress(); // disable the display of a progress bar
log.heading = 'node-pre-gyp'; // differentiate node-pre-gyp's logs from npm's

const napi = require('./util/napi.js');

const EE = require('events').EventEmitter;
Expand All @@ -43,12 +38,6 @@ const cli_commands = [
];
const aliases = {};

// differentiate node-pre-gyp's logs from npm's
log.heading = 'node-pre-gyp';

if (mocking) {
log.warn(`mocking s3 to ${process.env.node_pre_gyp_mock_s3}`);
}

// this is a getter to avoid circular reference warnings with node v14.
Object.defineProperty(exports, 'find', {
Expand Down
106 changes: 4 additions & 102 deletions lib/util/s3_setup.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@
module.exports = exports;

const url = require('url');
const fs = require('fs');
const path = require('path');

module.exports.detect = function(opts) {
const config = {};
Expand Down Expand Up @@ -60,40 +58,11 @@ module.exports.detect = function(opts) {
};

module.exports.get_s3 = function(config) {

// setting an environment variable: node_pre_gyp_mock_s3 to any value
// enables intercepting outgoing http requests to s3 (using nock) and
// serving them from a mocked S3 file system (using mock-aws-s3)
if (process.env.node_pre_gyp_mock_s3) {
// here we're mocking. node_pre_gyp_mock_s3 is the scratch directory
// for the mock code.
const AWSMock = require('mock-aws-s3');
const os = require('os');

AWSMock.config.basePath = `${os.tmpdir()}/mock`;

const s3 = AWSMock.S3();

// wrapped callback maker. fs calls return code of ENOENT but AWS.S3 returns
// NotFound.
const wcb = (fn) => (err, ...args) => {
if (err && err.code === 'ENOENT') {
err.code = 'NotFound';
}
return fn(err, ...args);
};

return {
listObjects(params, callback) {
return s3.listObjects(params, wcb(callback));
},
headObject(params, callback) {
return s3.headObject(params, wcb(callback));
},
deleteObject(params, callback) {
return s3.deleteObject(params, wcb(callback));
},
putObject(params, callback) {
return s3.putObject(params, wcb(callback));
}
};
return require('../mock/s3')();
}

// if not mocking then setup real s3.
Expand All @@ -117,71 +86,4 @@ module.exports.get_s3 = function(config) {
return s3.putObject(params, callback);
}
};



};

//
// function to get the mocking control function. if not mocking it returns a no-op.
//
// if mocking it sets up the mock http interceptors that use the mocked s3 file system
// to fulfill responses.
module.exports.get_mockS3Http = function() {
let mock_s3 = false;
if (!process.env.node_pre_gyp_mock_s3) {
return () => mock_s3;
}

const nock = require('nock');
// the bucket used for testing, as addressed by https.
const host = 'https://mapbox-node-pre-gyp-public-testing-bucket.s3.us-east-1.amazonaws.com';
const mockDir = process.env.node_pre_gyp_mock_s3 + '/mapbox-node-pre-gyp-public-testing-bucket';

// function to setup interceptors. they are "turned off" by setting mock_s3 to false.
const mock_http = () => {
// eslint-disable-next-line no-unused-vars
function get(uri, requestBody) {
const filepath = path.join(mockDir, uri.replace('%2B', '+'));

try {
fs.accessSync(filepath, fs.constants.R_OK);
} catch (e) {
return [404, 'not found\n'];
}

// the mock s3 functions just write to disk, so just read from it.
return [200, fs.createReadStream(filepath)];
}

// eslint-disable-next-line no-unused-vars
return nock(host)
.persist()
.get(() => mock_s3) // mock any uri for s3 when true
.reply(get);
};

// setup interceptors. they check the mock_s3 flag to determine whether to intercept.
mock_http(nock, host, mockDir);
// function to turn matching all requests to s3 on/off.
const mockS3Http = (action) => {
const previous = mock_s3;
if (action === 'off') {
mock_s3 = false;
} else if (action === 'on') {
mock_s3 = true;
} else if (action !== 'get') {
throw new Error(`illegal action for setMockHttp ${action}`);
}
return previous;
};

// call mockS3Http with the argument
// - 'on' - turn it on
// - 'off' - turn it off (used by fetch.test.js so it doesn't interfere with redirects)
// - 'get' - return true or false for 'on' or 'off'
return mockS3Http;
};



4 changes: 3 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,8 @@
"lint": "eslint bin/node-pre-gyp lib/*js lib/util/*js test/*js scripts/*js",
"fix": "npm run lint -- --fix",
"update-crosswalk": "node scripts/abi_crosswalk.js",
"test": "tape test/*test.js"
"test": "tape test/*test.js",
"test:s3": "tape test/s3.test.js",
"bucket": "node scripts/set-bucket.js"
}
}
47 changes: 47 additions & 0 deletions scripts/set-bucket.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
'use strict';

// script changes the bucket name set in package.json of the test apps.

const fs = require('fs');
const path = require('path');

// http mock (lib/mock/http.js) sets 'npg-mock-bucket' as default bucket name.
// when providing no bucket name as argument, script will set
// all apps back to default mock settings.
const bucket = process.argv[2] || 'npg-mock-bucket';

const root = '../test';
const rootPath = path.resolve(__dirname, root);
const dirs = fs.readdirSync(rootPath).filter((fileorDir) => fs.lstatSync(path.resolve(rootPath, fileorDir)).isDirectory());

dirs.forEach((dir) => {
const pkg = require(`${root}/${dir}/package.json`); // relative path

// bucket specified as part of s3 virtual host format (auto detected by node-pre-gyp)
const keys = ['host', 'staging_host', 'production_host'];
keys.forEach((item) => {
if (pkg.binary[item]) {

// match the bucket part of the url
const match = pkg.binary[item].match(/^https:\/\/(.+)(?:\.s3[-.].*)$/i);
if (match) {
pkg.binary[item] = pkg.binary[item].replace(match[1], bucket);
console.log(`Success: set ${dir} ${item} to ${pkg.binary[item]}`);
}
}
});
// bucket is specified explicitly
if (pkg.binary.bucket) {
pkg.binary.bucket = bucket;
console.log(`Set ${dir} bucket to ${pkg.binary.bucket}`);
}

// make sure bucket name is set in the package (somewhere) else this is an obvious error.
// most likely due to manual editing of the json resulting in unusable format
const str = JSON.stringify(pkg, null, 4);
if (str.indexOf(bucket) !== -1) {
fs.writeFileSync(path.join(path.resolve(rootPath, dir), 'package.json'), str + '\n');
} else {
throw new Error(`Error: could not set ${dir}. Manually check package.json`);
}
});
Loading