mirror of
https://github.com/veggiemonk/awesome-docker.git
synced 2024-10-01 01:36:03 -04:00
Rename files
This commit is contained in:
parent
6235ada42e
commit
2c53974940
@ -1,4 +1,6 @@
|
||||
{
|
||||
"bracketSpacing": true,
|
||||
"tabWidth": 4,
|
||||
"semi": true,
|
||||
"trailingComma": "all",
|
||||
"singleQuote": true
|
||||
|
@ -1,132 +0,0 @@
|
||||
const fs = require('fs-extra');
|
||||
const fetch = require('node-fetch');
|
||||
|
||||
require('draftlog').into(console);
|
||||
|
||||
const LOG = {
|
||||
error: (...args) => console.error(' ERROR', { ...args }),
|
||||
debug: (...args) => {
|
||||
if (process.env.DEBUG) console.log('💡 DEBUG: ', { ...args });
|
||||
},
|
||||
};
|
||||
const handleFailure = (err) => {
|
||||
LOG.error(err);
|
||||
process.exit(1);
|
||||
};
|
||||
|
||||
process.on('unhandledRejection', handleFailure);
|
||||
|
||||
if (!process.env.GITHUB_TOKEN) {
|
||||
LOG.error('no credentials found.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const TOKEN = process.env.GITHUB_TOKEN;
|
||||
|
||||
// --- ENV VAR ---
|
||||
const BATCH_SIZE = parseInt(process.env.BATCH_SIZE, 10) || 10;
|
||||
const DELAY = parseInt(process.env.DELAY, 10) || 3000;
|
||||
const INTERVAL = parseInt(process.env.INTERVAL, 10) || 1;
|
||||
const INTERVAL_UNIT = process.env.INTERVAL_UNIT || 'days';
|
||||
|
||||
// --- FILES ---
|
||||
const DATA_FOLDER = 'data';
|
||||
const README = 'README.md';
|
||||
const LATEST_FILENAME = `${DATA_FOLDER}/latest`;
|
||||
const GITHUB_REPOS = `${DATA_FOLDER}/repository.json`;
|
||||
const Authorization = `token ${TOKEN}`;
|
||||
|
||||
// --- HTTP ---
|
||||
const API = 'https://api.github.com/';
|
||||
const options = {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'User-Agent': 'awesome-docker script listing',
|
||||
'Content-Type': 'application/json',
|
||||
Authorization,
|
||||
},
|
||||
};
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
const removeHost = (x) => x.slice('https://github.com/'.length, x.length);
|
||||
|
||||
const delay = (ms) =>
|
||||
new Promise((resolve) => {
|
||||
setTimeout(() => resolve(), ms);
|
||||
});
|
||||
|
||||
const get = (pathURL, opt) => {
|
||||
LOG.debug(`Fetching ${pathURL}`);
|
||||
return fetch(`${API}repos/${pathURL}`, {
|
||||
...options,
|
||||
...opt,
|
||||
})
|
||||
.catch(handleFailure)
|
||||
.then((response) => {
|
||||
if (response.ok) return response.json();
|
||||
throw new Error('Network response was not ok.');
|
||||
})
|
||||
.catch(handleFailure);
|
||||
};
|
||||
|
||||
const fetchAll = (batch) =>
|
||||
Promise.all(batch.map(async (pathURL) => get(pathURL)));
|
||||
|
||||
const extractAllLinks = (markdown) => {
|
||||
const re = /((([A-Za-z]{3,9}:(?:\/\/)?)(?:[\-;:&=\+\$,\w]+@)?[A-Za-z0-9\.\-]+|(?:www\.|[\-;:&=\+\$,\w]+@)[A-Za-z0-9\.\-]+)((?:\/[\+~%\/\.\w\-_]*)?\??(?:[\-\+=&;%@\.\w_]*)#?(?:[\.\!\/\\\w]*))?)/g;
|
||||
return markdown.match(re);
|
||||
};
|
||||
|
||||
const extractAllRepos = (markdown) => {
|
||||
const re = /https:\/\/github\.com\/([a-zA-Z0-9-._]+)\/([a-zA-Z0-9-._]+)/g;
|
||||
const md = markdown.match(re);
|
||||
return [...new Set(md)];
|
||||
};
|
||||
|
||||
const ProgressBar = (i, batchSize, total) => {
|
||||
const progress = Math.round((i / total) * 100);
|
||||
const units = Math.round(progress / 2);
|
||||
const barLine = console.draft('Starting batch...');
|
||||
return barLine(
|
||||
`[${'='.repeat(units)}${' '.repeat(50 - units)}] ${progress}% - # ${i}`,
|
||||
);
|
||||
};
|
||||
// ----------------------------------------------------------------------------
|
||||
async function batchFetchRepoMetadata(githubRepos) {
|
||||
const repos = githubRepos.map(removeHost);
|
||||
|
||||
const metadata = [];
|
||||
/* eslint-disable no-await-in-loop */
|
||||
for (let i = 0; i < repos.length; i += BATCH_SIZE) {
|
||||
const batch = repos.slice(i, i + BATCH_SIZE);
|
||||
LOG.debug({ batch });
|
||||
const res = await fetchAll(batch);
|
||||
LOG.debug('batch fetched...');
|
||||
metadata.push(...res);
|
||||
ProgressBar(i, BATCH_SIZE, repos.length);
|
||||
// poor man's rate limiting so github doesn't ban us
|
||||
await delay(DELAY);
|
||||
}
|
||||
ProgressBar(repos.length, BATCH_SIZE, repos.length);
|
||||
return metadata;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
const markdown = await fs.readFile(README, 'utf8');
|
||||
const links = extractAllLinks(markdown);
|
||||
const githubRepos = extractAllRepos(markdown);
|
||||
LOG.debug('writing repo list to disk...');
|
||||
await fs.outputJSON(GITHUB_REPOS, githubRepos, { spaces: 2 });
|
||||
|
||||
LOG.debug('fetching data...');
|
||||
const metadata = await batchFetchRepoMetadata(githubRepos);
|
||||
|
||||
LOG.debug('gracefully shutting down.');
|
||||
process.exit();
|
||||
} catch (err) {
|
||||
handleFailure(err);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
@ -6,7 +6,7 @@
|
||||
"scripts": {
|
||||
"build": "rimraf ./dist/ && node build.js",
|
||||
"test-pr": "node tests/pull_request.js",
|
||||
"test": "node tests/all.js"
|
||||
"test": "node tests/test_all.js"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
111
tests/all.js
111
tests/all.js
@ -1,111 +0,0 @@
|
||||
const fs = require('fs-extra');
|
||||
const fetch = require('node-fetch');
|
||||
const helper = require('./helper');
|
||||
|
||||
function envvar_undefined(variable_name) {
|
||||
throw new Error(`${variable_name} must be defined`);
|
||||
}
|
||||
|
||||
console.log({
|
||||
DEBUG: process.env.DEBUG || false,
|
||||
});
|
||||
|
||||
const README = 'README.md';
|
||||
const GITHUB_GQL_API = 'https://api.github.com/graphql';
|
||||
const TOKEN = process.env.GITHUB_TOKEN || envvar_undefined('GITHUB_TOKEN');
|
||||
|
||||
const Authorization = `token ${TOKEN}`;
|
||||
|
||||
const make_GQL_options = (query) => ({
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization,
|
||||
'Content-Type': 'application/json',
|
||||
'user-agent':
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
|
||||
},
|
||||
body: JSON.stringify({ query }),
|
||||
});
|
||||
|
||||
const extract_repos = (arr) =>
|
||||
arr
|
||||
.map((e) => e.substr('https://github.com/'.length).split('/'))
|
||||
.filter((r) => r.length === 2 && r[1] !== '');
|
||||
|
||||
const generate_GQL_query = (arr) =>
|
||||
`query AWESOME_REPOS{ ${arr
|
||||
.map(
|
||||
([owner, name]) =>
|
||||
`repo_${owner.replace(/(-|\.)/g, '_')}_${name.replace(
|
||||
/(-|\.)/g,
|
||||
'_',
|
||||
)}: repository(owner: "${owner}", name:"${name}"){ nameWithOwner } `,
|
||||
)
|
||||
.join('')} }`;
|
||||
|
||||
async function main() {
|
||||
const has_error = {
|
||||
show: false,
|
||||
duplicates: '',
|
||||
other_links_error: '',
|
||||
github_repos: '',
|
||||
};
|
||||
const markdown = await fs.readFile(README, 'utf8');
|
||||
let links = helper.extract_all_links(markdown);
|
||||
links = links.filter((l) => !helper.exclude_from_list(l)); // exclude websites
|
||||
helper.LOG.debug_string({ links });
|
||||
|
||||
console.log(`total links to check ${links.length}`);
|
||||
|
||||
console.log('checking for duplicates links...');
|
||||
|
||||
const duplicates = helper.find_duplicates(links);
|
||||
if (duplicates.length > 0) {
|
||||
has_error.show = true;
|
||||
has_error.duplicates = duplicates;
|
||||
}
|
||||
helper.LOG.debug_string({ duplicates });
|
||||
const [github_links, external_links] = helper.partition(links, (link) =>
|
||||
link.startsWith('https://github.com'),
|
||||
);
|
||||
|
||||
console.log(`checking ${external_links.length} external links...`);
|
||||
|
||||
const external_links_error = await helper.batch_fetch({
|
||||
arr: external_links,
|
||||
get: helper.fetch_link,
|
||||
post_filter_func: (x) => !x[1].ok,
|
||||
BATCH_SIZE: 8,
|
||||
});
|
||||
if (external_links_error.length > 0) {
|
||||
has_error.show = true;
|
||||
has_error.other_links_error = external_links_error;
|
||||
}
|
||||
|
||||
console.log(`checking ${github_links.length} GitHub repositories...`);
|
||||
|
||||
const repos = extract_repos(github_links);
|
||||
const query = generate_GQL_query(repos);
|
||||
const options = make_GQL_options(query);
|
||||
const gql_response = await fetch(GITHUB_GQL_API, options).then((r) =>
|
||||
r.json(),
|
||||
);
|
||||
if (gql_response.errors) {
|
||||
has_error.show = true;
|
||||
has_error.github_repos = gql_response.errors;
|
||||
}
|
||||
|
||||
console.log({
|
||||
TEST_PASSED: has_error.show,
|
||||
GITHUB_REPOSITORY: github_links.length,
|
||||
EXTERNAL_LINKS: external_links.length,
|
||||
});
|
||||
|
||||
if (has_error.show) {
|
||||
helper.LOG.error_string(has_error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('starting...');
|
||||
main();
|
@ -1,63 +1,65 @@
|
||||
const fs = require('fs-extra');
|
||||
const helper = require('./helper');
|
||||
const helper = require('./common');
|
||||
|
||||
console.log({
|
||||
DEBUG: process.env.DEBUG || false,
|
||||
DEBUG: process.env.DEBUG || false,
|
||||
});
|
||||
|
||||
const README = 'README.md';
|
||||
|
||||
async function main() {
|
||||
const has_error = {
|
||||
show: false,
|
||||
duplicates: '',
|
||||
other_links_error: '',
|
||||
};
|
||||
const markdown = await fs.readFile(README, 'utf8');
|
||||
let links = helper.extract_all_links(markdown);
|
||||
links = links.filter((l) => !helper.exclude_from_list(l)); // exclude websites
|
||||
helper.LOG.debug_string({ links });
|
||||
const has_error = {
|
||||
show: false,
|
||||
duplicates: '',
|
||||
other_links_error: '',
|
||||
};
|
||||
const markdown = await fs.readFile(README, 'utf8');
|
||||
let links = helper.extract_all_links(markdown);
|
||||
links = links.filter((l) => !helper.exclude_from_list(l)); // exclude websites
|
||||
helper.LOG.debug_string({ links });
|
||||
|
||||
console.log(`total links to check ${links.length}`);
|
||||
console.log(`total links to check ${links.length}`);
|
||||
|
||||
console.log('checking for duplicates links...');
|
||||
console.log('checking for duplicates links...');
|
||||
|
||||
const duplicates = helper.find_duplicates(links);
|
||||
if (duplicates.length > 0) {
|
||||
has_error.show = true;
|
||||
has_error.duplicates = duplicates;
|
||||
}
|
||||
helper.LOG.debug_string({ duplicates });
|
||||
const [github_links, external_links] = helper.partition(links, (link) =>
|
||||
link.startsWith('https://github.com'),
|
||||
);
|
||||
const duplicates = helper.find_duplicates(links);
|
||||
if (duplicates.length > 0) {
|
||||
has_error.show = true;
|
||||
has_error.duplicates = duplicates;
|
||||
}
|
||||
helper.LOG.debug_string({ duplicates });
|
||||
const [github_links, external_links] = helper.partition(links, (link) =>
|
||||
link.startsWith('https://github.com'),
|
||||
);
|
||||
|
||||
console.log(`checking ${external_links.length} external links...`);
|
||||
console.log(`checking ${external_links.length} external links...`);
|
||||
|
||||
const external_links_error = await helper.batch_fetch({
|
||||
arr: external_links,
|
||||
get: helper.fetch_link,
|
||||
post_filter_func: (x) => !x[1].ok,
|
||||
BATCH_SIZE: 8,
|
||||
});
|
||||
if (external_links_error.length > 0) {
|
||||
has_error.show = true;
|
||||
has_error.other_links_error = external_links_error;
|
||||
}
|
||||
const external_links_error = await helper.batch_fetch({
|
||||
arr: external_links,
|
||||
get: helper.fetch_link,
|
||||
post_filter_func: (x) => !x[1].ok,
|
||||
BATCH_SIZE: 8,
|
||||
});
|
||||
if (external_links_error.length > 0) {
|
||||
has_error.show = true;
|
||||
has_error.other_links_error = external_links_error;
|
||||
}
|
||||
|
||||
console.log(`checking ${github_links.length} GitHub repositories...`);
|
||||
console.log(`checking ${github_links.length} GitHub repositories...`);
|
||||
|
||||
console.log(`skipping GitHub repository check. Run "npm run test" to execute them manually.`);
|
||||
console.log(
|
||||
`skipping GitHub repository check. Run "npm run test" to execute them manually.`,
|
||||
);
|
||||
|
||||
console.log({
|
||||
TEST_PASSED: !has_error.show,
|
||||
EXTERNAL_LINKS: external_links.length,
|
||||
});
|
||||
console.log({
|
||||
TEST_PASSED: !has_error.show,
|
||||
EXTERNAL_LINKS: external_links.length,
|
||||
});
|
||||
|
||||
if (has_error.show) {
|
||||
helper.LOG.error_string(has_error);
|
||||
process.exit(1);
|
||||
}
|
||||
if (has_error.show) {
|
||||
helper.LOG.error_string(has_error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('starting...');
|
||||
|
111
tests/test_all.js
Normal file
111
tests/test_all.js
Normal file
@ -0,0 +1,111 @@
|
||||
const fs = require('fs-extra');
|
||||
const fetch = require('node-fetch');
|
||||
const helper = require('./common');
|
||||
|
||||
function envvar_undefined(variable_name) {
|
||||
throw new Error(`${variable_name} must be defined`);
|
||||
}
|
||||
|
||||
console.log({
|
||||
DEBUG: process.env.DEBUG || false,
|
||||
});
|
||||
|
||||
const README = 'README.md';
|
||||
const GITHUB_GQL_API = 'https://api.github.com/graphql';
|
||||
const TOKEN = process.env.GITHUB_TOKEN || envvar_undefined('GITHUB_TOKEN');
|
||||
|
||||
const Authorization = `token ${TOKEN}`;
|
||||
|
||||
const make_GQL_options = (query) => ({
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization,
|
||||
'Content-Type': 'application/json',
|
||||
'user-agent':
|
||||
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
|
||||
},
|
||||
body: JSON.stringify({ query }),
|
||||
});
|
||||
|
||||
const extract_repos = (arr) =>
|
||||
arr
|
||||
.map((e) => e.substr('https://github.com/'.length).split('/'))
|
||||
.filter((r) => r.length === 2 && r[1] !== '');
|
||||
|
||||
const generate_GQL_query = (arr) =>
|
||||
`query AWESOME_REPOS{ ${arr
|
||||
.map(
|
||||
([owner, name]) =>
|
||||
`repo_${owner.replace(/(-|\.)/g, '_')}_${name.replace(
|
||||
/(-|\.)/g,
|
||||
'_',
|
||||
)}: repository(owner: "${owner}", name:"${name}"){ nameWithOwner } `,
|
||||
)
|
||||
.join('')} }`;
|
||||
|
||||
async function main() {
|
||||
const has_error = {
|
||||
show: false,
|
||||
duplicates: '',
|
||||
other_links_error: '',
|
||||
github_repos: '',
|
||||
};
|
||||
const markdown = await fs.readFile(README, 'utf8');
|
||||
let links = helper.extract_all_links(markdown);
|
||||
links = links.filter((l) => !helper.exclude_from_list(l)); // exclude websites
|
||||
helper.LOG.debug_string({ links });
|
||||
|
||||
console.log(`total links to check ${links.length}`);
|
||||
|
||||
console.log('checking for duplicates links...');
|
||||
|
||||
const duplicates = helper.find_duplicates(links);
|
||||
if (duplicates.length > 0) {
|
||||
has_error.show = true;
|
||||
has_error.duplicates = duplicates;
|
||||
}
|
||||
helper.LOG.debug_string({ duplicates });
|
||||
const [github_links, external_links] = helper.partition(links, (link) =>
|
||||
link.startsWith('https://github.com'),
|
||||
);
|
||||
|
||||
console.log(`checking ${external_links.length} external links...`);
|
||||
|
||||
const external_links_error = await helper.batch_fetch({
|
||||
arr: external_links,
|
||||
get: helper.fetch_link,
|
||||
post_filter_func: (x) => !x[1].ok,
|
||||
BATCH_SIZE: 8,
|
||||
});
|
||||
if (external_links_error.length > 0) {
|
||||
has_error.show = true;
|
||||
has_error.other_links_error = external_links_error;
|
||||
}
|
||||
|
||||
console.log(`checking ${github_links.length} GitHub repositories...`);
|
||||
|
||||
const repos = extract_repos(github_links);
|
||||
const query = generate_GQL_query(repos);
|
||||
const options = make_GQL_options(query);
|
||||
const gql_response = await fetch(GITHUB_GQL_API, options).then((r) =>
|
||||
r.json(),
|
||||
);
|
||||
if (gql_response.errors) {
|
||||
has_error.show = true;
|
||||
has_error.github_repos = gql_response.errors;
|
||||
}
|
||||
|
||||
console.log({
|
||||
TEST_PASSED: has_error.show,
|
||||
GITHUB_REPOSITORY: github_links.length,
|
||||
EXTERNAL_LINKS: external_links.length,
|
||||
});
|
||||
|
||||
if (has_error.show) {
|
||||
helper.LOG.error_string(has_error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('starting...');
|
||||
main();
|
Loading…
Reference in New Issue
Block a user