mirror of
https://github.com/veggiemonk/awesome-docker.git
synced 2025-01-09 06:28:18 -05:00
Merge pull request #598 from veggiemonk/auto-build-metadata
Auto build metadata
This commit is contained in:
commit
e5d847bce9
1
.gitignore
vendored
1
.gitignore
vendored
@ -5,7 +5,6 @@ build
|
|||||||
node_modules
|
node_modules
|
||||||
.cache
|
.cache
|
||||||
dist
|
dist
|
||||||
package-lock.json
|
|
||||||
website/index.html
|
website/index.html
|
||||||
website/table.html
|
website/table.html
|
||||||
|
|
||||||
|
25
.travis.yml
25
.travis.yml
@ -1,7 +1,24 @@
|
|||||||
language: ruby
|
# This is a weird way of telling Travis to use the fast container-based test
|
||||||
rvm:
|
# runner instead of the slow VM-based runner.
|
||||||
- 2.2
|
sudo: false
|
||||||
|
|
||||||
|
language: node_js
|
||||||
|
|
||||||
|
# Only clone the most recent commit.
|
||||||
|
git:
|
||||||
|
depth: 1
|
||||||
|
|
||||||
|
# keep cache of the deps
|
||||||
|
cache:
|
||||||
|
directories:
|
||||||
|
- "node_modules"
|
||||||
|
|
||||||
|
before_install:
|
||||||
|
- rvm install 2.2
|
||||||
|
|
||||||
before_script:
|
before_script:
|
||||||
- gem install awesome_bot
|
- gem install awesome_bot
|
||||||
|
|
||||||
script:
|
script:
|
||||||
- awesome_bot README.md --white-list wiki/Installing,slideshare,awesome-docker.svg,edit/master,sindresorhus/awesome,ashmckenzie,shortcircuit.net.au,photo777.org,adrianmouat,redhat,viget,meros,toedter,datadoghq,amazon.cn,blog.codeship.com,swarm-v-fleet,amazon.com,blogs.splunk.com,docs.mesosphere.com,containership.io,gliderlabs.com,twistlock.com,shipyard-project.com,dashboard.tutum.co,vimeo.com,docs.microsoft.com,eclipse.org,codenvy.io,gitter.im,sebgoa.blogspot.be,mindmeister.com,events.docker,blog.sixeyed
|
- awesome_bot README.md --white-list edit/master,sindresorhus/awesome,www.udacity.com,screwdriver.cd,veggiemonk/awesome-docker,vimeo.com
|
||||||
|
- TOKEN=$GITHUB_TOKEN node buildMetadata.js && ./push.sh
|
||||||
|
@ -236,7 +236,7 @@ _Source:_ [What is Docker](https://www.docker.com/what-docker)
|
|||||||
- [Flynn](https://github.com/flynn/flynn) - A next generation open source platform as a service
|
- [Flynn](https://github.com/flynn/flynn) - A next generation open source platform as a service
|
||||||
- [Jelastic](https://jelastic.com/) :heavy_dollar_sign: - An advanced PaaS for developers that simplifies clustering and complex cloud deployments with powerful web UI and usage-only pricing
|
- [Jelastic](https://jelastic.com/) :heavy_dollar_sign: - An advanced PaaS for developers that simplifies clustering and complex cloud deployments with powerful web UI and usage-only pricing
|
||||||
- [Nanobox](https://github.com/nanobox-io/nanobox) :heavy_dollar_sign: - An application development platform that creates local environments that can then be deployed and scaled in the cloud.
|
- [Nanobox](https://github.com/nanobox-io/nanobox) :heavy_dollar_sign: - An application development platform that creates local environments that can then be deployed and scaled in the cloud.
|
||||||
- [OpenShift][openshift] - An open source PaaS built on [Kubernetes][kubernetes] and optimized for Dockerized app development and deployment by [Red Hat](https://www.redhat.com/)
|
- [OpenShift][openshift] - An open source PaaS built on [Kubernetes][kubernetes] and optimized for Dockerized app development and deployment by [Red Hat](https://www.redhat.com/en)
|
||||||
- [Tsuru](https://github.com/tsuru/tsuru) - Tsuru is an extensible and open source Platform as a Service software
|
- [Tsuru](https://github.com/tsuru/tsuru) - Tsuru is an extensible and open source Platform as a Service software
|
||||||
- [Workflow](https://github.com/deis/workflow) - The open source PaaS for Kubernetes by [Deis](https://github.com/deis). Formerly Deis v1.
|
- [Workflow](https://github.com/deis/workflow) - The open source PaaS for Kubernetes by [Deis](https://github.com/deis). Formerly Deis v1.
|
||||||
- [ZEIT Now](https://github.com/zeit/now-cli) - A universal serverless single-command deploy for Node.js applications or any application with a Dockerfile.
|
- [ZEIT Now](https://github.com/zeit/now-cli) - A universal serverless single-command deploy for Node.js applications or any application with a Dockerfile.
|
||||||
@ -266,7 +266,7 @@ _Source:_ [What is Docker](https://www.docker.com/what-docker)
|
|||||||
- [docker-bench-security](https://github.com/docker/docker-bench-security) - script that checks for dozens of common best-practices around deploying Docker containers in production. By [@docker][docker]
|
- [docker-bench-security](https://github.com/docker/docker-bench-security) - script that checks for dozens of common best-practices around deploying Docker containers in production. By [@docker][docker]
|
||||||
- [docker-explorer](https://github.com/google/docker-explorer) - A tool to help forensicate offline docker acquisitions by [@Google][google]
|
- [docker-explorer](https://github.com/google/docker-explorer) - A tool to help forensicate offline docker acquisitions by [@Google][google]
|
||||||
- [notary](https://github.com/theupdateframework/notary) - a server and a client for running and interacting with trusted collections. By [@TUF](https://github.com/theupdateframework)
|
- [notary](https://github.com/theupdateframework/notary) - a server and a client for running and interacting with trusted collections. By [@TUF](https://github.com/theupdateframework)
|
||||||
- [oscap-docker](https://github.com/OpenSCAP/openscap) - OpenSCAP provides oscap-docker tool which is used to scan Docker containers and images. By RedHat
|
- [oscap-docker](https://github.com/OpenSCAP/openscap) - OpenSCAP provides oscap-docker tool which is used to scan Docker containers and images. By [OpenSCAP](https://github.com/OpenSCAP)
|
||||||
- [Sysdig Falco](https://github.com/draios/falco) - Sysdig Falco is an open source container security monitor. It can monitor application, container, host, and network activity and alert on unauthorized activity.
|
- [Sysdig Falco](https://github.com/draios/falco) - Sysdig Falco is an open source container security monitor. It can monitor application, container, host, and network activity and alert on unauthorized activity.
|
||||||
- [Sysdig Secure](https://sysdig.com/product/secure/) :heavy_dollar_sign: - Sysdig Secure addresses run-time security through behavioral monitoring and defense, and provides deep forensics based on open source Sysdig for incident response.
|
- [Sysdig Secure](https://sysdig.com/product/secure/) :heavy_dollar_sign: - Sysdig Secure addresses run-time security through behavioral monitoring and defense, and provides deep forensics based on open source Sysdig for incident response.
|
||||||
- [Twistlock](https://www.twistlock.com/) :heavy_dollar_sign: - Twistlock Security Suite detects vulnerabilities, hardens container images, and enforces security policies across the lifecycle of applications.
|
- [Twistlock](https://www.twistlock.com/) :heavy_dollar_sign: - Twistlock Security Suite detects vulnerabilities, hardens container images, and enforces security policies across the lifecycle of applications.
|
||||||
@ -591,7 +591,6 @@ Services to securely store your Docker images.
|
|||||||
- [Collecting docker logs and stats with Splunk](https://www.splunk.com/blog/2015/08/24/collecting-docker-logs-and-stats-with-splunk.html)
|
- [Collecting docker logs and stats with Splunk](https://www.splunk.com/blog/2015/08/24/collecting-docker-logs-and-stats-with-splunk.html)
|
||||||
- [CoScale](https://www.coscale.com/docker-monitoring) - Full stack monitoring for containerized applications and microservices. Powered by anomaly detection to find performance problems faster.
|
- [CoScale](https://www.coscale.com/docker-monitoring) - Full stack monitoring for containerized applications and microservices. Powered by anomaly detection to find performance problems faster.
|
||||||
- [Datadog](https://www.datadoghq.com/) - Datadog is a full-stack monitoring service for large-scale cloud environments that aggregates metrics/events from servers, databases, and applications. It includes support for Docker, Kubernetes, and Mesos.
|
- [Datadog](https://www.datadoghq.com/) - Datadog is a full-stack monitoring service for large-scale cloud environments that aggregates metrics/events from servers, databases, and applications. It includes support for Docker, Kubernetes, and Mesos.
|
||||||
- [Meros](https://meros.io) - Analyzes containers resources, captures logs, remote web SSH terminal and powerful DevOps alerts.
|
|
||||||
- [Prometheus](https://prometheus.io/) - Open-source service monitoring system and time series database
|
- [Prometheus](https://prometheus.io/) - Open-source service monitoring system and time series database
|
||||||
- [Site24x7](https://www.site24x7.com/docker-monitoring.html) - Docker Monitoring for DevOps and IT is a SaaS Pay per Host model
|
- [Site24x7](https://www.site24x7.com/docker-monitoring.html) - Docker Monitoring for DevOps and IT is a SaaS Pay per Host model
|
||||||
- [SPM for Docker][spm] - Monitoring of host and container metrics, Docker events and logs. Automatic log parser. Anomaly Detection and alerting for metrics and logs. [@sematext][sematext]
|
- [SPM for Docker][spm] - Monitoring of host and container metrics, Docker events and logs. Automatic log parser. Anomaly Detection and alerting for metrics and logs. [@sematext][sematext]
|
||||||
@ -614,7 +613,7 @@ Blogs by
|
|||||||
- [@jwilder](http://jasonwilder.com/)
|
- [@jwilder](http://jasonwilder.com/)
|
||||||
- [@nickjanetakis](https://nickjanetakis.com/blog/tag/docker-tips-tricks-and-tutorials)
|
- [@nickjanetakis](https://nickjanetakis.com/blog/tag/docker-tips-tricks-and-tutorials)
|
||||||
- [@progrium](http://progrium.com/blog/)
|
- [@progrium](http://progrium.com/blog/)
|
||||||
- [@sebgoa](http://sebgoa.blogspot.be/)
|
- [@sebgoa](http://sebgoa.blogspot.com/)
|
||||||
- [Container solutions](https://container-solutions.com/blog/)
|
- [Container solutions](https://container-solutions.com/blog/)
|
||||||
- [Container42](https://container42.com/)
|
- [Container42](https://container42.com/)
|
||||||
|
|
||||||
@ -641,7 +640,7 @@ Blogs by
|
|||||||
- [Docker Pirates ARMed with explosive stuff](http://blog.hypriot.com/) Huge resource on clustering, swarm, docker, pre-installed image for SD card on Raspberry Pi
|
- [Docker Pirates ARMed with explosive stuff](http://blog.hypriot.com/) Huge resource on clustering, swarm, docker, pre-installed image for SD card on Raspberry Pi
|
||||||
- [Get Docker up and running on the RaspberryPi in three steps](https://github.com/umiddelb/armhf/wiki/Get-Docker-up-and-running-on-the-RaspberryPi-%28ARMv6%29-in-three-steps)
|
- [Get Docker up and running on the RaspberryPi in three steps](https://github.com/umiddelb/armhf/wiki/Get-Docker-up-and-running-on-the-RaspberryPi-%28ARMv6%29-in-three-steps)
|
||||||
- [git push docker containers to linux devices](https://resin.io/) Modern DevOps for IoT, leveraging git and Docker.
|
- [git push docker containers to linux devices](https://resin.io/) Modern DevOps for IoT, leveraging git and Docker.
|
||||||
- [Installing, running, using Docker on armhf (ARMv7) devices](https://github.com/umiddelb/armhf/wiki/Installing,-running,-using-docker-on-armhf-(ARMv7)-devices)
|
- [Installing, running, using Docker on armhf (ARMv7) devices](https://github.com/umiddelb/armhf/wiki/Installing,-running,-using-docker-on-armhf-%28ARMv7%29-devices)
|
||||||
|
|
||||||
## Security
|
## Security
|
||||||
|
|
||||||
|
109
buildMetadata.js
109
buildMetadata.js
@ -1,28 +1,38 @@
|
|||||||
const fs = require('fs');
|
const fs = require('fs-extra');
|
||||||
const { promisify } = require('util');
|
|
||||||
const fetch = require('node-fetch');
|
const fetch = require('node-fetch');
|
||||||
const dayjs = require('dayjs');
|
const dayjs = require('dayjs');
|
||||||
|
|
||||||
require('draftlog').into(console);
|
require('draftlog').into(console);
|
||||||
|
|
||||||
|
const LOG = {
|
||||||
|
error: (...args) => console.error('❌ ERROR', { ...args }),
|
||||||
|
debug: (...args) => {
|
||||||
|
if (process.env.DEBUG) console.log('💡 DEBUG: ', { ...args });
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
process.on('unhandledRejection', error => {
|
process.on('unhandledRejection', error => {
|
||||||
console.log('unhandledRejection', error.message);
|
LOG.error('unhandledRejection', error.message);
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!process.env.TOKEN) {
|
if (!process.env.TOKEN) {
|
||||||
throw new Error('no github token found');
|
LOG.error('no credentials found.');
|
||||||
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// --- ENV VAR ---
|
// --- ENV VAR ---
|
||||||
const BATCH_SIZE = parseInt(process.env.BATCH_SIZE, 10) || 10;
|
const BATCH_SIZE = parseInt(process.env.BATCH_SIZE, 10) || 10;
|
||||||
const DELAY = parseInt(process.env.DELAY, 10) || 3000;
|
const DELAY = parseInt(process.env.DELAY, 10) || 3000;
|
||||||
|
|
||||||
// --- FILENAME ---
|
// --- FILENAME ---
|
||||||
const README = 'README.md';
|
const README = 'README.md';
|
||||||
const GITHUB_METADATA_FILE = `data/${dayjs().format(
|
const DATA_FOLDER = 'data';
|
||||||
|
const GITHUB_METADATA_FILE = `${DATA_FOLDER}/${dayjs().format(
|
||||||
'YYYY-MM-DDTHH.mm.ss',
|
'YYYY-MM-DDTHH.mm.ss',
|
||||||
)}-fetched_repo_data.json`;
|
)}-fetched_repo_data.json`;
|
||||||
const LATEST_FILENAME = 'data/latest';
|
const LATEST_FILENAME = `${DATA_FOLDER}/latest`;
|
||||||
const GITHUB_REPOS = 'data/list_repos.json';
|
const GITHUB_REPOS = `${DATA_FOLDER}/list_repos.json`;
|
||||||
|
|
||||||
// --- HTTP ---
|
// --- HTTP ---
|
||||||
const API = 'https://api.github.com/';
|
const API = 'https://api.github.com/';
|
||||||
const options = {
|
const options = {
|
||||||
@ -35,29 +45,32 @@ const options = {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const removeHost = x => x.slice('https://github.com/'.length, x.length);
|
const removeHost = x => x.slice('https://github.com/'.length, x.length);
|
||||||
const readFile = promisify(fs.readFile);
|
|
||||||
const writeFile = promisify(fs.writeFile);
|
|
||||||
const printError = err => err && console.error('❌ ERROR', err);
|
|
||||||
const barLine = console.draft('Starting batch...');
|
const barLine = console.draft('Starting batch...');
|
||||||
|
const handleFailure = err => {
|
||||||
|
LOG.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
};
|
||||||
|
|
||||||
const delay = ms =>
|
const delay = ms =>
|
||||||
new Promise(resolve => {
|
new Promise(resolve => {
|
||||||
setTimeout(() => resolve(), ms);
|
setTimeout(() => resolve(), ms);
|
||||||
});
|
});
|
||||||
|
|
||||||
const get = (path, opt) =>
|
const get = (pathURL, opt) => {
|
||||||
fetch(`${API}repos/${path}`, {
|
LOG(` Fetching ${pathURL}`);
|
||||||
|
return fetch(`${API}repos/${pathURL}`, {
|
||||||
...options,
|
...options,
|
||||||
...opt,
|
...opt,
|
||||||
})
|
})
|
||||||
.catch(printError)
|
.catch(handleFailure)
|
||||||
.then(response => {
|
.then(response => {
|
||||||
if (response.ok) return response.json();
|
if (response.ok) return response.json();
|
||||||
throw new Error('Network response was not ok.');
|
throw new Error('Network response was not ok.');
|
||||||
})
|
})
|
||||||
.catch(printError);
|
.catch(handleFailure);
|
||||||
|
};
|
||||||
|
|
||||||
const fetchAll = batch => Promise.all(batch.map(async path => get(path)));
|
const fetchAll = batch => Promise.all(batch.map(async pathURL => get(pathURL)));
|
||||||
|
|
||||||
const extractAllRepos = markdown => {
|
const extractAllRepos = markdown => {
|
||||||
const re = /https:\/\/github\.com\/([a-zA-Z0-9-._]+)\/([a-zA-Z0-9-._]+)/g;
|
const re = /https:\/\/github\.com\/([a-zA-Z0-9-._]+)\/([a-zA-Z0-9-._]+)/g;
|
||||||
@ -80,8 +93,9 @@ async function batchFetchRepoMetadata(githubRepos) {
|
|||||||
/* eslint-disable no-await-in-loop */
|
/* eslint-disable no-await-in-loop */
|
||||||
for (let i = 0; i < repos.length; i += BATCH_SIZE) {
|
for (let i = 0; i < repos.length; i += BATCH_SIZE) {
|
||||||
const batch = repos.slice(i, i + BATCH_SIZE);
|
const batch = repos.slice(i, i + BATCH_SIZE);
|
||||||
if (process.env.DEBUG) console.log({ batch });
|
LOG.debug({ batch });
|
||||||
const res = await fetchAll(batch);
|
const res = await fetchAll(batch);
|
||||||
|
LOG.debug('batch fetched...');
|
||||||
metadata.push(...res);
|
metadata.push(...res);
|
||||||
ProgressBar(i, BATCH_SIZE, repos.length);
|
ProgressBar(i, BATCH_SIZE, repos.length);
|
||||||
// poor man's rate limiting so github don't ban us
|
// poor man's rate limiting so github don't ban us
|
||||||
@ -91,29 +105,60 @@ async function batchFetchRepoMetadata(githubRepos) {
|
|||||||
return metadata;
|
return metadata;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function shouldUpdate(fileLatestUpdate) {
|
||||||
|
LOG.debug({ fileLatestUpdate });
|
||||||
|
if (!fileLatestUpdate) return true;
|
||||||
|
|
||||||
|
const hours = fileLatestUpdate.slice(
|
||||||
|
'data/YYYY-MM-DDT'.length,
|
||||||
|
'data/YYYY-MM-DDTHH'.length,
|
||||||
|
);
|
||||||
|
const latestUpdate = dayjs(
|
||||||
|
fileLatestUpdate.slice('data/'.length, 'data/YYYY-MM-DD'.length),
|
||||||
|
).add(hours, 'hour');
|
||||||
|
|
||||||
|
LOG.debug({ latestUpdate: latestUpdate.format() });
|
||||||
|
|
||||||
|
const isMoreThanOneDay = dayjs().diff(latestUpdate, 'hours') >= 1;
|
||||||
|
return isMoreThanOneDay;
|
||||||
|
}
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
try {
|
try {
|
||||||
const markdown = await readFile(README, { encoding: 'utf8' });
|
const getLatest = await fs.readFile(LATEST_FILENAME, 'utf8');
|
||||||
const githubRepos = extractAllRepos(markdown);
|
|
||||||
await writeFile(
|
|
||||||
GITHUB_REPOS,
|
|
||||||
JSON.stringify(githubRepos, null, 2),
|
|
||||||
printError,
|
|
||||||
);
|
|
||||||
|
|
||||||
|
LOG.debug('Checking if updating is needed');
|
||||||
|
if (!shouldUpdate(getLatest)) {
|
||||||
|
LOG.debug('Last update was less than a day ago 😅. Exiting...');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const markdown = await fs.readFile(README, 'utf8');
|
||||||
|
const githubRepos = extractAllRepos(markdown);
|
||||||
|
LOG.debug('writing repo list to disk...');
|
||||||
|
await fs.outputJSON(GITHUB_REPOS, githubRepos, { spaces: 2 });
|
||||||
|
|
||||||
|
LOG.debug('fetching data...');
|
||||||
const metadata = await batchFetchRepoMetadata(githubRepos);
|
const metadata = await batchFetchRepoMetadata(githubRepos);
|
||||||
|
|
||||||
await writeFile(
|
LOG.debug('writing metadata to disk...');
|
||||||
GITHUB_METADATA_FILE,
|
await fs.outputJSON(GITHUB_METADATA_FILE, metadata, { spaces: 2 });
|
||||||
JSON.stringify(metadata, null, 2),
|
LOG.debug('✅ metadata saved');
|
||||||
printError,
|
|
||||||
);
|
|
||||||
console.log('✅ metadata saved');
|
|
||||||
|
|
||||||
// save the latest
|
LOG.debug('removing latest...');
|
||||||
fs.writeFile(LATEST_FILENAME, GITHUB_METADATA_FILE, printError);
|
await fs.remove(LATEST_FILENAME);
|
||||||
|
|
||||||
|
LOG.debug('writing latest...');
|
||||||
|
await fs.outputFile(LATEST_FILENAME, GITHUB_METADATA_FILE);
|
||||||
|
LOG.debug('✅ late update time saved', {
|
||||||
|
LATEST_FILENAME,
|
||||||
|
GITHUB_METADATA_FILE,
|
||||||
|
});
|
||||||
|
|
||||||
|
LOG.debug('gracefully shutting down.');
|
||||||
|
process.exit();
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
printError(err);
|
handleFailure(err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -20,7 +20,7 @@
|
|||||||
"https://github.com/abesto/docker-compose-graphviz",
|
"https://github.com/abesto/docker-compose-graphviz",
|
||||||
"https://github.com/Alexis-benoist/draw-compose",
|
"https://github.com/Alexis-benoist/draw-compose",
|
||||||
"https://github.com/cisco/elsy",
|
"https://github.com/cisco/elsy",
|
||||||
"https://github.com/cloud66/habitus",
|
"https://github.com/cloud66-oss/habitus",
|
||||||
"https://github.com/toscanini/maestro",
|
"https://github.com/toscanini/maestro",
|
||||||
"https://github.com/ihucos/plash",
|
"https://github.com/ihucos/plash",
|
||||||
"https://github.com/grammarly/rocker-compose",
|
"https://github.com/grammarly/rocker-compose",
|
||||||
|
10577
package-lock.json
generated
Normal file
10577
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
@ -21,6 +21,7 @@
|
|||||||
"critical": "^1.3.3",
|
"critical": "^1.3.3",
|
||||||
"dayjs": "^1.6.6",
|
"dayjs": "^1.6.6",
|
||||||
"draftlog": "^1.0.12",
|
"draftlog": "^1.0.12",
|
||||||
|
"fs-extra": "7.0.0",
|
||||||
"jsdom": "^11.11.0",
|
"jsdom": "^11.11.0",
|
||||||
"list.js": "^1.5.0",
|
"list.js": "^1.5.0",
|
||||||
"node-fetch": "^2.1.2",
|
"node-fetch": "^2.1.2",
|
||||||
|
22
push.sh
Executable file
22
push.sh
Executable file
@ -0,0 +1,22 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Exit immediately if a command returns a non-zero status.
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Set git credentials
|
||||||
|
git config --global user.email "info@veggiemonk.ovh"
|
||||||
|
git config --global user.name "veggiemonk-bot"
|
||||||
|
|
||||||
|
# let git know where to apply the changes
|
||||||
|
git checkout master
|
||||||
|
|
||||||
|
echo 'Adding data files'
|
||||||
|
git add data/*
|
||||||
|
|
||||||
|
echo 'Commiting files'
|
||||||
|
git commit -m 'Automated update repository metadata'
|
||||||
|
|
||||||
|
echo 'Pushing changes'
|
||||||
|
git push https://$GITHUB_USER:$GITHUB_TOKEN@github.com/veggiemonk/awesome-docker master >/dev/null 2>&1
|
||||||
|
|
||||||
|
echo 'Done.'
|
Loading…
Reference in New Issue
Block a user