🍛 Commit progress before lunch break

This commit is contained in:
steinkirch.eth, phd 2023-07-15 13:01:58 -07:00
parent 4cf49ba50b
commit 9aa9e827c0
19 changed files with 378 additions and 19 deletions

View file

@ -54,23 +54,6 @@
* HTTP/3
* gRPC
* WebRTC
<br>
* **[HTTP](https/)**
* https communication
* https over TCP with TLS 1.2
* https over TCP with TLS 1.3
* https over QUIC (HTTP/3)
* https over TFO with TLS 1.3
* https over TCP with TLS 1.3 and ORTT
* https over QUICK with ORTT
<br>
* **[proxy and load balance](proxy_and_lb)**
* proxy vs. reverse proxy
* Layer 4 vs. Layer 7 load balancers

View file

@ -0,0 +1 @@
/node_modules

View file

@ -0,0 +1,35 @@
const grpc = require("grpc");
const protoLoader = require("@grpc/proto-loader")
const packageDef = protoLoader.loadSync("todo.proto", {});
const grpcObject = grpc.loadPackageDefinition(packageDef);
const todoPackage = grpcObject.todoPackage;
const text = process.argv[2];
const client = new todoPackage.Todo("localhost:40000",
grpc.credentials.createInsecure())
console.log(text)
client.createTodo({
"id": -1,
"text": text
}, (err, response) => {
console.log("Recieved from server " + JSON.stringify(response))
})
/*
client.readTodos(null, (err, response) => {
console.log("read the todos from server " + JSON.stringify(response))
if (!response.items)
response.items.forEach(a=>console.log(a.text));
})
*/
const call = client.readTodosStream();
call.on("data", item => {
console.log("received item from server " + JSON.stringify(item))
})
call.on("end", e => console.log("server done!"))

View file

@ -0,0 +1,16 @@
{
"name": "grpc-demo",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"@grpc/proto-loader": "^0.5.3",
"grpc": "^1.24.9"
}
}

View file

@ -0,0 +1,38 @@
const grpc = require("grpc");
const protoLoader = require("@grpc/proto-loader")
const packageDef = protoLoader.loadSync("todo.proto", {});
const grpcObject = grpc.loadPackageDefinition(packageDef);
const todoPackage = grpcObject.todoPackage;
const server = new grpc.Server();
server.bind("0.0.0.0:40000",
grpc.ServerCredentials.createInsecure());
server.addService(todoPackage.Todo.service,
{
"createTodo": createTodo,
"readTodos" : readTodos,
"readTodosStream": readTodosStream
});
server.start();
const todos = []
function createTodo (call, callback) {
const todoItem = {
"id": todos.length + 1,
"text": call.request.text
}
todos.push(todoItem)
callback(null, todoItem);
}
function readTodosStream(call, callback) {
todos.forEach(t => call.write(t));
call.end();
}
function readTodos(call, callback) {
callback(null, {"items": todos})
}

View file

@ -0,0 +1,21 @@
syntax = "proto3";
package todoPackage;
service Todo {
rpc createTodo(TodoItem) returns (TodoItem);
rpc readTodos(voidNoParam) returns (TodoItems);
rpc readTodosStream(voidNoParam) returns (stream TodoItem);
}
message voidNoParam {}
message TodoItem {
int32 id = 1;
string text = 2;
}
message TodoItems {
repeated TodoItem items = 1;
}

View file

@ -0,0 +1,36 @@
const app = require("express")();
const jobs = {}
app.post("/submit", (req, res) => {
const jobId = `job:${Date.now()}`
jobs[jobId] = 0;
updateJob(jobId,0);
res.end("\n\n" + jobId + "\n\n");
})
app.get("/checkstatus", async (req, res) => {
console.log(jobs[req.query.jobId])
//long polling, don't respond until done
while(await checkJobComplete(req.query.jobId) == false);
res.end("\n\nJobStatus: Complete " + jobs[req.query.jobId] + "%\n\n")
} )
app.listen(8080, () => console.log("listening on 8080"));
async function checkJobComplete(jobId) {
return new Promise( (resolve, reject) => {
if (jobs[jobId] < 100)
this.setTimeout(()=> resolve(false), 1000);
else
resolve(true);
})
}
function updateJob(jobId, prg) {
jobs[jobId] = prg;
console.log(`updated ${jobId} to ${prg}`)
if (prg == 100) return;
this.setTimeout(()=> updateJob(jobId, prg + 10), 10000)
}

View file

@ -0,0 +1,15 @@
{
"name": "short-polling",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"express": "^4.18.2"
}
}

View file

@ -0,0 +1,29 @@
const amqp = require("amqplib");
connect();
async function connect() {
try {
const amqpServer = "amqp://localhost:5672"
const connection = await amqp.connect(amqpServer)
const channel = await connection.createChannel();
await channel.assertQueue("jobs");
channel.consume("jobs", message => {
const input = JSON.parse(message.content.toString());
console.log(`Recieved job with input ${input.number}`)
//"7" == 7 true
//"7" === 7 false
if (input.number == 7 )
channel.ack(message);
})
console.log("Waiting for messages...")
}
catch (ex){
console.error(ex)
}
}

View file

@ -0,0 +1,16 @@
{
"name": "rabbitmq",
"version": "1.0.0",
"description": "",
"main": "publisher.js",
"scripts": {
"publish": "node publisher.js",
"consume": "node consumer.js"
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"amqplib": "^0.5.5"
}
}

View file

@ -0,0 +1,22 @@
/* RabbitMQ */
const amqp = require("amqplib");
const msg = {number: process.argv[2]}
connect();
async function connect() {
try {
const amqpServer = "amqp://localhost:5672"
const connection = await amqp.connect(amqpServer)
const channel = await connection.createChannel();
await channel.assertQueue("jobs");
await channel.sendToQueue("jobs", Buffer.from(JSON.stringify(msg)))
console.log(`Job sent successfully ${msg.number}`);
await channel.close();
await connection.close();
}
catch (ex){
console.error(ex)
}
}

View file

@ -0,0 +1,20 @@
## docker commands
### Spin rabbitmq server docker
docker run --name rabbitmq -p 5672:5672 -d rabbitmq
### Spin rabbitmq server HTTP server docker
docker run --name rabbitmq -p 5672:5672 -p 15672:15672 -d rabbitmq:3-management
HTTP
fetch("http://localhost:15672/api/vhosts”, {headers: {"Authorization" : `Basic ${btoa('guest:guest')}`}}).then(a=>a.json()).then(console.log)
fetch("http://localhost:15672/api/channels", {headers: {"Authorization" : `Basic ${btoa('guest:guest')}`}}).then(a=>a.json()).then(console.log)
fetch("http://localhost:15672/api/queues", {headers: {"Authorization" : `Basic ${btoa('guest:guest')}`}}).then(a=>a.json()).then(console.log)

View file

@ -0,0 +1,30 @@
/* Client Code
let sse = new EventSource("http://localhost:8080/stream");
sse.onmessage = console.log
*/
const app = require("express")();
app.get("/", (req, res) => res.send("hello!"));
app.get("/stream", (req,res) => {
res.setHeader("Content-Type", "text/event-stream");
send(res);
})
const port = process.env.PORT || 8888;
let i = 0;
function send (res) {
res.write("data: " + `hello from server ---- [${i++}]\n\n`);
setTimeout(() => send(res), 1000);
}
app.listen(port)
console.log(`Listening on ${port}`)

View file

@ -0,0 +1,15 @@
{
"name": "server-sent-events",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"express": "^4.18.2"
}
}

View file

@ -0,0 +1,24 @@
const app = require("express")();
const jobs = {}
app.post("/submit", (req, res) => {
const jobId = `job:${Date.now()}`
jobs[jobId] = 0;
updateJob(jobId,0);
res.end("\n\n" + jobId + "\n\n");
})
app.get("/checkstatus", (req, res) => {
console.log(jobs[req.query.jobId])
res.end("\n\nJobStatus: " + jobs[req.query.jobId] + "%\n\n")
} )
app.listen(8080, () => console.log("listening on 8080"));
function updateJob(jobId, prg) {
jobs[jobId] = prg;
console.log(`updated ${jobId} to ${prg}`)
if (prg == 100) return;
this.setTimeout(()=> updateJob(jobId, prg + 10), 3000)
}

View file

@ -0,0 +1,15 @@
{
"name": "short-polling",
"version": "1.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"express": "^4.18.2"
}
}

View file

@ -162,9 +162,18 @@ curl -v --trace marinasouza.xyz
<br>
* used when a request takes long time to process (e.g., upload a video) and very simple to build.
* however, it can be too chatting, use too much network bandwidth and backend resources.
<br>
#### basic idea
- when a request takes long time to process (e.g., upload a video)
- the backend want to sends notification
1. client sends a request
2. server responds immediately with a handle
3. server continues to process the request
4. client uses that handle to check for status
5. multiple short request response as polls
<br>
@ -173,6 +182,24 @@ curl -v --trace marinasouza.xyz
### Long Polling
<br>
* a poll requests where the server only responds when the job is ready (used when a request takes long time to process and it's not real time)
* used by Kafka
<br>
#### basic idea
<br>
1. clients sends a request
2. server responds immediately with a handle
3. server continues to process the request
4. client uses that handle to check for status
5. server does not reply until has the response (and there are some timeouts)
<br>
---
@ -180,6 +207,22 @@ curl -v --trace marinasouza.xyz
### Server Sent Events
<br>
* one request with a long response, but the client must be online and be able to handle the response.
<br>
#### basic idea
1. a response has start and end
2. client sends a request
3. server sends logical events as part of response
4. server never writes the end of the response
5. it's still a request but an unending response
6. client parses the streams data
7. works with HTTP
<br>
----

View file

View file