awesome-network-analysis/check.r

112 lines
2.0 KiB
R
Raw Normal View History

2017-01-02 18:01:11 +00:00
#
# Common errors
# -------------
# 400 Bad Request
# 403 Forbidden (e.g. Nature website)
# 404 Not Found
# 501 Not Implemented
2017-01-02 18:46:42 +00:00
# 999 LinkedIn being defensive
2017-01-02 18:01:11 +00:00
#
2017-01-01 17:54:03 +00:00
library(httr)
library(stringr)
f <- "check.log"
2017-01-01 17:54:03 +00:00
if (!file.exists(f)) {
2023-04-26 22:04:41 +00:00
u <- str_c(
"https://raw.githubusercontent.com/",
"briatte/awesome-network-analysis/",
"master/README.md"
)
2023-04-26 22:04:41 +00:00
cat("Source:", u, "\n")
2023-04-26 22:04:41 +00:00
u <- GET(u) %>%
content("text") %>%
str_split("\\n") %>% # so as to find [foo]: bar links
2023-04-26 22:04:41 +00:00
unlist()
# remove links that have been commented out
u <- str_remove_all(u, "<!--.*?-->")
2019-04-26 19:26:52 +00:00
# total number of links (made to match web.archive.org links only once)
t <- sum(str_count(u, "(?<!/)http"))
2023-04-26 22:04:41 +00:00
cat(t, "URLs, ")
2023-04-26 22:04:41 +00:00
l <- c(
# [foo](bar)
2019-04-26 19:26:52 +00:00
str_extract_all(u, "\\(http(.*?)\\)") %>%
lapply(str_replace_all, "^\\(|\\)$", "") %>%
2023-04-26 22:04:41 +00:00
unlist(),
# [foo]: bar
2023-04-26 22:04:41 +00:00
str_extract_all(u, "^\\[(.*)\\]: (.*)") %>%
unlist() %>%
str_replace("^\\[(.*)\\]: (.*)", "\\2")
)
2023-04-26 22:04:41 +00:00
stopifnot(length(l) == t)
2023-04-26 22:04:41 +00:00
} else {
2023-04-26 22:04:41 +00:00
cat("Source:", f, "\n")
2023-04-26 22:04:41 +00:00
2019-04-26 19:26:52 +00:00
l <- str_subset(stringi::stri_read_lines(f), "^http")
2023-04-26 22:04:41 +00:00
cat(length(l), "URLs, ")
2023-04-26 22:04:41 +00:00
}
2017-01-01 17:54:03 +00:00
2023-04-26 22:04:13 +00:00
l <- str_squish(sort(unique(l)))
2017-01-01 18:16:31 +00:00
2017-01-01 17:54:03 +00:00
cat(length(l), "unique\n")
cat("Ignoring", sum(str_detect(l, "^https://doi.org/")), "DOIs\n")
l <- str_subset(l, "^https://doi.org/", negate = TRUE)
sink(f, append = FALSE)
2017-01-01 18:16:31 +00:00
cat(as.character(Sys.time()), ": checking", length(l), "URLs\n\n")
sink()
2017-01-01 17:54:03 +00:00
for (i in l) {
2023-04-26 22:04:41 +00:00
2017-03-14 08:40:30 +00:00
x <- try(status_code(GET(i)), silent = TRUE)
2023-04-26 22:04:41 +00:00
2017-01-01 17:54:03 +00:00
if (!"try-error" %in% class(x) && x != 200) {
2023-04-26 22:04:41 +00:00
2017-01-01 18:16:31 +00:00
cat("X")
2023-04-26 22:04:41 +00:00
sink(f, append = TRUE)
2017-01-01 18:16:31 +00:00
cat(i, "\nStatus code:", x, "\n\n")
2017-01-01 18:05:09 +00:00
sink()
2023-04-26 22:04:41 +00:00
2017-03-14 08:40:30 +00:00
} else if ("try-error" %in% class(x)) {
2023-04-26 22:04:41 +00:00
2017-01-01 18:16:31 +00:00
cat("?")
2023-04-26 22:04:41 +00:00
sink(f, append = TRUE)
2017-01-01 18:16:31 +00:00
cat(i, "\nFailed to access\n\n")
2017-01-01 18:05:09 +00:00
sink()
2023-04-26 22:04:41 +00:00
2017-01-01 17:54:03 +00:00
} else {
2023-04-26 22:04:41 +00:00
2017-01-01 17:54:03 +00:00
cat(".")
2023-04-26 22:04:41 +00:00
2017-01-01 17:54:03 +00:00
}
2023-04-26 22:04:41 +00:00
2017-03-14 08:40:30 +00:00
if (!which(l == i) %% 50) {
2023-04-26 22:04:41 +00:00
2017-01-01 18:16:31 +00:00
cat("", length(l) - which(l == i), "left\n")
2023-04-26 22:04:41 +00:00
2017-03-14 08:40:30 +00:00
}
2023-04-26 22:04:41 +00:00
2017-01-01 17:54:03 +00:00
}
2017-01-01 18:16:31 +00:00
sink(f, append = TRUE)
f <- sum(str_count(stringi::stri_read_lines(f), "^http"))
cat(as.character(Sys.time()), ": done,", f, "errors.\n")
2017-01-01 18:16:31 +00:00
sink()
cat("\n", f, "errors\n")