Skip to content

fix: improve netzschleuder internals #40

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 5 commits into
base: main
Choose a base branch
from
Draft
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 30 additions & 18 deletions R/netzschleuder.R
Original file line number Diff line number Diff line change
Expand Up @@ -40,16 +40,17 @@ resolve_name <- function(x) {
x <- sub("//", "/", x)

if (grepl("/", x)) {
x_split <- strsplit(x, "/", fixed = TRUE)[[1]]
if (length(x_split) > 2) {
res_names <- strsplit(x, "/", fixed = TRUE)[[1]]
bad_names_format <- (length(res_names) > 2)
if (bad_names_format) {
cli::cli_abort(
"{.arg name} has {length(x_split)} components instead of 2."
"{.arg name} is not correctly formatted."
)
}
return(x_split)
} else {
c(x, x)
res_names <- c(x, x)
}
rlang::set_names(res_names, c("collection", "network"))
}

download_file <- function(zip_url, token = NULL, file, size_limit) {
Expand Down Expand Up @@ -109,17 +110,20 @@ download_file <- function(zip_url, token = NULL, file, size_limit) {
ns_metadata <- function(name, collection = FALSE) {
rlang::check_installed("cli")
net_ident <- resolve_name(name)
path <- sprintf("api/net/%s", net_ident[[1]])
collection_url <- sprintf("https://networks.skewed.de/net/%s", net_ident[[1]])
path <- sprintf("api/net/%s", net_ident[["collection"]])
collection_url <- sprintf(
"https://networks.skewed.de/net/%s",
net_ident[["collection"]]
)
resp <- make_request(path)
raw <- httr2::resp_body_json(resp)
class(raw) <- c("ns_meta", class(raw))
raw[["is_collection"]] <- collection
raw[["collection_name"]] <- net_ident[[1]]
raw[["collection_name"]] <- net_ident[["collection"]]
if (collection) {
return(raw)
} else if (
net_ident[[1]] == net_ident[[2]] &&
net_ident[["collection"]] == net_ident[["network"]] &&
length(unlist(raw$nets)) > 1 &&
!collection
) {
Expand All @@ -129,10 +133,10 @@ ns_metadata <- function(name, collection = FALSE) {
"i" = "see {.url {collection_url}}"
)
)
} else if (net_ident[[1]] == net_ident[[2]]) {
} else if (net_ident[["collection"]] == net_ident[["network"]]) {
return(raw)
} else {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Couldn't some of the else if in this file be removed to instead just have if's?

if bla
return thing
if blop
return thing

idx <- which(unlist(raw[["nets"]]) == net_ident[[2]])
idx <- which(unlist(raw[["nets"]]) == net_ident[["network"]])
if (length(idx) == 0) {
cli::cli_abort(
c(
Expand All @@ -141,7 +145,7 @@ ns_metadata <- function(name, collection = FALSE) {
)
)
}
raw[["analyses"]] <- raw[["analyses"]][[net_ident[[2]]]]
raw[["analyses"]] <- raw[["analyses"]][[net_ident[["network"]]]]
raw[["nets"]] <- raw[["nets"]][idx]
raw
}
Expand All @@ -163,14 +167,15 @@ ns_df <- function(name, token = NULL, size_limit = 1) {
}
meta <- name
net_ident <- c(meta[["collection_name"]], meta[["nets"]])
names(net_ident) <- c("collection", "network")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

could the names be added in the previous call?

net_ident <- c(
  collection = meta[["collection_name"]],
  network = meta[["nets"]]
)

} else {
cli::cli_abort("{.arg name} must be a string or a `ns_meta` object.")
}

zip_url <- sprintf(
"net/%s/files/%s.csv.zip",
net_ident[[1]],
net_ident[[2]]
net_ident[["collection"]],
net_ident[["network"]]
)

temp <- tempfile(fileext = "zip")
Expand All @@ -182,7 +187,8 @@ ns_df <- function(name, token = NULL, size_limit = 1) {
node_file_name <- grep("node", zip_contents$Name, value = TRUE)
gprops_file_name <- grep("gprops", zip_contents$Name, value = TRUE)

edges_df_raw <- utils::read.csv(unz(temp, edge_file_name))
con_edge <- unz(temp, edge_file_name)
edges_df_raw <- utils::read.csv(con_edge)
edges_df <- suppressWarnings(minty::type_convert(edges_df_raw))
source_loc <- grep("source", names(edges_df))
target_loc <- grep("target", names(edges_df))
Expand All @@ -192,10 +198,12 @@ ns_df <- function(name, token = NULL, size_limit = 1) {
edges_df[["from"]] <- edges_df[["from"]] + 1L
edges_df[["to"]] <- edges_df[["to"]] + 1L

nodes_df_raw <- utils::read.csv(unz(temp, node_file_name))
con_nodes <- unz(temp, node_file_name)
nodes_df_raw <- utils::read.csv(con_nodes)

#suppress warning if no character columns found
nodes_df <- suppressWarnings(minty::type_convert(nodes_df_raw))
names(nodes_df)[1] <- "id"
names(nodes_df)[[1]] <- "id"

# netzschleuder uses 0-indexing, igraph uses 1-indexing
nodes_df[["id"]] <- nodes_df[["id"]] + 1L
Expand All @@ -210,8 +218,12 @@ ns_df <- function(name, token = NULL, size_limit = 1) {
nodes_df[["y"]] <- mat[2, ]
}

gprops_df <- readLines(unz(temp, gprops_file_name))
con_gprops <- unz(temp, gprops_file_name)
gprops_df <- readLines(con_gprops)

on.exit(close(con_edge))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

would it make sense to take a dependency on withr and use withr::local_connection()? Or to define a helper since we'd call it at least four times?

on.exit(close(con_nodes))
on.exit(close(con_gprops))
on.exit(unlink(temp))

list(nodes = nodes_df, edges = edges_df, gprops = gprops_df, meta = meta)
Expand Down