Skip to content
This repository has been archived by the owner on Oct 28, 2019. It is now read-only.

Commit

Permalink
Merge branch 'dev'
Browse files Browse the repository at this point in the history
  • Loading branch information
andrie committed Dec 8, 2015
2 parents 536ebdd + d1357e8 commit 191a8d2
Show file tree
Hide file tree
Showing 12 changed files with 111 additions and 83 deletions.
6 changes: 3 additions & 3 deletions DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
Package: AzureML
Type: Package
Title: Interface with Azure ML datasets
Title: Interface with Azure Machine Learning datasets and web services
Description: Functions and datasets to support Azure Machine Learning. This allows you to interact with datasets, as well as publish and consume R functions as API services.
Version: 0.2.4
Date: 2015-11-23
Version: 0.2.5
Date: 2015-12-08
Authors@R: c(
person("Raymond", "Laghaeian", role=c("aut", "cre"), email="[email protected]"),
person(family="Microsoft Corporation", role="cph"),
Expand Down
8 changes: 4 additions & 4 deletions R/consume.R
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
#' @family consumption functions
#' @importFrom jsonlite fromJSON
#' @example inst/examples/example_publish.R
consume = function(endpoint, ..., globalParam, retryDelay = 10, output = "output1")
consume <- function(endpoint, ..., globalParam, retryDelay = 10, output = "output1")
{
if(is.Service(endpoint))
{
Expand Down Expand Up @@ -84,7 +84,7 @@ consume = function(endpoint, ..., globalParam, retryDelay = 10, output = "output
#' @importFrom jsonlite toJSON
#' @importFrom curl handle_setheaders new_handle handle_setopt curl_fetch_memory
#' @keywords internal
callAPI = function(apiKey, requestUrl, keyvalues, globalParam, retryDelay=10)
callAPI <- function(apiKey, requestUrl, keyvalues, globalParam, retryDelay=10)
{
# Set number of tries and HTTP status to 0
result = NULL
Expand Down Expand Up @@ -130,7 +130,7 @@ callAPI = function(apiKey, requestUrl, keyvalues, globalParam, retryDelay=10)
#'
#' @family discovery functions
#' @export
discoverSchema = function(helpURL, scheme = "https", host = "ussouthcentral.services.azureml.net", api_version = "2.0")
discoverSchema <- function(helpURL, scheme = "https", host = "ussouthcentral.services.azureml.net", api_version = "2.0")
{
workspaceId = getDetailsFromUrl(helpURL)[1]
endpointId = getDetailsFromUrl(helpURL)[3]
Expand Down Expand Up @@ -238,7 +238,7 @@ discoverSchema = function(helpURL, scheme = "https", host = "ussouthcentral.serv
#' @return a vector containing the workspace ID, webservices ID and endpoint ID
#'
#' @keywords internal
getDetailsFromUrl = function(url) {
getDetailsFromUrl <- function(url) {
ptn = ".*?/workspaces/([[:alnum:]]*)/webservices/([[:alnum:]]*)/endpoints/([[:alnum:]]*)/*.*$"
if(!grepl(ptn, url)) stop("Invalid url")
c(
Expand Down
15 changes: 10 additions & 5 deletions R/datasets.R
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
#' \code{\link{download.intermediate.dataset}}
#' @export
#' @example inst/examples/example_download.R
download.datasets = function(source, name, ...)
download.datasets <- function(source, name, ...)
{
datasets = source
if(! missing(name) && is.Workspace(source)) datasets = datasets(source)
Expand Down Expand Up @@ -95,7 +95,7 @@ download.datasets = function(source, name, ...)
#' @export
#' @family dataset functions
#' @family experiment functions
download.intermediate.dataset = function(ws, experiment, node_id, port_name="Results dataset", data_type_id="GenericCSV", ...)
download.intermediate.dataset <- function(ws, experiment, node_id, port_name="Results dataset", data_type_id="GenericCSV", ...)
{
url = sprintf("%s/workspaces/%s/experiments/%s/outputdata/%s/%s",
ws$.baseuri, curl_escape(ws$id),
Expand Down Expand Up @@ -126,15 +126,20 @@ download.intermediate.dataset = function(ws, experiment, node_id, port_name="Res
#' @export
#' @family dataset functions
#' @example inst/examples/example_upload.R
upload.dataset = function(x, ws, name, description="", family_id="", ...)
upload.dataset <- function(x, ws, name, description = "", family_id="", ...)
{
if(!is.Workspace(ws)) stop("ws must be a Workspace object")
if(name %in% datasets(ws)$Name) {
msg <- sprintf("A dataset with the name '%s' already exists in AzureML", name)
stop(msg)
}
# Uploading data to AzureML is a two-step process.
# 1. Upload raw data, retrieving an ID.
# 2. Construct a DataSource metadata JSON object describing the data and
# upload that.

# Step 1
tsv = capture.output(write.table(x, file="", sep="\t", row.names=FALSE, ...))
tsv = capture.output(write.table(x, file = "", sep = "\t", row.names = FALSE, ...))
url = sprintf("%s/resourceuploads/workspaces/%s/?userStorage=true&dataTypeId=GenericTSV",
ws$.baseuri, curl_escape(ws$id))
h = new_handle()
Expand Down Expand Up @@ -188,7 +193,7 @@ upload.dataset = function(x, ws, name, description="", family_id="", ...)
#' @return A data frame with columns Name, Deleted, status_code indicating the HTTP status code and success/failure result of the delete operation for each dataset.
#' @family dataset functions
#' @export
delete.datasets = function(ws, name, host="https://studioapi.azureml.net/api")
delete.datasets <- function(ws, name, host="https://studioapi.azureml.net/api")
{
# https://studioapi.azureml.net/api/workspaces/<workspaceId>/datasources/family/<familyId> HTTP/1.1
datasets = name
Expand Down
6 changes: 3 additions & 3 deletions R/discover.R
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@
#' getWebServices(ws)
#' }
#' @export
services = function(ws, service_id, name, host = ws$.management_endpoint)
services <- function(ws, service_id, name, host = ws$.management_endpoint)
{
if(!is.Workspace(ws)) stop("ws must be an AzureML Workspace object")
h = new_handle()
Expand Down Expand Up @@ -142,7 +142,7 @@ getWebServices = services
#' getEndpoints(ws, s$Id[1])
#' }
#' @export
endpoints = function(ws, service_id, endpoint_id, host = ws$.management_endpoint)
endpoints <- function(ws, service_id, endpoint_id, host = ws$.management_endpoint)
{
if(!is.Workspace(ws)) stop("ws must be an AzureML Workspace object")
# if(is.list(service_id) || is.data.frame(service_id)) service_id = service_id$Id[1]
Expand Down Expand Up @@ -213,7 +213,7 @@ getEndpoints = endpoints
#'
#' }
#' @export
endpointHelp = function(e, type = c("apidocument", "r-snippet","score","jobs","update"))
endpointHelp <- function(e, type = c("apidocument", "r-snippet","score","jobs","update"))
{
type = match.arg(type)
rsnip = FALSE
Expand Down
14 changes: 7 additions & 7 deletions R/getsyms.R
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@

# These are all internal functions.

.getsyms = function(ex) {
fun = function(x) {
.getsyms <- function(ex) {
fun <- function(x) {
if (is.symbol(x))
as.character(x)
else if (is.call(x))
Expand All @@ -29,17 +29,17 @@
unlist(lapply(ex, fun))
}

.gather = function(x) {
fun = function(a, b) unique(c(a, b))
.gather <- function(x) {
fun <- function(a, b) unique(c(a, b))
accum = list(good=character(0), bad=character(0))
for (e in x) {
accum = mapply(fun, e, accum, SIMPLIFY=FALSE)
}
accum
}

.expandsyms = function(syms, env, good, bad) {
fun = function(sym, good, bad) {
.expandsyms <- function(syms, env, good, bad) {
fun <- function(sym, good, bad) {
if (sym %in% c(good, bad)) {
# we already saw this symbol
list(good=good, bad=bad)
Expand Down Expand Up @@ -77,7 +77,7 @@
.gather(lapply(syms, fun, good, bad))$good
}

.getexports = function(ex, e, env, good=character(0), bad=character(0)) {
.getexports <- function(ex, e, env, good=character(0), bad=character(0)) {
syms = .getsyms(ex)
syms = .expandsyms(syms, env, good, bad)
for (s in syms) {
Expand Down
16 changes: 8 additions & 8 deletions R/internal.R
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ date_origin = "1970-1-1"
#' @param delay in seconds between retries, subject to exponent
#' @param exponent increment each successive delay by delay^exponent
#' @return the result of curl_fetch_memory(uri, handle)
try_fetch = function(uri, handle, retry_on=c(503,504,509,400,401,440), tries=3, delay=10, exponent=1.2)
try_fetch <- function(uri, handle, retry_on=c(503,504,509,400,401,440), tries=3, delay=10, exponent=1.2)
{
i = 0
while(i < tries)
Expand All @@ -48,7 +48,7 @@ try_fetch = function(uri, handle, retry_on=c(503,504,509,400,401,440), tries=3,
r
}

urlconcat = function(a,b)
urlconcat <- function(a,b)
{
ans = paste(gsub("/$", "", a), b, sep="/")
ans = gsub(":/([^/])", "://\\1", ans)
Expand All @@ -62,7 +62,7 @@ urlconcat = function(a,b)
#' @importFrom curl handle_setheaders curl new_handle
#' @importFrom jsonlite fromJSON
#' @keywords Internal
get_datasets = function(ws)
get_datasets <- function(ws)
{
h = new_handle()
handle_setheaders(h, .list=ws$.headers)
Expand All @@ -89,7 +89,7 @@ get_datasets = function(ws)
}


convertToDate = function(x){
convertToDate <- function(x){
x = as.numeric(gsub("[^-0-9]", "", x)) /1000
x = ifelse(x >= 0, x, NA)
suppressWarnings(
Expand All @@ -105,7 +105,7 @@ convertToDate = function(x){
#' @importFrom curl handle_setheaders curl new_handle
#' @importFrom jsonlite fromJSON
#' @keywords Internal
get_experiments = function(ws)
get_experiments <- function(ws)
{
h = new_handle()
handle_setheaders(h, .list=ws$.headers)
Expand Down Expand Up @@ -133,7 +133,7 @@ get_experiments = function(ws)
#' @importFrom foreign read.arff
#' @importFrom curl new_handle curl
#' @keywords Internal
get_dataset = function(x, h, quote = "\"", ...)
get_dataset <- function(x, h, quote = "\"", ...)
{
# Set default stringsAsFactors to FALSE, but allow users to override in ...
# Restore the option on function exit.
Expand Down Expand Up @@ -161,7 +161,7 @@ get_dataset = function(x, h, quote = "\"", ...)

# Checks if zip is available on system.
# Required for packageEnv()
zipAvailable = function(){
zipAvailable <- function(){
z = unname(Sys.which("zip"))
z != ""
}
Expand All @@ -178,7 +178,7 @@ zipNotAvailableMessage = "Requires external zip utility. Please install zip, ens
#' @importFrom base64enc base64encode
#' @importFrom miniCRAN makeRepo pkgDep
#' @keywords Internal
packageEnv = function(exportenv, packages=NULL, version="3.1.0")
packageEnv <- function(exportenv, packages=NULL, version="3.1.0")
{
if(!zipAvailable()) stop(zipNotAvailableMessage)
if(!is.null(packages)) assign("..packages", packages, envir=exportenv)
Expand Down
13 changes: 7 additions & 6 deletions R/methods.R
Original file line number Diff line number Diff line change
Expand Up @@ -25,33 +25,34 @@
#' @param x an R object
#' @return logical value, TRUE if \code{x} represents an Azure ML workspace.
#' @export
is.Workspace = function(x) "Workspace" %in% class(x)
is.Workspace <- function(x) "Workspace" %in% class(x)

#' @title Test if an object is an Azure ML Service.
#' @param x an R object
#' @return logical value, TRUE if \code{x} represents an Azure ML web service
#' @export
is.Service = function(x){
is.Service <- function(x){
inherits(x, "Service")
}

#' @title Test if an object is an Azure ML Endpoint
#' @param x an R object
#' @return logical value, TRUE if \code{x} represents an Azure ML web service endpoint
#' @export
is.Endpoint = function(x){
is.Endpoint <- function(x){
inherits(x, "Endpoint")
}

#' @export
print.Workspace = function(x, ...)
{
cat("AzureML Workspace\n")
cat("Workspace ID: ",x$id,"\n")
cat("Workspace ID: ", x$id, "\n")
cat("API endpoint:", x$.api_endpoint, "\n")
}

#' @export
print.Experiments = function(x, ...)
print.Experiments <- function(x, ...)
{
dots = character()
if(nrow(x) > 0) dots = "..."
Expand All @@ -68,7 +69,7 @@ print.Experiments = function(x, ...)
}

#' @export
print.Datasets = function(x, ...)
print.Datasets <- function(x, ...)
{
dots = character()
if(nrow(x) > 0) dots = "..."
Expand Down
14 changes: 7 additions & 7 deletions R/publish.R
Original file line number Diff line number Diff line change
Expand Up @@ -33,12 +33,12 @@ wrapper = "inputDF <- maml.mapInputPort(1)\nload('src/env.RData')\n if(!is.null(
#' @param fun a function to test
#' @param output_names character vector of function output names
#' @param data.frame i/o format
test_wrapper = function(inputDF, wrapper, fun, output_names, `data.frame`)
test_wrapper <- function(inputDF, wrapper, fun, output_names, `data.frame`)
{
exportenv = new.env()
maml.mapInputPort = function(x) inputDF
maml.mapOutputPort = function(x) get(x)
load = function(x) invisible()
maml.mapInputPort <- function(x) inputDF
maml.mapOutputPort <- function(x) get(x)
load <- function(x) invisible()
exportenv$..fun = fun
exportenv$..output_names = output_names
exportenv$..data.frame = `data.frame`
Expand All @@ -54,7 +54,7 @@ test_wrapper = function(inputDF, wrapper, fun, output_names, `data.frame`)
#' @return list of the format expected by the API
#'
#' @keywords internal
azureSchema = function(argList) {
azureSchema <- function(argList) {
form = list()
for (arg in names(argList)) {
type = argList[[arg]]
Expand Down Expand Up @@ -145,7 +145,7 @@ azureSchema = function(argList) {
#' @importFrom jsonlite toJSON
#' @importFrom uuid UUIDgenerate
#' @importFrom curl new_handle handle_setheaders handle_setopt
publishWebService = function(ws, fun, name,
publishWebService <- function(ws, fun, name,
inputSchema, outputSchema, `data.frame`=FALSE,
export=character(0), noexport=character(0), packages,
version="3.1.0", serviceId, host = ws$.management_endpoint)
Expand Down Expand Up @@ -259,7 +259,7 @@ updateWebService = publishWebService
#' @seealso \code{\link{services}} \code{\link{publishWebService}} \code{\link{updateWebService}}
#' @family publishing functions
#' @example inst/examples/example_publish.R
deleteWebService = function(ws, name, refresh = TRUE)
deleteWebService <- function(ws, name, refresh = TRUE)
{
#DELETE https://management.azureml.net/workspaces/{id}/webservices/{id}[/endpoints/{name}]

Expand Down
Loading

0 comments on commit 191a8d2

Please sign in to comment.