I successfully developed plane R code for nls Model on my R dataframe(df). but I need to develop same logic operations on sparkR dataframe suppose my df is sparkR dataframe How could I change my r code to sparkR code.
max_rows<-nrow(df)
size_min <- 15
size_max <- 46
cost_min <-47
cost_max <-78
counter = 1
data = data.frame()
while (counter <= max_rows)
{
file_name <- df[counter, 1]
version_number <- df[counter, 3]
long_id <- df[counter, 11]
short_id <- df[counter, 12]
size <- numeric(length(32))
cost <- numeric(length(32))
size <- unlist(c(size, df[counter, size_min:size_max]))
cost <- unlist(c(cost, df[counter, cost_min:cost_max]))
inputframe <- data.frame(size, cost)
trade_cost_model<- try(nls(cost ~ C + B * sqrt(size) + D * size, data = inputframe, start = list(C=200,B=0.5,D=0)), TRUE)
if(inherits(trade_cost_model,"try-error"))
{
result <- NULL
}
else
{
result <- t(as.data.frame(coef(trade_cost_model)))
output<- data.frame(file_name, version_number, long_id, short_id, result)
data <-rbind(data,output)
}
counter <- counter + 1
}
above code is successfully running but I want same code in sparkR development please could you help regarding on this need it immediately