NotesWhat is notes.io?

Notes brand slogan

Notes - notes.io


source(.stApiV2Functions)
source(.reports20Funs)
source(.stUtilityFunctions)
source(.excelFunctions)
source(.msGraphFunctions)
source('~/code/nextiva-pulls/nextivaSeleniumFuns.R')

ids <- list(
stBusinessUpdateJson = "013C5ECMZGUW3C5ED72NB3LQXP2HBTMYDM",
csrMetricsJson = "013C5ECMZLASHG5F54EZDK6WMAC2MR77E7",
scMetricsJson = "013C5ECM2FCV4CDJHLWZD32GIOBQ6UEYJ6"
)

yesterday <- c(
rio::import(paste0(.dataDir, "/runtimes/scCsrReporting.rds")),
today() - 1
)

# yesterday <- if(weekdays(today()) == 'Monday') {
# c(today() - 3, today() - 1)
# } else {
# c(today() - 1, today() - 1)
# }

monthToDate <- c(
floor_date(today() - 7, '1 month'),
today() - 1
)

# Helps separate long chains of cbinded, heterogeneous data by putting a space
# in between each new table
cbindSep <- function(lst) {
bind_cols(
lapply(lst, function(l) { l %>% mutate(X = '') %>% as_tibble() })
)
}

# SC / CSR Reporting Files

# SC Report
# Calls Data
stCalls <- getSearchResults(url = 'https://go.servicetitan.com/Search/Call',
critStr = sprintf(
'{"Caller":"",
"NumberDialed":"",
"InvoiceNumber":"",
"CustomerName":"",
"DurationFrom":"",
"DurationTo":"",
"DateFrom":"%sT07:00:00.000Z",
"DateTo":"%sT07:00:00.000Z",
"CallRecordingPaused":"",
"Tags":""}',
yesterday[1],
yesterday[2])
) %>%
clean_names('small_camel')

stCallsFlt <- stCalls %>%
select(id, agent, callDirection, date, duration) %>%
mutate(date = as_date(mdy_hm(date))) %>%
filter(!is.na(agent))

# Appts Creation
bookedSalesCalls <- getReports20(
url = 'https://go.servicetitan.com/app/api/reporting/CustomReport/QueryExport?format=1&id=55431785',
critStr = sprintf(
'{
"DateType": "2",
"BusinessUnitId": "50093756,50094224,50093630",
"IncludeAdjustmentInvoices": "false",
"RecipientId": "",
"PrimaryKeys": "",
"From": "%s",
"To": "%s",
"VisibleFields": "JobNumber,JobType,Campaign,CampaignCategory,JobBusinessUnit,JobStatus,CancelReason,BookedBy,CreatedDate,ScheduledDate,CustomerName,LocationAddress,PrimaryTechnician",
"Fields": "JobNumber,JobType,Campaign,CampaignCategory,JobBusinessUnit,JobStatus,CancelReason,BookedBy,CreatedDate,ScheduledDate,CustomerName,LocationAddress,PrimaryTechnician",
"Sort": "[{\"dir\":\"asc\",\"field\":\"CampaignCategory\"}]",
"Filters": "{\"filters\":[{\"filters\":[{\"field\":\"CustomerName\",\"operator\":\"doesnotcontain\",\"value\":\"Testing\"}],\"logic\":\"and\"},{\"filters\":[{\"field\":\"JobType\",\"operator\":\"notin\",\"value\":[\"Service - Counter Sale\"]}],\"logic\":\"and\"}],\"logic\":\"and\"}",
"Group": "[]",
"ColumnExportOptions": "[{\"Field\":\"JobNumber\",\"Width\":140},{\"Field\":\"JobType\",\"Width\":140},{\"Field\":\"Campaign\",\"Width\":165},{\"Field\":\"CampaignCategory\",\"Width\":203},{\"Field\":\"JobBusinessUnit\",\"Width\":163},{\"Field\":\"JobStatus\",\"Width\":140},{\"Field\":\"BookedBy\",\"Width\":145},{\"Field\":\"CancelReason\",\"Width\":170},{\"Field\":\"CreatedDate\",\"Width\":161},{\"Field\":\"ScheduledDate\",\"Width\":177},{\"Field\":\"CustomerName\",\"Width\":179},{\"Field\":\"LocationAddress\",\"Width\":187},{\"Field\":\"PrimaryTechnician\",\"Width\":196}]",
"AggregatesOnly": "false"
}',
monthToDate[1],
monthToDate[2])
) %>%
clean_names('small_camel') %>%
deleteLast()

# Appts Ran
pdscScheduledBy <- getReports20(
url = 'https://go.servicetitan.com/app/api/reporting/CustomReport/QueryExport?format=1&id=76153415',
critStr = sprintf('{
"DateType": "3",
"BusinessUnitId": "50093756,50093630,50094224",
"IncludeAdjustmentInvoices": "false",
"RecipientId": "",
"PrimaryKeys": "",
"From": "%s",
"To": "%s",
"VisibleFields": "CustomerId,JobNumber,ScheduledDate,CreatedDate,CustomerName,LocationStreet,LocationCity,JobType,Campaign,CampaignCategory,PrimaryTechnician,JobBusinessUnit,JobStatus,CancelReason,BookedBy",
"Fields": "CustomerId,JobNumber,ScheduledDate,CreatedDate,CustomerName,LocationStreet,LocationCity,JobType,Campaign,CampaignCategory,PrimaryTechnician,JobBusinessUnit,JobStatus,CancelReason,BookedBy",
"Group": "[]",
"AggregatesOnly": "false"
}',
yesterday[1],
yesterday[2])) %>%
clean_names('small_camel') %>%
deleteLast()


# Call Time
adr <- list.files(paste0(.nDriveDir, '/David D/datasets/nextiva'),
'agentDurationReport.*csv',
full.names = TRUE) %>%
lapply(function(fname) { rio::import(fname) %>% mutate(fname = fname) }) %>%
bind_rows() %>%
clean_names('small_camel') %>%
mutate(across(everything(),
function(x) {
ifelse(is.na(hms(x)),
x,
as.numeric(hms(x)) / (24*60*60))
}))

acr <- list.files(paste0(.nDriveDir, '/David D/datasets/nextiva'),
'agentCallReport.*csv',
full.names = TRUE) %>%
lapply(function(fname) { rio::import(fname) %>% mutate(fname = fname) }) %>%
bind_rows() %>%
clean_names('small_camel') %>%
mutate(across(everything(),
function(x) {
ifelse(is.na(as.numeric(x)),
x,
as.numeric(x))
}))

# First convert the nextiva file to pull in number of codes instead of duration
list.files(paste0(.nDriveDir, '/David D/datasets/nextiva'),
'agentUnavailabilityReport.*xls',
full.names = TRUE) %>%
sort(decreasing = TRUE) %>%
{ .[1:7] } %>%
lapply(convertNextivaFile,
bottomOffset = 1)

bouncedCalls <- list.files(paste0(.nDriveDir, '/David D/datasets/nextiva'),
'agentUnavailabilityReport.*csv',
full.names = TRUE) %>%
lapply(function(fname) { rio::import(fname) %>% mutate(fname = fname) }) %>%
lapply(function(df) {
bouncedColInd <- which(df[1,] == '21')
bounceCol <- names(df)[bouncedColInd]
names(df)[bouncedColInd] <- '21'
df %>%
select(any_of(c('V1', 'V2', '21')))
}) %>%
bind_rows() %>%
clean_names('small_camel') %>%
mutate(x21 = ifelse(is.na(as.numeric(x21)),
x21,
as.numeric(x21))) %>%
filter(!is.na(v1))

scAvailability <- list.files(paste0(.nDriveDir, '/David D/datasets/nextiva/sc'),
'.*csv',
full.names = TRUE) %>%
lapply(function(fname) { rio::import(fname) %>% mutate(fname = fname) }) %>%
bind_rows() %>%
clean_names('small_camel') %>%
filter(dateAndTime == 'Report Summary') %>%
mutate(dateAndTime = ifelse(dateAndTime == '', NA, dateAndTime)) %>%
fill(dateAndTime) %>%
filter(dateAndTime == 'Report Summary',
name != 'Summary') %>%
mutate(across(available:staffed,
function(x) {
as.numeric(hms(x)) / (24*60*60)
}))


# Rehash Est. Sold
rehashEst <- getReports20(
url = 'https://go.servicetitan.com/app/api/reporting/CustomReport/QueryExport?format=1&id=78134097',
critStr = sprintf('{
"DateType": "0",
"BusinessUnitId": "45593203",
"RecipientId": "",
"From": "%s",
"To": "%s",
"VisibleFields": "EstimateName,ParentJobCampaign,OpportunityId,CustomerName,OpportunityStatus,EstimateStatus,EstimateAge,CreationDate,SoldOn,SoldBy",
"Fields": "EstimateName,ParentJobCampaign,OpportunityId,CustomerName,OpportunityStatus,EstimateStatus,EstimateAge,CreationDate,SoldOn,SoldBy",
"Sort": "[]",
"Filters": "",
"Group": "[]",
"ColumnExportOptions": "[{\"Field\":\"EstimateName\",\"Width\":173},{\"Field\":\"ParentJobCampaign\",\"Width\":211},{\"Field\":\"OpportunityId\",\"Width\":209},{\"Field\":\"OpportunityStatus\",\"Width\":199},{\"Field\":\"EstimateStatus\",\"Width\":176},{\"Field\":\"EstimateAge\",\"Width\":206},{\"Field\":\"CreationDate\",\"Width\":164},{\"Field\":\"SoldOn\",\"Width\":140},{\"Field\":\"CustomerName\",\"Width\":179},{\"Field\":\"SoldBy\",\"Width\":140}]",
"AggregatesOnly": "false",
"TimeZone": "America/New_York"
}',
yesterday[1],
yesterday[2])) %>%
clean_names('small_camel') %>%
deleteLast() %>%
mutate(ageWhenSold = difftime(soldOn, creationDate, units = 'days') %>% as.numeric())


# CSR Metrics

# Booked Inspection Report
biReport <- list.files(paste0(.dataDir,
'/booked-inspection-yesterday'),
pattern = paste0(unique(yesterday + 1), collapse = '|'),
full.names = TRUE) %>%
lapply(function(fname) { rio::import(fname) %>% mutate(fname = fname) }) %>%
subset(!sapply(., function(x) { nrow(x) == 0 })) %>%
bind_rows()

# Booked SC
bookedServiceCalls <- getReports20(url = 'https://go.servicetitan.com/app/api/reporting/CustomReport/QueryExport?format=1&id=174550730',
critStr = sprintf('{
"DateType": "2",
"IncludeAdjustmentInvoices": "false",
"RecipientId": "",
"PrimaryKeys": "",
"From": "%s",
"To": "%s",
"VisibleFields": "JobNumber,JobType,JobBusinessUnit,JobStatus,BookedBy,CreatedDate,ScheduledDate",
"Fields": "JobNumber,JobType,JobBusinessUnit,JobStatus,BookedBy,CreatedDate,ScheduledDate",
"Sort": "[]",
"Group": "[]",
"AggregatesOnly": "false"
}', yesterday[1], yesterday[2])) %>%
clean_names('small_camel') %>%
deleteLast() %>%
filter(str_detect(businessUnit, '11|19|20|23|50|24'),
!str_detect(jobType, 'Counter Sale'))

# MP Sold
mpSold <- list.files(paste0(.dataDir, '/mr-msb'),
pattern = paste0(unique(yesterday+1),
collapse = '|'),
full.names = TRUE) %>%
lapply(rio::import) %>%
lapply(function(df) {
df %>%
transmute(customerId = customerId,
customerName = customerName,
invoiceId = membershipId,
invoiceNumber = saleRenewalInvoiceNumber,
membershipName = saleRenewalItemCode,
type = activationMethod,
price = membershipSaleRenewalPrice,
startDate = from,
endDate = to,
commission = membershipBonus,
soldBy = soldBy,
soldOn = soldOn,
status = membershipStatus)
}) %>%
bind_rows()

csrAvailability <- list.files(paste0(.nDriveDir, '/David D/datasets/nextiva/csr'),
'.*csv',
full.names = TRUE) %>%
lapply(function(fname) { rio::import(fname) %>% mutate(fname = fname) }) %>%
bind_rows() %>%
clean_names('small_camel') %>%
mutate(dateAndTime = ifelse(dateAndTime == '', NA, dateAndTime)) %>%
fill(dateAndTime) %>%
filter(dateAndTime == 'Report Summary',
name != 'Summary') %>%
mutate(across(available:staffed,
function(x) {
as.numeric(hms(x)) / (24*60*60)
}))

# SC business update
dropIns <- bookedSalesCalls %>%
group_by(createdDate) %>%
summarise(dropIns = snao(scheduledDate == createdDate),
seLeads = snao(bookedBy == 'logan'),
totalBooked = n())

rehash <- getReports20(url = 'https://go.servicetitan.com/app/api/reporting/CustomReport/QueryExport?format=1&id=78134097',
critStr = sprintf(
'{
"DateType": "0",
"BusinessUnitId": "45593203",
"RecipientId": "",
"From": "%s",
"To": "%s",
"VisibleFields": "EstimateName,ParentJobCampaign,OpportunityId,OpportunityStatus,EstimateStatus,EstimateAge,CreationDate,SoldOn,SoldBy",
"Fields": "EstimateName,ParentJobCampaign,OpportunityId,OpportunityStatus,EstimateStatus,EstimateAge,CreationDate,SoldOn,SoldBy",
"Sort": "[]",
"Filters": "",
"Group": "[]",
"AggregatesOnly": "false"
}', yesterday[1], yesterday[2])) %>%
deleteLast() %>%
clean_names('small_camel')

rehashSmry <- rehash %>%
group_by(soldOn) %>%
summarise(numRehash = snao(creationDate != soldOn))

apptBreakdown <- bookedSalesCalls %>%
filter(createdDate >= yesterday[1], createdDate <= yesterday[2]) %>%
# Want to see the specific campaign for D3 since it's made up of multiple
mutate(campaignCategory = ifelse(campaignCategory == 'Digital 3rd Party',
paste0(campaignCategory, ' - ', jobCampaign),
campaignCategory)) %>%
group_by(campaignCategory) %>%
summarise(n = n())

next3Days <- seq.Date(today(), today() + 4, 1) %>%
subset(!weekdays(.) == 'Sunday') %>%
{ .[1:3] }

upcomingApptBreakdown <- getReports20(url = 'https://go.servicetitan.com/app/api/reporting/CustomReport/QueryExport?format=1&id=76153415',
critStr = sprintf(
'{
"DateType": "3",
"BusinessUnitId": "50093756,50093630,50094224",
"IncludeAdjustmentInvoices": "false",
"RecipientId": "",
"PrimaryKeys": "",
"From": "%s",
"To": "%s",
"VisibleFields": "CustomerId,JobNumber,ScheduledDate,CustomerName,LocationStreet,LocationCity,JobType,Campaign,CampaignCategory,PrimaryTechnician,JobBusinessUnit,JobStatus,CancelReason,BookedBy",
"Fields": "CustomerId,JobNumber,ScheduledDate,CustomerName,LocationStreet,LocationCity,JobType,Campaign,CampaignCategory,PrimaryTechnician,JobBusinessUnit,JobStatus,CancelReason,BookedBy",
"Group": "[]",
"AggregatesOnly": "false"
}', next3Days[1], tail(next3Days, 1)
)) %>%
deleteLast() %>%
clean_names('small_camel') %>%
filter(!str_detect(tolower(customerName), 'testing cust'),
as_date(scheduledDate) %in% next3Days) %>%
group_by(scheduledDate, businessUnit) %>%
summarise(n = n())

apptsRanSmry <- pdscScheduledBy %>%
summarise(dropIns = snao(createdDate == scheduledDate),
totalRan = n(),
alreadyScheduled = totalRan - dropIns)


# Form email body
serviceRan <- getReports20(url = 'https://go.servicetitan.com/app/api/reporting/CustomReport/QueryExport?format=1&id=174550730',
critStr = sprintf('{
"DateType": "3",
"IncludeAdjustmentInvoices": "false",
"RecipientId": "",
"PrimaryKeys": "",
"From": "%s",
"To": "%s",
"VisibleFields": "JobNumber,JobType,JobBusinessUnit,JobStatus,BookedBy,CreatedDate,ScheduledDate",
"Fields": "JobNumber,JobType,JobBusinessUnit,JobStatus,BookedBy,CreatedDate,ScheduledDate",
"Sort": "[]",
"Group": "[]",
"AggregatesOnly": "false"
}', yesterday[1], yesterday[2])) %>%
clean_names('small_camel') %>%
deleteLast() %>%
filter(str_detect(businessUnit, '19|20|23|50|24'),
!str_detect(jobType, 'Counter Sale'),
!status %in% c('Canceled', 'Hold')) %>%
group_by(scheduledDate) %>%
summarise(serviceRan = n())

salesRan <- pdscScheduledBy %>%
group_by(scheduledDate) %>%
summarise(salesRan = n())

serviceSched <- bookedServiceCalls %>%
filter(!str_detect(businessUnit, '11')) %>%
group_by(createdDate) %>%
summarise(serviceScheduled = n())

salesSched <- bookedSalesCalls %>%
filter(createdDate >= yesterday[1], createdDate <= yesterday[2]) %>%
group_by(createdDate) %>%
summarise(salesScheduled = n())

estSmry <- rio::import(paste0(
.nDriveDir, '/Reporting/Sales Report/MAIN Sales Report.xlsx'
),
which = '.Estimates') %>%
clean_names('small_camel') %>%
filter(creationDate >= yesterday[1],
creationDate <= yesterday[2],
salesOrService == 'Service') %>%
group_by(creationDate) %>%
summarise(serviceEsts = n())

oppSmry <- rio::import(paste0(
.nDriveDir, '/Reporting/Sales Report/MAIN Sales Report.xlsx'
),
which = 'Tech.Opps') %>%
clean_names('small_camel') %>%
filter(scheduledDate >= yesterday[1],
scheduledDate <= yesterday[2]) %>%
group_by(scheduledDate) %>%
summarise(serviceOpps = n())

fullBdySmry <- salesRan %>%
left_join(salesSched, by = c('scheduledDate' = 'createdDate')) %>%
left_join(serviceRan, by = 'scheduledDate') %>%
left_join(serviceSched, by = c('scheduledDate' = 'createdDate')) %>%
left_join(estSmry, by = c('scheduledDate' = 'creationDate') )%>%
left_join(oppSmry, by = c('scheduledDate'))

# On Monday, pull the canceled sales report
lastWeek <- c(floor_date(today()-9, '1 week', week_start = 1),
today() - 1)

if(weekdays(today()) == 'Monday') {

pdscLastWeek <- getReports20(
url = 'https://go.servicetitan.com/app/api/reporting/CustomReport/QueryExport?format=1&id=76153415',
critStr = sprintf('{
"DateType": "3",
"BusinessUnitId": "50093756,50093630,50094224",
"IncludeAdjustmentInvoices": "false",
"RecipientId": "",
"PrimaryKeys": "",
"From": "%s",
"To": "%s",
"VisibleFields": "CustomerId,JobNumber,ScheduledDate,CreatedDate,CustomerName,LocationStreet,LocationCity,JobType,Campaign,CampaignCategory,PrimaryTechnician,JobBusinessUnit,JobStatus,CancelReason,BookedBy",
"Fields": "CustomerId,JobNumber,ScheduledDate,CreatedDate,CustomerName,LocationStreet,LocationCity,JobType,Campaign,CampaignCategory,PrimaryTechnician,JobBusinessUnit,JobStatus,CancelReason,BookedBy",
"Group": "[]",
"AggregatesOnly": "false"
}',
lastWeek[1],
lastWeek[2])) %>%
clean_names('small_camel') %>%
deleteLast() %>%
filter(status == 'Canceled')

lastWeekCanceled <- getSearchResults(url = 'https://go.servicetitan.com/Search/Job',
critStr = sprintf(
'{"JobNumber":"",
"InvoiceNumber":"",
"Customer":"",
"CustomerPO":"",
"Location":"",
"Unit":"",
"Technician":"",
"SkuIds":"",
"Status":5,
"TotalFrom":"",
"TotalTo":"",
"StartFrom":"%sT07:00:00.000Z",
"StartTo":"%sT07:00:00.000Z",
"Summary":"",
"Tags":"",
"CustomFields":[{"TypeId":"80696706",
"Value":""},
{"TypeId":"83975543"},
{"TypeId":"83976555"},
{"TypeId":"83984488",
"Value":""},
{"TypeId":"84742243",
"Value":""},
{"TypeId":"138691695"},
{"TypeId":"142759003"},
{"TypeId":"145520737"}]}',
lastWeek[1],
lastWeek[2]
)) %>%
clean_names('small_camel')

canceledSalesAppts <- pdscLastWeek %>%
left_join(lastWeekCanceled %>% select(jobNumber, cancelMemo),
by = 'jobNumber')

} else {
canceledSalesAppts <- tibble()
}

# Start JSON prep
selfName <- function(x) {
setNames(x, x)
}

jsonify <- function(x) {
fromJSON(toJSON(x))
}

sumAcross <- function(df, startCol, endCol, coerce = as.numeric) {

inds <- which(names(df) == startCol, arr.ind = TRUE):which(names(df) == endCol, arr.ind = TRUE)
df[, inds] %>%
mutate(across(everything(), coerce)) %>%
rowSums()

}

getTodayFromNextiva <- function(df, dateCol, yesterday) {
lapply(seq.Date(yesterday[1], yesterday[2], 1),
function(y) {
td <- strftime(y, "%m/%d/%Y")
t <- df %>%
mutate(ind = cumsum(str_detect(.[[dateCol]], td)),
ind2 = cumsum(ind == 1 & str_detect(.[[dateCol]], "Summary")),
ind = ind & (lead(ind2) == 0)) %>%
filter(ind) %>%
select(-ind, -ind2)
if(nrow(t) > 0) { t[[dateCol]] <- y } else { t <- tibble() }
t
}) %>%
bind_rows()
}


scMetricsOldRaw <- fromJSON(readLines(paste0(
.nDriveDir,
"/Sales Coordination Management/Performance Progress Report/backendData/scMetricsData.json"
)))

scMetricsOld <- map(
seq_len(length(names(scMetricsOldRaw))),
function(i) {

nm <- names(scMetricsOldRaw)[i]

dtCol <- c(
"Call Data" = "Date",
"Appt Creation" = "Created Date",
"Appts Ran" = "Scheduled Date",
"Call Time" = "Date",
"Total Calls" = "Date",
"Bounced Calls" = "Date",
"Rehash Est Sold" = "Sold On"
)[[nm]]

minDt <- c(
"Call Data" = yesterday[1],
"Appt Creation" = monthToDate[1],
"Appts Ran" = yesterday[1],
"Call Time" = yesterday[1],
"Total Calls" = yesterday[1],
"Bounced Calls" = yesterday[1],
"Rehash Est Sold" = yesterday[1]
)[[nm]]

scMetricsOldRaw[[nm]] %>%
filter(.[[dtCol]] < minDt)

}
)
names(scMetricsOld) <- names(scMetricsOldRaw)

# scMetricsOldComp <- map2(
# selfName(names(scMetricsOldRaw), raw = TRUE),
# c(
# "Date", "Created Date", "Scheduled Date",
# "Date", "Date", "Date", "Sold On"
# ),
# function(nm, dtCol) {
# scMetricsOldRaw[[nm]] %>%
# filter(.[[dtCol]] < yesterday[1])
# }
# )


scMetricsList <- list(
`Call Data` = stCallsFlt %>%
transmute(ID = id,
`Call Direction` = callDirection,
Date = date,
Duration = duration,
Agent = agent,
year = year(date),
week = week(date)) %>%
jsonify(),
`Appt Creation` = bookedSalesCalls %>%
transmute(
`Job #` = jobNumber,
`Job Type` = jobType,
`Job Campaign` = jobCampaign,
`Campaign Category` = campaignCategory,
`Business Unit` = businessUnit,
Status = status,
`Cancel Reason` = cancelReason,
`Booked By` = bookedBy,
`Created Date` = createdDate,
`Scheduled Date` = scheduledDate,
`Customer Name` = customerName,
`Location Address` = locationAddress,
`Primary Technician` = primaryTechnician,
`Drop In?` = as.numeric(`Created Date` == `Scheduled Date`),
week = week(as_date(`Created Date`)),
year = year(as_date(`Created Date`))
) %>%
jsonify(),
`Appts Ran` = pdscScheduledBy %>%
transmute(
`Customer ID` = customerId,
`Job #` = jobNumber,
`Scheduled Date` = scheduledDate,
`Customer Name` = customerName,
`Location Street` = locationStreet,
`Location City` = locationCity,
`Job Type` = jobType,
`Job Campaign` = jobCampaign,
`Campaign Category` = campaignCategory,
`Primary Technician` = primaryTechnician,
`Business Unit` = businessUnit,
Status = status,
`Cancel Reason` = cancelReason,
`Booked By` = bookedBy,
week = week(as_date(scheduledDate)),
year = year(as_date(scheduledDate))
) %>%
jsonify(),
`Call Time` = adr %>%
getTodayFromNextiva(dateCol = "dateAndTime", yesterday) %>%
{
if(nrow(.) > 0) {

mutate(., totalTime = sumAcross(., 'acdCalls', 'internalCalls')) %>%
transmute(
Date = dateAndTime,
Name = name,
`ACD Calls` = acdCalls,
`Outbound ACD Calls` = outboundAcdCalls,
`Inbound Calls` = inboundCalls,
`Outbound Calls` = outboundCalls,
`Internal Calls` = internalCalls,
`Total Time` = totalTime
) %>%
mutate(across(`ACD Calls`:`Total Time`,
function(x) {
x %>%
as.numeric() %>%
{. * 3600*24} %>%
as.duration() %>%
as_datetime() %>%
strftime(format = "1899-12-31 %H:%M:%S", tz = "UTC")

})) %>%
mutate(`Route Point Calls` = "1899-12-31",
`Outbound Route Point Calls` = "1899-12-31") %>%
jsonify()

} else {
.
}

}
,
`Total Calls` = acr %>%
getTodayFromNextiva(dateCol = 'dateAndTime', yesterday) %>%
{
if(nrow(.) > 0) {

mutate(., totalCalls = sumAcross(., 'acdCalls', 'internalCalls'),
daysWorked = as.numeric(totalCalls > 0)) %>%
transmute(
Date = dateAndTime,
Name = name,
`Calls Presented` = callsPresented,
`ACD Calls` = acdCalls,
`Outbound ACD Calls` = outboundAcdCalls,
`Route Point Calls` = routePointCalls,
`Outbound Route Point Calls` = outboundRoutePointCalls,
`Inbound Calls` = inboundCalls,
`Outbound Calls` = outboundCalls,
`Internal Calls` = internalCalls,
`Total Calls` = totalCalls,
`Days Worked` = daysWorked
) %>%
mutate(across(`Calls Presented`:`Total Calls`,
as.numeric)) %>%
jsonify()

} else {

.

}
},
`Bounced Calls` = bouncedCalls %>%
getTodayFromNextiva(dateCol = "v1", yesterday) %>%
{
if(nrow(.) > 0) {
transmute(.,
Date = v1,
Name = v2,
`21-Bounced` = as.numeric(x21)) %>%
jsonify()
} else {
.
}
},
`Rehash Est Sold` = rehashEst %>%
transmute(
`Estimate Name` = estimateName,
`Parent Job Campaign` = parentJobCampaign,
`Opportunity Number` = opportunityNumber,
`Opportunity Status` = opportunityStatus,
`Estimate Status` = estimateStatus,
`Estimate Age (Days)` = estimateAgeDays,
`Creation Date` = creationDate,
`Sold On` = soldOn,
`Sold By` = soldBy,
`Age When Sold` = ageWhenSold
) %>%
jsonify()
)

updatedScMetrics <- map(setNames(names(scMetricsList), names(scMetricsList)),
function(nm) {
bind_rows(scMetricsOld[[nm]], scMetricsList[[nm]]) %>%
unique()
})

rio::export(updatedScMetrics,
paste0(
.nDriveDir,
"/Sales Coordination Management/Performance Progress Report/backendData/scMetricsData.json"
))

# We will now export the data as separate files to a folder
scBackendDir <- paste0(
.nDriveDir,
"/Sales Coordination Management/Performance Progress Report/backendData"
)
lapply(names(updatedScMetrics),
function(x) {

readr::write_csv(
updatedScMetrics[[x]],
paste0(scBackendDir,
"/scMetrics-",
x,
".csv"),
escape = 'backslash'
)

})

# Prepare workbooks to send. Also update a JSON file of reporting data.
scMetrics <- createWorkbook() %>%
addSheet('Call Data', stCallsFlt) %>%
addSheet('Appt Creation', bookedSalesCalls) %>%
addSheet('Appt Ran', pdscScheduledBy %>% select(-createdDate)) %>%
addSheet('Call Time', adr) %>%
addSheet('Total Calls', acr) %>%
addSheet('Bounced Calls', bouncedCalls) %>%
addSheet('Availability', scAvailability) %>%
tmpSaveWorkbook('scMetricsTf', 'SC Metrics')

canceledSalesWkbk <- createWorkbook() %>%
addSheet('Canceled Sales', canceledSalesAppts) %>%
tmpSaveWorkbook('canceledSalesApptTf',
paste0('Canceled Sales Appts ',
lastWeek[1], ' - ', lastWeek[2]))

scBusinessUpdate <- createWorkbook() %>%
addSheet('Appts Booked',
dropIns %>%
left_join(rehashSmry,
by = c('createdDate' = 'soldOn'))) %>%
addSheet('Appointment Breakdown', apptBreakdown) %>%
addSheet('Appointment Schedule', upcomingApptBreakdown) %>%
addSheet('Appointments Ran', apptsRanSmry) %>%
tmpSaveWorkbook('scBusinessUpdateTf', 'SC Business Update')

scBusinessUpdateList <- list(
`Appts Booked` = dropIns %>%
left_join(rehashSmry,
by = c('createdDate' = 'soldOn')),
`Appointment Breakdown` = apptBreakdown,
`Appointment Schedule` = upcomingApptBreakdown,
`Appointments Ran` = apptsRanSmry
)

rio::export(
scBusinessUpdateList,
paste0(
.dataDir,
'/sc-business-update/scBusinessUpdate.json'
)
)

# Upload to the location on the N drive
tryWhileLoop(
updateDriveItem(getNDriveId(),
ids$stBusinessUpdateJson,
filePath = paste0(
.dataDir,
'/sc-business-update/scBusinessUpdate.json'
),
containerType = 'drives')
)

csrMetrics <- createWorkbook() %>%
addSheet('Call Data', stCallsFlt) %>%
addSheet('Booked Inspections', biReport) %>%
addSheet('Booked SC', bookedServiceCalls) %>%
addSheet('MP Sold', mpSold) %>%
addSheet('Call Time', adr) %>%
addSheet('Total Calls', acr) %>%
addSheet('Bounced Calls', bouncedCalls) %>%
addSheet('Availability', csrAvailability) %>%
tmpSaveWorkbook('csrMetricsTf', 'CSR Metrics')


csrMetricsOldRaw <- fromJSON(readLines(paste0(
.nDriveDir,
"/CSR Management/Reporting/Performance Progress Reports/backendData/csrMetricsData.json"
)))


csrMetricsOld <- map2(
selfName(names(csrMetricsOldRaw)),
c(
"Date", "Date", "Created Date",
"Sold On", "Date", "Date", "Date"
),
function(nm, dtCol) {
csrMetricsOldRaw[[nm]] %>%
filter(.[[dtCol]] < yesterday[1])
}
)

csrMetricsList <- list(
`Call Data` = stCallsFlt %>%
mutate(durationInSeconds = as.numeric(hms(duration))) %>%
transmute(ID = id,
`Call Direction` = callDirection,
Date = date,
Duration = duration,
`Duration in Seconds` = durationInSeconds,
Agent = agent) %>%
jsonify(),
`Booked Inspections` = if(nrow(biReport) > 0) {
biReport %>%
mutate(date = str_extract(fname, '[0-9]{4}-[0-9]*-[0-9]*') %>%
ymd %>%
{. - 1}) %>%
relocate(date) %>%
transmute(
Date = date,
Employee = employee,
`Recurring Service` = recurringService,
`Outbound Calls` = as.numeric(outboundCalls),
`Booked Inspections` = as.numeric(bookedInspections),
`Conversion Rate` = as.numeric(conversionRate),
`Inbound Calls Booked` = as.numeric(inboundCallsBooked),
`Rev from Rec Service` = as.numeric(revFromRecService),
`Rev from Lead Job` = as.numeric(revFromLeadJob),
`Total Inspections booked` = `Inbound Calls Booked` + `Booked Inspections`
) %>%
jsonify()
} else {
biReport
},
`Booked SC` = bookedServiceCalls %>%
transmute(
`Job #` = jobNumber,
`Job Type` = jobType,
`Business Unit` = businessUnit,
Status = status,
`Booked By` = bookedBy,
`Created Date` = createdDate,
`Scheduled Date` = scheduledDate
) %>%
jsonify(),
`MP Sold` = mpSold %>%
transmute(
`Customer Id` = customerId,
`Customer Name` = customerName,
`Invoice Id` = invoiceId,
`Invoice Number` = as.character(invoiceNumber),
`Membership Name` = membershipName,
Type = type,
Price = price,
StartDate = startDate,
EndDate = endDate,
Commission = commission,
`Sold By` = soldBy,
`Sold On` = soldOn,
Status = status
) %>%
jsonify(),
`Call Time` = adr %>%
getTodayFromNextiva(dateCol = "dateAndTime", yesterday) %>%
{
if(nrow(.) > 0) {

mutate(., totalTime = sumAcross(., 'acdCalls', 'internalCalls')) %>%
transmute(
Date = dateAndTime,
Name = name,
`ACD Calls` = acdCalls,
`Outbound ACD Calls` = outboundAcdCalls,
`Route Point Calls` = routePointCalls,
`Outbound Route Point Calls` = outboundRoutePointCalls,
`Inbound Calls` = inboundCalls,
`Outbound Calls` = outboundCalls,
`Internal Calls` = internalCalls,
`Total Time` = totalTime
) %>%
mutate(across(`ACD Calls`:`Total Time`,
function(x) {
x %>%
as.numeric() %>%
{. * 3600*24} %>%
as.duration() %>%
as_datetime() %>%
strftime(format = "1899-12-31 %H:%M:%S", tz = "UTC")

})) %>%
mutate(`Route Point Calls` = "1899-12-31",
`Outbound Route Point Calls` = "1899-12-31") %>%
jsonify()

} else {

.

}
},
`Total Calls` = acr %>%
getTodayFromNextiva(dateCol = 'dateAndTime', yesterday) %>%
{
if(nrow(.) > 0) {

mutate(., totalCalls = sumAcross(., 'acdCalls', 'internalCalls'),
daysWorked = as.numeric(totalCalls > 0)) %>%
transmute(
Date = dateAndTime,
Name = name,
`Calls Presented` = callsPresented,
`ACD Calls` = acdCalls,
`Outbound ACD Calls` = outboundAcdCalls,
`Route Point Calls` = routePointCalls,
`Outbound Route Point Calls` = outboundRoutePointCalls,
`Inbound Calls` = inboundCalls,
`Outbound Calls` = outboundCalls,
`Internal Calls` = internalCalls,
`Total Calls` = totalCalls,
`Days Worked` = daysWorked
) %>%
mutate(across(`Calls Presented`:`Total Calls`,
as.numeric)) %>%
jsonify()

} else {

.

}
},
`Bounced Calls` = bouncedCalls %>%
getTodayFromNextiva(dateCol = "v1", yesterday) %>%
{
if(nrow(.) > 0) {
transmute(.,
Date = v1,
Name = v2,
`21-Bounced` = as.numeric(x21)) %>%
jsonify()
} else {
.
}
}
)

updatedCsrMetrics <- map(selfName(names(csrMetricsList)),
function(nm) {
bind_rows(csrMetricsOld[[nm]], csrMetricsList[[nm]]) %>%
unique()
})

rio::export(updatedCsrMetrics,
paste0(
.nDriveDir,
"/CSR Management/Reporting/Performance Progress Reports/backendData/csrMetricsData.json"
))

csrBackendDir <- paste0(
.nDriveDir,
"/CSR Management/Reporting/Performance Progress Reports/backendData"
)
lapply(names(updatedCsrMetrics),
function(x) {

readr::write_csv(
updatedCsrMetrics[[x]],
paste0(csrBackendDir,
"/csrMetrics-",
x,
".csv"),
escape = 'backslash'
)

})

emailBody <- createWorkbook() %>%
addSheet('Body Summary', fullBdySmry) %>%
tmpSaveWorkbook('bodySmryTf', 'Email Body')

bodyTemplate <- ''

bodyTemplate <- if(nrow(fullBdySmry) == 1) {

sprintf(
"Sales ran %s and scheduled %s appointments yesterday. Service ran %s, scheduled %s, and left %s estimates on %s opportunities for new equipment.",
fullBdySmry$salesRan,
fullBdySmry$salesScheduled,
fullBdySmry$serviceRan,
fullBdySmry$serviceScheduled,
fullBdySmry$serviceEsts,
fullBdySmry$serviceOpps
)

} else if(nrow(fullBdySmry) > 1) {

fullBdySmry %>%
clean_names('title') %>%
kableHtml()

}

sendMail(from = '[email protected]',
to = c('[email protected]',
'[email protected]'),
subject = 'SC/CSR Morning Report',
body = '',
attachmentFiles = c(scMetricsTf,
scBusinessUpdateTf,
csrMetricsTf,
bodySmryTf))

if(weekdays(today()) == "Monday") {
sendMail(from = '[email protected]',
to = c("[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]"),
body = bodyTemplate,
subject = 'Sales & Service Update',
send = FALSE,
attachmentFiles = canceledSalesApptTf)
} else {
sendMail(from = '[email protected]',
to = c("[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]"),
body = bodyTemplate,
subject = 'Sales & Service Update',
send = FALSE)
}


rio::export(today(), paste0(.dataDir, "/runtimes/scCsrReporting.rds"))
     
 
what is notes.io
 

Notes is a web-based application for online taking notes. You can take your notes and share with others people. If you like taking long notes, notes.io is designed for you. To date, over 8,000,000,000+ notes created and continuing...

With notes.io;

  • * You can take a note from anywhere and any device with internet connection.
  • * You can share the notes in social platforms (YouTube, Facebook, Twitter, instagram etc.).
  • * You can quickly share your contents without website, blog and e-mail.
  • * You don't need to create any Account to share a note. As you wish you can use quick, easy and best shortened notes with sms, websites, e-mail, or messaging services (WhatsApp, iMessage, Telegram, Signal).
  • * Notes.io has fabulous infrastructure design for a short link and allows you to share the note as an easy and understandable link.

Fast: Notes.io is built for speed and performance. You can take a notes quickly and browse your archive.

Easy: Notes.io doesn’t require installation. Just write and share note!

Short: Notes.io’s url just 8 character. You’ll get shorten link of your note when you want to share. (Ex: notes.io/q )

Free: Notes.io works for 14 years and has been free since the day it was started.


You immediately create your first note and start sharing with the ones you wish. If you want to contact us, you can use the following communication channels;


Email: [email protected]

Twitter: http://twitter.com/notesio

Instagram: http://instagram.com/notes.io

Facebook: http://facebook.com/notesio



Regards;
Notes.io Team

     
 
Shortened Note Link
 
 
Looding Image
 
     
 
Long File
 
 

For written notes was greater than 18KB Unable to shorten.

To be smaller than 18KB, please organize your notes, or sign in.