Abstract
Combining data of stratified CV and performing aggregations and statistical computations to constract anonymized tidy datasetlibrariesVersion <- c()
for(i in 1:length(libraries))
librariesVersion <- c(librariesVersion, paste(packageVersion(libraries[i] )))
librariesLoaded <- lapply(libraries, require, character.only = TRUE)
## Loading required package: magrittr
## Loading required package: DT
## Loading required package: ggplot2
## Loading required package: jsonlite
## Loading required package: parallel
../000.core/00.01.libraries.R completed in 0.58 seconds
sourceTimeNeeded <- c( sourceTimeNeeded, timeNeeded)
source.starting.time <- proc.time()[3]
## Base functions
# @authors kp@eworx.gr ako@eworx.gr
repository <- "/data/generic/"
getSourcePath <- function(filename, baseFolder = repository){
return(paste(baseFolder, filename, sep = ""))
}
readData <- function(filename, colClasses = c(), baseFolder = repository, header = TRUE, sep = "\t", encoding = "UTF-8", stringsAsFactors = TRUE, na.strings = c("", "NULL"), verbose = FALSE){
if(length(colClasses) == 0)
return (data.table::fread(input = getSourcePath(filename, baseFolder), header = header, sep = sep, encoding = encoding, stringsAsFactors = stringsAsFactors, verbose = verbose, showProgress = TRUE, na.strings = na.strings ) )
return (data.table::fread(input = getSourcePath(filename, baseFolder), colClasses = colClasses, header = header, sep = sep, encoding = encoding, verbose = verbose, showProgress = TRUE, na.strings = na.strings ) )
}
#rds for small disk space & fst for fast load
saveBinary <- function(data, filename = filename, baseFolder = repository, format = "rds"){
fileName <- getSourcePath(filename, baseFolder)
dir.create(dirname(fileName), recursive = TRUE, showWarnings = FALSE)
if(format == "rds") saveRDS(data, fileName)
if(format == "fst") fst::write_fst(data, fileName)
}
#alternative for rough read write operations
saveRDS_ <- function(object, file){
dir.create(dirname(file), recursive = TRUE, showWarnings = FALSE)
saveRDS(object, file)
}
#rds for small disk space & fst for fast load
loadBinary <- function(filename, baseFolder = repository, format = "rds", as.data.table = TRUE){
if(format == "rds"){return(readRDS(getSourcePath(filename, baseFolder)))}
if(format == "fst"){return(fst::read_fst(getSourcePath(filename, baseFolder), as.data.table = as.data.table))}
}
rowColumns <- function(data){
return(paste( format(nrow(data), big.mark=","), "Rows X ", ncol(data), "Columns"))
}
publishIncludeCss <- function(){
sourceFile <- "/data/jobs/wp41.analysis/000.core/include.css"
destinatinoFile <- "/data/tmpfs/results/include.css"
if (!file.exists(destinatinoFile)) {
return (file.copy(sourceFile, destinatinoFile))
}else{
return(TRUE);
}
}
#as the mountstorage is on memory make sure the asset include.css is there.
summariseTable <- function(data){
return(data.frame(unclass(summary(data)), check.names = FALSE, stringsAsFactors = FALSE))
#return(do.call(cbind, lapply(data, summary)))
}
factoriseCharacterColumns <- function(data){
for(name in names(data)){
if( class(data[[name]]) =="character"){
data[[name]] <- as.factor(data[[name]])
}
}
return(data)
}
############################
# https://rstudio.github.io/DT/010-style.html
#https://rpubs.com/marschmi/RMarkdown
capitalise <- function(x) paste0(toupper(substring(x, 1, 1)), substring(x, 2, nchar(x)))
styliseDTNumericalColumn <- function(data, result, columnName, color, columnsName_original ){
if(columnName%in% columnsName_original){
result <- result %>% formatStyle(
columnName,
background = styleColorBar(data[[columnName]], color),
backgroundSize = '100% 90%',
backgroundRepeat = 'no-repeat',
backgroundPosition = 'center'
)
}
return(result)
}
reportTabularData <- function(data){
columnsName <- names(data)
#columnsName <- lapply(columnsName, capitalise)
columnsName_original <- names(data)
result <-
DT::datatable(
data,
class = 'cell-border stripe',
filter = 'top',
rownames = FALSE,
colnames = columnsName,
extensions = 'Buttons',
options = list(
pageLength = 20,
columnDefs = list(list(className = 'dt-left', targets = "_all")),
dom = 'Bfrtip',
buttons = c('copy', 'csv', 'excel', 'pdf'),
searchHighlight = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'border': '1px solid'});",
"}"
)
)
)
result <- styliseDTNumericalColumn(data,result, "Count", 'steelblue', columnsName_original)
result <- styliseDTNumericalColumn(data,result, "sourceTimeNeeded", '#808080', columnsName_original)
result <- styliseDTNumericalColumn(data,result, "timeNeeded", '#808080', columnsName_original)
result <- styliseDTNumericalColumn(data,result, "percentMatch", '#4682b4', columnsName_original)
return(result)
}
fonts <- list(
sans = "DejaVu Serif",
mono = "DejaVu Serif",
`Times New Roman` = "DejaVu Serif"
)
cleanJsonId <- function(txt){
txt <- gsub("\\.json", "", txt)
gsub(".*/", "", txt)
}
embed_data <- function(x= mtcars, filename= "file.csv", label= "Get data"){
# Create encoded Base64 datastream
encode_data= function(x){
saveMe <- getSourcePath("file.csv")
write.csv2(x, saveMe)
enc= sprintf('data:text/csv;base64,%s', openssl::base64_encode(paste0(readLines(saveMe), collapse="\n")) )
unlink(saveMe)
return(enc)
}
# String result ready to be placed in rmarkdown
paste0("<a download='", filename, "' href=", encode_data(x), ">", label, "</a>")
}
###########################################################################################################
## R version 4.0.5 (2021-03-31)
## Platform: x86_64-pc-linux-gnu (64-bit)
## Running under: Ubuntu 20.04.2 LTS
##
## Matrix products: default
## BLAS/LAPACK: /usr/lib/x86_64-linux-gnu/openblas-pthread/libopenblasp-r0.3.8.so
##
## locale:
## [1] LC_CTYPE=en_US.UTF-8 LC_NUMERIC=C
## [3] LC_TIME=en_US.UTF-8 LC_COLLATE=en_US.UTF-8
## [5] LC_MONETARY=en_US.UTF-8 LC_MESSAGES=C
## [7] LC_PAPER=en_US.UTF-8 LC_NAME=C
## [9] LC_ADDRESS=C LC_TELEPHONE=C
## [11] LC_MEASUREMENT=en_US.UTF-8 LC_IDENTIFICATION=C
##
## attached base packages:
## [1] parallel stats graphics grDevices utils datasets methods
## [8] base
##
## other attached packages:
## [1] jsonlite_1.7.2 ggplot2_3.3.3 DT_0.18 magrittr_2.0.1
## [5] rmarkdown_2.8 data.table_1.14.0
##
## loaded via a namespace (and not attached):
## [1] bslib_0.2.5 compiler_4.0.5 pillar_1.6.0 jquerylib_0.1.4
## [5] highr_0.9 tools_4.0.5 digest_0.6.27 evaluate_0.14
## [9] lifecycle_1.0.0 tibble_3.1.1 gtable_0.3.0 pkgconfig_2.0.3
## [13] rlang_0.4.11 yaml_2.2.1 xfun_0.23 withr_2.4.2
## [17] stringr_1.4.0 dplyr_1.0.6 knitr_1.33 generics_0.1.0
## [21] htmlwidgets_1.5.3 sass_0.4.0 vctrs_0.3.8 grid_4.0.5
## [25] tidyselect_1.1.1 glue_1.4.2 R6_2.5.0 fansi_0.4.2
## [29] purrr_0.3.4 scales_1.1.1 htmltools_0.5.1.1 ellipsis_0.3.2
## [33] colorspace_2.0-1 utf8_1.2.1 stringi_1.6.2 munsell_0.5.0
## [37] crayon_1.4.1
../000.core/00.02.base.functions.R completed in 0.07 seconds
Length: 353518
fileName <- "jobsOutput/tidySurvey/demographStat.fst"
demographStat <- loadBinary(fileName, format = "fst")
demographStat <- demographStat[id %in% strataData]
Dimensions: 353518, 20
fileName <- "jobsOutput/skillsDescriptionDT.rds"
skillsDescriptionDT <- loadBinary(fileName)
skillsDescriptionDT <- skillsDescriptionDT[id %in% strataData]
Dimensions: 2671643, 4
fileName <- "jobsOutput/skills/finalPredictedSkills.rds"
finalFreeTextPredictedSkills <- loadBinary(fileName)
finalFreeTextPredictedSkills <- finalFreeTextPredictedSkills[id %in% strataData]
Dimensions: 339566, 3
fileName <- "jobsOutput/skills/skillsForMatching/linguisticTextSuggestedUri.rds"
linguisticPredictedSkills <- loadBinary(fileName)
linguisticPredictedSkills <- linguisticPredictedSkills[id %in% strataData]
Dimensions: 799450, 5
Dimensions: 13485, 13
Load processed free-text skills with index.
fileName <- "jobsOutput/skills/skillsForMatching/processedSkills.fst"
processedSkills <- loadBinary(fileName, format = "fst")
Dimensions: 1761673, 5
1.load.data.R completed in 63.24 seconds
finalFreeTextPredictedSkills[, type := "non-linguistic"]
linguisticPredictedSkills <- linguisticPredictedSkills[, .(id, skillCode = suggestedCode, index)]
linguisticPredictedSkills[, type := "linguistic"]
finalPredictedSkills <- rbindlist(list(finalFreeTextPredictedSkills, linguisticPredictedSkills))
finalPredictedSkills[processedSkills, on = "index", category := i.variable]
finalPredictedSkills <- unique(finalPredictedSkills, by = c("id", "skillCode", "category"))[, -c("index")]
drivingSkills <- skillsDescriptionDT[grepl(pattern = "Driving", variable)]
drivingCategories <- c("AM", "A1", "A2", "A", "B1", "B", "B3", "C1", "C1E", "C", "CE", "D1", "D1E", "D", "DE")
drivingSkills <- drivingSkills[value %in% drivingCategories, .(id, value)]
drivingSkillsStat <- merge(demographStat, drivingSkills, by = "id")
skillsDescriptionDT <- skillsDescriptionDT[!grepl(pattern = "Code", variable)]
skillsDescriptionDT <- skillsDescriptionDT[grepl(pattern = "Driving", variable), variable := "Driving"]
skillsDescriptionDT <- skillsDescriptionDT[grepl(pattern = "ForeignLanguage", variable), variable := "Foreign Language"]
skillsDescriptionDT <- skillsDescriptionDT[grepl(pattern = "MotherTongue", variable), variable := "Mother Tongue"]
skillsDescriptionDT[variable == "JobRelated", variable := "Job Related"]
skillCategoriesStat <- merge(skillsDescriptionDT, demographStat, by = "id")
skillCategoriesStat <- skillCategoriesStat[, -c("value", "SkillsPassport.Locale")]
setnames(skillCategoriesStat, "variable", "category")
skillsStat <- merge(demographStat, finalPredictedSkills)
skillsStat[, skillTitle := escoSkillsEn$preferredLabel[match(skillCode, escoSkillsEn$conceptUri)]]
2.process.data.R completed in 18.33 seconds
Save free-text predicted skills.
fileName <- "jobsOutput/tidySurvey/skills/skillsStat.fst"
saveBinary(skillsStat, fileName, format = "fst")
Datasource : /data/generic/jobsOutput/tidySurvey/skills/skillsStat.fst of 180,612,085 bytes.
Save skills for broad categories.
fileName <- "jobsOutput/tidySurvey/skills/skillCategoriesStat.fst"
saveBinary(skillCategoriesStat, fileName, format = "fst")
Datasource : /data/generic/jobsOutput/tidySurvey/skills/skillCategoriesStat.fst of 220,688,193 bytes.
Save driving skills.
fileName <- "jobsOutput/tidySurvey/skills/drivingSkillsStat.fst"
saveBinary(drivingSkillsStat, fileName, format = "fst")
Datasource : /data/generic/jobsOutput/tidySurvey/skills/drivingSkillsStat.fst of 32,362,623 bytes.
3.save.data.R completed in 2.28 seconds
Completed in 84.5 seconds.
An Eworx S.A. DSENSE - A documented process for internal consumption.
– end of report –