[1445] | 1 | /** |
---|
| 2 | * AssayService Service |
---|
| 3 | * |
---|
| 4 | * @author s.h.sikkema@gmail.com |
---|
| 5 | * @since 20101216 |
---|
| 6 | * @package dbnp.studycapturing |
---|
| 7 | * |
---|
| 8 | * Revision information: |
---|
| 9 | * $Rev: 1730 $ |
---|
| 10 | * $Author: s.h.sikkema@gmail.com $ |
---|
| 11 | * $Date: 2011-04-08 10:07:56 +0000 (vr, 08 apr 2011) $ |
---|
| 12 | */ |
---|
| 13 | package dbnp.studycapturing |
---|
| 14 | |
---|
| 15 | import org.apache.poi.ss.usermodel.* |
---|
| 16 | import org.apache.poi.xssf.usermodel.XSSFWorkbook |
---|
| 17 | import org.apache.poi.hssf.usermodel.HSSFWorkbook |
---|
| 18 | |
---|
| 19 | class AssayService { |
---|
| 20 | |
---|
[1577] | 21 | boolean transactional = false |
---|
[1445] | 22 | def authenticationService |
---|
[1455] | 23 | def moduleCommunicationService |
---|
[1445] | 24 | |
---|
| 25 | /** |
---|
[1559] | 26 | * Collects the assay field names per category in a map as well as the |
---|
| 27 | * module's measurements. |
---|
[1445] | 28 | * |
---|
[1559] | 29 | * @param assay the assay for which to collect the fields |
---|
| 30 | * @return a map of categories as keys and field names or measurements as |
---|
| 31 | * values |
---|
[1445] | 32 | */ |
---|
[1559] | 33 | def collectAssayTemplateFields(assay) throws Exception { |
---|
[1445] | 34 | |
---|
[1583] | 35 | def getUsedTemplateFields = { templateEntities -> |
---|
[1445] | 36 | |
---|
[1559] | 37 | // gather all unique and non null template fields that haves values |
---|
| 38 | templateEntities*.giveFields().flatten().unique().findAll{ field -> |
---|
| 39 | |
---|
| 40 | field && templateEntities.any { it.fieldExists(field.name) && it.getFieldValue(field.name) } |
---|
| 41 | |
---|
[1583] | 42 | }.collect{[name: it.name, comment: it.comment]} |
---|
[1559] | 43 | |
---|
| 44 | } |
---|
| 45 | |
---|
[1445] | 46 | // check whether module is reachable |
---|
[1559] | 47 | if (!moduleCommunicationService.isModuleReachable(assay.module.url)) { |
---|
[1445] | 48 | |
---|
| 49 | throw new Exception('Module is not reachable') |
---|
| 50 | |
---|
| 51 | } |
---|
| 52 | |
---|
| 53 | def samples = assay.samples |
---|
| 54 | |
---|
[1583] | 55 | [ 'Subject Data' : getUsedTemplateFields( samples*."parentSubject".unique() ), |
---|
| 56 | 'Sampling Event Data' : getUsedTemplateFields( samples*."parentEvent".unique() ), |
---|
| 57 | 'Sample Data' : getUsedTemplateFields( samples ), |
---|
| 58 | 'Event Group' : [[name: 'name', comment: 'Name of Event Group']], |
---|
[1559] | 59 | 'Module Measurement Data': requestModuleMeasurementNames(assay) |
---|
| 60 | ] |
---|
[1445] | 61 | |
---|
[1559] | 62 | } |
---|
[1445] | 63 | |
---|
[1559] | 64 | /** |
---|
| 65 | * Gathers all assay related data, including measurements from the module, |
---|
| 66 | * into 1 hash map containing: Subject Data, Sampling Event Data, Sample |
---|
| 67 | * Data, and module specific measurement data. |
---|
| 68 | * Data from each of the 4 hash map entries are themselves hash maps |
---|
| 69 | * representing a descriptive header (field name) as key and the data as |
---|
| 70 | * value. |
---|
| 71 | * |
---|
| 72 | * @param assay the assay to collect data for |
---|
| 73 | * @fieldMap map with categories as keys and fields as values |
---|
| 74 | * @measurementTokens selection of measurementTokens |
---|
| 75 | * @return The assay data structure as described above. |
---|
| 76 | */ |
---|
| 77 | def collectAssayData(assay, fieldMap, measurementTokens) throws Exception { |
---|
[1445] | 78 | |
---|
| 79 | def collectFieldValuesForTemplateEntities = { templateFieldNames, templateEntities -> |
---|
| 80 | |
---|
| 81 | // return a hash map with for each field name all values from the |
---|
| 82 | // template entity list |
---|
| 83 | templateFieldNames.inject([:]) { map, fieldName -> |
---|
| 84 | |
---|
| 85 | map + [(fieldName): templateEntities.collect { |
---|
| 86 | |
---|
| 87 | it?.fieldExists(fieldName) ? it.getFieldValue(fieldName) : '' |
---|
| 88 | |
---|
| 89 | }] |
---|
| 90 | |
---|
| 91 | } |
---|
| 92 | |
---|
| 93 | } |
---|
| 94 | |
---|
[1559] | 95 | def getFieldValues = { templateEntities, fieldNames, propertyName = '' -> |
---|
[1445] | 96 | |
---|
| 97 | def returnValue |
---|
| 98 | |
---|
| 99 | // if no property name is given, simply collect the fields and |
---|
| 100 | // values of the template entities themselves |
---|
| 101 | if (propertyName == '') { |
---|
| 102 | |
---|
[1559] | 103 | returnValue = collectFieldValuesForTemplateEntities(fieldNames, templateEntities) |
---|
[1445] | 104 | |
---|
| 105 | } else { |
---|
| 106 | |
---|
| 107 | // if a property name is given, we'll have to do a bit more work |
---|
| 108 | // to ensure efficiency. The reason for this is that for a list |
---|
| 109 | // of template entities, the properties referred to by |
---|
| 110 | // propertyName can include duplicates. For example, for 10 |
---|
| 111 | // samples, there may be less than 10 parent subjects. Maybe |
---|
| 112 | // there's only 1 parent subject. We don't want to collect field |
---|
[1559] | 113 | // values for this single subject 10 times ... |
---|
| 114 | def fieldValues |
---|
[1445] | 115 | |
---|
| 116 | // we'll get the unique list of properties to make sure we're |
---|
| 117 | // not getting the field values for identical template entity |
---|
| 118 | // properties more then once. |
---|
| 119 | def uniqueProperties = templateEntities*."$propertyName".unique() |
---|
| 120 | |
---|
[1559] | 121 | fieldValues = collectFieldValuesForTemplateEntities(fieldNames, uniqueProperties) |
---|
[1445] | 122 | |
---|
| 123 | // prepare a lookup hashMap to be able to map an entities' |
---|
| 124 | // property (e.g. a sample's parent subject) to an index value |
---|
| 125 | // from the field values list |
---|
| 126 | int i = 0 |
---|
| 127 | def propertyToFieldValueIndexMap = uniqueProperties.inject([:]) { map, item -> map + [(item):i++]} |
---|
| 128 | |
---|
| 129 | // prepare the return value so that it has an entry for field |
---|
| 130 | // name. This will be the column name (second header line). |
---|
| 131 | returnValue = fieldNames.inject([:]) { map, item -> map + [(item):[]] } |
---|
| 132 | |
---|
| 133 | // finally, fill map the unique field values to the (possibly |
---|
| 134 | // not unique) template entity properties. In our example with |
---|
| 135 | // 1 unique parent subject, this means copying that subject's |
---|
| 136 | // field values to all 10 samples. |
---|
| 137 | templateEntities.each{ te -> |
---|
| 138 | |
---|
| 139 | fieldNames.each{ |
---|
| 140 | |
---|
| 141 | returnValue[it] << fieldValues[it][propertyToFieldValueIndexMap[te[propertyName]]] |
---|
| 142 | |
---|
| 143 | } |
---|
| 144 | |
---|
| 145 | } |
---|
| 146 | |
---|
| 147 | } |
---|
| 148 | |
---|
| 149 | returnValue |
---|
| 150 | |
---|
| 151 | } |
---|
| 152 | |
---|
[1559] | 153 | // check whether module is reachable |
---|
| 154 | if (!moduleCommunicationService.isModuleReachable(assay.module.url)) { |
---|
| 155 | |
---|
| 156 | throw new Exception('Module is not reachable') |
---|
| 157 | |
---|
| 158 | } |
---|
| 159 | |
---|
| 160 | def samples = assay.samples |
---|
| 161 | |
---|
| 162 | def eventFieldMap = [:] |
---|
| 163 | |
---|
| 164 | // check whether event group data was requested |
---|
| 165 | if (fieldMap['Event Group']) { |
---|
| 166 | |
---|
| 167 | def names = samples*.parentEventGroup*.name.flatten() |
---|
| 168 | |
---|
| 169 | // only set name field when there's actual data |
---|
| 170 | if (!names.every {!it}) eventFieldMap['name'] = names |
---|
| 171 | |
---|
| 172 | } |
---|
| 173 | |
---|
[1583] | 174 | [ 'Subject Data' : getFieldValues(samples, fieldMap['Subject Data']*.name, 'parentSubject'), |
---|
| 175 | 'Sampling Event Data' : getFieldValues(samples, fieldMap['Sampling Event Data']*.name, 'parentEvent'), |
---|
| 176 | 'Sample Data' : getFieldValues(samples, fieldMap['Sample Data']*.name), |
---|
[1559] | 177 | 'Event Group' : eventFieldMap, |
---|
[1583] | 178 | 'Module Measurement Data': measurementTokens*.name ? requestModuleMeasurements(assay, measurementTokens) : [:] |
---|
[1559] | 179 | ] |
---|
[1445] | 180 | } |
---|
| 181 | |
---|
| 182 | /** |
---|
[1559] | 183 | * Retrieves measurement names from the module through a rest call |
---|
| 184 | * |
---|
| 185 | * @param consumer the url of the module |
---|
| 186 | * @param path path of the rest call to the module |
---|
| 187 | * @return |
---|
| 188 | */ |
---|
| 189 | def requestModuleMeasurementNames(assay) { |
---|
| 190 | |
---|
| 191 | def moduleUrl = assay.module.url |
---|
| 192 | |
---|
[1730] | 193 | def path = moduleUrl + "/rest/getMeasurements/query?assayToken=$assay.assayUUID" |
---|
[1559] | 194 | |
---|
[1716] | 195 | def jsonArray = moduleCommunicationService.callModuleRestMethodJSON(moduleUrl, path) |
---|
[1559] | 196 | |
---|
[1730] | 197 | jsonArray*.toString() |
---|
[1716] | 198 | |
---|
[1559] | 199 | } |
---|
| 200 | |
---|
| 201 | /** |
---|
[1445] | 202 | * Retrieves module measurement data through a rest call to the module |
---|
| 203 | * |
---|
| 204 | * @param consumer the url of the module |
---|
[1559] | 205 | * @param path path of the rest call to the module |
---|
[1445] | 206 | * @return |
---|
| 207 | */ |
---|
[1559] | 208 | def requestModuleMeasurements(assay, fields) { |
---|
[1445] | 209 | |
---|
[1559] | 210 | def moduleUrl = assay.module.url |
---|
[1445] | 211 | |
---|
[1559] | 212 | def tokenString = '' |
---|
| 213 | |
---|
[1716] | 214 | fields.each{ |
---|
| 215 | tokenString+="&measurementToken=${it.name.encodeAsURL()}" |
---|
| 216 | } |
---|
| 217 | |
---|
[1687] | 218 | def path = moduleUrl + "/rest/getMeasurementData/query?assayToken=$assay.assayUUID" + tokenString |
---|
[1559] | 219 | |
---|
| 220 | def (sampleTokens, measurementTokens, moduleData) = moduleCommunicationService.callModuleRestMethodJSON(moduleUrl, path) |
---|
| 221 | |
---|
[1455] | 222 | if (!sampleTokens?.size()) return [] |
---|
[1445] | 223 | |
---|
| 224 | def lastDataIndex = moduleData.size() - 1 |
---|
| 225 | def stepSize = sampleTokens.size() + 1 |
---|
[1727] | 226 | |
---|
| 227 | // Convert the three different maps into a map like: |
---|
| 228 | // |
---|
| 229 | // [ "measurement 1": [ value1, value2, value3 ], |
---|
[1730] | 230 | // "measurement 2": [ value4, value5, value6 ] ] |
---|
[1727] | 231 | // |
---|
| 232 | def map = [:] |
---|
| 233 | def numSamples = sampleTokens.size(); |
---|
| 234 | def idx = 0; |
---|
| 235 | |
---|
| 236 | // Loop through all measurementtokens, and get the right slice from the measurement list |
---|
| 237 | measurementTokens.each { measurementToken -> |
---|
| 238 | def startIndex = idx++ * numSamples; |
---|
| 239 | def stopIndex = startIndex + numSamples - 1; |
---|
| 240 | map[ measurementToken.toString() ] = moduleData[ startIndex..stopIndex ].collect { it.toString() } |
---|
| 241 | } |
---|
[1445] | 242 | |
---|
[1727] | 243 | return map; |
---|
[1445] | 244 | } |
---|
| 245 | |
---|
| 246 | /** |
---|
[1559] | 247 | * Converts column |
---|
[1445] | 248 | * @param columnData multidimensional map containing column data. |
---|
| 249 | * On the top level, the data must be grouped by category. Each key is the |
---|
| 250 | * category title and the values are maps representing the columns. Each |
---|
| 251 | * column also has a title (its key) and a list of values. Columns must be |
---|
| 252 | * equally sized. |
---|
| 253 | * |
---|
| 254 | * For example, consider the following map: |
---|
| 255 | * [Category1: |
---|
| 256 | * [Column1: [1,2,3], Column2: [4,5,6]], |
---|
| 257 | * Category2: |
---|
| 258 | * [Column3: [7,8,9], Column4: [10,11,12], Column5: [13,14,15]]] |
---|
| 259 | * |
---|
| 260 | * which will be written as: |
---|
| 261 | * |
---|
| 262 | * | Category1 | | Category2 | | | |
---|
| 263 | * | Column1 | Column2 | Column3 | Column4 | Column5 | |
---|
| 264 | * | 1 | 4 | 7 | 10 | 13 | |
---|
| 265 | * | 2 | 5 | 8 | 11 | 14 | |
---|
| 266 | * | 3 | 6 | 9 | 12 | 15 | |
---|
| 267 | * |
---|
[1559] | 268 | * @return row wise data |
---|
[1445] | 269 | */ |
---|
[1559] | 270 | def convertColumnToRowStructure(columnData) { |
---|
[1445] | 271 | |
---|
| 272 | // check if all columns have the dimensionality 2 |
---|
[1559] | 273 | if (columnData.every { it.value.every { it.value instanceof ArrayList } }) { |
---|
[1445] | 274 | |
---|
| 275 | def headers = [[],[]] |
---|
| 276 | |
---|
[1559] | 277 | columnData.each { category -> |
---|
[1445] | 278 | |
---|
[1455] | 279 | if (category.value.size()) { |
---|
[1445] | 280 | |
---|
| 281 | // put category keys into first row separated by null values |
---|
| 282 | // wherever there are > 1 columns per category |
---|
| 283 | headers[0] += [category.key] + [null] * (category.value.size() - 1) |
---|
| 284 | |
---|
| 285 | // put non-category column headers into 2nd row |
---|
| 286 | headers[1] += category.value.collect{it.key} |
---|
| 287 | |
---|
| 288 | } |
---|
| 289 | |
---|
| 290 | } |
---|
| 291 | |
---|
| 292 | def d = [] |
---|
| 293 | |
---|
| 294 | // add all column wise data into 'd' |
---|
[1559] | 295 | columnData.each { it.value.each { d << it.value } } |
---|
[1445] | 296 | |
---|
| 297 | // transpose d into row wise data and combine with header rows |
---|
| 298 | headers + d.transpose() |
---|
| 299 | } |
---|
| 300 | |
---|
| 301 | } |
---|
[1559] | 302 | |
---|
| 303 | /** |
---|
| 304 | * Export column wise data in Excel format to a stream. |
---|
| 305 | * |
---|
| 306 | * @param columnData Multidimensional map containing column data |
---|
| 307 | * @param outputStream Stream to write to |
---|
| 308 | * @param useOfficeOpenXML Flag to specify xlsx (standard) or xls output |
---|
| 309 | * @return |
---|
| 310 | */ |
---|
| 311 | def exportColumnWiseDataToExcelFile(columnData, outputStream, useOfficeOpenXML = true) { |
---|
| 312 | |
---|
[1445] | 313 | // transform data into row based structure for easy writing |
---|
| 314 | def rows = convertColumnToRowStructure(columnData) |
---|
| 315 | |
---|
| 316 | if (rows) { |
---|
| 317 | |
---|
| 318 | exportRowWiseDataToExcelFile(rows, outputStream, useOfficeOpenXML) |
---|
| 319 | |
---|
| 320 | } else { |
---|
| 321 | |
---|
| 322 | throw new Exception('Wrong column data format.') |
---|
| 323 | |
---|
| 324 | } |
---|
| 325 | |
---|
| 326 | } |
---|
| 327 | |
---|
| 328 | /** |
---|
| 329 | * Export row wise data in Excel format to a stream |
---|
| 330 | * |
---|
[1559] | 331 | * @param rowData List of lists containing for each row all cell values |
---|
| 332 | * @param outputStream Stream to write to |
---|
| 333 | * @param useOfficeOpenXML Flag to specify xlsx (standard) or xls output |
---|
[1445] | 334 | * @return |
---|
| 335 | */ |
---|
| 336 | def exportRowWiseDataToExcelFile(rowData, outputStream, useOfficeOpenXML = true) { |
---|
| 337 | |
---|
| 338 | Workbook wb = useOfficeOpenXML ? new XSSFWorkbook() : new HSSFWorkbook() |
---|
| 339 | Sheet sheet = wb.createSheet() |
---|
| 340 | |
---|
| 341 | // create all rows |
---|
| 342 | rowData.size().times { sheet.createRow it } |
---|
| 343 | |
---|
| 344 | sheet.eachWithIndex { Row row, ri -> |
---|
| 345 | |
---|
| 346 | // create appropriate number of cells for this row |
---|
| 347 | rowData[ri].size().times { row.createCell it } |
---|
| 348 | |
---|
| 349 | row.eachWithIndex { Cell cell, ci -> |
---|
| 350 | |
---|
| 351 | // Numbers and values of type boolean, String, and Date can be |
---|
| 352 | // written as is, other types need converting to String |
---|
| 353 | def value = rowData[ri][ci] |
---|
| 354 | |
---|
| 355 | value = (value instanceof Number | value?.class in [boolean.class, String.class, Date.class]) ? value : value?.toString() |
---|
| 356 | |
---|
| 357 | // write the value (or an empty String if null) to the cell |
---|
| 358 | cell.setCellValue(value ?: '') |
---|
| 359 | |
---|
| 360 | } |
---|
| 361 | |
---|
| 362 | } |
---|
| 363 | |
---|
| 364 | wb.write(outputStream) |
---|
| 365 | outputStream.close() |
---|
| 366 | |
---|
| 367 | } |
---|
| 368 | |
---|
[1559] | 369 | } |
---|