Changeset 1752 for trunk/grails-app/services/dbnp
- Timestamp:
- Apr 11, 2011, 4:02:04 PM (13 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/grails-app/services/dbnp/studycapturing/AssayService.groovy
r1731 r1752 16 16 import org.apache.poi.xssf.usermodel.XSSFWorkbook 17 17 import org.apache.poi.hssf.usermodel.HSSFWorkbook 18 import org.codehaus.groovy.grails.web.json.JSONObject; 18 19 19 20 class AssayService { 20 21 21 boolean transactional = false 22 def authenticationService 23 def moduleCommunicationService 24 25 /** 26 * Collects the assay field names per category in a map as well as the 27 * module's measurements. 28 * 29 * @param assay the assay for which to collect the fields 30 * @return a map of categories as keys and field names or measurements as 31 * values 32 */ 33 def collectAssayTemplateFields(assay) throws Exception { 34 35 def getUsedTemplateFields = { templateEntities -> 36 37 // gather all unique and non null template fields that haves values 38 templateEntities*.giveFields().flatten().unique().findAll{ field -> 39 40 field && templateEntities.any { it.fieldExists(field.name) && it.getFieldValue(field.name) } 41 42 }.collect{[name: it.name, comment: it.comment]} 43 44 } 45 46 // check whether module is reachable 47 if (!moduleCommunicationService.isModuleReachable(assay.module.url)) { 48 49 throw new Exception('Module is not reachable') 50 51 } 52 53 def samples = assay.samples 54 55 [ 'Subject Data' : getUsedTemplateFields( samples*."parentSubject".unique() ), 56 'Sampling Event Data' : getUsedTemplateFields( samples*."parentEvent".unique() ), 57 'Sample Data' : getUsedTemplateFields( samples ), 58 'Event Group' : [[name: 'name', comment: 'Name of Event Group']], 59 'Module Measurement Data': requestModuleMeasurementNames(assay) 60 ] 61 62 } 63 64 /** 65 * Gathers all assay related data, including measurements from the module, 66 * into 1 hash map containing: Subject Data, Sampling Event Data, Sample 67 * Data, and module specific measurement data. 68 * Data from each of the 4 hash map entries are themselves hash maps 69 * representing a descriptive header (field name) as key and the data as 70 * value. 71 * 72 * @param assay the assay to collect data for 73 * @fieldMap map with categories as keys and fields as values 74 * @measurementTokens selection of measurementTokens 75 * @return The assay data structure as described above. 76 */ 77 def collectAssayData(assay, fieldMap, measurementTokens) throws Exception { 78 79 def collectFieldValuesForTemplateEntities = { templateFieldNames, templateEntities -> 80 81 // return a hash map with for each field name all values from the 82 // template entity list 83 templateFieldNames.inject([:]) { map, fieldName -> 84 85 map + [(fieldName): templateEntities.collect { 86 87 it?.fieldExists(fieldName) ? it.getFieldValue(fieldName) : '' 88 89 }] 90 91 } 92 93 } 94 95 def getFieldValues = { templateEntities, fieldNames, propertyName = '' -> 96 97 def returnValue 98 99 // if no property name is given, simply collect the fields and 100 // values of the template entities themselves 101 if (propertyName == '') { 102 103 returnValue = collectFieldValuesForTemplateEntities(fieldNames, templateEntities) 104 105 } else { 106 107 // if a property name is given, we'll have to do a bit more work 108 // to ensure efficiency. The reason for this is that for a list 109 // of template entities, the properties referred to by 110 // propertyName can include duplicates. For example, for 10 111 // samples, there may be less than 10 parent subjects. Maybe 112 // there's only 1 parent subject. We don't want to collect field 113 // values for this single subject 10 times ... 114 def fieldValues 115 116 // we'll get the unique list of properties to make sure we're 117 // not getting the field values for identical template entity 118 // properties more then once. 119 def uniqueProperties = templateEntities*."$propertyName".unique() 120 121 fieldValues = collectFieldValuesForTemplateEntities(fieldNames, uniqueProperties) 122 123 // prepare a lookup hashMap to be able to map an entities' 124 // property (e.g. a sample's parent subject) to an index value 125 // from the field values list 126 int i = 0 127 def propertyToFieldValueIndexMap = uniqueProperties.inject([:]) { map, item -> map + [(item):i++]} 128 129 // prepare the return value so that it has an entry for field 130 // name. This will be the column name (second header line). 131 returnValue = fieldNames.inject([:]) { map, item -> map + [(item):[]] } 132 133 // finally, fill map the unique field values to the (possibly 134 // not unique) template entity properties. In our example with 135 // 1 unique parent subject, this means copying that subject's 136 // field values to all 10 samples. 137 templateEntities.each{ te -> 138 139 fieldNames.each{ 140 141 returnValue[it] << fieldValues[it][propertyToFieldValueIndexMap[te[propertyName]]] 142 143 } 144 145 } 146 147 } 148 149 returnValue 150 151 } 152 153 // check whether module is reachable 154 if (!moduleCommunicationService.isModuleReachable(assay.module.url)) { 155 156 throw new Exception('Module is not reachable') 157 158 } 159 160 // Find samples and sort by name 22 boolean transactional = false 23 def authenticationService 24 def moduleCommunicationService 25 26 /** 27 * Collects the assay field names per category in a map as well as the 28 * module's measurements. 29 * 30 * @param assay the assay for which to collect the fields 31 * @return a map of categories as keys and field names or measurements as 32 * values 33 */ 34 def collectAssayTemplateFields(assay) throws Exception { 35 36 def getUsedTemplateFields = { templateEntities -> 37 38 // gather all unique and non null template fields that haves values 39 templateEntities*.giveFields().flatten().unique().findAll{ field -> 40 41 field && templateEntities.any { it && it.fieldExists(field.name) && it.getFieldValue(field.name) } 42 43 }.collect{[name: it.name, comment: it.comment]} 44 45 } 46 47 def samples = assay.samples 48 [ 'Subject Data' : getUsedTemplateFields( samples*."parentSubject".unique() ), 49 'Sampling Event Data' : getUsedTemplateFields( samples*."parentEvent".unique() ), 50 'Sample Data' : getUsedTemplateFields( samples ), 51 'Event Group' : [[name: 'name', comment: 'Name of Event Group']], 52 53 // If module is not reachable, only the field 'module error' is returned, and is filled later on. 54 'Module Measurement Data': moduleCommunicationService.isModuleReachable(assay.module.url) ? requestModuleMeasurementNames(assay) : [ [ name: "Module error" ] ] 55 ] 56 57 } 58 59 /** 60 * Gathers all assay related data, including measurements from the module, 61 * into 1 hash map containing: Subject Data, Sampling Event Data, Sample 62 * Data, and module specific measurement data. 63 * Data from each of the 4 hash map entries are themselves hash maps 64 * representing a descriptive header (field name) as key and the data as 65 * value. 66 * 67 * @param assay the assay to collect data for 68 * @param fieldMap map with categories as keys and fields as values 69 * @param measurementTokens selection of measurementTokens 70 * @return The assay data structure as described above. 71 */ 72 def collectAssayData(assay, fieldMap, measurementTokens) throws Exception { 73 74 def collectFieldValuesForTemplateEntities = { templateFieldNames, templateEntities -> 75 76 // return a hash map with for each field name all values from the 77 // template entity list 78 templateFieldNames.inject([:]) { map, fieldName -> 79 80 map + [(fieldName): templateEntities.collect { 81 82 it?.fieldExists(fieldName) ? it.getFieldValue(fieldName) : '' 83 84 }] 85 86 } 87 88 } 89 90 def getFieldValues = { templateEntities, fieldNames, propertyName = '' -> 91 92 def returnValue 93 94 // if no property name is given, simply collect the fields and 95 // values of the template entities themselves 96 if (propertyName == '') { 97 98 returnValue = collectFieldValuesForTemplateEntities(fieldNames, templateEntities) 99 100 } else { 101 102 // if a property name is given, we'll have to do a bit more work 103 // to ensure efficiency. The reason for this is that for a list 104 // of template entities, the properties referred to by 105 // propertyName can include duplicates. For example, for 10 106 // samples, there may be less than 10 parent subjects. Maybe 107 // there's only 1 parent subject. We don't want to collect field 108 // values for this single subject 10 times ... 109 def fieldValues 110 111 // we'll get the unique list of properties to make sure we're 112 // not getting the field values for identical template entity 113 // properties more then once. 114 def uniqueProperties = templateEntities*."$propertyName".unique() 115 116 fieldValues = collectFieldValuesForTemplateEntities(fieldNames, uniqueProperties) 117 118 // prepare a lookup hashMap to be able to map an entities' 119 // property (e.g. a sample's parent subject) to an index value 120 // from the field values list 121 int i = 0 122 def propertyToFieldValueIndexMap = uniqueProperties.inject([:]) { map, item -> map + [(item):i++]} 123 124 // prepare the return value so that it has an entry for field 125 // name. This will be the column name (second header line). 126 returnValue = fieldNames.inject([:]) { map, item -> map + [(item):[]] } 127 128 // finally, fill map the unique field values to the (possibly 129 // not unique) template entity properties. In our example with 130 // 1 unique parent subject, this means copying that subject's 131 // field values to all 10 samples. 132 templateEntities.each{ te -> 133 134 fieldNames.each{ 135 136 returnValue[it] << fieldValues[it][propertyToFieldValueIndexMap[te[propertyName]]] 137 138 } 139 140 } 141 142 } 143 144 returnValue 145 146 } 147 148 // Find samples and sort by name 161 149 def samples = assay.samples.toList().sort { it.name } 162 150 163 def eventFieldMap = [:] 164 165 // check whether event group data was requested 166 if (fieldMap['Event Group']) { 167 168 def names = samples*.parentEventGroup*.name.flatten() 169 170 // only set name field when there's actual data 171 if (!names.every {!it}) eventFieldMap['name'] = names 172 173 } 174 175 [ 'Subject Data' : getFieldValues(samples, fieldMap['Subject Data']*.name, 'parentSubject'), 176 'Sampling Event Data' : getFieldValues(samples, fieldMap['Sampling Event Data']*.name, 'parentEvent'), 177 'Sample Data' : getFieldValues(samples, fieldMap['Sample Data']*.name), 178 'Event Group' : eventFieldMap, 179 'Module Measurement Data': measurementTokens*.name ? requestModuleMeasurements(assay, measurementTokens, samples) : [:] 180 ] 181 } 182 183 /** 184 * Retrieves measurement names from the module through a rest call 185 * 186 * @param consumer the url of the module 187 * @param path path of the rest call to the module 188 * @return 189 */ 190 def requestModuleMeasurementNames(assay) { 191 192 def moduleUrl = assay.module.url 193 194 def path = moduleUrl + "/rest/getMeasurements/query?assayToken=$assay.assayUUID" 195 196 def jsonArray = moduleCommunicationService.callModuleRestMethodJSON(moduleUrl, path) 197 198 jsonArray*.toString() 199 200 } 201 202 /** 203 * Retrieves module measurement data through a rest call to the module 204 * 205 * @param assay Assay for which the module measurements should be retrieved 206 * @param fields List with the names of the fields to be retrieved. Format: [ [ name: 'measurementName1' ], [ name: 'measurementName2' ] ] 207 * @param samples Samples for which the module 208 * @return 209 */ 210 def requestModuleMeasurements(assay, fields, samples) { 211 212 def moduleUrl = assay.module.url 213 214 def tokenString = '' 215 216 fields.each{ 217 tokenString+="&measurementToken=${it.name.encodeAsURL()}" 218 } 219 220 def path = moduleUrl + "/rest/getMeasurementData/query?assayToken=$assay.assayUUID" + tokenString 221 222 def (sampleTokens, measurementTokens, moduleData) = moduleCommunicationService.callModuleRestMethodJSON(moduleUrl, path) 223 224 if (!sampleTokens?.size()) return [] 225 226 // Convert the three different maps into a map like: 151 def eventFieldMap = [:] 152 153 // check whether event group data was requested 154 if (fieldMap['Event Group']) { 155 156 def names = samples*.parentEventGroup*.name.flatten() 157 158 // only set name field when there's actual data 159 if (!names.every {!it}) eventFieldMap['name'] = names 160 161 } 162 163 [ 'Subject Data' : getFieldValues(samples, fieldMap['Subject Data']*.name, 'parentSubject'), 164 'Sampling Event Data' : getFieldValues(samples, fieldMap['Sampling Event Data']*.name, 'parentEvent'), 165 'Sample Data' : getFieldValues(samples, fieldMap['Sample Data']*.name), 166 'Event Group' : eventFieldMap, 167 168 // If module is not reachable, only the message 'module not reachable' is given for each sample 169 'Module Measurement Data': moduleCommunicationService.isModuleReachable(assay.module.url) ? 170 ( measurementTokens ? requestModuleMeasurements(assay, measurementTokens, samples) : [:] ) : 171 [ "Module error": [ "Module not reachable" ] * samples.size() ] 172 ] 173 } 174 175 /** 176 * Prepend data from study to the data structure 177 * @param assayData Column wise data structure of samples 178 * @param assay Assay object the data should be selected from 179 * @param numValues Number of values for this assay 180 * @return Extended column wise data structure 181 */ 182 def prependStudyData( inputData, Assay assay, numValues ) { 183 if( !assay ) 184 return inputData; 185 186 // Retrieve study data 187 def studyData =[:] 188 assay.parent?.giveFields().each { 189 def value = assay.parent.getFieldValue( it.name ) 190 if( value ) 191 studyData[ it.name ] = [value] * numValues 192 } 193 194 return [ 195 'Study Data': studyData 196 ] + inputData 197 } 198 199 /** 200 * Prepend data from assay to the data structure 201 * @param assayData Column wise data structure of samples 202 * @param assay Assay object the data should be selected from 203 * @param numValues Number of values for this assay 204 * @return Extended column wise data structure 205 */ 206 def prependAssayData( inputData, Assay assay, numValues ) { 207 if( !assay ) 208 return inputData; 209 210 // Retrieve assay data 211 def assayData = [:] 212 assay.giveFields().each { 213 def value = assay.getFieldValue( it.name ) 214 if( value ) 215 assayData[ it.name ] = [value] * numValues 216 } 217 218 return [ 219 'Assay Data': assayData 220 ] + inputData 221 } 222 223 /** 224 * Retrieves measurement names from the module through a rest call 225 * 226 * @param consumer the url of the module 227 * @param path path of the rest call to the module 228 * @return 229 */ 230 def requestModuleMeasurementNames(assay) { 231 232 def moduleUrl = assay.module.url 233 234 def path = moduleUrl + "/rest/getMeasurements/query?assayToken=$assay.assayUUID" 235 236 def jsonArray = moduleCommunicationService.callModuleRestMethodJSON(moduleUrl, path) 237 238 jsonArray.collect { 239 if( it == JSONObject.NULL ) 240 return "" 241 else 242 return it.toString() 243 } 244 245 } 246 247 /** 248 * Retrieves module measurement data through a rest call to the module 249 * 250 * @param assay Assay for which the module measurements should be retrieved 251 * @param measurementTokens List with the names of the fields to be retrieved. Format: [ 'measurementName1', 'measurementName2' ] 252 * @param samples Samples for which the module 253 * @return 254 */ 255 def requestModuleMeasurements(assay, inputMeasurementTokens, samples) { 256 257 def moduleUrl = assay.module.url 258 259 def tokenString = '' 260 261 inputMeasurementTokens.each{ 262 tokenString+="&measurementToken=${it.encodeAsURL()}" 263 } 264 265 def path = moduleUrl + "/rest/getMeasurementData/query?assayToken=$assay.assayUUID" + tokenString 266 267 def (sampleTokens, measurementTokens, moduleData) = moduleCommunicationService.callModuleRestMethodJSON(moduleUrl, path) 268 269 if (!sampleTokens?.size()) return [] 270 271 // Convert the three different maps into a map like: 227 272 // 228 273 // [ "measurement 1": [ value1, value2, value3 ], … … 232 277 def map = [:] 233 278 def numSampleTokens = sampleTokens.size(); 234 279 235 280 measurementTokens.eachWithIndex { measurementToken, measurementIndex -> 236 281 def measurements = []; … … 243 288 def tokenIndex = sampleTokens.indexOf( sample.giveUUID() ); 244 289 def valueIndex = measurementIndex * numSampleTokens + tokenIndex; 245 290 246 291 // If the module data is in the wrong format, show an error in the log file 247 292 // and return a null value for this measurement. … … 250 295 measurements << null 251 296 } else { 252 measurements << moduleData[ valueIndex ].toString();297 measurements << ( moduleData[ valueIndex ] == JSONObject.NULL ? "" : moduleData[ valueIndex ].toString() ); 253 298 } 254 299 } else { … … 260 305 261 306 return map; 262 } 263 264 /** 265 * Converts column 266 * @param columnData multidimensional map containing column data. 267 * On the top level, the data must be grouped by category. Each key is the 268 * category title and the values are maps representing the columns. Each 269 * column also has a title (its key) and a list of values. Columns must be 270 * equally sized. 271 * 272 * For example, consider the following map: 273 * [Category1: 274 * [Column1: [1,2,3], Column2: [4,5,6]], 275 * Category2: 276 * [Column3: [7,8,9], Column4: [10,11,12], Column5: [13,14,15]]] 277 * 278 * which will be written as: 279 * 280 * | Category1 | | Category2 | | | 281 * | Column1 | Column2 | Column3 | Column4 | Column5 | 282 * | 1 | 4 | 7 | 10 | 13 | 283 * | 2 | 5 | 8 | 11 | 14 | 284 * | 3 | 6 | 9 | 12 | 15 | 285 * 286 * @return row wise data 287 */ 288 def convertColumnToRowStructure(columnData) { 289 290 // check if all columns have the dimensionality 2 291 if (columnData.every { it.value.every { it.value instanceof ArrayList } }) { 292 293 def headers = [[],[]] 294 295 columnData.each { category -> 296 297 if (category.value.size()) { 298 299 // put category keys into first row separated by null values 300 // wherever there are > 1 columns per category 301 headers[0] += [category.key] + [null] * (category.value.size() - 1) 302 303 // put non-category column headers into 2nd row 304 headers[1] += category.value.collect{it.key} 305 306 } 307 308 } 309 310 def d = [] 311 312 // add all column wise data into 'd' 313 columnData.each { it.value.each { d << it.value } } 314 315 // transpose d into row wise data and combine with header rows 316 headers + d.transpose() 317 } 318 319 } 320 321 /** 322 * Export column wise data in Excel format to a stream. 323 * 324 * @param columnData Multidimensional map containing column data 325 * @param outputStream Stream to write to 326 * @param useOfficeOpenXML Flag to specify xlsx (standard) or xls output 327 * @return 328 */ 329 def exportColumnWiseDataToExcelFile(columnData, outputStream, useOfficeOpenXML = true) { 330 331 // transform data into row based structure for easy writing 332 def rows = convertColumnToRowStructure(columnData) 333 334 if (rows) { 335 336 exportRowWiseDataToExcelFile(rows, outputStream, useOfficeOpenXML) 337 338 } else { 339 340 throw new Exception('Wrong column data format.') 341 342 } 343 344 } 345 346 /** 347 * Export row wise data in Excel format to a stream 348 * 349 * @param rowData List of lists containing for each row all cell values 350 * @param outputStream Stream to write to 351 * @param useOfficeOpenXML Flag to specify xlsx (standard) or xls output 352 * @return 353 */ 354 def exportRowWiseDataToExcelFile(rowData, outputStream, useOfficeOpenXML = true) { 355 356 Workbook wb = useOfficeOpenXML ? new XSSFWorkbook() : new HSSFWorkbook() 357 Sheet sheet = wb.createSheet() 358 359 // create all rows 360 rowData.size().times { sheet.createRow it } 361 362 sheet.eachWithIndex { Row row, ri -> 363 364 // create appropriate number of cells for this row 365 rowData[ri].size().times { row.createCell it } 366 367 row.eachWithIndex { Cell cell, ci -> 368 369 // Numbers and values of type boolean, String, and Date can be 370 // written as is, other types need converting to String 371 def value = rowData[ri][ci] 372 373 value = (value instanceof Number | value?.class in [boolean.class, String.class, Date.class]) ? value : value?.toString() 374 375 // write the value (or an empty String if null) to the cell 376 cell.setCellValue(value ?: '') 377 378 } 379 380 } 381 382 wb.write(outputStream) 383 outputStream.close() 384 385 } 307 } 308 309 /** 310 * Merges the data from multiple studies into a structure that can be exported to an excel file. The format for each assay is 311 * 312 * [Category1: 313 * [Column1: [1,2,3], Column2: [4,5,6]], 314 * Category2: 315 * [Column3: [7,8,9], Column4: [10,11,12], Column5: [13,14,15]]] 316 * 317 * Where the category describes the category of data that is presented (e.g. subject, sample etc.) and the column names describe 318 * the fields that are present. Each entry in the lists shows the value for that column for an entity. In this case, 3 entities are described. 319 * Each field should give values for all entities, so the length of all value-lists should be the same. 320 * 321 * Example: If the following input is given (2 assays) 322 * 323 * [ 324 * [Category1: 325 * [Column1: [1,2,3], Column2: [4,5,6]], 326 * Category2: 327 * [Column3: [7,8,9], Column4: [10,11,12], Column5: [13,14,15]]], 328 * [Category1: 329 * [Column1: [16,17], Column6: [18,19]], 330 * Category3: 331 * [Column3: [20,21], Column8: [22,23]]] 332 * ] 333 * 334 * the output will be (5 entries for each column, empty values for fields that don't exist in some assays) 335 * 336 * [ 337 * [Category1: 338 * [Column1: [1,2,3,16,17], Column2: [4,5,6,,], Column6: [,,,18,19]], 339 * Category2: 340 * [Column3: [7,8,9,,], Column4: [10,11,12,,], Column5: [13,14,15,,]], 341 * Category3: 342 * [Column3: [,,,20,21], Column8: [,,,22,23]] 343 * ] 344 * 345 * 346 * @param columnWiseAssayData List with each entry being the column wise data of an assay. The format for each 347 * entry is described above 348 * @return Hashmap Combined assay data, in the same structure as each input entry. Empty values are given as an empty string. 349 * So for input entries 350 */ 351 def mergeColumnWiseDataOfMultipleStudies(def columnWiseAssayData) { 352 // Compute the number of values that is expected for each assay. This number is 353 // used later on to determine the number of empty fields to add if a field is not present in this 354 // assay 355 def numValues = columnWiseAssayData.collect { assay -> 356 for( cat in assay ) { 357 if( cat ) { 358 for( field in cat.value ) { 359 if( field?.value?.size() > 0 ) { 360 return field.value.size(); 361 } 362 } 363 } 364 } 365 366 return 0; 367 } 368 369 // Merge categories from all assays. Create a list for all categories 370 def categories = columnWiseAssayData*.keySet().toList().flatten().unique(); 371 def mergedColumnWiseData = [:] 372 categories.each { category -> 373 // Only work with this category for all assays 374 def categoryData = columnWiseAssayData*.getAt( category ); 375 376 // Find the different fields in all assays 377 def categoryFields = categoryData.findAll{ it }*.keySet().toList().flatten().unique(); 378 379 // Find data for all assays for these fields. If the fields do not exist, return an empty string 380 def categoryValues = [:] 381 categoryFields.each { field -> 382 categoryValues[ field ] = []; 383 384 // Loop through all assays 385 categoryData.eachWithIndex { assayValues, idx -> 386 if( assayValues && assayValues.containsKey( field ) ) { 387 // Append the values if they exist 388 categoryValues[ field ] += assayValues[ field ]; 389 } else { 390 // Append empty string for each entity if the field doesn't exist 391 categoryValues[ field ] += [""] * numValues[ idx ] 392 } 393 } 394 } 395 396 mergedColumnWiseData[ category ] = categoryValues 397 } 398 399 return mergedColumnWiseData; 400 } 401 402 /** 403 * Converts column 404 * @param columnData multidimensional map containing column data. 405 * On the top level, the data must be grouped by category. Each key is the 406 * category title and the values are maps representing the columns. Each 407 * column also has a title (its key) and a list of values. Columns must be 408 * equally sized. 409 * 410 * For example, consider the following map: 411 * [Category1: 412 * [Column1: [1,2,3], Column2: [4,5,6]], 413 * Category2: 414 * [Column3: [7,8,9], Column4: [10,11,12], Column5: [13,14,15]]] 415 * 416 * which will be written as: 417 * 418 * | Category1 | | Category2 | | | 419 * | Column1 | Column2 | Column3 | Column4 | Column5 | 420 * | 1 | 4 | 7 | 10 | 13 | 421 * | 2 | 5 | 8 | 11 | 14 | 422 * | 3 | 6 | 9 | 12 | 15 | 423 * 424 * @return row wise data 425 */ 426 def convertColumnToRowStructure(columnData) { 427 428 // check if all columns have the dimensionality 2 429 if (columnData.every { it.value.every { it.value instanceof ArrayList } }) { 430 431 def headers = [[],[]] 432 433 columnData.each { category -> 434 435 if (category.value.size()) { 436 437 // put category keys into first row separated by null values 438 // wherever there are > 1 columns per category 439 headers[0] += [category.key] + [null] * (category.value.size() - 1) 440 441 // put non-category column headers into 2nd row 442 headers[1] += category.value.collect{it.key} 443 444 } 445 446 } 447 448 def d = [] 449 450 // add all column wise data into 'd' 451 columnData.each { it.value.each { d << it.value } } 452 453 // transpose d into row wise data and combine with header rows 454 headers + d.transpose() 455 } 456 457 } 458 459 /** 460 * Export column wise data in Excel format to a stream. 461 * 462 * @param columnData Multidimensional map containing column data 463 * @param outputStream Stream to write to 464 * @param useOfficeOpenXML Flag to specify xlsx (standard) or xls output 465 * @return 466 */ 467 def exportColumnWiseDataToExcelFile(columnData, outputStream, useOfficeOpenXML = true) { 468 469 // transform data into row based structure for easy writing 470 def rows = convertColumnToRowStructure(columnData) 471 472 if (rows) { 473 474 exportRowWiseDataToExcelFile(rows, outputStream, useOfficeOpenXML) 475 476 } else { 477 478 throw new Exception('Wrong column data format.') 479 480 } 481 482 } 483 484 /** 485 * Export row wise data in Excel format to a stream 486 * 487 * @param rowData List of lists containing for each row all cell values 488 * @param outputStream Stream to write to 489 * @param useOfficeOpenXML Flag to specify xlsx (standard) or xls output 490 * @return 491 */ 492 def exportRowWiseDataToExcelFile(rowData, outputStream, useOfficeOpenXML = true) { 493 Workbook wb = useOfficeOpenXML ? new XSSFWorkbook() : new HSSFWorkbook() 494 Sheet sheet = wb.createSheet() 495 496 exportRowWiseDataToExcelSheet( rowData, sheet ); 497 498 wb.write(outputStream) 499 outputStream.close() 500 } 501 502 /** 503 * Export row wise data for multiple assays in Excel format (separate sheets) to a stream 504 * 505 * @param rowData List of structures with rowwise data for each assay 506 * @param outputStream Stream to write to 507 * @param useOfficeOpenXML Flag to specify xlsx (standard) or xls output 508 * @return 509 */ 510 def exportRowWiseDataForMultipleAssaysToExcelFile(assayData, outputStream, useOfficeOpenXML = true) { 511 Workbook wb = useOfficeOpenXML ? new XSSFWorkbook() : new HSSFWorkbook() 512 513 assayData.each { rowData -> 514 Sheet sheet = wb.createSheet() 515 516 exportRowWiseDataToExcelSheet( rowData, sheet ); 517 } 518 519 wb.write(outputStream) 520 outputStream.close() 521 } 522 523 /** 524 * Export row wise data in Excel format to a given sheet in an excel workbook 525 * 526 * @param rowData List of lists containing for each row all cell values 527 * @param sheet Excel sheet to append the 528 * @return 529 */ 530 def exportRowWiseDataToExcelSheet(rowData, Sheet sheet) { 531 // create all rows 532 rowData.size().times { sheet.createRow it } 533 534 sheet.eachWithIndex { Row row, ri -> 535 if( rowData[ ri ] ) { 536 // create appropriate number of cells for this row 537 rowData[ri].size().times { row.createCell it } 538 539 row.eachWithIndex { Cell cell, ci -> 540 541 // Numbers and values of type boolean, String, and Date can be 542 // written as is, other types need converting to String 543 def value = rowData[ri][ci] 544 545 value = (value instanceof Number | value?.class in [boolean.class, String.class, Date.class]) ? value : value?.toString() 546 547 // write the value (or an empty String if null) to the cell 548 cell.setCellValue(value ?: '') 549 550 } 551 } 552 } 553 } 386 554 387 555 }
Note: See TracChangeset
for help on using the changeset viewer.