Changeset 1752
- Timestamp:
- Apr 11, 2011, 4:02:04 PM (13 years ago)
- Location:
- trunk/grails-app
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/grails-app/controllers/dbnp/query/AdvancedQueryController.groovy
r1717 r1752 592 592 name:"simpletox", 593 593 description: "Export as SimpleTox", 594 url: createLink( controller: "exporter", action: "export", params: [ 'ids' : ids ] ) 594 url: createLink( controller: "exporter", action: "export", params: [ 'format': 'list', 'ids' : ids ] ) 595 ], [ 596 module: "gscf", 597 name:"excel", 598 description: "Export as Excel", 599 url: createLink( controller: "study", action: "exportToExcel", params: [ 'format': 'list', 'ids' : ids ] ) 600 ]] 601 case "Assay": 602 def ids = [] 603 s.filterResults(selectedIds).each { 604 ids << it.id 605 } 606 607 return [[ 608 module: "gscf", 609 name:"excel", 610 description: "Export as Excel", 611 url: createLink( controller: "assay", action: "exportToExcel", params: [ 'format': 'list', 'ids' : ids ] ) 595 612 ]] 596 613 case "Sample": -
trunk/grails-app/controllers/dbnp/studycapturing/AssayController.groovy
r1730 r1752 3 3 class AssayController { 4 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 // The following lines deviate from the generate-all generated code.29 // See http://jira.codehaus.org/browse/GRAILS-3783 for why we have this shameful workaround...30 def study = assayInstance.parent31 study.addToAssays(assayInstance)32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 5 def assayService 6 def authenticationService 7 8 static allowedMethods = [save: "POST", update: "POST", delete: "POST"] 9 10 def index = { 11 redirect(action: "list", params: params) 12 } 13 14 def list = { 15 params.max = Math.min(params.max ? params.int('max') : 10, 100) 16 [assayInstanceList: Assay.list(params), assayInstanceTotal: Assay.count()] 17 } 18 19 def create = { 20 def assayInstance = new Assay() 21 assayInstance.properties = params 22 return [assayInstance: assayInstance] 23 } 24 25 def save = { 26 def assayInstance = new Assay(params) 27 28 // The following lines deviate from the generate-all generated code. 29 // See http://jira.codehaus.org/browse/GRAILS-3783 for why we have this shameful workaround... 30 def study = assayInstance.parent 31 study.addToAssays(assayInstance) 32 33 if (assayInstance.save(flush: true)) { 34 flash.message = "${message(code: 'default.created.message', args: [message(code: 'assay.label', default: 'Assay'), assayInstance.id])}" 35 redirect(action: "show", id: assayInstance.id) 36 } 37 else { 38 render(view: "create", model: [assayInstance: assayInstance]) 39 } 40 } 41 42 def show = { 43 def assayInstance = Assay.get(params.id) 44 if (!assayInstance) { 45 flash.message = "${message(code: 'default.not.found.message', args: [message(code: 'assay.label', default: 'Assay'), params.id])}" 46 redirect(action: "list") 47 } 48 else { 49 [assayInstance: assayInstance] 50 } 51 } 52 52 53 53 def showByToken = { 54 def assayInstance = Assay.findByAssayUUID(params.id) 55 if (!assayInstance) { 56 flash.message = "${message(code: 'default.not.found.message', args: [message(code: 'assay.label', default: 'Assay'), params.id])}" 57 redirect(action: "list") 58 } 59 else { 60 redirect(action: "show", id: assayInstance.id) 61 } 62 } 63 64 def edit = { 65 def assayInstance = Assay.get(params.id) 66 if (!assayInstance) { 67 flash.message = "${message(code: 'default.not.found.message', args: [message(code: 'assay.label', default: 'Assay'), params.id])}" 68 redirect(action: "list") 69 } 70 else { 71 return [assayInstance: assayInstance] 72 } 73 } 74 75 def update = { 76 def assayInstance = Assay.get(params.id) 77 if (assayInstance) { 78 if (params.version) { 79 def version = params.version.toLong() 80 if (assayInstance.version > version) { 81 82 assayInstance.errors.rejectValue("version", "default.optimistic.locking.failure", [message(code: 'assay.label', default: 'Assay')] as Object[], "Another user has updated this Assay while you were editing") 83 render(view: "edit", model: [assayInstance: assayInstance]) 84 return 85 } 86 } 87 assayInstance.properties = params 88 if (!assayInstance.hasErrors() && assayInstance.save(flush: true)) { 89 flash.message = "${message(code: 'default.updated.message', args: [message(code: 'assay.label', default: 'Assay'), assayInstance.id])}" 90 redirect(action: "show", id: assayInstance.id) 91 } 92 else { 93 render(view: "edit", model: [assayInstance: assayInstance]) 94 } 95 } 96 else { 97 flash.message = "${message(code: 'default.not.found.message', args: [message(code: 'assay.label', default: 'Assay'), params.id])}" 98 redirect(action: "list") 99 } 100 } 101 102 def delete = { 103 def assayInstance = Assay.get(params.id) 104 if (assayInstance) { 105 try { 106 assayInstance.delete(flush: true) 107 flash.message = "${message(code: 'default.deleted.message', args: [message(code: 'assay.label', default: 'Assay'), params.id])}" 108 redirect(action: "list") 109 } 110 catch (org.springframework.dao.DataIntegrityViolationException e) { 111 flash.message = "${message(code: 'default.not.deleted.message', args: [message(code: 'assay.label', default: 'Assay'), params.id])}" 112 redirect(action: "show", id: params.id) 113 } 114 } 115 else { 116 flash.message = "${message(code: 'default.not.found.message', args: [message(code: 'assay.label', default: 'Assay'), params.id])}" 117 redirect(action: "list") 118 } 119 } 120 121 def excelExportFlow = { 122 entry { 123 action{ 124 def user = authenticationService.getLoggedInUser() 125 flow.userStudies = Study.giveReadableStudies(user) 126 } 127 on("success").to "selectAssay" 128 } 129 130 selectAssay { 131 on ("submit"){ 132 flow.assay = Assay.get(params.assayId) 133 134 // check if assay exists 135 if (!flow.assay) throw new Exception("No assay found with id: ${flow.assay.id}") 136 137 // obtain fields for each category 138 flow.fieldMap = assayService.collectAssayTemplateFields(flow.assay) 139 140 flow.measurementTokens = flow.fieldMap.remove('Module Measurement Data') 141 }.to "selectFields" 142 143 on(Exception).to "handleError" 144 } 145 146 selectFields { 147 on ("submit"){ 148 def fieldMapSelection = [:] 149 150 flow.fieldMap.eachWithIndex { cat, cat_i -> 151 152 if (params."cat_$cat_i" == 'on') { 153 fieldMapSelection[cat.key] = [] 154 155 cat.value.eachWithIndex { field, field_i -> 156 157 if (params."cat_${cat_i}_${field_i}" == 'on') 158 fieldMapSelection[cat.key] += field 159 } 160 161 if (fieldMapSelection[cat.key] == []) 162 fieldMapSelection.remove(cat.key) 163 } 164 } 165 166 def measurementTokensSelection = [] 167 168 if (params."cat_4" == 'on') { 169 170 def measurementToken = params.measurementToken 171 172 if (measurementToken) 173 if (measurementToken instanceof String) 174 measurementTokensSelection = [[name: measurementToken]] 175 else 176 measurementTokensSelection = measurementToken.collect{[name: it]} 177 } 178 179 def assayData = assayService.collectAssayData(flow.assay, fieldMapSelection, measurementTokensSelection) 180 flow.rowData = assayService.convertColumnToRowStructure(assayData) 181 flow.assayDataPreview = flow.rowData[0..4].collect{ it[0..4] as ArrayList } 182 183 }.to "compileExportData" 184 185 on(Exception).to "handleError" 186 } 187 188 compileExportData { 189 on ("ok"){session.rowData = flow.rowData}.to "export" 190 on ("cancel").to "selectAssay" 191 } 192 193 export { 194 redirect(action: 'doExport') 195 } 196 197 handleError() { 198 render(view: 'errorPage') 199 } 200 } 201 202 def doExport = { 203 204 def filename = 'export.xlsx' 205 response.setHeader("Content-disposition", "attachment;filename=\"${filename}\"") 206 response.setContentType("application/octet-stream") 207 try { 208 209 assayService.exportRowWiseDataToExcelFile(session.rowData, response.outputStream) 210 response.outputStream.flush() 211 212 } catch (Exception e) { 213 214 flash.errorMessage = e.message 215 redirect action: 'errorPage' 216 217 } 218 } 219 220 def errorPage = { 221 render(view: 'excelExport/errorPage') 222 } 54 def assayInstance = Assay.findByAssayUUID(params.id) 55 if (!assayInstance) { 56 flash.message = "${message(code: 'default.not.found.message', args: [message(code: 'assay.label', default: 'Assay'), params.id])}" 57 redirect(action: "list") 58 } 59 else { 60 redirect(action: "show", id: assayInstance.id) 61 } 62 } 63 64 def edit = { 65 def assayInstance = Assay.get(params.id) 66 if (!assayInstance) { 67 flash.message = "${message(code: 'default.not.found.message', args: [message(code: 'assay.label', default: 'Assay'), params.id])}" 68 redirect(action: "list") 69 } 70 else { 71 return [assayInstance: assayInstance] 72 } 73 } 74 75 def update = { 76 def assayInstance = Assay.get(params.id) 77 if (assayInstance) { 78 if (params.version) { 79 def version = params.version.toLong() 80 if (assayInstance.version > version) { 81 82 assayInstance.errors.rejectValue("version", "default.optimistic.locking.failure", [message(code: 'assay.label', default: 'Assay')] as Object[], "Another user has updated this Assay while you were editing") 83 render(view: "edit", model: [assayInstance: assayInstance]) 84 return 85 } 86 } 87 assayInstance.properties = params 88 if (!assayInstance.hasErrors() && assayInstance.save(flush: true)) { 89 flash.message = "${message(code: 'default.updated.message', args: [message(code: 'assay.label', default: 'Assay'), assayInstance.id])}" 90 redirect(action: "show", id: assayInstance.id) 91 } 92 else { 93 render(view: "edit", model: [assayInstance: assayInstance]) 94 } 95 } 96 else { 97 flash.message = "${message(code: 'default.not.found.message', args: [message(code: 'assay.label', default: 'Assay'), params.id])}" 98 redirect(action: "list") 99 } 100 } 101 102 def delete = { 103 def assayInstance = Assay.get(params.id) 104 if (assayInstance) { 105 try { 106 assayInstance.delete(flush: true) 107 flash.message = "${message(code: 'default.deleted.message', args: [message(code: 'assay.label', default: 'Assay'), params.id])}" 108 redirect(action: "list") 109 } 110 catch (org.springframework.dao.DataIntegrityViolationException e) { 111 flash.message = "${message(code: 'default.not.deleted.message', args: [message(code: 'assay.label', default: 'Assay'), params.id])}" 112 redirect(action: "show", id: params.id) 113 } 114 } 115 else { 116 flash.message = "${message(code: 'default.not.found.message', args: [message(code: 'assay.label', default: 'Assay'), params.id])}" 117 redirect(action: "list") 118 } 119 } 120 121 def excelExportFlow = { 122 entry { 123 action{ 124 def user = authenticationService.getLoggedInUser() 125 flow.userStudies = Study.giveReadableStudies(user) 126 } 127 on("success").to "selectAssay" 128 } 129 130 selectAssay { 131 on ("submit"){ 132 flow.assay = Assay.get(params.assayId) 133 134 // check if assay exists 135 if (!flow.assay) throw new Exception("No assay found with id: ${flow.assay.id}") 136 137 // obtain fields for each category 138 flow.fieldMap = assayService.collectAssayTemplateFields(flow.assay) 139 140 flow.measurementTokens = flow.fieldMap.remove('Module Measurement Data') 141 }.to "selectFields" 142 143 on(Exception).to "handleError" 144 } 145 146 selectFields { 147 on ("submit"){ 148 def fieldMapSelection = [:] 149 150 flow.fieldMap.eachWithIndex { cat, cat_i -> 151 152 if (params."cat_$cat_i" == 'on') { 153 fieldMapSelection[cat.key] = [] 154 155 cat.value.eachWithIndex { field, field_i -> 156 157 if (params."cat_${cat_i}_${field_i}" == 'on') 158 fieldMapSelection[cat.key] += field 159 } 160 161 if (fieldMapSelection[cat.key] == []) 162 fieldMapSelection.remove(cat.key) 163 } 164 } 165 166 def measurementTokens = [] 167 168 if (params."cat_4" == 'on') { 169 measurementTokens = params.list( "measurementToken" ) 170 } 171 172 def assayData = assayService.collectAssayData(flow.assay, fieldMapSelection, measurementTokens) 173 flow.rowData = assayService.convertColumnToRowStructure(assayData) 174 flow.assayDataPreview = flow.rowData[0..4].collect{ it[0..4] as ArrayList } 175 176 }.to "compileExportData" 177 178 on(Exception).to "handleError" 179 } 180 181 compileExportData { 182 on ("ok"){session.rowData = flow.rowData}.to "export" 183 on ("cancel").to "selectAssay" 184 } 185 186 export { 187 redirect(action: 'doExport') 188 } 189 190 handleError() { 191 render(view: 'errorPage') 192 } 193 } 194 195 def doExport = { 196 197 def filename = 'export.xlsx' 198 response.setHeader("Content-disposition", "attachment;filename=\"${filename}\"") 199 response.setContentType("application/octet-stream") 200 try { 201 202 assayService.exportRowWiseDataToExcelFile(session.rowData, response.outputStream) 203 response.outputStream.flush() 204 205 } catch (Exception e) { 206 207 flash.errorMessage = e.message 208 redirect action: 'errorPage' 209 210 } 211 } 212 213 /** 214 * Method to export one or more assays to excel in separate sheets. 215 * 216 * @param params.ids One or more assay IDs to export 217 * @param params.format "list" in order to export all assays in one big excel sheet 218 * "sheets" in order to export every assay on its own sheet (default) 219 */ 220 def exportToExcel = { 221 def format = params.get( 'format', 'sheets' ); 222 if( format == 'list' ) { 223 exportToExcelAsList( params ); 224 } else { 225 exportToExcelAsSheets( params ); 226 } 227 } 228 229 /** 230 * Method to export one or more assays to excel in separate sheets. 231 * 232 * @param params.ids One or more assay IDs to export 233 */ 234 def exportToExcelAsSheets = { 235 def ids = params.list( 'ids' ).findAll { it.isLong() }.collect { Long.valueOf( it ) }; 236 237 if( !ids ) { 238 flash.errorMessage = "No assay ids given"; 239 redirect( action: "errorPage" ); 240 return; 241 } 242 243 // Find all assays for the given ids 244 def assays = ids.unique().collect { id -> Assay.get( id ) }.findAll { it } 245 246 // Send headers to the browser so the user can download the file 247 def filename = 'export.xlsx' 248 response.setHeader("Content-disposition", "attachment;filename=\"${filename}\"") 249 response.setContentType("application/octet-stream") 250 251 try { 252 // Loop through all assays to collect the data 253 def rowWiseAssayData = []; 254 255 assays.each { assay -> 256 // Determine which fields should be exported for this assay 257 def fieldMap = assayService.collectAssayTemplateFields(assay) 258 def measurementTokens = fieldMap.remove('Module Measurement Data') 259 260 // Retrieve row based data for this assay 261 def assayData = assayService.collectAssayData( assay, fieldMap, measurementTokens ); 262 def rowData = assayService.convertColumnToRowStructure(assayData) 263 264 // Put each assay on another sheet 265 rowWiseAssayData << rowData; 266 } 267 268 assayService.exportRowWiseDataForMultipleAssaysToExcelFile( rowWiseAssayData, response.getOutputStream() ) 269 270 response.outputStream.flush() 271 272 } catch (Exception e) { 273 throw e; 274 } 275 } 276 277 /** 278 * Method to export one or more assays to excel. 279 * 280 * @param params.ids One or more assay IDs to export 281 */ 282 def exportToExcelAsList = { 283 def ids = params.list( 'ids' ).findAll { it.isLong() }.collect { Long.valueOf( it ) }; 284 285 if( !ids ) { 286 flash.errorMessage = "No assay ids given"; 287 redirect( action: "errorPage" ); 288 return; 289 } 290 291 // If only 1 assay is asked for, don't bother with merging multiple assays. 292 // In that case just use the export method to export one assay per sheet 293 if( ids.size() == 1 ) 294 return exportToExcelAsSheets( params ); 295 296 // Find all assays for the given ids 297 def assays = ids.unique().collect { id -> Assay.get( id ) }.findAll { it } 298 299 // Send headers to the browser so the user can download the file 300 def filename = 'export.xlsx' 301 response.setHeader("Content-disposition", "attachment;filename=\"${filename}\"") 302 response.setContentType("application/octet-stream") 303 304 try { 305 // Loop through all assays to collect the data 306 def columnWiseAssayData = []; 307 308 assays.each { assay -> 309 // Determine which fields should be exported for this assay 310 def fieldMap = assayService.collectAssayTemplateFields(assay) 311 def measurementTokens = fieldMap.remove('Module Measurement Data') 312 313 // Retrieve row based data for this assay 314 def assayData = assayService.collectAssayData( assay, fieldMap, measurementTokens ); 315 316 // Prepend study and assay data to the list 317 assayData = assayService.prependAssayData( assayData, assay, assay.samples?.size() ) 318 assayData = assayService.prependStudyData( assayData, assay, assay.samples?.size() ) 319 320 // Put each assay on another sheet 321 columnWiseAssayData << assayData; 322 } 323 324 // Merge data from all assays 325 def mergedColumnWiseData = assayService.mergeColumnWiseDataOfMultipleStudies( columnWiseAssayData ); 326 327 def rowData = assayService.convertColumnToRowStructure(mergedColumnWiseData) 328 assayService.exportRowWiseDataToExcelFile( rowData, response.getOutputStream() ) 329 330 response.outputStream.flush() 331 332 } catch (Exception e) { 333 throw e; 334 } 335 } 336 337 338 def errorPage = { 339 render(view: 'excelExport/errorPage') 340 } 223 341 } -
trunk/grails-app/controllers/dbnp/studycapturing/StudyController.groovy
r1735 r1752 385 385 render study?.assays?.collect{[name: it.name, id: it.id]} as JSON 386 386 } 387 388 387 388 /** 389 * Exports all data from the given studies to excel. This is done using a redirect to the 390 * assay controller 391 * 392 * @param ids ids of the studies to export 393 * @param params.format "list" in order to export all assays in one big excel sheet 394 * "sheets" in order to export every assay on its own sheet (default) 395 * @see AssayController.exportToExcel 396 */ 397 def exportToExcel = { 398 def ids = params.list( 'ids' ).findAll { it.isLong() }.collect { Long.valueOf( it ) }; 399 400 if( !ids ) { 401 flash.errorMessage = "No study ids given"; 402 redirect( controller: "assay", action: "errorPage" ); 403 return; 404 } 405 406 // Find all assay ids for these studies 407 def assayIds = ids.collect { id -> 408 def study = Study.get( id ); 409 if( study ) { 410 return study.assays.collect { assay -> assay.id } 411 } else { 412 return [] 413 } 414 }.flatten() 415 416 if( !assayIds ) { 417 flash.errorMessage = "No assays found for the given studies"; 418 redirect( controller: "assay", action: "errorPage" ); 419 return; 420 } 421 422 // Create url to redirect to 423 def format = params.get( "format", "sheets" ) 424 redirect( controller: "assay", action: "exportToExcel", params: [ "format": format, "ids": assayIds ] ); 425 } 389 426 } -
trunk/grails-app/services/dbnp/studycapturing/AssayService.groovy
r1731 r1752 16 16 import org.apache.poi.xssf.usermodel.XSSFWorkbook 17 17 import org.apache.poi.hssf.usermodel.HSSFWorkbook 18 import org.codehaus.groovy.grails.web.json.JSONObject; 18 19 19 20 class AssayService { 20 21 21 boolean transactional = false 22 def authenticationService 23 def moduleCommunicationService 24 25 /** 26 * Collects the assay field names per category in a map as well as the 27 * module's measurements. 28 * 29 * @param assay the assay for which to collect the fields 30 * @return a map of categories as keys and field names or measurements as 31 * values 32 */ 33 def collectAssayTemplateFields(assay) throws Exception { 34 35 def getUsedTemplateFields = { templateEntities -> 36 37 // gather all unique and non null template fields that haves values 38 templateEntities*.giveFields().flatten().unique().findAll{ field -> 39 40 field && templateEntities.any { it.fieldExists(field.name) && it.getFieldValue(field.name) } 41 42 }.collect{[name: it.name, comment: it.comment]} 43 44 } 45 46 // check whether module is reachable 47 if (!moduleCommunicationService.isModuleReachable(assay.module.url)) { 48 49 throw new Exception('Module is not reachable') 50 51 } 52 53 def samples = assay.samples 54 55 [ 'Subject Data' : getUsedTemplateFields( samples*."parentSubject".unique() ), 56 'Sampling Event Data' : getUsedTemplateFields( samples*."parentEvent".unique() ), 57 'Sample Data' : getUsedTemplateFields( samples ), 58 'Event Group' : [[name: 'name', comment: 'Name of Event Group']], 59 'Module Measurement Data': requestModuleMeasurementNames(assay) 60 ] 61 62 } 63 64 /** 65 * Gathers all assay related data, including measurements from the module, 66 * into 1 hash map containing: Subject Data, Sampling Event Data, Sample 67 * Data, and module specific measurement data. 68 * Data from each of the 4 hash map entries are themselves hash maps 69 * representing a descriptive header (field name) as key and the data as 70 * value. 71 * 72 * @param assay the assay to collect data for 73 * @fieldMap map with categories as keys and fields as values 74 * @measurementTokens selection of measurementTokens 75 * @return The assay data structure as described above. 76 */ 77 def collectAssayData(assay, fieldMap, measurementTokens) throws Exception { 78 79 def collectFieldValuesForTemplateEntities = { templateFieldNames, templateEntities -> 80 81 // return a hash map with for each field name all values from the 82 // template entity list 83 templateFieldNames.inject([:]) { map, fieldName -> 84 85 map + [(fieldName): templateEntities.collect { 86 87 it?.fieldExists(fieldName) ? it.getFieldValue(fieldName) : '' 88 89 }] 90 91 } 92 93 } 94 95 def getFieldValues = { templateEntities, fieldNames, propertyName = '' -> 96 97 def returnValue 98 99 // if no property name is given, simply collect the fields and 100 // values of the template entities themselves 101 if (propertyName == '') { 102 103 returnValue = collectFieldValuesForTemplateEntities(fieldNames, templateEntities) 104 105 } else { 106 107 // if a property name is given, we'll have to do a bit more work 108 // to ensure efficiency. The reason for this is that for a list 109 // of template entities, the properties referred to by 110 // propertyName can include duplicates. For example, for 10 111 // samples, there may be less than 10 parent subjects. Maybe 112 // there's only 1 parent subject. We don't want to collect field 113 // values for this single subject 10 times ... 114 def fieldValues 115 116 // we'll get the unique list of properties to make sure we're 117 // not getting the field values for identical template entity 118 // properties more then once. 119 def uniqueProperties = templateEntities*."$propertyName".unique() 120 121 fieldValues = collectFieldValuesForTemplateEntities(fieldNames, uniqueProperties) 122 123 // prepare a lookup hashMap to be able to map an entities' 124 // property (e.g. a sample's parent subject) to an index value 125 // from the field values list 126 int i = 0 127 def propertyToFieldValueIndexMap = uniqueProperties.inject([:]) { map, item -> map + [(item):i++]} 128 129 // prepare the return value so that it has an entry for field 130 // name. This will be the column name (second header line). 131 returnValue = fieldNames.inject([:]) { map, item -> map + [(item):[]] } 132 133 // finally, fill map the unique field values to the (possibly 134 // not unique) template entity properties. In our example with 135 // 1 unique parent subject, this means copying that subject's 136 // field values to all 10 samples. 137 templateEntities.each{ te -> 138 139 fieldNames.each{ 140 141 returnValue[it] << fieldValues[it][propertyToFieldValueIndexMap[te[propertyName]]] 142 143 } 144 145 } 146 147 } 148 149 returnValue 150 151 } 152 153 // check whether module is reachable 154 if (!moduleCommunicationService.isModuleReachable(assay.module.url)) { 155 156 throw new Exception('Module is not reachable') 157 158 } 159 160 // Find samples and sort by name 22 boolean transactional = false 23 def authenticationService 24 def moduleCommunicationService 25 26 /** 27 * Collects the assay field names per category in a map as well as the 28 * module's measurements. 29 * 30 * @param assay the assay for which to collect the fields 31 * @return a map of categories as keys and field names or measurements as 32 * values 33 */ 34 def collectAssayTemplateFields(assay) throws Exception { 35 36 def getUsedTemplateFields = { templateEntities -> 37 38 // gather all unique and non null template fields that haves values 39 templateEntities*.giveFields().flatten().unique().findAll{ field -> 40 41 field && templateEntities.any { it && it.fieldExists(field.name) && it.getFieldValue(field.name) } 42 43 }.collect{[name: it.name, comment: it.comment]} 44 45 } 46 47 def samples = assay.samples 48 [ 'Subject Data' : getUsedTemplateFields( samples*."parentSubject".unique() ), 49 'Sampling Event Data' : getUsedTemplateFields( samples*."parentEvent".unique() ), 50 'Sample Data' : getUsedTemplateFields( samples ), 51 'Event Group' : [[name: 'name', comment: 'Name of Event Group']], 52 53 // If module is not reachable, only the field 'module error' is returned, and is filled later on. 54 'Module Measurement Data': moduleCommunicationService.isModuleReachable(assay.module.url) ? requestModuleMeasurementNames(assay) : [ [ name: "Module error" ] ] 55 ] 56 57 } 58 59 /** 60 * Gathers all assay related data, including measurements from the module, 61 * into 1 hash map containing: Subject Data, Sampling Event Data, Sample 62 * Data, and module specific measurement data. 63 * Data from each of the 4 hash map entries are themselves hash maps 64 * representing a descriptive header (field name) as key and the data as 65 * value. 66 * 67 * @param assay the assay to collect data for 68 * @param fieldMap map with categories as keys and fields as values 69 * @param measurementTokens selection of measurementTokens 70 * @return The assay data structure as described above. 71 */ 72 def collectAssayData(assay, fieldMap, measurementTokens) throws Exception { 73 74 def collectFieldValuesForTemplateEntities = { templateFieldNames, templateEntities -> 75 76 // return a hash map with for each field name all values from the 77 // template entity list 78 templateFieldNames.inject([:]) { map, fieldName -> 79 80 map + [(fieldName): templateEntities.collect { 81 82 it?.fieldExists(fieldName) ? it.getFieldValue(fieldName) : '' 83 84 }] 85 86 } 87 88 } 89 90 def getFieldValues = { templateEntities, fieldNames, propertyName = '' -> 91 92 def returnValue 93 94 // if no property name is given, simply collect the fields and 95 // values of the template entities themselves 96 if (propertyName == '') { 97 98 returnValue = collectFieldValuesForTemplateEntities(fieldNames, templateEntities) 99 100 } else { 101 102 // if a property name is given, we'll have to do a bit more work 103 // to ensure efficiency. The reason for this is that for a list 104 // of template entities, the properties referred to by 105 // propertyName can include duplicates. For example, for 10 106 // samples, there may be less than 10 parent subjects. Maybe 107 // there's only 1 parent subject. We don't want to collect field 108 // values for this single subject 10 times ... 109 def fieldValues 110 111 // we'll get the unique list of properties to make sure we're 112 // not getting the field values for identical template entity 113 // properties more then once. 114 def uniqueProperties = templateEntities*."$propertyName".unique() 115 116 fieldValues = collectFieldValuesForTemplateEntities(fieldNames, uniqueProperties) 117 118 // prepare a lookup hashMap to be able to map an entities' 119 // property (e.g. a sample's parent subject) to an index value 120 // from the field values list 121 int i = 0 122 def propertyToFieldValueIndexMap = uniqueProperties.inject([:]) { map, item -> map + [(item):i++]} 123 124 // prepare the return value so that it has an entry for field 125 // name. This will be the column name (second header line). 126 returnValue = fieldNames.inject([:]) { map, item -> map + [(item):[]] } 127 128 // finally, fill map the unique field values to the (possibly 129 // not unique) template entity properties. In our example with 130 // 1 unique parent subject, this means copying that subject's 131 // field values to all 10 samples. 132 templateEntities.each{ te -> 133 134 fieldNames.each{ 135 136 returnValue[it] << fieldValues[it][propertyToFieldValueIndexMap[te[propertyName]]] 137 138 } 139 140 } 141 142 } 143 144 returnValue 145 146 } 147 148 // Find samples and sort by name 161 149 def samples = assay.samples.toList().sort { it.name } 162 150 163 def eventFieldMap = [:] 164 165 // check whether event group data was requested 166 if (fieldMap['Event Group']) { 167 168 def names = samples*.parentEventGroup*.name.flatten() 169 170 // only set name field when there's actual data 171 if (!names.every {!it}) eventFieldMap['name'] = names 172 173 } 174 175 [ 'Subject Data' : getFieldValues(samples, fieldMap['Subject Data']*.name, 'parentSubject'), 176 'Sampling Event Data' : getFieldValues(samples, fieldMap['Sampling Event Data']*.name, 'parentEvent'), 177 'Sample Data' : getFieldValues(samples, fieldMap['Sample Data']*.name), 178 'Event Group' : eventFieldMap, 179 'Module Measurement Data': measurementTokens*.name ? requestModuleMeasurements(assay, measurementTokens, samples) : [:] 180 ] 181 } 182 183 /** 184 * Retrieves measurement names from the module through a rest call 185 * 186 * @param consumer the url of the module 187 * @param path path of the rest call to the module 188 * @return 189 */ 190 def requestModuleMeasurementNames(assay) { 191 192 def moduleUrl = assay.module.url 193 194 def path = moduleUrl + "/rest/getMeasurements/query?assayToken=$assay.assayUUID" 195 196 def jsonArray = moduleCommunicationService.callModuleRestMethodJSON(moduleUrl, path) 197 198 jsonArray*.toString() 199 200 } 201 202 /** 203 * Retrieves module measurement data through a rest call to the module 204 * 205 * @param assay Assay for which the module measurements should be retrieved 206 * @param fields List with the names of the fields to be retrieved. Format: [ [ name: 'measurementName1' ], [ name: 'measurementName2' ] ] 207 * @param samples Samples for which the module 208 * @return 209 */ 210 def requestModuleMeasurements(assay, fields, samples) { 211 212 def moduleUrl = assay.module.url 213 214 def tokenString = '' 215 216 fields.each{ 217 tokenString+="&measurementToken=${it.name.encodeAsURL()}" 218 } 219 220 def path = moduleUrl + "/rest/getMeasurementData/query?assayToken=$assay.assayUUID" + tokenString 221 222 def (sampleTokens, measurementTokens, moduleData) = moduleCommunicationService.callModuleRestMethodJSON(moduleUrl, path) 223 224 if (!sampleTokens?.size()) return [] 225 226 // Convert the three different maps into a map like: 151 def eventFieldMap = [:] 152 153 // check whether event group data was requested 154 if (fieldMap['Event Group']) { 155 156 def names = samples*.parentEventGroup*.name.flatten() 157 158 // only set name field when there's actual data 159 if (!names.every {!it}) eventFieldMap['name'] = names 160 161 } 162 163 [ 'Subject Data' : getFieldValues(samples, fieldMap['Subject Data']*.name, 'parentSubject'), 164 'Sampling Event Data' : getFieldValues(samples, fieldMap['Sampling Event Data']*.name, 'parentEvent'), 165 'Sample Data' : getFieldValues(samples, fieldMap['Sample Data']*.name), 166 'Event Group' : eventFieldMap, 167 168 // If module is not reachable, only the message 'module not reachable' is given for each sample 169 'Module Measurement Data': moduleCommunicationService.isModuleReachable(assay.module.url) ? 170 ( measurementTokens ? requestModuleMeasurements(assay, measurementTokens, samples) : [:] ) : 171 [ "Module error": [ "Module not reachable" ] * samples.size() ] 172 ] 173 } 174 175 /** 176 * Prepend data from study to the data structure 177 * @param assayData Column wise data structure of samples 178 * @param assay Assay object the data should be selected from 179 * @param numValues Number of values for this assay 180 * @return Extended column wise data structure 181 */ 182 def prependStudyData( inputData, Assay assay, numValues ) { 183 if( !assay ) 184 return inputData; 185 186 // Retrieve study data 187 def studyData =[:] 188 assay.parent?.giveFields().each { 189 def value = assay.parent.getFieldValue( it.name ) 190 if( value ) 191 studyData[ it.name ] = [value] * numValues 192 } 193 194 return [ 195 'Study Data': studyData 196 ] + inputData 197 } 198 199 /** 200 * Prepend data from assay to the data structure 201 * @param assayData Column wise data structure of samples 202 * @param assay Assay object the data should be selected from 203 * @param numValues Number of values for this assay 204 * @return Extended column wise data structure 205 */ 206 def prependAssayData( inputData, Assay assay, numValues ) { 207 if( !assay ) 208 return inputData; 209 210 // Retrieve assay data 211 def assayData = [:] 212 assay.giveFields().each { 213 def value = assay.getFieldValue( it.name ) 214 if( value ) 215 assayData[ it.name ] = [value] * numValues 216 } 217 218 return [ 219 'Assay Data': assayData 220 ] + inputData 221 } 222 223 /** 224 * Retrieves measurement names from the module through a rest call 225 * 226 * @param consumer the url of the module 227 * @param path path of the rest call to the module 228 * @return 229 */ 230 def requestModuleMeasurementNames(assay) { 231 232 def moduleUrl = assay.module.url 233 234 def path = moduleUrl + "/rest/getMeasurements/query?assayToken=$assay.assayUUID" 235 236 def jsonArray = moduleCommunicationService.callModuleRestMethodJSON(moduleUrl, path) 237 238 jsonArray.collect { 239 if( it == JSONObject.NULL ) 240 return "" 241 else 242 return it.toString() 243 } 244 245 } 246 247 /** 248 * Retrieves module measurement data through a rest call to the module 249 * 250 * @param assay Assay for which the module measurements should be retrieved 251 * @param measurementTokens List with the names of the fields to be retrieved. Format: [ 'measurementName1', 'measurementName2' ] 252 * @param samples Samples for which the module 253 * @return 254 */ 255 def requestModuleMeasurements(assay, inputMeasurementTokens, samples) { 256 257 def moduleUrl = assay.module.url 258 259 def tokenString = '' 260 261 inputMeasurementTokens.each{ 262 tokenString+="&measurementToken=${it.encodeAsURL()}" 263 } 264 265 def path = moduleUrl + "/rest/getMeasurementData/query?assayToken=$assay.assayUUID" + tokenString 266 267 def (sampleTokens, measurementTokens, moduleData) = moduleCommunicationService.callModuleRestMethodJSON(moduleUrl, path) 268 269 if (!sampleTokens?.size()) return [] 270 271 // Convert the three different maps into a map like: 227 272 // 228 273 // [ "measurement 1": [ value1, value2, value3 ], … … 232 277 def map = [:] 233 278 def numSampleTokens = sampleTokens.size(); 234 279 235 280 measurementTokens.eachWithIndex { measurementToken, measurementIndex -> 236 281 def measurements = []; … … 243 288 def tokenIndex = sampleTokens.indexOf( sample.giveUUID() ); 244 289 def valueIndex = measurementIndex * numSampleTokens + tokenIndex; 245 290 246 291 // If the module data is in the wrong format, show an error in the log file 247 292 // and return a null value for this measurement. … … 250 295 measurements << null 251 296 } else { 252 measurements << moduleData[ valueIndex ].toString();297 measurements << ( moduleData[ valueIndex ] == JSONObject.NULL ? "" : moduleData[ valueIndex ].toString() ); 253 298 } 254 299 } else { … … 260 305 261 306 return map; 262 } 263 264 /** 265 * Converts column 266 * @param columnData multidimensional map containing column data. 267 * On the top level, the data must be grouped by category. Each key is the 268 * category title and the values are maps representing the columns. Each 269 * column also has a title (its key) and a list of values. Columns must be 270 * equally sized. 271 * 272 * For example, consider the following map: 273 * [Category1: 274 * [Column1: [1,2,3], Column2: [4,5,6]], 275 * Category2: 276 * [Column3: [7,8,9], Column4: [10,11,12], Column5: [13,14,15]]] 277 * 278 * which will be written as: 279 * 280 * | Category1 | | Category2 | | | 281 * | Column1 | Column2 | Column3 | Column4 | Column5 | 282 * | 1 | 4 | 7 | 10 | 13 | 283 * | 2 | 5 | 8 | 11 | 14 | 284 * | 3 | 6 | 9 | 12 | 15 | 285 * 286 * @return row wise data 287 */ 288 def convertColumnToRowStructure(columnData) { 289 290 // check if all columns have the dimensionality 2 291 if (columnData.every { it.value.every { it.value instanceof ArrayList } }) { 292 293 def headers = [[],[]] 294 295 columnData.each { category -> 296 297 if (category.value.size()) { 298 299 // put category keys into first row separated by null values 300 // wherever there are > 1 columns per category 301 headers[0] += [category.key] + [null] * (category.value.size() - 1) 302 303 // put non-category column headers into 2nd row 304 headers[1] += category.value.collect{it.key} 305 306 } 307 308 } 309 310 def d = [] 311 312 // add all column wise data into 'd' 313 columnData.each { it.value.each { d << it.value } } 314 315 // transpose d into row wise data and combine with header rows 316 headers + d.transpose() 317 } 318 319 } 320 321 /** 322 * Export column wise data in Excel format to a stream. 323 * 324 * @param columnData Multidimensional map containing column data 325 * @param outputStream Stream to write to 326 * @param useOfficeOpenXML Flag to specify xlsx (standard) or xls output 327 * @return 328 */ 329 def exportColumnWiseDataToExcelFile(columnData, outputStream, useOfficeOpenXML = true) { 330 331 // transform data into row based structure for easy writing 332 def rows = convertColumnToRowStructure(columnData) 333 334 if (rows) { 335 336 exportRowWiseDataToExcelFile(rows, outputStream, useOfficeOpenXML) 337 338 } else { 339 340 throw new Exception('Wrong column data format.') 341 342 } 343 344 } 345 346 /** 347 * Export row wise data in Excel format to a stream 348 * 349 * @param rowData List of lists containing for each row all cell values 350 * @param outputStream Stream to write to 351 * @param useOfficeOpenXML Flag to specify xlsx (standard) or xls output 352 * @return 353 */ 354 def exportRowWiseDataToExcelFile(rowData, outputStream, useOfficeOpenXML = true) { 355 356 Workbook wb = useOfficeOpenXML ? new XSSFWorkbook() : new HSSFWorkbook() 357 Sheet sheet = wb.createSheet() 358 359 // create all rows 360 rowData.size().times { sheet.createRow it } 361 362 sheet.eachWithIndex { Row row, ri -> 363 364 // create appropriate number of cells for this row 365 rowData[ri].size().times { row.createCell it } 366 367 row.eachWithIndex { Cell cell, ci -> 368 369 // Numbers and values of type boolean, String, and Date can be 370 // written as is, other types need converting to String 371 def value = rowData[ri][ci] 372 373 value = (value instanceof Number | value?.class in [boolean.class, String.class, Date.class]) ? value : value?.toString() 374 375 // write the value (or an empty String if null) to the cell 376 cell.setCellValue(value ?: '') 377 378 } 379 380 } 381 382 wb.write(outputStream) 383 outputStream.close() 384 385 } 307 } 308 309 /** 310 * Merges the data from multiple studies into a structure that can be exported to an excel file. The format for each assay is 311 * 312 * [Category1: 313 * [Column1: [1,2,3], Column2: [4,5,6]], 314 * Category2: 315 * [Column3: [7,8,9], Column4: [10,11,12], Column5: [13,14,15]]] 316 * 317 * Where the category describes the category of data that is presented (e.g. subject, sample etc.) and the column names describe 318 * the fields that are present. Each entry in the lists shows the value for that column for an entity. In this case, 3 entities are described. 319 * Each field should give values for all entities, so the length of all value-lists should be the same. 320 * 321 * Example: If the following input is given (2 assays) 322 * 323 * [ 324 * [Category1: 325 * [Column1: [1,2,3], Column2: [4,5,6]], 326 * Category2: 327 * [Column3: [7,8,9], Column4: [10,11,12], Column5: [13,14,15]]], 328 * [Category1: 329 * [Column1: [16,17], Column6: [18,19]], 330 * Category3: 331 * [Column3: [20,21], Column8: [22,23]]] 332 * ] 333 * 334 * the output will be (5 entries for each column, empty values for fields that don't exist in some assays) 335 * 336 * [ 337 * [Category1: 338 * [Column1: [1,2,3,16,17], Column2: [4,5,6,,], Column6: [,,,18,19]], 339 * Category2: 340 * [Column3: [7,8,9,,], Column4: [10,11,12,,], Column5: [13,14,15,,]], 341 * Category3: 342 * [Column3: [,,,20,21], Column8: [,,,22,23]] 343 * ] 344 * 345 * 346 * @param columnWiseAssayData List with each entry being the column wise data of an assay. The format for each 347 * entry is described above 348 * @return Hashmap Combined assay data, in the same structure as each input entry. Empty values are given as an empty string. 349 * So for input entries 350 */ 351 def mergeColumnWiseDataOfMultipleStudies(def columnWiseAssayData) { 352 // Compute the number of values that is expected for each assay. This number is 353 // used later on to determine the number of empty fields to add if a field is not present in this 354 // assay 355 def numValues = columnWiseAssayData.collect { assay -> 356 for( cat in assay ) { 357 if( cat ) { 358 for( field in cat.value ) { 359 if( field?.value?.size() > 0 ) { 360 return field.value.size(); 361 } 362 } 363 } 364 } 365 366 return 0; 367 } 368 369 // Merge categories from all assays. Create a list for all categories 370 def categories = columnWiseAssayData*.keySet().toList().flatten().unique(); 371 def mergedColumnWiseData = [:] 372 categories.each { category -> 373 // Only work with this category for all assays 374 def categoryData = columnWiseAssayData*.getAt( category ); 375 376 // Find the different fields in all assays 377 def categoryFields = categoryData.findAll{ it }*.keySet().toList().flatten().unique(); 378 379 // Find data for all assays for these fields. If the fields do not exist, return an empty string 380 def categoryValues = [:] 381 categoryFields.each { field -> 382 categoryValues[ field ] = []; 383 384 // Loop through all assays 385 categoryData.eachWithIndex { assayValues, idx -> 386 if( assayValues && assayValues.containsKey( field ) ) { 387 // Append the values if they exist 388 categoryValues[ field ] += assayValues[ field ]; 389 } else { 390 // Append empty string for each entity if the field doesn't exist 391 categoryValues[ field ] += [""] * numValues[ idx ] 392 } 393 } 394 } 395 396 mergedColumnWiseData[ category ] = categoryValues 397 } 398 399 return mergedColumnWiseData; 400 } 401 402 /** 403 * Converts column 404 * @param columnData multidimensional map containing column data. 405 * On the top level, the data must be grouped by category. Each key is the 406 * category title and the values are maps representing the columns. Each 407 * column also has a title (its key) and a list of values. Columns must be 408 * equally sized. 409 * 410 * For example, consider the following map: 411 * [Category1: 412 * [Column1: [1,2,3], Column2: [4,5,6]], 413 * Category2: 414 * [Column3: [7,8,9], Column4: [10,11,12], Column5: [13,14,15]]] 415 * 416 * which will be written as: 417 * 418 * | Category1 | | Category2 | | | 419 * | Column1 | Column2 | Column3 | Column4 | Column5 | 420 * | 1 | 4 | 7 | 10 | 13 | 421 * | 2 | 5 | 8 | 11 | 14 | 422 * | 3 | 6 | 9 | 12 | 15 | 423 * 424 * @return row wise data 425 */ 426 def convertColumnToRowStructure(columnData) { 427 428 // check if all columns have the dimensionality 2 429 if (columnData.every { it.value.every { it.value instanceof ArrayList } }) { 430 431 def headers = [[],[]] 432 433 columnData.each { category -> 434 435 if (category.value.size()) { 436 437 // put category keys into first row separated by null values 438 // wherever there are > 1 columns per category 439 headers[0] += [category.key] + [null] * (category.value.size() - 1) 440 441 // put non-category column headers into 2nd row 442 headers[1] += category.value.collect{it.key} 443 444 } 445 446 } 447 448 def d = [] 449 450 // add all column wise data into 'd' 451 columnData.each { it.value.each { d << it.value } } 452 453 // transpose d into row wise data and combine with header rows 454 headers + d.transpose() 455 } 456 457 } 458 459 /** 460 * Export column wise data in Excel format to a stream. 461 * 462 * @param columnData Multidimensional map containing column data 463 * @param outputStream Stream to write to 464 * @param useOfficeOpenXML Flag to specify xlsx (standard) or xls output 465 * @return 466 */ 467 def exportColumnWiseDataToExcelFile(columnData, outputStream, useOfficeOpenXML = true) { 468 469 // transform data into row based structure for easy writing 470 def rows = convertColumnToRowStructure(columnData) 471 472 if (rows) { 473 474 exportRowWiseDataToExcelFile(rows, outputStream, useOfficeOpenXML) 475 476 } else { 477 478 throw new Exception('Wrong column data format.') 479 480 } 481 482 } 483 484 /** 485 * Export row wise data in Excel format to a stream 486 * 487 * @param rowData List of lists containing for each row all cell values 488 * @param outputStream Stream to write to 489 * @param useOfficeOpenXML Flag to specify xlsx (standard) or xls output 490 * @return 491 */ 492 def exportRowWiseDataToExcelFile(rowData, outputStream, useOfficeOpenXML = true) { 493 Workbook wb = useOfficeOpenXML ? new XSSFWorkbook() : new HSSFWorkbook() 494 Sheet sheet = wb.createSheet() 495 496 exportRowWiseDataToExcelSheet( rowData, sheet ); 497 498 wb.write(outputStream) 499 outputStream.close() 500 } 501 502 /** 503 * Export row wise data for multiple assays in Excel format (separate sheets) to a stream 504 * 505 * @param rowData List of structures with rowwise data for each assay 506 * @param outputStream Stream to write to 507 * @param useOfficeOpenXML Flag to specify xlsx (standard) or xls output 508 * @return 509 */ 510 def exportRowWiseDataForMultipleAssaysToExcelFile(assayData, outputStream, useOfficeOpenXML = true) { 511 Workbook wb = useOfficeOpenXML ? new XSSFWorkbook() : new HSSFWorkbook() 512 513 assayData.each { rowData -> 514 Sheet sheet = wb.createSheet() 515 516 exportRowWiseDataToExcelSheet( rowData, sheet ); 517 } 518 519 wb.write(outputStream) 520 outputStream.close() 521 } 522 523 /** 524 * Export row wise data in Excel format to a given sheet in an excel workbook 525 * 526 * @param rowData List of lists containing for each row all cell values 527 * @param sheet Excel sheet to append the 528 * @return 529 */ 530 def exportRowWiseDataToExcelSheet(rowData, Sheet sheet) { 531 // create all rows 532 rowData.size().times { sheet.createRow it } 533 534 sheet.eachWithIndex { Row row, ri -> 535 if( rowData[ ri ] ) { 536 // create appropriate number of cells for this row 537 rowData[ri].size().times { row.createCell it } 538 539 row.eachWithIndex { Cell cell, ci -> 540 541 // Numbers and values of type boolean, String, and Date can be 542 // written as is, other types need converting to String 543 def value = rowData[ri][ci] 544 545 value = (value instanceof Number | value?.class in [boolean.class, String.class, Date.class]) ? value : value?.toString() 546 547 // write the value (or an empty String if null) to the cell 548 cell.setCellValue(value ?: '') 549 550 } 551 } 552 } 553 } 386 554 387 555 }
Note: See TracChangeset
for help on using the changeset viewer.