1 | package nl.tno.massSequencing.files |
---|
2 | |
---|
3 | import org.codehaus.groovy.grails.commons.ConfigurationHolder |
---|
4 | import org.hibernate.SessionFactory |
---|
5 | import grails.converters.*; |
---|
6 | import nl.tno.massSequencing.* |
---|
7 | import nl.tno.massSequencing.auth.User |
---|
8 | |
---|
9 | class ImportController { |
---|
10 | def fileService |
---|
11 | def fastaService |
---|
12 | def importService |
---|
13 | def classificationService |
---|
14 | def sessionFactory |
---|
15 | def workerService |
---|
16 | |
---|
17 | /************************************************************************** |
---|
18 | * |
---|
19 | * Methods for handling uploaded sequence, quality and classification files |
---|
20 | * |
---|
21 | *************************************************************************/ |
---|
22 | |
---|
23 | /** |
---|
24 | * Shows a screen to indicate that files are being parsed |
---|
25 | */ |
---|
26 | def parseUploadedFiles = { |
---|
27 | def entityType = params.entityType |
---|
28 | def entityId = params.id |
---|
29 | |
---|
30 | // Check whether files are given |
---|
31 | def names = [] + params.list( 'sequencefiles' ) |
---|
32 | |
---|
33 | if( !names ) { |
---|
34 | flash.message = "No files uploaded for processing" |
---|
35 | if( params.entityType && params.id) |
---|
36 | redirect( controller: params.entityType, action: 'show', 'id': params.id) |
---|
37 | else |
---|
38 | redirect( url: "" ) |
---|
39 | |
---|
40 | return |
---|
41 | } |
---|
42 | |
---|
43 | // Check for total size of the files in order to be able |
---|
44 | // to show a progress bar |
---|
45 | long filesize = 0; |
---|
46 | names.each { |
---|
47 | filesize += fileService.get( it )?.length() |
---|
48 | } |
---|
49 | |
---|
50 | // Create a unique process identifier |
---|
51 | String processId = workerService.initProcess( session, "Parsing files", 1, filesize ); |
---|
52 | |
---|
53 | session.process[ processId ].filenames = names; |
---|
54 | session.process[ processId ].entityId = entityId; |
---|
55 | session.process[ processId ].entityType = entityType |
---|
56 | |
---|
57 | // Retrieve worker URL |
---|
58 | def finishUrl = createLink( controller: "import", action: 'parseUploadResult', params: [ processId: processId ] ).toString(); |
---|
59 | def returnUrl = createLink( controller: entityType, action: "show", id: entityId ).toString(); |
---|
60 | |
---|
61 | def url = workerService.startProcess( session, processId, finishUrl, returnUrl ) |
---|
62 | |
---|
63 | // |
---|
64 | // Initiate work |
---|
65 | // |
---|
66 | |
---|
67 | /* Parses uploaded files, discards files we can not handle |
---|
68 | * |
---|
69 | * [ |
---|
70 | * success: [ |
---|
71 | * [filename: 'abc.fasta', type: FASTA, numSequences: 190] |
---|
72 | * [filename: 'cde.fasta', type: FASTA, numSequences: 140] |
---|
73 | * [filename: 'abc.qual', type: QUAL, numSequences: 190, avgQuality: 38] |
---|
74 | * [filename: 'cde.qual', type: QUAL, numSequences: 140, avgQuality: 29] |
---|
75 | * ], |
---|
76 | * failure: [ |
---|
77 | * [filename: 'testing.doc', message: 'Type not recognized'] |
---|
78 | * ] |
---|
79 | * ] |
---|
80 | * |
---|
81 | * The second parameter is a callback function to update progress indicators |
---|
82 | */ |
---|
83 | def httpSession = session; |
---|
84 | def onProgress = { progress, total -> |
---|
85 | // Update progress |
---|
86 | httpSession.progress[ processId ].stepTotal = total; |
---|
87 | httpSession.progress[ processId ].stepProgress = progress; |
---|
88 | } |
---|
89 | def newStep = { total, description -> |
---|
90 | // Start a new step |
---|
91 | httpSession.progress[ processId ].stepTotal = total; |
---|
92 | httpSession.progress[ processId ].stepProgress = 0; |
---|
93 | |
---|
94 | httpSession.progress[ processId ].stepDescription = description; |
---|
95 | httpSession.progress[ processId ].stepNum++; |
---|
96 | } |
---|
97 | |
---|
98 | // Perform the actual computations asynchronously |
---|
99 | def loggedInUser = httpSession.user |
---|
100 | runAsync { |
---|
101 | def entity |
---|
102 | |
---|
103 | // Determine entity and assaysamples |
---|
104 | switch( httpSession.process[ processId ].entityType ) { |
---|
105 | case "run": |
---|
106 | entity = getRun( httpSession.process[ processId ].entityId ); |
---|
107 | break; |
---|
108 | case "assay": |
---|
109 | entity = getAssay( httpSession.process[ processId ].entityId, loggedInUser ); |
---|
110 | break; |
---|
111 | default: |
---|
112 | httpSession.progress[ processId ].error = true; |
---|
113 | httpSession.progress[ processId ].finished = true; |
---|
114 | return; |
---|
115 | } |
---|
116 | |
---|
117 | if (!entity) { |
---|
118 | httpSession.progress[ processId ].error = true; |
---|
119 | httpSession.progress[ processId ].finished = true; |
---|
120 | return; |
---|
121 | } |
---|
122 | |
---|
123 | def assaySamples = entity.assaySamples.findAll { it.assay.study.canWrite( httpSession.user ) }; |
---|
124 | |
---|
125 | def parsedFiles = importService.parseFiles( names, onProgress, [progress: 0, total: httpSession.progress[ processId ].stepTotal ], newStep ); |
---|
126 | |
---|
127 | // Determine excel matches from the uploaded files |
---|
128 | parsedFiles.success = fastaService.inferExcelMatches( parsedFiles.success ); |
---|
129 | |
---|
130 | // Match files with samples in the database |
---|
131 | def matchedFiles = fastaService.matchFiles( parsedFiles.success, assaySamples ); |
---|
132 | |
---|
133 | // Sort files on filename |
---|
134 | matchedFiles.sort { a,b -> a.fasta?.originalfilename <=> b.fasta?.originalfilename } |
---|
135 | |
---|
136 | // Retrieve all files that have not been matched |
---|
137 | def notMatchedFiles = parsedFiles.success.findAll { |
---|
138 | switch( it.type ) { |
---|
139 | case "fasta": |
---|
140 | return !matchedFiles*.fasta*.filename.contains( it.filename ); |
---|
141 | case "qual": |
---|
142 | return !matchedFiles*.feasibleQuals.flatten().filename.contains( it.filename ); |
---|
143 | case "taxonomy": |
---|
144 | return !matchedFiles*.feasibleClassifications.flatten().filename.contains( it.filename ); |
---|
145 | } |
---|
146 | return false; |
---|
147 | } |
---|
148 | |
---|
149 | // Saved file matches in session to use them later on |
---|
150 | httpSession.process[ processId ].processedFiles = [ parsed: parsedFiles, matched: matchedFiles, notMatched: notMatchedFiles ]; |
---|
151 | |
---|
152 | // Check whether quality, classification or logfiles have been added |
---|
153 | def types = [ "fasta", "qual", "taxonomy", "logfile" ]; |
---|
154 | def typesExist = [:] |
---|
155 | types.each { type -> typesExist[ type ] = parsedFiles.success.any { it.type == type } } |
---|
156 | |
---|
157 | httpSession.process[ processId ].fileTypes = typesExist |
---|
158 | |
---|
159 | // Tell the frontend we are finished |
---|
160 | httpSession.progress[ processId ].finished = true; |
---|
161 | } |
---|
162 | |
---|
163 | redirect( url: url ); |
---|
164 | } |
---|
165 | |
---|
166 | /** |
---|
167 | * Show result of processing uploaded files (step 1) |
---|
168 | */ |
---|
169 | def parseUploadResult = { |
---|
170 | def processId = params.processId; |
---|
171 | // load study with id specified by param.id |
---|
172 | def entity |
---|
173 | |
---|
174 | switch( session.process[ processId ].entityType ) { |
---|
175 | case "run": |
---|
176 | entity = getRun( session.process[ processId ].entityId ) |
---|
177 | break; |
---|
178 | case "assay": |
---|
179 | entity = getAssay( session.process[ processId ].entityId ) |
---|
180 | break; |
---|
181 | default: |
---|
182 | response.setStatus( 404, "No entity found" ); |
---|
183 | render ""; |
---|
184 | return; |
---|
185 | } |
---|
186 | |
---|
187 | def assaySamples = entity.assaySamples.findAll { it.assay.study.canWrite( session.user ) }; |
---|
188 | |
---|
189 | if (!entity) { |
---|
190 | response.setStatus( 404, flash.error ) |
---|
191 | render ""; |
---|
192 | return |
---|
193 | } |
---|
194 | |
---|
195 | if( !session.process[ processId ].processedFiles ) { |
---|
196 | flash.error = "Processing of files failed. Maybe the session timed out." |
---|
197 | redirect( controller: params.entityType, action: 'show', 'id': params.id) |
---|
198 | return |
---|
199 | } |
---|
200 | |
---|
201 | // Find matching sequenceData objects for taxonomyfiles that have not been matched |
---|
202 | def notMatchedFiles = session.process[ processId ].processedFiles.notMatched; |
---|
203 | def extraClassifications = notMatchedFiles.findAll { it.type == "taxonomy" } |
---|
204 | extraClassifications.collect { |
---|
205 | // Find all sequence files that have the correct number of sequences and are in the list of assaySamples |
---|
206 | it[ 'feasibleSequenceData' ] = SequenceData.findAllByNumSequences( it.numLines ).findAll { assaySamples.contains( it.sample ) } |
---|
207 | return it |
---|
208 | } |
---|
209 | |
---|
210 | [ entityType: session.process[ processId ].entityType, processId: processId, entity: entity, |
---|
211 | parsedFiles: session.process[ processId ].processedFiles.parsed, |
---|
212 | matchedFiles: session.process[ processId ].processedFiles.matched, |
---|
213 | remainingClassificationFiles: extraClassifications, |
---|
214 | existingTypes: session.process[ processId ].fileTypes, |
---|
215 | selectedRun: params.selectedRun ] |
---|
216 | } |
---|
217 | |
---|
218 | /** |
---|
219 | * Returns from the upload wizard without saving the data. The uploaded files are removed |
---|
220 | */ |
---|
221 | def returnWithoutSaving = { |
---|
222 | def processId = params.processId; |
---|
223 | def entityType = session.process[ processId ].entityType; |
---|
224 | def entityId = session.process[ processId ].entityId; |
---|
225 | |
---|
226 | // Delete all uploaded files from disk |
---|
227 | session.process[ processId ]?.processedFiles?.parsed?.success?.each { |
---|
228 | fileService.delete( it.filename ); |
---|
229 | } |
---|
230 | |
---|
231 | // Clear process from session |
---|
232 | workerService.clearProcess( session, processId ); |
---|
233 | |
---|
234 | // Redirect to the correct controller |
---|
235 | switch( entityType ) { |
---|
236 | case "run": |
---|
237 | case "assay": |
---|
238 | redirect( controller: entityType, action: "show", id: entityId ); |
---|
239 | return; |
---|
240 | default: |
---|
241 | response.setStatus( 404, "No entity found" ); |
---|
242 | render ""; |
---|
243 | return; |
---|
244 | } |
---|
245 | |
---|
246 | |
---|
247 | } |
---|
248 | |
---|
249 | /** |
---|
250 | * Shows a screen with the progress of saving matched files |
---|
251 | */ |
---|
252 | def saveMatchedFiles = { |
---|
253 | def processId = params.processId |
---|
254 | |
---|
255 | def entityType = session.process[ processId ].entityType |
---|
256 | def entityId = session.process[ processId ].entityId |
---|
257 | |
---|
258 | session.process[ processId ].matchedFiles = params.file |
---|
259 | session.process[ processId ].matchedRemainingClassification = params.remainingClassification |
---|
260 | |
---|
261 | // Check for total size of the classification files in order to be able |
---|
262 | // to show a progress bar. The handling of classification files is orders |
---|
263 | // of magnitude bigger than the rest, so we only show progress of those files |
---|
264 | long filesize = 0; |
---|
265 | |
---|
266 | // Loop through all files. Those are the numeric elements in the 'files' array |
---|
267 | def digitRE = ~/^\d+$/; |
---|
268 | params.file.findAll { it.key.matches( digitRE ) }.each { file -> |
---|
269 | def filevalue = file.value; |
---|
270 | |
---|
271 | // Check if the file is selected |
---|
272 | if( filevalue.include == "on" ) { |
---|
273 | if( fileService.fileExists( filevalue.fasta ) ) { |
---|
274 | // Also save classification data for this file, if it is present |
---|
275 | if( filevalue.classification ) { |
---|
276 | filesize += fileService.get( filevalue.classification )?.size() |
---|
277 | } |
---|
278 | } |
---|
279 | } |
---|
280 | } |
---|
281 | params.remainingClassification.findAll { it.key.matches( digitRE ) }.each { file -> |
---|
282 | def filevalue = file.value; |
---|
283 | |
---|
284 | // Check if the file is selected |
---|
285 | if( filevalue.include == "on" ) { |
---|
286 | if( fileService.fileExists( filevalue.filename ) ) { |
---|
287 | // Also save classification data for this file, if it is present |
---|
288 | filesize += fileService.get( filevalue.filename )?.size() |
---|
289 | } |
---|
290 | } |
---|
291 | } |
---|
292 | |
---|
293 | // Clear old process, but save useful data |
---|
294 | def processInfo = session.process[ processId ] |
---|
295 | workerService.clearProcess( session, processId ); |
---|
296 | |
---|
297 | // Create a new unique process identifier |
---|
298 | processId = workerService.initProcess( session, "Store sequence data and classification", 2, filesize ); |
---|
299 | |
---|
300 | session.process[ processId ] = processInfo; |
---|
301 | |
---|
302 | // Retrieve worker URL |
---|
303 | def finishUrl = createLink( controller: "import", action: 'saveMatchedResult', params: [ processId: processId ] ).toString(); |
---|
304 | def returnUrl = createLink( controller: entityType, action: "show", entityId ).toString(); |
---|
305 | |
---|
306 | def url = workerService.startProcess( session, processId, finishUrl, returnUrl ) |
---|
307 | |
---|
308 | // |
---|
309 | // Initiate work |
---|
310 | // |
---|
311 | // Check whether files are given |
---|
312 | def files = session.process[ processId ].matchedFiles |
---|
313 | def remainingClassification = session.process[ processId ].matchedRemainingClassification; |
---|
314 | |
---|
315 | if( !files && !remainingClassification ) { |
---|
316 | flash.message = "No files were selected for import." |
---|
317 | redirect( controller: session.process[ processId ].entityType, action: 'show', 'id': session.process[ processId ].entityId) |
---|
318 | return |
---|
319 | } |
---|
320 | |
---|
321 | File permanentDir = fileService.absolutePath( ConfigurationHolder.config.massSequencing.fileDir ) |
---|
322 | |
---|
323 | // This closure enables keeping track of the progress |
---|
324 | def httpSession = session; |
---|
325 | def onProgress = { progress -> |
---|
326 | // Update progress |
---|
327 | httpSession.progress[ processId ].stepProgress += progress; |
---|
328 | } |
---|
329 | |
---|
330 | // Run the computations asynchronously, since it takes a lot of time |
---|
331 | runAsync { |
---|
332 | // Loop through all FASTA files. Those are the numeric elements in the 'files' array |
---|
333 | def fastaReturn = saveMatchedFastaFiles( files, httpSession.process[ processId ]?.processedFiles, onProgress ); |
---|
334 | def classificationReturn = saveRemainingClassificationFiles( remainingClassification, onProgress ); |
---|
335 | |
---|
336 | // Update classification (summary) for updated samples |
---|
337 | def samplesClassified = [] + fastaReturn.samplesClassified + classificationReturn.samplesClassified; |
---|
338 | def uniqueSamples = samplesClassified.findAll { it }.unique(); |
---|
339 | |
---|
340 | // Now all classification files have been parsed, start a new step. This might take a while, so |
---|
341 | // the progress should be shown. |
---|
342 | workerService.nextStep( httpSession, processId, "Updating classification statistics in database", uniqueSamples.size() ); |
---|
343 | classificationService.updateClassificationForAssaySamples( uniqueSamples, onProgress ) |
---|
344 | |
---|
345 | def returnStructure = [ |
---|
346 | numSequenceFiles: fastaReturn.numSequenceFiles, |
---|
347 | numQualFiles: fastaReturn.numQualFiles, |
---|
348 | numClassificationFiles: fastaReturn.numClassificationFiles, |
---|
349 | numLogFiles: fastaReturn.numLogFiles, |
---|
350 | numExtraClassificationFiles: classificationReturn.numExtraClassifications, |
---|
351 | numTotal: fastaReturn.numSequenceFiles + classificationReturn.numExtraClassifications, |
---|
352 | errors: [] + fastaReturn.errors + classificationReturn.errors |
---|
353 | ] |
---|
354 | |
---|
355 | // Return all files that have not been moved |
---|
356 | httpSession.process[ processId ]?.processedFiles?.parsed?.success?.each { |
---|
357 | fileService.delete( it.filename ); |
---|
358 | } |
---|
359 | |
---|
360 | httpSession.process[ processId ].result = returnStructure; |
---|
361 | |
---|
362 | // Tell the frontend we are finished |
---|
363 | httpSession.progress[ processId ].finished = true; |
---|
364 | |
---|
365 | } |
---|
366 | |
---|
367 | redirect( url: url ); |
---|
368 | } |
---|
369 | |
---|
370 | def saveMatchedFastaFiles( def files, processedFiles, Closure onProgress ) { |
---|
371 | int numSuccesful = 0; |
---|
372 | int numQualFiles = 0; |
---|
373 | int numClassificationFiles = 0; |
---|
374 | int numLogFiles = 0; |
---|
375 | def samplesClassified = []; |
---|
376 | def errors = []; |
---|
377 | |
---|
378 | def digitRE = ~/^\d+$/; |
---|
379 | files.findAll { it.key.matches( digitRE ) }.each { file -> |
---|
380 | def filevalue = file.value; |
---|
381 | |
---|
382 | // Check if the file is selected |
---|
383 | if( filevalue.include == "on" ) { |
---|
384 | if( fileService.fileExists( filevalue.fasta ) ) { |
---|
385 | try { |
---|
386 | def permanent = fastaService.savePermanent( filevalue.fasta, filevalue.qual, filevalue.logfile, processedFiles ); |
---|
387 | |
---|
388 | // Save the data into the database |
---|
389 | SequenceData sd = new SequenceData(); |
---|
390 | |
---|
391 | sd.sequenceFile = permanent.fasta |
---|
392 | sd.qualityFile = permanent.qual |
---|
393 | sd.logFile = permanent.logfile |
---|
394 | sd.numSequences = permanent.numSequences |
---|
395 | sd.averageQuality = permanent.avgQuality |
---|
396 | |
---|
397 | def sample = AssaySample.get( filevalue.assaySample ); |
---|
398 | if( sample ) { |
---|
399 | sample.addToSequenceData( sd ); |
---|
400 | |
---|
401 | AssaySample.recalculateNumSequences( sample ); |
---|
402 | } |
---|
403 | |
---|
404 | if( !sd.validate() ) { |
---|
405 | errors << "an error occurred while saving " + filevalue.fasta + ": validation of SequenceData failed."; |
---|
406 | } else { |
---|
407 | sd.save(flush:true); |
---|
408 | |
---|
409 | // Also save classification data for this file, if it is present |
---|
410 | if( filevalue.classification ) { |
---|
411 | classificationService.storeClassification( filevalue.classification, sd, onProgress ) |
---|
412 | samplesClassified << sample |
---|
413 | |
---|
414 | numClassificationFiles++; |
---|
415 | } |
---|
416 | |
---|
417 | if( sd.qualityFile ) |
---|
418 | numQualFiles++; |
---|
419 | |
---|
420 | if( sd.logFile ) |
---|
421 | numLogFiles++; |
---|
422 | |
---|
423 | numSuccesful++; |
---|
424 | } |
---|
425 | } catch( Exception e ) { |
---|
426 | e.printStackTrace(); |
---|
427 | errors << "an error occurred while saving " + filevalue.fasta + ": " + e.getMessage() |
---|
428 | } |
---|
429 | } |
---|
430 | } else { |
---|
431 | // File doesn't need to be included in the system. Delete it also from disk |
---|
432 | fileService.delete( filevalue.fasta ); |
---|
433 | } |
---|
434 | } |
---|
435 | |
---|
436 | return [ numSequenceFiles: numSuccesful, numQualFiles: numQualFiles, numClassificationFiles: numClassificationFiles, numLogFiles: numLogFiles, errors: errors, samplesClassified: samplesClassified.unique() ] |
---|
437 | } |
---|
438 | |
---|
439 | def saveRemainingClassificationFiles( def files, Closure onProgress ) { |
---|
440 | def digitRE = ~/^\d+$/; |
---|
441 | def errors = []; |
---|
442 | def samplesClassified = []; |
---|
443 | def numSuccesful = 0; |
---|
444 | |
---|
445 | files.findAll { it.key.matches( digitRE ) }.each { file -> |
---|
446 | def filevalue = file.value; |
---|
447 | |
---|
448 | // Check if the file is selected |
---|
449 | if( filevalue.include == "on" ) { |
---|
450 | if( fileService.fileExists( filevalue.filename ) ) { |
---|
451 | def sequenceDataId = filevalue.sequenceData; |
---|
452 | try { |
---|
453 | if( sequenceDataId.toString().isLong() ) { |
---|
454 | // Retrieve sequenceData and sample now, because the session will be cleared during import |
---|
455 | def sequenceData = SequenceData.get( sequenceDataId.toString().toLong() ); |
---|
456 | def sample = sequenceData.sample; |
---|
457 | |
---|
458 | if( sequenceData ) { |
---|
459 | classificationService.removeClassificationForSequenceData( sequenceData ); |
---|
460 | classificationService.storeClassification( filevalue.filename, sequenceData, onProgress ) |
---|
461 | samplesClassified << sample; |
---|
462 | } |
---|
463 | |
---|
464 | numSuccesful++; |
---|
465 | } else { |
---|
466 | errors << "a wrong ID is entered for classification file " + filevalue.filename; |
---|
467 | } |
---|
468 | } catch( Exception e ) { |
---|
469 | e.printStackTrace(); |
---|
470 | errors << "an error occurred while saving " + filevalue.filename + ": " + e.getMessage() |
---|
471 | } |
---|
472 | } |
---|
473 | } |
---|
474 | |
---|
475 | // File doesn't need to be included in the system. Delete it from disk. |
---|
476 | fileService.delete( filevalue.filename ); |
---|
477 | } |
---|
478 | |
---|
479 | return [ numExtraClassifications: numSuccesful, errors: errors, samplesClassified: samplesClassified.unique() ] |
---|
480 | |
---|
481 | } |
---|
482 | |
---|
483 | /** |
---|
484 | * Redirects the user back to the start screen with a message about how things went |
---|
485 | */ |
---|
486 | def saveMatchedResult = { |
---|
487 | def processId = params.processId |
---|
488 | |
---|
489 | def result = session.process[ processId ].result |
---|
490 | |
---|
491 | // Return a message to the user |
---|
492 | if( result.numTotal == 0 ) { |
---|
493 | |
---|
494 | if( result.errors.size() > 0 ) { |
---|
495 | flash.error = "None of the files were imported, because " |
---|
496 | result.errors.each { |
---|
497 | flash.error += "<br />- " + it |
---|
498 | } |
---|
499 | } else { |
---|
500 | flash.message = "None of the files were imported, because none of the files were selected for import." |
---|
501 | } |
---|
502 | } else { |
---|
503 | flash.message = "" |
---|
504 | if( result.numSequenceFiles == 1 ) { |
---|
505 | flash.message += result.numSequenceFiles + " sequence file has been added to the system" |
---|
506 | } else if( result.numSequenceFiles > 1 ) { |
---|
507 | flash.message += result.numSequenceFiles + " sequence files have been added to the system" |
---|
508 | } |
---|
509 | |
---|
510 | if( result.numQualFiles > 0 || result.numClassificationFiles > 0 || result.numLogFiles > 0 ) { |
---|
511 | flash.message += ", with"; |
---|
512 | } |
---|
513 | |
---|
514 | if( result.numQualFiles == 1 ) { |
---|
515 | flash.message += " 1 quality file" |
---|
516 | } else if( result.numQualFiles > 1 ) { |
---|
517 | flash.message += " " + result.numQualFiles + " quality files" |
---|
518 | } |
---|
519 | |
---|
520 | if( result.numQualFiles > 0 && ( result.numClassificationFiles > 0 || result.numLogFiles > 0 ) ) { |
---|
521 | flash.message += " and"; |
---|
522 | } |
---|
523 | |
---|
524 | if( result.numClassificationFiles == 1 ) { |
---|
525 | flash.message += " 1 classification file" |
---|
526 | } else if( result.numClassificationFiles > 1 ) { |
---|
527 | flash.message += " " + result.numClassificationFiles + " classification files" |
---|
528 | } |
---|
529 | |
---|
530 | if( ( result.numQualFiles > 0 || result.numClassificationFiles > 0 ) && result.numLogFiles > 0 ) { |
---|
531 | flash.message += " and"; |
---|
532 | } |
---|
533 | |
---|
534 | if( result.numLogFiles == 1 ) { |
---|
535 | flash.message += " 1 log file" |
---|
536 | } else if( result.numLogFiles > 1 ) { |
---|
537 | flash.message += " " + result.numLogFiles + " log files" |
---|
538 | } |
---|
539 | |
---|
540 | if( flash.message ) |
---|
541 | flash.message += "." |
---|
542 | |
---|
543 | if( result.numExtraClassificationFiles == 1 ) { |
---|
544 | flash.message += result.numExtraClassificationFiles + " additional classification file has been read. "; |
---|
545 | } else if( result.numExtraClassificationFiles > 1 ) { |
---|
546 | flash.message += result.numExtraClassificationFiles + " additional classification files have been read. "; |
---|
547 | } |
---|
548 | |
---|
549 | if( result.errors.size() > 0 ) { |
---|
550 | flash.error = "However, " + result.errors.size() + " errors occurred during import: " |
---|
551 | result.errors.each { |
---|
552 | flash.error += "<br />- " + it |
---|
553 | } |
---|
554 | } |
---|
555 | } |
---|
556 | |
---|
557 | // Determine where to redirect the user to |
---|
558 | def entityType = session.process[ processId ].entityType; |
---|
559 | def entityId = session.process[ processId ].entityId; |
---|
560 | |
---|
561 | // Clear session |
---|
562 | workerService.clearProcess( session, processId ); |
---|
563 | |
---|
564 | // Redirect user |
---|
565 | redirect( controller: entityType, action: "show", id: entityId ) |
---|
566 | } |
---|
567 | |
---|
568 | protected Assay getAssay(def assayId, User loggedInUser = null) { |
---|
569 | // load assay with id specified by param.id |
---|
570 | def assay |
---|
571 | try { |
---|
572 | assay = Assay.get(assayId as Long) |
---|
573 | } catch( Exception e ) { |
---|
574 | flash.error = "Incorrect id given: " + assayId |
---|
575 | return null |
---|
576 | } |
---|
577 | |
---|
578 | if (!assay) { |
---|
579 | flash.error = "No assay found with id: " + assayId |
---|
580 | return null |
---|
581 | } |
---|
582 | |
---|
583 | if (!assay.study.canRead( loggedInUser ?: session.user ) ) { |
---|
584 | flash.error = "You don't have the right authorizaton to access assay " + assay.name |
---|
585 | return null |
---|
586 | } |
---|
587 | |
---|
588 | return assay |
---|
589 | } |
---|
590 | |
---|
591 | protected Run getRun(def runId) { |
---|
592 | // load run with id specified by param.id |
---|
593 | def run |
---|
594 | try { |
---|
595 | run = Run.get(runId as Long) |
---|
596 | } catch( Exception e ) { |
---|
597 | flash.error = "Incorrect id given: " + runId |
---|
598 | return null |
---|
599 | } |
---|
600 | |
---|
601 | if (!run) { |
---|
602 | flash.error = "No run found with id: " + runId |
---|
603 | return null |
---|
604 | } |
---|
605 | |
---|
606 | return run |
---|
607 | } |
---|
608 | } |
---|