Changeset 6209


Ignore:
Timestamp:
Apr 13, 2021, 3:35:55 PM (13 months ago)
Author:
Nicklas Nordborg
Message:

References #1295: Registration of specimen handled by external lab

Filling in some missing values in the import.

Renamed the ImportArchive file server to ImportGateway and created a new ImportArcive. The idea is that the ImportGateway is a temporary gateway were date is waiting to be imported. Once imported the data (JSON and FASTQ files) is copied to the ImportArchive for permanent storage. Secondary analysis is then using the files in the ImportArchive. The ImportFastqJobCreator has been update to handle the moving of FASTQ files. JSON files are not yet handled but I think it has to be done by the importer. It will currently generate a lot of error message if the file is left at the gateway.

Location:
extensions/net.sf.basedb.reggie/trunk
Files:
12 edited

Legend:

Unmodified
Added
Removed
  • extensions/net.sf.basedb.reggie/trunk/config/reggie-config.xml

    r6181 r6209  
    165165     
    166166      <!-- full path to the location where HiSeq/NextSeq data is stored (required) -->
    167       <run-archive>/casa2/run_archive</run-archive>
     167      <run-archive>/casa18/run_archive/scanbprim</run-archive>
    168168      <!-- Alternate paths in search order in case data is not found in the primary -->
    169169      <!-- run archive. Add more entries as needed, but it is important that they -->
     
    171171      <run-archive-2></run-archive-2>
    172172     
    173       <!-- full path to the location where incoming FASTQ files are stored (required) -->
    174       <import-archive>/casa17/cmdimport</import-archive>
     173      <!-- full path to the location where incoming FASTQ files are temporarily -->
     174      <!-- stored until they have been imported (required) -->
     175      <import-gateway>/casa17/cmdimport</import-gateway>
     176     
     177      <!-- full path to the location where incoming FASTQ files are stored -->
     178      <!--  after they have been imported (required) -->
     179      <import-archive>/casa17/project_archive/cmdimport</import-archive>
    175180     
    176181      <!-- Full path to the location where data files should be archived (required) -->
    177182      <!-- The path should include the name of the project -->
    178       <project-archive>/casa4/project_archive/scanb</project-archive>
     183      <project-archive>/casa17/project_archive/scanb</project-archive>
    179184      <!-- Full path to the location where external data files should be archive (optional) -->
    180185      <!-- If not specified, the 'project-archive' path is used -->
    181       <external-archive></external-archive>
     186      <external-archive>/casa17/project_archive/scanb-external</external-archive>
    182187     
    183188      <!-- Full path to the root location where reference genomes are located -->
  • extensions/net.sf.basedb.reggie/trunk/src/net/sf/basedb/reggie/counter/CounterService.java

    r6199 r6209  
    14731473    try
    14741474    {
    1475       importFiles = JsonFile.findJsonFiles(dc, Fileserver.IMPORT_ARCHIVE.load(dc), false);
     1475      importFiles = JsonFile.findJsonFiles(dc, Fileserver.IMPORT_GATEWAY.load(dc), false);
    14761476    }
    14771477    catch (RuntimeException ex)
  • extensions/net.sf.basedb.reggie/trunk/src/net/sf/basedb/reggie/dao/Annotationtype.java

    r6203 r6209  
    13781378
    13791379  /**
     1380    The "RawFASTQ" annotation, used for (merged) derived bioassays that
     1381    use external FASTQ files for import. This annotation should typically
     1382    have 2 values (one for R1 and one for R2).
     1383    @since 4.32
     1384  */
     1385  public static final Annotationtype RAW_FASTQ =
     1386    new Annotationtype("RawFASTQ", Type.STRING, false, Item.DERIVEDBIOASSAY);
     1387 
     1388  /**
    13801389    The "SequencingResult" annotation, used for derived bioassays (SequencingRun).
    13811390    A value of "Successful" typically means that the process can continue with
  • extensions/net.sf.basedb.reggie/trunk/src/net/sf/basedb/reggie/dao/Fileserver.java

    r6177 r6209  
    5050      "File server for exporting released data to other servers.");
    5151 
    52  
    5352  /**
    5453    The file server containing data files coming from external labs
    55     that we should import.
     54    that we should import. This is a temporary location and the
     55    files will be moved to IMPORT_ARCHIVE after the import.
     56    @since 4.32
     57  */
     58  public static final Fileserver IMPORT_GATEWAY =
     59    new Fileserver("ImportGateway", "net.sf.basedb.xfiles.sftp-connection-manager",
     60      "File server containing data from external labs that should be imported. " +
     61      "This is a temporary storage location and the files will be moved to ImportArchive.");
     62
     63  /**
     64    The file server containing data files coming from external labs
     65    after they have been imported.
    5666    @since 4.32
    5767  */
    5868  public static final Fileserver IMPORT_ARCHIVE =
    5969    new Fileserver("ImportArchive", "net.sf.basedb.xfiles.sftp-connection-manager",
    60       "File server containing data from external labs that should be imported.");
    61  
     70      "File server containing data from external labs after they have been imported.");
     71 
     72   
    6273  /**
    6374    Get the file server by name of the static constant defined in this class.
  • extensions/net.sf.basedb.reggie/trunk/src/net/sf/basedb/reggie/grid/ImportFastqJobCreator.java

    r6184 r6209  
    1616import net.sf.basedb.core.FileServer;
    1717import net.sf.basedb.core.FileSetMember;
     18import net.sf.basedb.core.InvalidDataException;
    1819import net.sf.basedb.core.ItemList;
    1920import net.sf.basedb.core.ItemNotFoundException;
     
    3940import net.sf.basedb.reggie.dao.BiomaterialList;
    4041import net.sf.basedb.reggie.dao.Datafiletype;
     42import net.sf.basedb.reggie.dao.DemuxedSequences;
    4143import net.sf.basedb.reggie.dao.Fileserver;
    4244import net.sf.basedb.reggie.dao.Library;
    4345import net.sf.basedb.reggie.dao.MergedSequences;
    4446import net.sf.basedb.reggie.dao.Pipeline;
     47import net.sf.basedb.reggie.dao.SequencingRun;
    4548import net.sf.basedb.reggie.dao.Subtype;
    4649import net.sf.basedb.util.Values;
     
    120123    @return A list with the corresponding jobs in BASE
    121124  */
     125  @SuppressWarnings("unchecked")
    122126  public List<JobDefinition> createFastqImportJobs(DbControl dc, OpenGridCluster cluster, List<MergedSequences> mergedSequences)
    123127  {
     
    134138   
    135139    // Get global options
     140    String importGateway = cfg.getRequiredConfig("import-gateway", null);
    136141    String importArchive = cfg.getRequiredConfig("import-archive", null);
    137142    String projectRoot = cfg.getRequiredConfig("project-archive", null);
     
    174179      ms = MergedSequences.getById(dc, ms.getId()); // Ensure item is loaded in this transaction
    175180      DerivedBioAssay merged = ms.getDerivedBioAssay();
    176      
    177181      String mergeName = ScriptUtil.checkValidFilename(merged.getName());
     182     
     183      List<String> rawFastqNames = (List<String>)Annotationtype.RAW_FASTQ.getAnnotationValues(dc, merged);
     184      if (rawFastqNames == null || rawFastqNames.size() != 2)
     185      {
     186        throw new InvalidDataException("Annotation RawFASTQ on " +
     187            mergeName + " must have two values: " + rawFastqNames);
     188      }
     189     
     190      // Get SequencingRun so that we can get the path to the FASTQ folder.
     191      List<DemuxedSequences> demux = ms.getDemuxedSequences(dc);
     192      if (demux.size() > 1)
     193      {
     194        throw new InvalidDataException(
     195          "More than one demux was found for " + mergeName +
     196          "This wizard can't be used until that is corrected.");
     197      }
     198      SequencingRun sr = SequencingRun.getByDemuxedSequences(dc, demux.get(0));
     199      String rawFastqFolder = ScriptUtil.checkValidPath((String)Annotationtype.DATA_FILES_FOLDER.getAnnotationValue(dc, sr.getItem()), true, true);
     200     
    178201      if (merged.hasFileSet() && merged.getFileSet().hasMember(fastqData))
    179202      {
     
    213236      String R2_name = baseFileName+"_R2.fastq";
    214237      String fragments_name = baseFileName + "_fragmentsize.txt";
    215  
     238     
    216239      ScriptBuilder script = new ScriptBuilder();
    217240      script.comment("Setting up scripting environment and copying script to tmp folder");
     
    219242      script.cmd("export TrimmomaticJAR="+trimmomatic_path);
    220243      script.cmd("export AdapterFile="+trimmomatic_adapterFile);
    221       script.cmd("ImportArchive=" + importArchive);
     244      script.cmd("ImportGateway=" + importGateway);
     245      script.cmd("ImportArchive=" + importArchive + rawFastqFolder);
    222246      script.cmd("ReferenceDir=" + referenceRoot);
    223247      script.cmd("Gidx=${ReferenceDir}/" + demux_bowtieGidx);
     
    245269
    246270      script.comment("Copy FASTQ files to tmp folder");
    247       script.progress(10, "Copying FASTQ files");
    248       // TODO -- we need some other information about which FASTQ files to copy
    249       script.cmd("cp ${ImportArchive}/*.fastq.gz fastq");
    250       script.newLine();
    251 
     271      // Check if FASTQ files exists in ImportArchive location
     272      // If not, we need to copy from ImportGateway
     273      // If they don't exists at all we generate an error
     274      int fileNo = 0;
     275      script.cmd("mkdir -p ${ImportArchive}");
     276      for (String fastqName : rawFastqNames)
     277      {
     278        fileNo++;
     279        // NOTE! Order in the list is not specified so we don't know which is R1 and R2
     280        script.cmd("FASTQ="+ScriptUtil.checkValidFilename(fastqName));
     281        script.progress(5+fileNo*5, "Copying FASTQ files: ${FASTQ}");
     282        script.cmd("if [ ! -f \"${ImportArchive}/${FASTQ}\" ]; then");
     283        script.cmd("  if [ ! -f \"${ImportGateway}/${FASTQ}\" ]; then");
     284        script.cmd("    echo \"Can't find FASTQ file ${FASTQ} in ${ImportGateway} or ${ImportArchive}\" 1>&2");
     285        script.cmd("    exit 1");
     286        script.cmd("   fi");
     287        script.cmd("   cp \"${ImportGateway}/${FASTQ}\" \"${ImportArchive}/${FASTQ}\"");
     288        if (!debug)
     289        {
     290          script.cmd("   rm -f \"${ImportGateway}/${FASTQ}\"");
     291        }
     292        script.cmd("fi");
     293        script.cmd("cp \"${ImportArchive}/${FASTQ}\" fastq");
     294        script.newLine();
     295      }
     296     
    252297      script.comment("Find FASTQ files");
    253       script.cmd("FASTQ1=`find fastq -name \"*_R1.fastq.gz\" -print -quit 2> /dev/null`");
    254       script.cmd("FASTQ2=`find fastq -name \"*_R2.fastq.gz\" -print -quit 2> /dev/null`");
     298      script.cmd("FASTQ1=`find fastq -name \"*_R1*.fastq.gz\" -print -quit 2> /dev/null`");
     299      script.cmd("FASTQ2=`find fastq -name \"*_R2*.fastq.gz\" -print -quit 2> /dev/null`");
    255300      script.newLine();
    256301
  • extensions/net.sf.basedb.reggie/trunk/src/net/sf/basedb/reggie/plugins/FutureSpecimenImporter.java

    r6207 r6209  
    33import java.text.DateFormat;
    44import java.text.SimpleDateFormat;
     5import java.util.Arrays;
     6
    57import org.json.simple.JSONObject;
    68
     
    3436import net.sf.basedb.reggie.plugins.cmd.DemuxInfo;
    3537import net.sf.basedb.reggie.plugins.cmd.DnaInfo;
     38import net.sf.basedb.reggie.plugins.cmd.FastqInfo;
    3639import net.sf.basedb.reggie.plugins.cmd.FlowCellInfo;
    3740import net.sf.basedb.reggie.plugins.cmd.FlowThroughInfo;
     
    106109    DemuxInfo demuxInfo = jsonFile.getDemuxInfo();
    107110    MergeInfo mergeInfo = jsonFile.getMergeInfo();
     111    FastqInfo fastqInfo = jsonFile.getFastqInfo();
    108112
    109113    if (jsonSpecimen == null) return;
     
    135139    DemuxedSequences demux = getOrCreateDemux(dc, seqRun, flowCellInfo, demuxInfo);
    136140   
    137     if (demux == null) return;
    138     MergedSequences merged = createMergedSequences(dc, lib, demux, mergeInfo);
     141    if (demux == null || !mergeInfo.valid || !fastqInfo.valid) return;
     142    MergedSequences merged = createMergedSequences(dc, lib, demux, mergeInfo, fastqInfo);
     143   
    139144  }
    140145 
     
    293298    // creationEvent.setEventDate(rnaInfo.qcDate);
    294299    creationEvent.setEventDate(rnaInfo.qiacubeDate);
     300    creationEvent.setProtocol(null);
     301    creationEvent.setHardware(null);
    295302   
    296303    // TODO -- BA_RIN or CA_RQS
     
    428435      Annotationtype.FLOWCELL_TYPE.setAnnotationValue(dc, flowCell, seqRunInfo.sequencer.flowCellType);
    429436      Annotationtype.CLUSTER_OPERATOR.setAnnotationValue(dc, flowCell, flowCellInfo.operator);
     437      Annotationtype.CLUSTER_START.setAnnotationValue(dc, flowCell, seqRunInfo.startDate);
    430438      Annotationtype.PLATE_PROCESS_RESULT.setAnnotationValue(dc, flowCell, ReactionPlate.PROCESS_SUCCESSFUL);
     439      Annotationtype.SEQUENCING_RESULT.setAnnotationValue(dc, flowCell, SequencingRun.SEQUENCING_SUCCESSFUL);
    431440      dc.saveItem(flowCell);
    432441     
    433442      BioMaterialEvent createEvent = flowCell.getCreationEvent();
     443      createEvent.setEventDate(seqRunInfo.startDate);
    434444      createEvent.setProtocol(null); // To avoid that a 'project default' is used
    435445      createEvent.setHardware(seqRunInfo.sequencer.sequencer);
     
    441451        poolA.setItemSubtype(Subtype.POOLED_LIBRARY_ALIQUOT.get(dc));
    442452        poolA.setName(pool.getNextAliquotName(dc));
    443         poolA.getCreationEvent().setSource(pool.getItem());
     453        BioMaterialEvent poolEvent = poolA.getCreationEvent();
     454        poolEvent.setSource(pool.getItem());
     455        poolEvent.setEventDate(seqRunInfo.startDate);
    444456        createEvent.addSource(poolA).setPosition(laneNo);
    445457        dc.saveItem(poolA);
     
    481493      Annotationtype.SEQUENCING_RESULT.setAnnotationValue(dc, seqRun, SequencingRun.SEQUENCING_SUCCESSFUL);
    482494     
    483       DateFormat df = new SimpleDateFormat("yyyy/yyMMdd");
     495      DateFormat df = new SimpleDateFormat("/yyyy/yyMMdd");
    484496      String dataFilesFolder = df.format(seqRunInfo.startDate)+
    485497        "_"+seqRunInfo.sequencer.serialNo+
    486498        "_"+MD5.leftPad(Integer.toString(seqRunInfo.runNumber), '0', 4)+
    487499        "_"+seqRunInfo.position+flowCellInfo.flowCellId;
    488       addDebugMessage(dataFilesFolder);
     500      addDebugMessage("DataFilesFolder: "+dataFilesFolder);
    489501      Annotationtype.DATA_FILES_FOLDER.setAnnotationValue(dc, seqRun, dataFilesFolder);
    490502      dc.saveItem(seqRun);
     
    515527      demux.setSoftware(demuxInfo.software);
    516528      demux.setProtocol(null);
     529      Annotationtype.ANALYSIS_RESULT.setAnnotationValue(dc, demux, DemuxedSequences.DEMUX_SUCCESSFUL);
    517530      Annotationtype.READ_STRING.setAnnotationValue(dc, demux, demuxInfo.readString);       
    518531      dc.saveItem(demux);
     
    523536  }
    524537 
    525   private MergedSequences createMergedSequences(DbControl dc, Library lib, DemuxedSequences demux, MergeInfo mergeInfo)
     538  private MergedSequences createMergedSequences(DbControl dc, Library lib, DemuxedSequences demux, MergeInfo mergeInfo, FastqInfo fastqInfo)
    526539  {
    527540
     
    531544    Pipeline.RNA_SEQ.setAnnotation(dc, merged);
    532545    merged.setExtract(lib.getItem());
     546    Annotationtype.READS.setAnnotationValue(dc, merged, mergeInfo.reads);
    533547    Annotationtype.PF_READS.setAnnotationValue(dc, merged, mergeInfo.pfReads);
     548    Annotationtype.RAW_FASTQ.setAnnotationValues(dc, merged, Arrays.asList(fastqInfo.R1.name, fastqInfo.R2.name));
     549   
     550    merged.setProtocol(null);
     551    merged.setSoftware(null);
     552    merged.setHardware(null);
     553   
    534554    dc.saveItem(merged);
    535555   
  • extensions/net.sf.basedb.reggie/trunk/src/net/sf/basedb/reggie/plugins/cmd/FlowCellInfo.java

    r6205 r6209  
    5252      }
    5353      size = section.getRequiredEntry("Size", IntValidator.FLOWCELL_SIZE_2_OR_4);
    54       sequencingCycles = section.getRequiredEntry("SequencingCycles", PatternValidator.CMD_ID);
     54      sequencingCycles = section.getRequiredEntry("SequencingCycles", PatternValidator.SEQUENCING_CYCLES);
    5555      operator = section.getRequiredEntry("Operator");
    5656    }
  • extensions/net.sf.basedb.reggie/trunk/src/net/sf/basedb/reggie/plugins/cmd/JsonFile.java

    r6207 r6209  
    1616import net.sf.basedb.opengrid.config.ConnectionInfo;
    1717import net.sf.basedb.opengrid.filetransfer.ByteArrayDownloadTarget;
     18import net.sf.basedb.opengrid.filetransfer.FilePermission;
     19import net.sf.basedb.opengrid.filetransfer.StringUploadSource;
     20import net.sf.basedb.opengrid.filetransfer.UploadSource;
    1821import net.sf.basedb.reggie.grid.ScriptUtil;
    1922import net.sf.basedb.util.error.ThrowableUtil;
     
    7780  private final List<String> debugMessages;
    7881
     82  private String rawData;
    7983  private JSONObject json;
    8084
     
    218222    try
    219223    {
    220       String data = download.getString("UTF-8");
    221       if (data == null || data.length() == 0)
     224      rawData = download.getString("UTF-8");
     225      if (rawData == null || rawData.length() == 0)
    222226      {
    223227        addErrorMessage("Could not parse '"+name+"': File is empty");
    224228        return;
    225229      }
    226       json = (JSONObject)new JSONParser().parse(data);
     230      json = (JSONObject)new JSONParser().parse(rawData);
    227231     
    228232      fastqInfo = new FastqInfo(getRequiredSection("fastq"));
     
    248252  }
    249253 
     254 
     255  public void copyTo(RemoteSession session, String rootDir)
     256  {
     257    if (!rootDir.endsWith("/")) rootDir += "/";
     258    String path = rootDir + name;
     259
     260    UploadSource upload = new StringUploadSource(name, rawData);
     261    try
     262    {
     263      session.executeCmd("mkdir -p " + rootDir, 5);
     264      session.uploadFile(upload, path, FilePermission.USER_RW);
     265    }
     266    catch (Exception ex)
     267    {
     268      addErrorMessage("Could not upload '"+ path + "': " + ex.getMessage());
     269      return;
     270    }
     271  }
     272 
    250273  public JsonSection getRequiredSection(String key)
    251274  {
     
    308331  }
    309332 
    310   public FastqInfo getFastq()
     333  public FastqInfo getFastqInfo()
    311334  {
    312335    return fastqInfo;
  • extensions/net.sf.basedb.reggie/trunk/src/net/sf/basedb/reggie/plugins/cmd/MergeInfo.java

    r6206 r6209  
    1313{
    1414  public Long pfReads;
     15  public Long reads;
    1516 
    1617  public boolean valid;
     
    2122    {
    2223      pfReads = merge.getRequiredEntry("PF_READS", LongValidator.POSITIVE.warnIf(10000000l, null)); // Warn if less than 10M reads
     24      reads = pfReads; //
    2325    }
    2426    valid = merge != null && !merge.hasError();
  • extensions/net.sf.basedb.reggie/trunk/src/net/sf/basedb/reggie/plugins/cmd/PatternValidator.java

    r6203 r6209  
    4444  public static final PatternValidator READ_STRING = new PatternValidator("([0-9]+[TBS]){4,8}", "read string", null, "at least 4 groups of [0-9]+[TBS]");
    4545
     46  /**
     47    4 groups of digits with - separator.
     48  */
     49  public static final PatternValidator SEQUENCING_CYCLES = new PatternValidator("([0-9]+\\-){3}[0-9]+", "sequencing cycles", null, "4 groups of 0-9");
    4650 
    4751  private final Pattern pattern;
  • extensions/net.sf.basedb.reggie/trunk/src/net/sf/basedb/reggie/servlet/FutureSpecimenServlet.java

    r6208 r6209  
    7070        dc = sc.newDbControl();
    7171       
    72         List<JsonFile> files = JsonFile.findJsonFiles(dc, Fileserver.IMPORT_ARCHIVE.load(dc), true);
     72        List<JsonFile> files = JsonFile.findJsonFiles(dc, Fileserver.IMPORT_GATEWAY.load(dc), true);
    7373       
    7474        ItemQuery<Sample> query = Sample.getQuery();
     
    116116        dc = sc.newDbControl();
    117117       
    118         FileServer server = Fileserver.IMPORT_ARCHIVE.load(dc);
    119         FileServer fs = Fileserver.IMPORT_ARCHIVE.load(dc);
     118        FileServer fs = Fileserver.IMPORT_GATEWAY.load(dc);
    120119        String rootPath = ScriptUtil.checkValidPath(fs.getRootPath(), true, false);
    121120        String file = ScriptUtil.checkValidFilename(req.getParameter("file"));
     
    199198        }
    200199       
    201         FileServer fs = Fileserver.IMPORT_ARCHIVE.load(dc);
    202         String rootPath = ScriptUtil.checkValidPath(fs.getRootPath(), true, false);
    203         RemoteSession session = null;
     200        FileServer importGateway = Fileserver.IMPORT_GATEWAY.load(dc);
     201        FileServer importArchive = Fileserver.IMPORT_ARCHIVE.load(dc);
     202       
     203        String gatewayRoot = ScriptUtil.checkValidPath(importGateway.getRootPath(), true, false);
     204        String archiveRoot = ScriptUtil.checkValidPath(importArchive.getRootPath(), true, false);
     205        RemoteSession gatewaySession = null;
     206        RemoteSession archiveSession = null;
    204207        try
    205208        {
    206           RemoteHost host = new RemoteHost(new ConnectionInfo(fs));
    207           session = host.connect(5);
     209          gatewaySession = new RemoteHost(new ConnectionInfo(importGateway)).connect(5);
     210          archiveSession = new RemoteHost(new ConnectionInfo(importArchive)).connect(5);
    208211         
    209212          for (int itemNo = 0; itemNo < jsonItems.size(); itemNo++)
     
    212215            Number itemId = (Number)jsonItem.get("id");
    213216            JsonFile jsonFile = new JsonFile(dc, (String)jsonItem.get("jsonFile"));
    214             jsonFile.downloadAndParse(dc, session, rootPath);
     217            jsonFile.downloadAndParse(dc, gatewaySession, gatewayRoot);
    215218           
    216219            Sample specimen = Sample.getById(dc, itemId.intValue());
     
    220223              FutureSpecimenImporter importer = new FutureSpecimenImporter();
    221224              importer.doImport(dc, specimen, jsonFile);
     225             
     226              if (!jsonFile.hasError())
     227              {
     228                // Add to FASTQ import pipeline
     229                // BiomaterialList.FASTQ_IMPORT_PIPELINE.get(dc).add(merged);
     230                // Copy JSON to Fileserver.IMPORT_ARCHIVE
     231                // jsonFile.copyTo(archiveSession, archiveRoot);
     232                // Remove JSON from Filesever.IMPORT_GATEWAY
     233              }
    222234            }
    223235           
     
    226238              jsonMessages.add("[Error]["+jsonFile.getName()+"] Import failed (see below for more information)");
    227239              jsonMessages.addAll(prefix("[Error]["+jsonFile.getName()+"] ", jsonFile.getErrorMessages()));
     240              dc.close();
    228241            }
    229242            else
    230243            {
     244              dc.commit();
    231245              jsonMessages.add("Imported " + specimen.getName());
    232246            }
     
    235249            jsonMessages.addAll(prefix("[Debug]["+jsonFile.getName()+"] ", jsonFile.getDebugMessages()));
    236250
    237             // TODO -- we should import each file in a separate transaction
    238             // dc.commit()
    239             // dc = sc.newDbControl();
     251            dc = sc.newDbControl();
    240252          }
    241253
     
    243255        finally
    244256        {
    245           OpenGrid.close(session);
     257          OpenGrid.close(gatewaySession);
     258          OpenGrid.close(archiveSession);
    246259        }
    247260      }
  • extensions/net.sf.basedb.reggie/trunk/src/net/sf/basedb/reggie/servlet/InstallServlet.java

    r6203 r6209  
    617617            createIfMissing, effectivePermissionsUse));
    618618        jsonChecks.add(checkAnnotationType(dc, Annotationtype.DATA_FILES_FOLDER, 1, null, createIfMissing, effectivePermissionsUse));
     619        jsonChecks.add(checkAnnotationType(dc, Annotationtype.RAW_FASTQ, 2, null, createIfMissing, effectivePermissionsUse));
    619620        jsonChecks.add(checkAnnotationType(dc, Annotationtype.SEQUENCING_RESULT, 1,
    620621            new ValueOptions(SequencingRun.SEQUENCING_SUCCESSFUL, SequencingRun.SEQUENCING_FIRST_BASE_REPORT_FAILED, SequencingRun.SEQUENCING_FAILED),
     
    10271028            Annotationtype.READS, Annotationtype.PF_READS, Annotationtype.ADAPTER_READS, Annotationtype.PT_READS,
    10281029            Annotationtype.FRAGMENT_SIZE_AVG, Annotationtype.FRAGMENT_SIZE_STDEV,
    1029             Annotationtype.DATA_FILES_FOLDER,
     1030            Annotationtype.DATA_FILES_FOLDER, Annotationtype.RAW_FASTQ,
    10301031            Annotationtype.DO_NOT_USE, Annotationtype.DO_NOT_USE_COMMENT,
    10311032            Annotationtype.AUTO_PROCESSING,
     
    11371138        jsonChecks.add(checkFileServer(dc, Fileserver.EXTERNAL_ARCHIVE, effectivePermissionsUse, createIfMissing));
    11381139        jsonChecks.add(checkFileServer(dc, Fileserver.RELEASE_ARCHIVE, effectivePermissionsUse, createIfMissing));
     1140        jsonChecks.add(checkFileServer(dc, Fileserver.IMPORT_GATEWAY, effectivePermissionsUse, createIfMissing));
    11391141        jsonChecks.add(checkFileServer(dc, Fileserver.IMPORT_ARCHIVE, effectivePermissionsUse, createIfMissing));
    11401142       
Note: See TracChangeset for help on using the changeset viewer.