Changeset 6077


Ignore:
Timestamp:
Aug 3, 2012, 12:53:22 PM (11 years ago)
Author:
Nicklas Nordborg
Message:

Fixes #1706: External program executor and BASE 1 plug-in executor doesn't remove temporary working files

Location:
trunk
Files:
4 edited

Legend:

Unmodified
Added
Removed
  • trunk/doc/src/docbook/appendix/incompatible.xml

    r5983 r6077  
    3535    and backwards compatible.
    3636  </para>
     37 
     38  <sect1 id="appendix.incompatible.3.2">
     39    <title>BASE 3.2 release</title>
     40
     41    <bridgehead>BASEfile exporter automatically closes the output stream</bridgehead>
     42    <para>
     43      The implementation of the BASEfile exporter has been changed to
     44      automatically close the provided output stream when the export
     45      is complete. Clients that need the old behavior should call
     46      <code>BaseFileExporter.setAutoCloseWriters(false)</code> before
     47      using it.
     48    </para>
     49  </sect1>
    3750 
    3851  <sect1 id="appendix.incompatible.3.1">
  • trunk/src/core/net/sf/basedb/util/export/spotdata/BaseFileExporter.java

    r5405 r6077  
    6969  private Map<String, String> parameters;
    7070  private BaseFileWriter out;
     71  private boolean autoCloseWriters;
    7172 
    7273  protected BaseFileExporter()
    7374  {
    7475    this.parameters = new LinkedHashMap<String, String>();
     76    this.autoCloseWriters = true;
    7577  }
    7678 
     
    8789  {
    8890    this.out = out;
     91  }
     92 
     93  /**
     94    If this option is set then all writers are automatically closed
     95    when all data has been writted to them. This setting is enabled by default.
     96    @since 3.2
     97  */
     98  public void setAutoCloseWriters(boolean autoClose)
     99  {
     100    this.autoCloseWriters = autoClose;
    89101  }
    90102 
     
    246258    exportAssaysSectionData();
    247259    return true;
     260  }
     261 
     262  @Override
     263  protected void endExport(RuntimeException e)
     264  {
     265    if (out != null && autoCloseWriters) out.close();
     266    super.endExport(e);
    248267  }
    249268  // ------------------------------------------- 
  • trunk/src/core/net/sf/basedb/util/importer/spotdata/BaseFileImporter.java

    r5689 r6077  
    2323
    2424import java.io.IOException;
     25import java.io.InputStream;
    2526import java.sql.SQLException;
    2627import java.util.ArrayList;
     
    5253import net.sf.basedb.core.signal.ThreadSignalHandler;
    5354import net.sf.basedb.util.ChainedProgressReporter;
     55import net.sf.basedb.util.FileUtil;
    5456import net.sf.basedb.util.basefile.BaseFileParser;
    5557import net.sf.basedb.util.importer.FileWrapper;
     
    282284      parser.setProgressReporter(new SimpleAbsoluteProgressReporter(chainedProgress, srcFile.getSize()));
    283285    }
    284     FlatFileParser ffp = parser.parse(srcFile.getInputStream(), srcFile.getCharacterSet());
    285     int totalLines = ffp.getParsedLines();
    286    
    287     // If the BASEfile didn't have any 'spots' sections we don't
    288     // create a child bioassay set
    289     if (parser.getSectionCount("spots") == 0) return null;
    290 
    291     // Create the child bioassay set and bioassays
    292     boolean useNewDataCube = info.getAssaysHaveParentAssaysMapping()
    293       || info.getChildHasDifferentReporterPositionMapping();
    294     BioAssaySet child = createChildBioAssaySet(dc, info, t, useNewDataCube);
    295     child.setIntensityTransform(transform);
    296     createChildBioAssays(dc, child, info, useNewDataCube);
    297    
    298     // Create the position/reporter mapping if needed
    299     if (useNewDataCube)
    300     {
    301       if (chainedProgress != null) chainedProgress.setRange(30, 40);
    302       createChildPositionReporterMapping(child, info, chainedProgress);
    303       if (!info.getChildHasDifferentReporterPositionMapping() && child.getRawDataType().isStoredInDb())
    304       {
    305         // If the child and parent has identical position/reporter mapping
    306         // we can "calculate" new raw data mappings.
    307         if (chainedProgress != null) chainedProgress.setRange(40, 50);
    308         createChildRawDataMapping(child, parent, info, chainedProgress);
    309       }
    310     }
    311    
    312     // Second parser pass: progress=50-100%
    313     SecondPassSectionSpotsParser spotParser2 =
    314       new SecondPassSectionSpotsParser(dc, info, child, totalLines);
    315     BaseFileParser secondParser = new BaseFileParser();
    316     secondParser.copyRedefinedColumnNames(parser);
    317     secondParser.setSectionParser("spots", spotParser2);
    318     if (chainedProgress != null)
    319     {
    320       chainedProgress.setRange(50, 100);
    321       secondParser.setProgressReporter(new SimpleAbsoluteProgressReporter(chainedProgress, totalLines));
    322     }
    323     secondParser.parse(srcFile.getInputStream(), srcFile.getCharacterSet());
    324     return child;
     286    InputStream srcIn = srcFile.getInputStream();
     287    try
     288    {
     289      FlatFileParser ffp = parser.parse(srcIn, srcFile.getCharacterSet());
     290      int totalLines = ffp.getParsedLines();
     291     
     292      // If the BASEfile didn't have any 'spots' sections we don't
     293      // create a child bioassay set
     294      if (parser.getSectionCount("spots") == 0) return null;
     295 
     296      // Create the child bioassay set and bioassays
     297      boolean useNewDataCube = info.getAssaysHaveParentAssaysMapping()
     298        || info.getChildHasDifferentReporterPositionMapping();
     299      BioAssaySet child = createChildBioAssaySet(dc, info, t, useNewDataCube);
     300      child.setIntensityTransform(transform);
     301      createChildBioAssays(dc, child, info, useNewDataCube);
     302     
     303      // Create the position/reporter mapping if needed
     304      if (useNewDataCube)
     305      {
     306        if (chainedProgress != null) chainedProgress.setRange(30, 40);
     307        createChildPositionReporterMapping(child, info, chainedProgress);
     308        if (!info.getChildHasDifferentReporterPositionMapping() && child.getRawDataType().isStoredInDb())
     309        {
     310          // If the child and parent has identical position/reporter mapping
     311          // we can "calculate" new raw data mappings.
     312          if (chainedProgress != null) chainedProgress.setRange(40, 50);
     313          createChildRawDataMapping(child, parent, info, chainedProgress);
     314        }
     315      }
     316      FileUtil.close(srcIn);
     317     
     318      // Second parser pass: progress=50-100%
     319      SecondPassSectionSpotsParser spotParser2 =
     320        new SecondPassSectionSpotsParser(dc, info, child, totalLines);
     321      BaseFileParser secondParser = new BaseFileParser();
     322      secondParser.copyRedefinedColumnNames(parser);
     323      secondParser.setSectionParser("spots", spotParser2);
     324      if (chainedProgress != null)
     325      {
     326        chainedProgress.setRange(50, 100);
     327        secondParser.setProgressReporter(new SimpleAbsoluteProgressReporter(chainedProgress, totalLines));
     328      }
     329      srcIn = srcFile.getInputStream();
     330      secondParser.parse(srcIn, srcFile.getCharacterSet());
     331      return child;
     332    }
     333    finally
     334    {
     335      FileUtil.close(srcIn);
     336    }
    325337  }
    326338
  • trunk/src/plugins/core/net/sf/basedb/plugins/Base1PluginExecuter.java

    r5689 r6077  
    14841484        if (!File.exists(dc, d, f.getName()))
    14851485        {
     1486          ThreadSignalHandler.checkInterrupted();
    14861487          File newFile = File.getNew(dc, d);
    14871488          newFile.setName(f.getName());
    14881489          newFile.setMimeTypeAuto(null, null);
    14891490          dc.saveItem(newFile);
     1491          InputStream tmpIn = null;
    14901492          try
    14911493          {
    1492             ThreadSignalHandler.checkInterrupted();
    1493             newFile.upload(new FileInputStream(f), true);
     1494            tmpIn = new FileInputStream(f);
     1495            newFile.upload(tmpIn, true);
    14941496          }
    14951497          catch (FileNotFoundException e)
    14961498          {
    14971499            continue;
     1500          }
     1501          finally
     1502          {
     1503            FileUtil.close(tmpIn);
    14981504          }
    14991505        }
Note: See TracChangeset for help on using the changeset viewer.