Changeset 3653
- Timestamp:
- Aug 10, 2007, 12:59:24 PM (16 years ago)
- Location:
- branches/webservices
- Files:
-
- 2 deleted
- 137 edited
- 50 copied
Legend:
- Unmodified
- Added
- Removed
-
branches/webservices/config/dist/extended-properties.xml
r3581 r3653 179 179 type="string" 180 180 length="255" 181 /> 181 > 182 <link 183 regexp=".+" 184 url="callto://{value}" 185 /> 186 </property> 182 187 </class> 183 188 </extended-properties> -
branches/webservices/config/dist/raw-data-types.xml
r3533 r3653 232 232 column="flags" 233 233 type="int" 234 averagemethod="none" 234 235 /> 235 236 <property … … 3168 3169 </intensity-formula> 3169 3170 </raw-data-type> 3171 <raw-data-type 3172 id="illumina" 3173 channels="1" 3174 name="Illumina" 3175 table="RawDataIllumina"> 3176 <property 3177 name="MIN_Signal" 3178 title="MIN_Signal" 3179 description="Minimum intensity of the gene in the group." 3180 column="minSignal" 3181 type="float" 3182 /> 3183 <property 3184 name="AVG_Signal" 3185 title="AVG_Signal" 3186 description="Average intensity of the gene in the group." 3187 column="avgSignal" 3188 type="float" 3189 /> 3190 <property 3191 name="MAX_Signal" 3192 title="MAX_Signal" 3193 description="Maximum intensity of the gene in the group." 3194 column="maxSignal" 3195 type="float" 3196 /> 3197 <property 3198 name="NARRAYS" 3199 title="NARRAYS" 3200 description="Number of arrays in the group." 3201 column="narrays" 3202 type="int" 3203 /> 3204 <property 3205 name="ARRAY_STDEV" 3206 title="ARRAY_STDEV" 3207 description="Standard deviation associated with array-to-array variability within the group (undefined when the group contains a single array)." 3208 column="arrayStdDev" 3209 type="float" 3210 /> 3211 <property 3212 name="BEAD_STDEV" 3213 title="BEAD_STDEV" 3214 description="Average standard deviation associated with bead-to-bead variability for the arrays in the group." 3215 column="beadStdDev" 3216 type="float" 3217 /> 3218 <property 3219 name="Avg_NBEADS" 3220 title="Avg_NBEADS" 3221 description="Average number of beads per bead type representing probes for the gene." 3222 column="avgNBeads" 3223 type="int" 3224 /> 3225 <property 3226 name="Detection" 3227 title="Detection" 3228 description="p-value computed from the background model characterizing chance that the target sequence signal was distinguishable from the negative controls." 3229 column="detection" 3230 type="float" 3231 /> 3232 <intensity-formula 3233 name="avgSignal" 3234 title="Average signal" 3235 description="Average signal as determined by BeadStudio software." 3236 > 3237 <formula 3238 channel="1" 3239 expression="raw('AVG_Signal')" 3240 /> 3241 </intensity-formula> 3242 </raw-data-type> 3170 3243 </raw-data-types> 3171 3244 -
branches/webservices/credits.txt
r3533 r3653 26 26 27 27 28 The current BASE team is (at BASE 2. 3release)28 The current BASE team is (at BASE 2.4 release) 29 29 30 30 Johan Enell … … 41 41 Samuel Andersson 42 42 Micha Bayer 43 Jeremy Davis-Turak 43 44 Johan Enell 44 45 Michael Green -
branches/webservices/data/plugin_configfile.xml
r3133 r3653 881 881 </parameter> 882 882 </configuration> 883 <configuration pluginClassName="net.sf.basedb.plugins.PackedFileExporter"> 884 <configname>Zip archive (.zip)</configname> 885 <description>Compress the selected files/directories and put them in a ZIP file.</description> 886 <parameter> 887 <name>packer</name> 888 <label>Packer class</label> 889 <description>Enter the name of the class that is responsible for packing the files. It must be a class that implements the FilePacker interface.</description> 890 <class>java.lang.String</class> 891 <value>net.sf.basedb.util.zip.ZipFilePacker</value> 892 </parameter> 893 </configuration> 894 <configuration pluginClassName="net.sf.basedb.plugins.PackedFileExporter"> 895 <configname>TAR archive (.tar)</configname> 896 <description>Collect the selected files/directories into a TAR file (not compressed).</description> 897 <parameter> 898 <name>packer</name> 899 <label>Packer class</label> 900 <description>Enter the name of the class that is responsible for packing the files. It must be a class that implements the FilePacker interface.</description> 901 <class>java.lang.String</class> 902 <value>net.sf.basedb.util.zip.TarFilePacker</value> 903 </parameter> 904 </configuration> 905 <configuration pluginClassName="net.sf.basedb.plugins.PackedFileExporter"> 906 <configname>GZipped TAR archive (.tar.gz)</configname> 907 <description>Collect the selected files/directoris into a TAR file and compress it with GZIP.</description> 908 <parameter> 909 <name>packer</name> 910 <label>Packer class</label> 911 <description>Enter the name of the class that is responsible for packing the files. It must be a class that implements the FilePacker interface.</description> 912 <class>java.lang.String</class> 913 <value>net.sf.basedb.util.zip.GzipFilePacker</value> 914 </parameter> 915 </configuration> 916 <configuration pluginClassName="net.sf.basedb.plugins.PackedFileExporter"> 917 <configname>BZipped TAR archive (.tar.bz2)</configname> 918 <description>Collect the selected files/directoris into a TAR file and compress it with BZIP2.</description> 919 <parameter> 920 <name>packer</name> 921 <label>Packer class</label> 922 <description>Enter the name of the class that is responsible for packing the files. It must be a class that implements the FilePacker interface.</description> 923 <class>java.lang.String</class> 924 <value>net.sf.basedb.util.zip.Bzip2FilePacker</value> 925 </parameter> 926 </configuration> 883 927 </configfile> -
branches/webservices/doc/src/docbook/appendix/base.config.xml
r3533 r3653 575 575 </listitem> 576 576 </varlistentry> 577 <varlistentry> 578 <term><property>plugins.autounload</property></term> 579 <listitem> 580 <para> 581 Enable this setting to let BASE detect if a plug-in JAR file is changed 582 and automatically load and use the new code instead of the old code. 583 This setting is useful for plug-in developers since they don't have to 584 restart the webserver each time the plug-in is recompiled. 585 <itemizedlist> 586 <listitem> 587 <simpara> 588 <userinput>true,yes,1</userinput> 589 to enable 590 </simpara> 591 </listitem> 592 <listitem> 593 <simpara> 594 <userinput>false,no,0</userinput> 595 to disable (default if no value is specified) 596 </simpara> 597 </listitem> 598 </itemizedlist> 599 </para> 600 </listitem> 601 </varlistentry> 577 602 </variablelist> 578 603 </simplesect> -
branches/webservices/doc/src/docbook/appendix/extended_properties.xml
r3334 r3653 30 30 <title>extended-properties.xml reference</title> 31 31 32 <para> 33 This document is only available in the old format. 34 See <ulink url="http://base.thep.lu.se/chrome/site/doc/admin/extended-properties.html" 35 >http://base.thep.lu.se/chrome/site/doc/admin/extended-properties.html</ulink>. 36 </para> 37 32 33 <bridgehead>What is extended-properties.xml?</bridgehead> 34 35 <para> 36 The <filename>extended-properties.xml</filename> file is a configuration 37 file for customizing some of the tables in the BASE database. 38 It is located in the <filename><basedir>/www/WEB-INF/classes</filename> 39 directory. Only a limited number of tables support this feature, the most important 40 one is the table for storing reporter information. 41 </para> 42 43 <para> 44 The default <filename>extended-properties.xml</filename> that ships 45 with BASE is biased towards the BASE 1.2 setup for 2-spotted microarray 46 data. If you want your BASE installation to be configured differently we 47 recommend that you do it before the first initialisation of the database. 48 It is possible to change the configuration of an existing BASE installation but it 49 may require manual updates to the database. Follow this procedure: 50 </para> 51 52 <orderedlist> 53 <listitem> 54 <para> 55 Shut down the BASE web server. If you have installed job agents you should shut 56 down them as well. 57 </para> 58 </listitem> 59 60 <listitem> 61 <para> 62 Modify the <filename>extended-properties.xml</filename> file. If you have installed 63 job agents, make sure they all have the same version as the web server. 64 </para> 65 </listitem> 66 67 <listitem> 68 <para> 69 Run the <filename>updatedb.sh</filename> script. New 70 columns will automatically be created, but the script can't delete columns that 71 have been removed, or modify columns that have changed datatype. You will have to 72 do these kind of changes by manually executing SQL against your database. Check your 73 database documentation for information about SQL syntax. 74 </para> 75 76 <tip> 77 <title>Create a parallell installation</title> 78 <para> 79 You can always create a new temporary parallell installation to check 80 what the table generated by installation script looks like. Compare the 81 new table to the existing one and make sure they match. 82 </para> 83 </tip> 84 </listitem> 85 86 <listitem> 87 <para> 88 Start up the BASE web server and any job agents, if any, again. 89 </para> 90 </listitem> 91 92 </orderedlist> 93 94 <tip> 95 <title>Start with few columns</title> 96 <para> 97 It is better to start with too few columns, since it is easier to add 98 more columns than it is to remove columns that are not needed. 99 </para> 100 </tip> 101 102 <bridgehead>Sample extended properties setups</bridgehead> 103 104 <itemizedlist> 105 <listitem> 106 <para> 107 After installing BASE the default <filename>extended-properties.xml</filename> 108 is located in the <filename><basedir>/www/WEB-INF/classes</filename> directory. 109 This setup is biased towards the BASE 1.2 setup for 2-spotted cDNA arrays. 110 If you are migrating from BASE 1.2 you need to use the default setup. 111 </para> 112 </listitem> 113 114 <listitem> 115 <para> 116 A <filename>minimalistic_extended-properties.xml</filename> setup which doesn't 117 define any extra columns at all. This file 118 can be found in the <filename><basedir>/misc/config</filename> directory, 119 and should be used if it is not known what reporter data will be stored in the 120 database. The addition of more columns later is straightforward. 121 </para> 122 </listitem> 123 </itemizedlist> 124 125 126 <bridgehead>Format of the extended-properties.xml file</bridgehead> 127 <para> 128 The <filename>extended-properties.xml</filename> is an XML file. 129 The following example will serve as a description of the format: 130 </para> 131 132 <programlisting> 133 <?xml version="1.0" ?> 134 <!DOCTYPE extended-properties SYSTEM "extended-properties.dtd"> 135 <extended-properties> 136 <class name="ReporterData"> 137 <property 138 name="extra1" 139 column="extra1" 140 title="Extra property" 141 type="string" 142 length="255" 143 null="true" 144 update="true" 145 insert="true" 146 averagemethod="max" 147 description="An extra property for all reporters" 148 > 149 <link 150 regexp=".*" 151 url="http://www.myexternaldb.com/find?{value}" 152 /> 153 </property> 154 </class> 155 </extended-properties> 156 </programlisting> 157 158 <para> 159 Each table that can be customized is represented by a <sgmltag class="starttag">class</sgmltag> 160 tag. The value of the <sgmltag>name</sgmltag> attribute is the name of the Java class 161 that handles the information in that table. In the case of reporters 162 the class name is <constant>ReporterData</constant>. 163 </para> 164 165 <para> 166 Each <sgmltag class="starttag">class</sgmltag> tag may contain one or more 167 <sgmltag class="starttag">property</sgmltag> tags, each one describing a single 168 column in the table. The possible attributes of the <sgmltag class="starttag">property</sgmltag> 169 tag are: 170 </para> 171 172 <informaltable frame="all"> 173 <tgroup cols="3" align="left"> 174 <colspec colname="attribute" align="left" /> 175 <colspec colname="required" /> 176 <colspec colname="comment" /> 177 <thead> 178 <row> 179 <entry>Attribute</entry> 180 <entry>Required</entry> 181 <entry>Comment</entry> 182 </row> 183 </thead> 184 <tbody> 185 <row> 186 <entry>name</entry> 187 <entry>yes</entry> 188 <entry> 189 A unique name (within the class) of the extra property. 190 The name must only contain letters, numbers and underscores but the first character 191 can't be a number. The name is used to identify the extra column in the Java code 192 and in the Query API. 193 </entry> 194 </row> 195 <row> 196 <entry>column</entry> 197 <entry>yes</entry> 198 <entry> 199 The name of the database column. This value must be unique within the 200 class. Valid names depends on the database, but it should be safe 201 to follow the same rules as for the <sgmltag>name</sgmltag> attribute. 202 In most cases, it makes sense to use the same value for both the 203 <sgmltag>name</sgmltag> and <sgmltag>column</sgmltag> attributes. 204 </entry> 205 </row> 206 <row> 207 <entry>title</entry> 208 <entry>no</entry> 209 <entry> 210 The title of the extra property as it is displayed in client applications. 211 If not specified the value of the <sgmltag>name</sgmltag> attrbiute is used. 212 </entry> 213 </row> 214 <row> 215 <entry>description</entry> 216 <entry>no</entry> 217 <entry> 218 A longer (but still short!) description of the extra property which can be 219 used in client applications to provide help. 220 </entry> 221 </row> 222 <row> 223 <entry>type</entry> 224 <entry>yes</entry> 225 <entry> 226 The data type of the column. Allowed values are: 227 <itemizedlist> 228 <listitem> 229 <simpara>int</simpara> 230 </listitem> 231 <listitem> 232 <simpara>long</simpara> 233 </listitem> 234 <listitem> 235 <simpara>float</simpara> 236 </listitem> 237 <listitem> 238 <simpara>double</simpara> 239 </listitem> 240 <listitem> 241 <simpara>boolean</simpara> 242 </listitem> 243 <listitem> 244 <simpara>string</simpara> 245 </listitem> 246 <listitem> 247 <simpara>date</simpara> 248 </listitem> 249 </itemizedlist> 250 251 <para> 252 Note that the given types are converted into the most appropriate database 253 column type by Hibernate. 254 </para> 255 </entry> 256 </row> 257 <row> 258 <entry>length</entry> 259 <entry>no</entry> 260 <entry> 261 If the column is a string type, this is the maximum length that can 262 be stored in the database. If no value is given, 255 is assumed. 263 </entry> 264 </row> 265 <row> 266 <entry>null</entry> 267 <entry>no</entry> 268 <entry> 269 If the column should allow <constant>null</constant> values or not. 270 Allowed values are <constant>true</constant> (default) and 271 <constant>false</constant>. 272 </entry> 273 </row> 274 <row> 275 <entry>insert</entry> 276 <entry>no</entry> 277 <entry> 278 If valus for this property should be inserted into the database or not. 279 Allowed values are <constant>true</constant> (default) and 280 <constant>false</constant>. 281 </entry> 282 </row> 283 <row> 284 <entry>update</entry> 285 <entry>no</entry> 286 <entry> 287 If valus for this property should be updated in the database or not. 288 Allowed values are <constant>true</constant> (default) and 289 <constant>false</constant>. 290 </entry> 291 </row> 292 <row> 293 <entry>averagable</entry> 294 <entry>no</entry> 295 <entry> 296 <emphasis>This attribute has been deprecated and replaced by the 297 <sgmltag>averagemethod</sgmltag> attribute!</emphasis> 298 299 <para> 300 If it makes sense to calculate the average of a set of values 301 for this property or not. By default, all numerical columns are 302 averagable. For non-numerical columns this attribute is ignored. 303 </para> 304 </entry> 305 </row> 306 <row> 307 <entry>averagemethod</entry> 308 <entry>no</entry> 309 <entry> 310 The method to use when calculating the average of a set of values. 311 This attribute replaces the <sgmltag>averagable</sgmltag> attribute. 312 The following values can be used: 313 314 <itemizedlist> 315 <listitem> 316 <simpara> 317 <constant>none</constant>: average values are not supported 318 (default for non-numerical columns) 319 </simpara> 320 </listitem> 321 <listitem> 322 <simpara> 323 <constant>arithmetic_mean</constant>: calculate the arithmetic mean 324 (default for numerical columns; not supported for non-numerical columns) 325 </simpara> 326 </listitem> 327 <listitem> 328 <simpara> 329 <constant>geometric_mean</constant>: calculate the geometric mean 330 (not supported for non-numerical columns) 331 </simpara> 332 </listitem> 333 <listitem> 334 <simpara> 335 <constant>min</constant>: use the minimum value of the values in the set 336 </simpara> 337 </listitem> 338 <listitem> 339 <simpara> 340 <constant>max</constant>: use the maximum value of the values in the set 341 </simpara> 342 </listitem> 343 </itemizedlist> 344 345 </entry> 346 </row> 347 </tbody> 348 </tgroup> 349 </informaltable> 350 351 <para> 352 Each <sgmltag class="starttag">property</sgmltag> tag may contain zero or more 353 <sgmltag class="starttag">link</sgmltag> tags that can be used by client 354 application to provide clickable links to other databases. Each 355 <sgmltag class="starttag">link</sgmltag> has a <sgmltag>regexp</sgmltag> 356 and an <sgmltag>url</sgmltag> attribute. If the regular expression matches 357 the value a link will be created, otherwise not. The order of the 358 <sgmltag class="starttag">link</sgmltag> tags are important, since only 359 the first one that matches is used. The <sgmltag>url</sgmltag> attribute may 360 contain the string <constant>{value}</constant> which will be replaced by the 361 actual value when the link is generated. 362 </para> 363 364 <note> 365 <para> 366 If the link contains the character <constant>&</constant> it must be 367 escaped as <constant>&amp;</constant>. For example, to link to a Unigene 368 entry: 369 </para> 370 <programlisting> 371 <link 372 regexp="\w+\.\d+" 373 url="http://www.ncbi.nlm.nih.gov/entrez/query.fcgi?db=unigene&amp;term={value}[ClusterID]" 374 /> 375 </programlisting> 376 </note> 377 378 38 379 </appendix> 39 380 -
branches/webservices/doc/src/docbook/appendix/incompatible.xml
r3533 r3653 79 79 old behaviour use <code>ch(1) == 'NULL'</code>. 80 80 </para> 81 82 <bridgehead>Parsing strings into numeric values</bridgehead> 83 <para> 84 The <methodname>Type.parseString(String)</methodname> method for 85 <constant>Type.FLOAT</constant> and <constant>Type.DOUBLE</constant> 86 has changed it's behaviour for <constant>NaN</constant> 87 and <constant>Infinity</constant> values. The methods used to 88 return <constant>Float.NaN</constant>, <constant>Float.NEGATIVE_INFINITY</constant>, 89 <constant>Float.POSITIVE_INFINITY</constant> or the corresponding 90 <classname>Double</classname> values. Since databases doesn't like 91 these special values and eventually most values will go into the database, 92 the <methodname>parseString</methodname> method now returns <constant>null</constant> 93 instead. 94 </para> 95 96 <bridgehead>Extended properties and raw data types</bridgehead> 97 <para> 98 We have added validation code to check for invalid values. If you 99 have modified the <filename>extended-properties.xml</filename> 100 or the <filename>raw-data-types.xml</filename> file and they 101 contain invalid values, you may not be able to start BASE until 102 they are fixed. The validation is rather strict and things that may 103 have worked before (because you were lucky or the because the database 104 has been forgiving) may no longer work. Here is an overview of the most 105 important validation rules: 106 </para> 107 108 <itemizedlist> 109 <listitem> 110 <para> 111 Names and identifiers for extended properties and raw data type 112 can only contain letters, digits and underscores. They must not 113 start with a digit. 114 </para> 115 </listitem> 116 <listitem> 117 <para> 118 Names of database tables and columns can only contain letters, 119 digits and underscores. They must not start with a digit. 120 </para> 121 </listitem> 122 <listitem> 123 <para> 124 There mustn't be any duplicate tables, columns, properties, etc. 125 for a given context. For example, no duplicate tables in the 126 database, no duplicate columns in a table, and no duplicate 127 properties for a raw data type. 128 </para> 129 </listitem> 130 </itemizedlist> 131 81 132 </sect1> 82 133 -
branches/webservices/doc/src/docbook/developerdoc/plugin_developer.xml
r3581 r3653 76 76 <para> 77 77 In the root of your directory, create the build file: 78 <filename>build.xml</filename> 79 .Here is an example that will compile your plug-in and put it in a JAR file.78 <filename>build.xml</filename>. 79 Here is an example that will compile your plug-in and put it in a JAR file. 80 80 </para> 81 81 <example id="plugin_developer.organize.build.file"> … … 383 383 to be able to function as expected. This method may return null 384 384 or an empty collection. In this case the plug-in permission system 385 is n't used and the plug-in always getthe same permissions as the385 is not used and the plug-in always gets the same permissions as the 386 386 logged in user. If permissions are specified the plug-in should 387 list all permissions it require . Permissions that are not listed387 list all permissions it requires. Permissions that are not listed 388 388 are denied. 389 389 </para> … … 518 518 <para> 519 519 The <varname>progress</varname> parameter 520 can be used by a plug-in to report it 's progress back to the core. The520 can be used by a plug-in to report its progress back to the core. The 521 521 core will usually send the progress information to the database, which 522 522 allows users to see exactly how the plug-in is progressing from the web … … 550 550 <example id="net.sf.basedb.core.plugin.Plugin.run"> 551 551 <title> 552 Here is a skeleton that we recommend each plug-in to use in it 's552 Here is a skeleton that we recommend each plug-in to use in its 553 553 implementation of the 554 554 <methodname>run()</methodname> … … 881 881 <listitem> 882 882 <para> 883 Ask the plug-in for parameters that need sto be entered by the user. The883 Ask the plug-in for parameters that need to be entered by the user. The 884 884 <classname>GuiContext</classname> 885 885 parameter is one of the contexts returned by the … … 887 887 method. The command is a string telling the plug-in what command was 888 888 executed. There are two predefined commands but as you will see the 889 plug-in may define it 's own commands. The two predefined commands are889 plug-in may define its own commands. The two predefined commands are 890 890 defined in the 891 891 <classname>net.sf.basedb.core.plugin.Request</classname> … … 1582 1582 The <methodname>Plugin.init()</methodname> method is called. 1583 1583 The <varname>job</varname> parameter contains the job 1584 configuration param ters. The <varname>configuration</varname> parameter1584 configuration parameters. The <varname>configuration</varname> parameter 1585 1585 is <constant>null</constant> if the plug-in does not have any 1586 1586 configuration parameters. … … 1706 1706 object created by your plug-in. 1707 1707 </para> 1708 <programlisting>// Get session control and it 's ID (required to post to index.jsp)1708 <programlisting>// Get session control and its ID (required to post to index.jsp) 1709 1709 final SessionControl sc = Base.getExistingSessionControl(pageContext, true); 1710 1710 final String ID = sc.getId(); … … 1778 1778 try 1779 1779 { 1780 ffp.nextSection(); 1780 1781 FlatFileParser.LineType result = ffp.parseHeaders(); 1781 return result != FlatFileParser.LineType.UNKNOWN; 1782 if (result == FlatFileParser.LineType.UNKNOWN) 1783 { 1784 return false; 1785 } 1786 else 1787 { 1788 return isImportable(ffp); 1789 } 1782 1790 } 1783 1791 catch (IOException ex) … … 1937 1945 <listitem> 1938 1946 <para> 1939 Implement the <methodname>Plugin.getAbout()</methodname> and 1940 <methodname>Plugin.getMainType()</methodname> methods. See 1947 Implement the <methodname>Plugin.getAbout()</methodname> method. See 1941 1948 <xref linkend="plugin_developer.api.interfaces.plugin" /> for more information. 1942 1949 </para> … … 2054 2061 ffp.setMinDataColumns(12); 2055 2062 return ffp; 2063 } 2064 </programlisting> 2065 </listitem> 2066 </varlistentry> 2067 2068 <varlistentry> 2069 <term> 2070 <methodsynopsis language="java"> 2071 <modifier>protected</modifier> 2072 <type>boolean</type> 2073 <methodname>isImportable</methodname> 2074 <methodparam> 2075 <type>FlatFileParser</type> 2076 <parameter>ffp</parameter> 2077 </methodparam> 2078 <exceptionname>IOException</exceptionname> 2079 </methodsynopsis> 2080 </term> 2081 <listitem> 2082 <para> 2083 This method is called from the <methodname>isImportable(InputStream)</methodname> 2084 method, AFTER <methodname>FlatFileParser.nextSection()</methodname> and 2085 <methodname>FlatFileParser.parseHeaders()</methodname> has been called 2086 a single time and if the <methodname>parseHeaders</methodname> method didn't 2087 stop on an unknown line. The default implementation of this method always returns 2088 TRUE, since obviously some data has been found. A subclass may override this method 2089 if it wants to do more checks, for example, make that a certain header is present 2090 with a certain value. It may also continut parsing the file. Here is a code example from 2091 the <classname>PrintMapFlatFileImporter</classname> which checks if a 2092 <constant>FormatName</constant> header is present and contains either 2093 <constant>TAM</constant> or <constant>MwBr</constant>. 2094 </para> 2095 2096 <programlisting> 2097 /** 2098 Check that the file is a TAM or MwBr file. 2099 @return TRUE if a FormatName header is present and contains "TAM" or "MwBr", FALSE 2100 otherwise 2101 */ 2102 @Override 2103 protected boolean isImportable(FlatFileParser ffp) 2104 { 2105 String formatName = ffp.getHeader("FormatName"); 2106 return formatName != null && 2107 (formatName.contains("TAM") || formatName.contains("MwBr")); 2056 2108 } 2057 2109 </programlisting> … … 2464 2516 <listitem> 2465 2517 <para> 2466 Set the MIME type of the file that is be eing generated.2518 Set the MIME type of the file that is being generated. 2467 2519 </para> 2468 2520 </listitem> … … 2482 2534 <listitem> 2483 2535 <para> 2484 Set a suggested name of the file that is be eing2536 Set a suggested name of the file that is being 2485 2537 generated. 2486 2538 </para> … … 2866 2918 <para> 2867 2919 Now, the typical <methodname>Plugin.run()</methodname> method loads the specfied bioassay set 2868 and it 's spot data. It may do some filtering and recalculation of the spot2920 and its spot data. It may do some filtering and recalculation of the spot 2869 2921 intensity value(s). In most cases it will store the result as a child bioassay 2870 2922 set with one bioassay for each bioassay in the parent bioassay set. … … 2938 2990 <para> 2939 2991 This class is an abstract base class. It is a useful 2940 class for most analysis plug-ins to inherit from. It 's main2992 class for most analysis plug-ins to inherit from. Its main 2941 2993 purpose is to define <classname>PluginParameter</classname> 2942 2994 objects that are commonly used in analysis plug-ins. This includes: … … 2954 3006 <listitem> 2955 3007 <para> 3008 The optional restriction of which bioassays to use. 3009 All bioassays in a bioassay set will be used if this 3010 parameter is empty. This is useful when the plugin only 3011 should run on a subset of bioassays in a bioassay set: 3012 <methodname>getSourceBioAssaysParameter()</methodname>, 3013 <methodname>getSourceBioAssays()</methodname> 3014 </para> 3015 </listitem> 3016 <listitem> 3017 <para> 2956 3018 The name and description of the child bioassay set that 2957 3019 is going to be created by the plug-in: … … 2970 3032 </itemizedlist> 2971 3033 3034 </sect2> 3035 3036 <sect2 id="plugin_developer.analyse.filterplugin"> 3037 <title>The AnalysisFilterPlugin interface</title> 3038 3039 <para> 3040 The <interfacename>net.sf.basedb.core.plugin.AnalysisFilterPlugin</interfacename> 3041 is a tagging interface with no methods that all analysis plug-ins that only filters 3042 data should implement. The benefit is that they will be linked from the 3043 <guibutton>Filter bioassay set</guibutton> button and not just 3044 the <guibutton>Run analysis</guibutton> button. They will also get 3045 a different icon in the experiment outline to make filtering 3046 transformations appear different from other transformations. 3047 </para> 3048 3049 <para> 3050 The interface exists purely for making the user interaction better. There is 3051 no harm in not implementing it since the plug-in will always appear in 3052 from the <guibutton>Run analysis</guibutton> button. On the other hand, 3053 it doesn't cost anything to implement the interface since it doesn't 3054 have any methods. 3055 </para> 3056 2972 3057 </sect2> 2973 3058 … … 2995 3080 BASE can authenticate users in two ways. Either it uses the internal 2996 3081 authentiction or the external authentication. With internal 2997 authentication BASE stores logins and passwords in it 's own database.3082 authentication BASE stores logins and passwords in its own database. 2998 3083 With external authentication this is handled by some external 2999 3084 application. Even with external authentication it is possible to … … 3103 3188 interface. Specify the name of the class in the <property>auth.driver</property> 3104 3189 setting in <filename>base.config</filename> and 3105 it 's initialisation parameters in the <property>auth.init</property> setting.3190 its initialisation parameters in the <property>auth.init</property> setting. 3106 3191 </para> 3107 3192 … … 3130 3215 <listitem> 3131 3216 <para> 3132 This method is called just after the object has been created with it 's argument3217 This method is called just after the object has been created with its argument 3133 3218 taken from the <property>auth.init</property> setting in your <filename>base.config</filename> 3134 3219 file. This method is only called once for an instance of the object. The syntax and meaning of … … 3207 3292 <exceptionname>AuthenticationException</exceptionname>: 3208 3293 In case there is another problem, such as the authentication service 3209 be eing down. This exception triggers the use of cached passwords3294 being down. This exception triggers the use of cached passwords 3210 3295 if caching has been enabled. 3211 3296 </para> … … 3285 3370 <sect2 id="plugin_developer.other.secondary"> 3286 3371 <title>Secondary file storage plugins</title> 3287 <para> 3288 This documentation is only available in the old format. 3289 See <ulink url="http://base.thep.lu.se/chrome/site/doc/development/plugins/storage/index.html" 3290 >http://base.thep.lu.se/chrome/site/doc/development/plugins/storage/index.html</ulink> 3291 </para> 3372 3373 <sect3 id="plugin_developer.other.secondary.vsprimary"> 3374 <title>Primary vs. secondary storage</title> 3375 <para> 3376 BASE has support for storing files in two locations, the primary storage and 3377 the secondary storage. The primary storage is always disk-based and must be 3378 accessible by the BASE server as a path on the file system. The path to the 3379 primary storage is configured by the <varname>userfiles</varname> setting in the 3380 <filename>base.config</filename> file. The primary storage is internal to 3381 the core. Client applications don't get access to read or manipulate the 3382 files directly from the file system. 3383 </para> 3384 3385 <para> 3386 The secondary storage can be anything that can store files. It could, for 3387 example, be another directory, a remote FTP server, or a tape based archiving 3388 system. A file located in the secondary storage is not accessible by the 3389 core, client applications or plug-ins. The secondary storage can only be accessed 3390 by the secondary storage controller. The core (and client) applications uses 3391 flags on the file items to handle the interaction with the secondary storage. 3392 </para> 3393 3394 <para> 3395 Each file has an <property>action</property> attribute which default's to 3396 <constant>File.Action.NOTHING</constant>. It can take two other values: 3397 </para> 3398 3399 <orderedlist> 3400 <listitem> 3401 <para> 3402 <constant>File.Action.MOVE_TO_SECONDARY</constant> 3403 </para> 3404 </listitem> 3405 <listitem> 3406 <para> 3407 <constant>File.Action.MOVE_TO_PRIMARY</constant> 3408 </para> 3409 </listitem> 3410 </orderedlist> 3411 3412 <para> 3413 All files with the action attribute set to <constant>MOVE_TO_SECONDARY</constant> 3414 should be moved to the secondary storage by the controller, and all files 3415 with the action attribute set to <constant>MOVE_TO_PRIMARY</constant> should be 3416 brought back to primary storage. 3417 </para> 3418 3419 <para> 3420 The moving of files between primary and secondary storage doesn't happen 3421 immediately. It is up to the server administrator to configure how often and 3422 at what times the controller should check for files that should be moved. 3423 This is configured by the <varname>secondary.storage.interval</varname> 3424 and <varname>secondary.storage.time</varname> settings in the 3425 <filename>base.config</filename> file. 3426 </para> 3427 </sect3> 3428 3429 <sect3 id="plugin_developer.other.secondary.interface"> 3430 <title>The SecondaryStorageController interface</title> 3431 3432 <para> 3433 All you have to do to create a secondary storage controller is to 3434 create a class that implements the 3435 <interfacename>net.sf.basedb.core.SecondaryStorageController</interfacename> 3436 interface. In your <filename>base.config</filename> file you then specify the 3437 class name in the <varname>secondary.storage.driver</varname> setting and its 3438 initialisation parameters in the <varname>secondary.storage.init</varname> setting. 3439 </para> 3440 3441 <para> 3442 Your class must have a public no-argument constructor. 3443 The BASE application will create only one instance of the class for 3444 lifetime of the BASE server. Here are the methods that you must implement: 3445 </para> 3446 3447 <variablelist> 3448 <varlistentry> 3449 <term> 3450 <methodsynopsis language="java"> 3451 <modifier>public</modifier> 3452 <void /> 3453 <methodname>init</methodname> 3454 <methodparam> 3455 <type>String</type> 3456 <parameter>settings</parameter> 3457 </methodparam> 3458 </methodsynopsis> 3459 </term> 3460 <listitem> 3461 <para> 3462 This method is called just after the object has been created with its argument 3463 taken from the <varname>secondary.storage.init</varname> setting in your 3464 <filename>base.config</filename> file. This method is only called once for 3465 an object. 3466 </para> 3467 </listitem> 3468 </varlistentry> 3469 <varlistentry> 3470 <term> 3471 <methodsynopsis language="java"> 3472 <modifier>public</modifier> 3473 <void /> 3474 <methodname>run</methodname> 3475 </methodsynopsis> 3476 </term> 3477 <listitem> 3478 <para> 3479 This method is called whenever the core thinks it is time to do some 3480 management of the secondary storage. How often the <methodname>run()</methodname> 3481 method is called is controlled by the <varname>secondary.storage.interval</varname> 3482 and <varname>secondary.storage.time</varname> settings in the 3483 <filename>base.config</filename> file. 3484 When this method is called the controller should: 3485 </para> 3486 3487 <itemizedlist> 3488 <listitem> 3489 <para> 3490 Move all files which has <constant>action=MOVE_TO_SECONDARY</constant> to 3491 the secondary storage. When the file has been moved call 3492 <methodname>File.setLocation(Location.SECONDARY)</methodname> to tell the 3493 core that the file is now in the secondary storage. You should also call 3494 <methodname>File.setAction(File.Action.NOTHING)</methodname> to reset the 3495 action attribute. 3496 </para> 3497 </listitem> 3498 3499 <listitem> 3500 <para> 3501 Restore all files which has <constant>action=MOVE_TO_PRIMARY</constant>. 3502 The core will set the location attribute automatically, but you should 3503 call <methodname>File.setAction(File.Action.NOTHING)</methodname> to reset 3504 the action attribute. 3505 </para> 3506 </listitem> 3507 3508 <listitem> 3509 <para> 3510 Delete all files from the secondary storage that are not present 3511 in the database with <constant>location=Location.SECONDARY</constant>. 3512 This includes files which has been deleted and files that have been 3513 moved offline or re-uploaded. 3514 </para> 3515 </listitem> 3516 3517 </itemizedlist> 3518 3519 <para> 3520 As a final act the method should send a message to each user owning 3521 files that has been moved from one location to the other. The message 3522 should include a list of files that has been moved to the secondary 3523 storage and a list of files moved from the secondary storage and a 3524 list of files that has been deleted due to some of the reasons above. 3525 </para> 3526 </listitem> 3527 </varlistentry> 3528 3529 <varlistentry> 3530 <term> 3531 <methodsynopsis language="java"> 3532 <modifier>public</modifier> 3533 <void /> 3534 <methodname>close()</methodname> 3535 </methodsynopsis> 3536 </term> 3537 <listitem> 3538 <para> 3539 This method is called when the server is closing down. After this the object 3540 is never used again. 3541 </para> 3542 </listitem> 3543 </varlistentry> 3544 </variablelist> 3545 </sect3> 3546 3547 <sect3 id="plugin_developer.other.secondary.settings"> 3548 <title>Configuration settings</title> 3549 3550 <para> 3551 The configuration settings for the secondary storage controller is located in the 3552 <filename>base.config</filename> file. Here is an overview of the settings. 3553 For more information read <xref linkend="appendix.base.config" />. 3554 </para> 3555 3556 <variablelist> 3557 <varlistentry> 3558 <term><property>secondary.storage.driver</property></term> 3559 <listitem> 3560 <para> 3561 The class name of the secondary storage plug-in. 3562 </para> 3563 </listitem> 3564 </varlistentry> 3565 <varlistentry> 3566 <term><property>secondary.storage.init</property></term> 3567 <listitem> 3568 <para> 3569 Initialisation parameters sent to the plug-in by calling the 3570 <methodname>init()</methodname> method. 3571 </para> 3572 </listitem> 3573 </varlistentry> 3574 <varlistentry> 3575 <term><property>secondary.storage.interval</property></term> 3576 <listitem> 3577 <para> 3578 Interval in seconds between each execution of the secondary storage 3579 controller plug-in. 3580 </para> 3581 </listitem> 3582 </varlistentry> 3583 <varlistentry> 3584 <term><property>secondary.storage.time</property></term> 3585 <listitem> 3586 <para> 3587 Time points during the day when the secondary storage controller plugin 3588 should be executed. 3589 </para> 3590 </listitem> 3591 </varlistentry> 3592 </variablelist> 3593 </sect3> 3292 3594 </sect2> 3293 3595 … … 3300 3602 BASE distribution comes with support for ZIP files 3301 3603 (<classname>net.sf.basedb.plugins.ZipFileUnpacker</classname>) 3302 and TAR files (<classname>net.sf.basedb.plugins. ZipFileUnpacker</classname>).3604 and TAR files (<classname>net.sf.basedb.plugins.TarFileUnpacker</classname>). 3303 3605 </para> 3304 3606 <para> … … 3496 3798 3497 3799 </sect2> 3800 3801 <sect2 id="plugin_developer.other.packer"> 3802 <title>File packer plug-ins</title> 3803 3804 <para> 3805 BASE has support for compressing and downloading a set of selected files and/or 3806 directories. This functionality is provided by a plug-in, the 3807 <classname>PackedFileExporter</classname>. This plug-in doesn't do the actual 3808 packing itself. This is delegated to classes implementing the 3809 <interfacename>net.sf.basedb.util.zip.FilePacker</interfacename> interface. 3810 </para> 3811 3812 <para> 3813 BASE ships with a number of packing methods, including ZIP and TAR. To 3814 add support for other methods you have to provide an implementation 3815 of the <interfacename>FilePacker</interfacename> 3816 interface. Then, create a new configuration for the <classname>PackerFileExporter</classname> 3817 and enter the name of your class in the configuration wizard. 3818 </para> 3819 3820 <para> 3821 The <interfacename>FilePacker</interfacename> interface is not a regular 3822 plug-in interface (ie. it is not a subinterface to 3823 <interfacename>Plugin</interfacename>). This means that you don't have to 3824 mess with configuration or job parameters. Another difference is that your 3825 class must be installed in Tomcat's classpath (ie. in one of the 3826 <filename>WEB-INF/classes</filename> or <filename>WEB-INF/lib</filename> 3827 folders). 3828 </para> 3829 3830 <variablelist> 3831 <title>Methods in the <interfacename>FilePacker</interfacename> interface</title> 3832 <varlistentry> 3833 <term> 3834 <methodsynopsis language="java"> 3835 <modifier>public</modifier> 3836 <type>String</type> 3837 <methodname>getDescription</methodname> 3838 </methodsynopsis> 3839 </term> 3840 <listitem> 3841 <para> 3842 Return a short description the file format that is suitable for use 3843 in dropdown lists in client applications. For example: 3844 <constant>Zip-archive (.zip)</constant> or <constant>TAR-archive (.tar)</constant>. 3845 </para> 3846 </listitem> 3847 </varlistentry> 3848 <varlistentry> 3849 <term> 3850 <methodsynopsis language="java"> 3851 <modifier>public</modifier> 3852 <type>String</type> 3853 <methodname>getFileExtension</methodname> 3854 </methodsynopsis> 3855 </term> 3856 <listitem> 3857 <para> 3858 Return the default file extension of the packed format. The returned 3859 value should not include the dot. For example: 3860 <constant>zip</constant> or <constant>tar</constant>. 3861 </para> 3862 </listitem> 3863 </varlistentry> 3864 <varlistentry> 3865 <term> 3866 <methodsynopsis language="java"> 3867 <modifier>public</modifier> 3868 <type>String</type> 3869 <methodname>getMimeType</methodname> 3870 </methodsynopsis> 3871 </term> 3872 <listitem> 3873 <para> 3874 Return the standard MIME type of the packed file format. 3875 For example: 3876 <constant>application/zip</constant> or <constant>application/x-tar</constant>. 3877 </para> 3878 </listitem> 3879 </varlistentry> 3880 <varlistentry> 3881 <term> 3882 <methodsynopsis language="java"> 3883 <modifier>public</modifier> 3884 <void /> 3885 <methodname>setOutputStream</methodname> 3886 <methodparam> 3887 <type>OutputStream</type> 3888 <parameter>out</parameter> 3889 </methodparam> 3890 <exceptionname>IOException</exceptionname> 3891 </methodsynopsis> 3892 </term> 3893 <listitem> 3894 <para> 3895 Sets the outputstream that the packer should write the packed 3896 files to. 3897 </para> 3898 </listitem> 3899 </varlistentry> 3900 <varlistentry> 3901 <term> 3902 <methodsynopsis language="java"> 3903 <modifier>public</modifier> 3904 <void /> 3905 <methodname>pack</methodname> 3906 <methodparam> 3907 <type>String</type> 3908 <parameter>entryName</parameter> 3909 </methodparam> 3910 <methodparam> 3911 <type>InputStream</type> 3912 <parameter>in</parameter> 3913 </methodparam> 3914 <methodparam> 3915 <type>long</type> 3916 <parameter>size</parameter> 3917 </methodparam> 3918 <methodparam> 3919 <type>long</type> 3920 <parameter>lastModified</parameter> 3921 </methodparam> 3922 <exceptionname>IOException</exceptionname> 3923 </methodsynopsis> 3924 </term> 3925 <listitem> 3926 <para> 3927 Add another file or directory to the packed file. The 3928 <parameter>entryName</parameter> is the name of the new entry, including 3929 path information. The <parameter>in</parameter> is the stream to read 3930 the file data from. If <parameter>in</parameter> is <constant>null</constant> 3931 then the entry denotes a directory. The <parameter>size</parameter> parameter 3932 gives the size in bytes of the file (zero for empty files or directories). 3933 The <parameter>lastModified</parameter> 3934 is that time the file was last modified or 0 if not known. 3935 </para> 3936 </listitem> 3937 </varlistentry> 3938 <varlistentry> 3939 <term> 3940 <methodsynopsis language="java"> 3941 <modifier>public</modifier> 3942 <void /> 3943 <methodname>close</methodname> 3944 <exceptionname>IOException</exceptionname> 3945 </methodsynopsis> 3946 </term> 3947 <listitem> 3948 <para> 3949 Finish the packing. The packer should release any resources, flush 3950 all data and close all output streams, including the <varname>out</varname> stream 3951 set in the <methodname>setOutputStream</methodname> method. 3952 </para> 3953 </listitem> 3954 </varlistentry> 3955 3956 </variablelist> 3957 3958 </sect2> 3498 3959 </sect1> 3499 3960 … … 3501 3962 <title>Example plug-ins (with download)</title> 3502 3963 <para> 3503 <para> 3504 This documentation is only available in the old format. 3505 See <ulink url="http://base.thep.lu.se/chrome/site/doc/development/index.html#plugins" 3506 >http://base.thep.lu.se/chrome/site/doc/development/index.html#plugins</ulink> 3507 </para> 3964 We have created some example plug-ins which demonstrates how to 3965 use the plug-in system and how to create an interactive plug-in that 3966 can ask a user for one or more parameters. 3967 3968 <ulink url="../../../download/exampleplugins.tar.gz">Download</ulink> 3969 a tar file with the source and compiled code. It contains the followin 3970 plug-ins: 3508 3971 </para> 3972 3973 <variablelist> 3974 <varlistentry> 3975 <term>ExampleImporter</term> 3976 <listitem> 3977 <para> 3978 An import plug-in that pretends to import samples. It will ask for a file 3979 and if existing samples should be updated or not, but doesn't actually 3980 import anything. 3981 </para> 3982 </listitem> 3983 </varlistentry> 3984 3985 <varlistentry> 3986 <term>ExampleAnalyzer</term> 3987 <listitem> 3988 <para> 3989 An analysis plug-in that asks for a multiplication factor and a cut-off value. 3990 It will create a child bioassay set by multiplying the original intensities 3991 with the given factor and filter out those with original intensities less 3992 than the cut-off. It works for any number of channels and raw data types 3993 </para> 3994 </listitem> 3995 </varlistentry> 3996 </variablelist> 3997 3509 3998 </sect1> 3510 3999 </chapter> -
branches/webservices/doc/src/docbook/userdoc/annotations.xml
r3533 r3653 692 692 </sect2> 693 693 694 <sect2 id="annotations.massimport"> 695 <title>Mass annotation import plug-in</title> 696 697 <para> 698 BASE includes a plug-in for importing annotations to multiple items 699 in one go. The plug-in read annotation values from a simple column-based 700 text file. Ususally, a tab is used as the delimiter between columns. 701 The first row should contain the column headers. One column should contain 702 the name or the external ID of the item. The rest of the columns can each be 703 mapped to an annotation type and contains the annotation values. If a column 704 header exactly match the name of an annotation type, the plug-in will automatically 705 create the mapping, otherwise you must do it manually. You don't have to map 706 all columns if you don't want to. 707 </para> 708 709 <para> 710 Each column can only contain a single annotation value for each row. 711 If you have annotation types that accept multiple values you can map 712 two or more columns to the same annotation type, or you can add an 713 extra row only giving the name and the extra annotation value. 714 Here is a simple example of a valid file with comma as column separator: 715 </para> 716 717 <programlisting> 718 # 'Time' and 'Age' are integer types 719 # 'Subtype' is a string enumeration 720 # 'Comment' is a text type that accept multiple values 721 Name,Time (hours),Age (years),Subtype,Comment 722 Sample #1,0,0,alfa,Very good 723 Sample #2,24,0,beta,Not so bad 724 Sample #2,,,,Yet another comment 725 </programlisting> 726 727 <para> 728 The plug-in can be used with or without a configuration. The configuration 729 keeps the regular expressions and other settings used to parse the file. If 730 you often import annotations from the same file format, we recommend that 731 you use a configuration. The mapping from file columns to annotation types 732 is not part of the configuration, it must be done each time the plug-in is used. 733 </para> 734 735 <para> 736 The plug-in can be used from the list view of all annotatable items. 737 Using the plug-in is a three-step wizard: 738 </para> 739 740 <orderedlist> 741 <listitem> 742 <para> 743 Select a file to import from and the regular expressions and other 744 settings used to parse the file. In this step you also select the column 745 that contains the name or external ID the items. If a configuration is used 746 all settings on this page, except the file to import from, already has values. 747 </para> 748 </listitem> 749 750 <listitem> 751 <para> 752 The plug-in will start parsing the file until it finds the column headers. 753 You are asked to select an annotation type for each column. 754 </para> 755 </listitem> 756 757 <listitem> 758 <para> 759 Set error handling options and some other import options. 760 </para> 761 </listitem> 762 763 </orderedlist> 764 765 </sect2> 766 694 767 </sect1> 695 768 -
branches/webservices/doc/src/docbook/userdoc/file_system.xml
r3581 r3653 685 685 </para> 686 686 </sect3> 687 <sect3 id="file_system.handling.actions.zippeddownload"> 688 <title>Download/compress multiple files</title> 689 690 <para> 691 You can download multiple files/directories at the same time. First, 692 from the file browser, select one or more files/directories. Then, click 693 on the &gbExport; button. Select the <userinput>Packed file exporter</userinput> 694 plug-in and choose one of the file formats below it. On the &gbNext; 695 page you can specify other options for the download: 696 </para> 697 698 <itemizedlist> 699 <listitem> 700 <para> 701 <guilabel>Save as</guilabel>: The path to a file on the BASE file system 702 where the selected files and directories should be packed. Leave this 703 field empty to download the files to your own computer. 704 </para> 705 </listitem> 706 <listitem> 707 <para> 708 <guilabel>Overwrite</guilabel>: If you are saving to the BASE file system 709 you may select if it is allowed to overwrite an existing file or not. 710 </para> 711 </listitem> 712 <listitem> 713 <para> 714 <guilabel>Remove files/directories</guilabel>: If you select this option 715 the selected files and directories will be marked as removed. You must still 716 go to the <guilabel>Trashcan</guilabel> and remove the items permanently. 717 </para> 718 </listitem> 719 </itemizedlist> 720 721 </sect3> 687 722 </sect2> 688 723 <sect2 id="file_system.handling.directories"> -
branches/webservices/doc/src/docbook/userdoc/jobs.xml
-
Property
svn:keywords
set to
Id Date
-
Property
svn:keywords
set to
-
branches/webservices/doc/src/docbook/userdoc/reporters.xml
r3533 r3653 322 322 323 323 <para> 324 Reporters which has been referenced to from r aw data, array324 Reporters which has been referenced to from reporter lists, raw data, array 325 325 designs, plates or any other item cannot be deleted. 326 326 </para> 327 328 <sect3 id="reporter.delete.batch"> 329 <title>Batch deletion</title> 330 <para> 331 A common problem is to delete reporters that has been accidentaly 332 created. The regular web interface is usually no good since it 333 only allows you to select at most 99 reporters at a time. To solve 334 this problem the reporter import plug-in can be used in delete mode. 335 You can use the same file as you used when importing. Just select 336 the <userinput>delete</userinput> option for the <guilabel>mode</guilabel> 337 parameter in the configuration wizard and continue as usual. 338 If the plug-in is used in delete mode from a reporter list it will 339 only remove the reporters from the reporter list. The reporters are not 340 deleted from the database. 341 </para> 342 343 <note> 344 <para> 345 It may be a bit confusing to delete things from an import plug-in. But 346 since plug-ins can only belong to one category and we wanted to re-use 347 existing file format definitions this was our only option. 348 </para> 349 </note> 350 </sect3> 351 327 352 </sect2> 328 353 -
branches/webservices/lib/docbook/module-fop/lib/batik.LICENSE.txt
-
Property
svn:eol-style
set to
native
-
Property
svn:keywords
set to
Id Date
-
Property
svn:eol-style
set to
-
branches/webservices/src/clients/migrate/net/sf/basedb/clients/migrate/FileTransfer.java
r3533 r3653 154 154 fis.close(); 155 155 return md5.toString(16); 156 } 157 else 158 { 159 log.warn("BASE 1 file doesn't exists: " + file.toString()); 156 160 } 157 161 } -
branches/webservices/src/clients/migrate/net/sf/basedb/clients/migrate/Migrater.java
r3533 r3653 38 38 import java.sql.SQLException; 39 39 import java.util.HashMap; 40 import java.util.HashSet; 40 41 import java.util.Properties; 42 import java.util.Set; 41 43 import java.util.regex.Matcher; 42 44 import java.util.regex.Pattern; … … 52 54 public class Migrater extends Manager 53 55 { 56 57 public static Set<String> supportedBase1Versions = new HashSet<String>(); 58 static 59 { 60 supportedBase1Versions.add("1.2.20"); 61 supportedBase1Versions.add("1.2.21"); 62 } 63 54 64 /** 55 65 Holds properties defined in migrate.properties … … 431 441 String schemaVersion = rs.getString(2); 432 442 ps.close(); 433 if (!s chemaVersion.equals("1.2.20"))443 if (!supportedBase1Versions.contains(schemaVersion)) 434 444 { 435 445 log.error("Migration cannot proceed due to incompatible schema version. Your schema version is " 436 + schemaVersion + " and this migration needs 1.2.20");446 + schemaVersion + " and this migration needs one of: " + supportedBase1Versions); 437 447 stop(); 438 448 } -
branches/webservices/src/clients/migrate/net/sf/basedb/clients/migrate/RawBioAssayDataTransfer.java
r3533 r3653 36 36 37 37 /** 38 This class provides methods for transferring RawBioAssay and RawBioAssayData39 from base 1 to BASE 2. Base 1 files are located in /userfiles/rawdata/40 41 @author Gregory, Martin42 @version 2.043 @base.modified $Date$38 This class provides methods for transferring RawBioAssay and RawBioAssayData 39 from base 1 to BASE 2. Base 1 files are located in /userfiles/rawdata/ 40 41 @author Gregory, Martin 42 @version 2.0 43 @base.modified $Date$ 44 44 */ 45 45 46 public class RawBioAssayDataTransfer extends CommonItemTransfer 46 public class RawBioAssayDataTransfer 47 extends CommonItemTransfer 47 48 { 48 49 /** … … 172 173 values from and size. 173 174 174 @param ps <code>PreparedStatement</code>175 @param batcher RawDataBatcher used in this transfer .175 @param rawBioAssay The raw bio assay to transfer data to 176 @param batcher RawDataBatcher used in this transfer 176 177 @param hasArrayDesign (Currently not used) 178 @param base1RawBioAssayId The ID of the raw bioassay in the BASE 1.2 installation 177 179 @see Transfer#prepareStatementFromFile(String) 178 180 */ -
branches/webservices/src/clients/migrate/net/sf/basedb/clients/migrate/ReporterTransfer.java
r3533 r3653 71 71 72 72 /** 73 Maps BASE 1 reporters that are considered duplicate by BASE 2 after 74 whitespace has been trimmed from the external ID. Only used temporary until the 75 reporterMap is generated. Map contain BASE 1 ID --> Base 1 ID where the value 76 is the "real" reporter that is migrated to BASE 2. 77 */ 78 private Map<Integer, Integer> duplicates = null; 79 80 /** 73 81 Used for inserting reporter into Base 2. 74 82 */ … … 97 105 98 106 int count = getInt("countReporters"); 99 startProgress(count*2, "Reporters"); // First pass creates reporters, second one reads in the new ID:s 107 // First pass creates reporters... 108 startProgress(count, "Reporters"); 100 109 PreparedStatement ps = QUICK_TRANSFER ? prepareStatementFromFile("selectMinimumReporters") 101 110 : prepareStatementFromFile("selectReporters"); 102 111 103 112 externalIdMap = new HashMap<String, Integer>(count); 104 DbControl dc = newDbControl(); 113 duplicates = new HashMap<Integer, Integer>(); 114 DbControl dc = null; 105 115 try 106 116 { 117 dc = newDbControl(); 107 118 batcher = ReporterBatcher.getNew(dc); 108 119 batcher.setBatchSize(getBatchSize()); … … 110 121 batcher.close(); 111 122 dc.commit(); 112 123 } 124 finally 125 { 126 if (dc != null) dc.close(); 127 progress.stop(); 128 } 129 130 // ... second pass reads in the new ID:s 131 progress = null; 132 startProgress(count, "Mapping reporter ID:s"); 133 try 134 { 113 135 reporterMap = new HashMap<Integer, ReporterData>(count); 136 dc = newDbControl(); 114 137 log.info("Mapping reporters in BASE 2..."); 115 mapReporters(0, getBatchSize()); 138 int start = 0; 139 int batchSize = getBatchSize(); 140 141 DataQuery<ReporterData> query = Reporter.getQuery(); 142 query.order(Orders.asc(Hql.property("id"))); 143 query.setMaxResults(batchSize); 144 while (start < count) 145 { 146 query.setFirstResult(start); 147 cleanUpMemory(); 148 mapMigratedReporters(dc, query); 149 start += batchSize; 150 } 116 151 cleanUpMemory(); 152 mapDuplicateReporters(); 117 153 } 118 154 finally … … 120 156 if (dc != null) dc.close(); 121 157 externalIdMap.clear(); 158 duplicates.clear(); 122 159 progress.stop(); 123 160 } … … 152 189 rowCount++; 153 190 ReporterData item = createDataItem(rs); 154 batcher.insert(item);191 if (item != null) batcher.insert(item); 155 192 progress.increase(); 156 193 if (log.isDebugEnabled()) … … 179 216 } 180 217 181 private void mapReporters(int from, int size) 182 { 183 cleanUpMemory(); 184 int rowCount = 0; 185 DbControl dc = newDbControl(); 218 private void mapMigratedReporters(DbControl dc, DataQuery<ReporterData> query) 219 { 186 220 DataResultIterator<ReporterData> result = null; 187 221 try 188 222 { 189 DataQuery<ReporterData> query = Reporter.getQuery();190 query.order(Orders.asc(Hql.property("id")));191 query.setFirstResult(from);192 query.setMaxResults(size);193 223 result = query.iterate(dc); 194 224 while (result.hasNext()) 195 225 { 196 226 ReporterData reporter = result.next(); 197 reporterMap.put(externalIdMap.get(reporter.getExternalId()), new ReporterProxy(reporter));198 r owCount++;227 Integer base1Id = externalIdMap.get(reporter.getExternalId()); 228 reporterMap.put(base1Id, new ReporterProxy(reporter)); 199 229 progress.increase(); 200 230 if (log.isDebugEnabled()) 201 231 { 202 log.debug("Reporter: " + reporter.getExternalId() + " has been mapped"); 232 log.debug("Reporter: " + reporter.getExternalId() + 233 " has been mapped to BASE 1 ID="+base1Id); 203 234 } 204 235 } … … 207 238 { 208 239 if (result != null) result.close(); 209 if (dc != null) dc.close(); 210 } 211 if (rowCount == size) 212 { 213 mapReporters(from+size, size); 214 } 215 } 216 240 } 241 } 242 243 private void mapDuplicateReporters() 244 { 245 // The map contains base1Id of duplicate -> base1Id of transfered reporter 246 for (Map.Entry<Integer, Integer> entry : duplicates.entrySet()) 247 { 248 Integer duplicateId = entry.getKey(); 249 Integer migratedId = entry.getValue(); 250 ReporterData migratedReporter = reporterMap.get(migratedId); 251 reporterMap.put(duplicateId, migratedReporter); 252 progress.increase(); 253 if (log.isDebugEnabled()) 254 { 255 log.debug("Reporter: " + migratedReporter.getExternalId() + 256 " has been mapped to BASE 1 ID=" + duplicateId); 257 } 258 } 259 } 260 261 217 262 218 263 /** … … 221 266 222 267 @param rs <code>ResultSet</code> containing the data for this item. 223 @return <code>ReporterData</code> 268 @return <code>ReporterData</code> or null if another reporter with same ID has already been created 224 269 @throws BaseException 225 270 @throws SQLException If current ResultSet position is invalid or number … … 230 275 { 231 276 // Populate a new ReporterData object 232 String externalId = rs.getString(2); 233 ReporterData rd = Reporter.getNew(externalId); 234 if (log.isDebugEnabled()) 235 { 236 log.debug("Transfering Reporter BASE1 id(" + rs.getInt(1) + ")\texternal Id(" + externalId + ")"); 237 } 238 externalIdMap.put(externalId, rs.getInt(1)); 239 if (!QUICK_TRANSFER) 240 { 241 String name = rs.getString(3); 242 if (name == null || "".equals(name)) name = externalId; 243 rd.setName(name); 244 rd.setSymbol(rs.getString(4)); 245 int columnIndex = 5; 246 for (String property : stringProperties) 247 { 248 rd.setExtended(property, rs.getString(columnIndex)); 249 columnIndex++; 250 } 251 // Set integer properties, integer properties should be listed 252 // last(after the string properties) in the select statement 253 String[] intProperties = { "length" }; 254 for (String property : intProperties) 255 { 256 rd.setExtended(property, rs.getInt(columnIndex)); 257 columnIndex++; 258 } 259 } 277 String externalId = rs.getString(2).trim(); 278 int base1Id = rs.getInt(1); 279 ReporterData rd = null; 280 if (externalIdMap.containsKey(externalId)) 281 { 282 Integer migratedBase1Id = externalIdMap.get(externalId); 283 log.warn("Duplicate reporter found in BASE 1 after trimming whitespace: id=" + 284 base1Id + "; externald ID=" + externalId + "; Merging this reporter with reporter id="+ 285 migratedBase1Id); 286 duplicates.put(base1Id, migratedBase1Id); 287 } 288 else 289 { 290 externalIdMap.put(externalId, base1Id); 291 if (log.isDebugEnabled()) 292 { 293 log.debug("Transfering Reporter BASE1 id(" + base1Id + ")\texternal Id(" + externalId + ")"); 294 } 295 rd = Reporter.getNew(externalId); 296 if (!QUICK_TRANSFER) 297 { 298 String name = rs.getString(3); 299 if (name == null || "".equals(name)) name = externalId; 300 rd.setName(name); 301 rd.setSymbol(rs.getString(4)); 302 int columnIndex = 5; 303 for (String property : stringProperties) 304 { 305 rd.setExtended(property, rs.getString(columnIndex)); 306 columnIndex++; 307 } 308 // Set integer properties, integer properties should be listed 309 // last(after the string properties) in the select statement 310 String[] intProperties = { "length" }; 311 for (String property : intProperties) 312 { 313 rd.setExtended(property, rs.getInt(columnIndex)); 314 columnIndex++; 315 } 316 } 317 } 260 318 return rd; 261 319 } -
branches/webservices/src/clients/migrate/net/sf/basedb/clients/migrate/SampleTissueTransfer.java
r3533 r3653 72 72 Recursive run over a SQL query defined by a prepared statement and LIMIT 73 73 values from and size. If this method is called directly from a child 74 transfer, remember to call {@link #startProgress(int )} before you call74 transfer, remember to call {@link #startProgress(int, String)} before you call 75 75 this method 76 76 -
branches/webservices/src/clients/migrate/net/sf/basedb/clients/migrate/Transfer.java
r3533 r3653 269 269 Recursive run over a SQL query defined by a prepared statement and LIMIT 270 270 values from and size. If this method is called directly from a child 271 transfer, remember to call {@link #startProgress(int )} before you call271 transfer, remember to call {@link #startProgress(int, String)} before you call 272 272 this method 273 273 … … 339 339 Run over a SQL query defined by a prepared statement. If this method is 340 340 called directly from a child transfer, remember to call 341 {@link #startProgress(int )} before you call this method341 {@link #startProgress(int, String)} before you call this method 342 342 343 343 @param ps <code>PreparedStatement</code> -
branches/webservices/src/clients/migrate/net/sf/basedb/clients/migrate/WizzzardTransfer.java
r3533 r3653 59 59 public class WizzzardTransfer extends CommonItemTransfer 60 60 { 61 private static final org.apache.log4j.Logger log = org.apache.log4j.Logger.getLogger("net.sf.basedb.clients.migrate.WizzzardTransfer"); 62 61 63 /** 62 64 @param userT A finished {@link UserTransfer} 63 65 @param groupT A finished {@link GroupTransfer} 64 66 */ 65 private static final org.apache.log4j.Logger log = org.apache.log4j.Logger.getLogger("net.sf.basedb.clients.migrate.WizzzardTransfer");66 67 67 WizzzardTransfer(UserTransfer userT, GroupTransfer groupT) 68 68 { -
branches/webservices/src/core/common-queries.xml
r3581 r3653 1540 1540 </description> 1541 1541 </query> 1542 1543 <query id="GET_PLUGINCONFIGURATION_FOR_PLUGIN_WITH_NAME" type="HQL"> 1544 <sql> 1545 SELECT pc 1546 FROM PluginConfigurationData pc 1547 WHERE pc.pluginDefinition.className = :className 1548 AND pc.name = :name 1549 </sql> 1550 <description> 1551 Load a plugin configuration for a given plug-in and with a given name 1552 </description> 1553 </query> 1554 1542 1555 1543 1556 <query id="GET_EXPERIMENTS_FOR_RAWBIOASSAY" type="HQL"> … … 2688 2701 </query> 2689 2702 2703 <query id="UPDATE_PROPERTY_FILTER" type="SQL"> 2704 <sql> 2705 UPDATE [PropertyFilters] 2706 SET [property] = :newProperty 2707 WHERE [property] = :oldProperty 2708 </sql> 2709 <description> 2710 An SQL query that changes the property for all PropertyFilters 2711 with a given value. 2712 </description> 2713 </query> 2690 2714 2691 2715 -
branches/webservices/src/core/net/sf/basedb/core/AnnotationType.java
r3533 r3653 1094 1094 if (pv == null || !pv.getValues().contains(value)) 1095 1095 { 1096 throw new InvalidDataException("Value '"+value+"' is not among the list of allowed values: "+(pv == null ? "" : pv.getValues())); 1096 throw new DisallowedValueException("Value '"+value+"' is not among the list of allowed values " + 1097 "for annotation type: "+getName()); 1097 1098 } 1098 1099 } -
branches/webservices/src/core/net/sf/basedb/core/Application.java
r3533 r3653 136 136 137 137 /** 138 Allow automatic unload of plug-in JAR files or not. 139 */ 140 private static boolean autoUnloadPlugins; 141 142 /** 138 143 The host name of the server running BASE. 139 144 */ … … 388 393 log.info("db.batch-size = " + Config.getString("db.batch-size")); 389 394 395 autoUnloadPlugins = Config.getBoolean("plugins.autounload"); 396 log.info("plugins.autounload = " + autoUnloadPlugins); 397 390 398 // Create a cache for SessionControl objects 391 399 sessionCache = Collections.synchronizedMap(new HashMap<String,SessionControl>()); 392 400 393 401 // Initialise other utility classes 402 HibernateUtil.init1(); 394 403 ExtendedProperties.init(); 395 404 RawDataTypes.init(); 396 405 RawDataUtil.init(); 397 HibernateUtil.init ();406 HibernateUtil.init2(); 398 407 QueryRuntimeFilterFactory.init(); 399 408 PredefinedQuery.init(); … … 695 704 696 705 /** 706 If external plug-in JAR files should automatically be unloaded if the are modified. 707 @since 2.4 708 */ 709 static boolean autoUnloadPlugins() 710 { 711 return autoUnloadPlugins; 712 } 713 714 715 /** 697 716 Create a new <code>SessionControl</code> object. If BASE is not 698 717 running it will be started. -
branches/webservices/src/core/net/sf/basedb/core/BioAssaySet.java
r2998 r3653 313 313 if (action == Transactional.Action.CREATE) 314 314 { 315 checkAndCreateTables(); 315 316 countSpotsAndReporters(); 316 317 } … … 1177 1178 } 1178 1179 1180 private void checkAndCreateTables() 1181 { 1182 VirtualDb vdb = getVirtualDb(); 1183 vdb.createTables(VirtualTable.SPOT, VirtualTable.POSITION); 1184 if (getDataCubeFilterNo() != 0) 1185 { 1186 vdb.createTables(VirtualTable.FILTER); 1187 } 1188 } 1189 1179 1190 /** 1180 1191 Called when a bioassayset is about to be deleted. Finds which data cubes, layers, -
branches/webservices/src/core/net/sf/basedb/core/DataCube.java
r3533 r3653 171 171 getData().setBytes(addedBytes); 172 172 getData().setMaxRawMappingsForSpot(countSpotMappings()); 173 addedBytes = 0; 173 174 } 174 175 else if (action == Transactional.Action.UPDATE && addedBytes != 0) … … 190 191 query.setInteger("dataCube", getId()); 191 192 query.executeUpdate(); 193 addedBytes = 0; 192 194 } 193 195 } -
branches/webservices/src/core/net/sf/basedb/core/DbControl.java
r3533 r3653 100 100 /** 101 101 Lists items that are saved only if the parent item is also saved. 102 @see #saveItemIf(BasicItem, BasicItem )102 @see #saveItemIf(BasicItem, BasicItem, boolean) 103 103 */ 104 104 private Map<BasicItem, List<SaveIf>> saveIfQueue; … … 356 356 Map.Entry<BasicItem,Transactional.Action> entry = iterator.next(); 357 357 BasicItem item = entry.getKey(); 358 Transactional.Action action = entry.getValue(); 359 if (afterCommitQueue.get(item) == action) 360 { 361 // Don't process the same item twice for the same action 362 continue; 363 } 358 364 BasicData data = item.getData(); 359 Transactional.Action action = entry.getValue();360 365 boolean controlled = item instanceof Controlled; 361 366 boolean transactional = controlled && item instanceof Transactional; … … 780 785 item = c.newInstance(constructorParams); 781 786 itemCache.put(data, item); 787 if (item instanceof Controlled) 788 { 789 commitQueue.put(item, Transactional.Action.UPDATE); 790 } 782 791 } 783 792 catch (Exception ex) … … 795 804 item.initPermissions(0, 0); 796 805 item.checkPermission(Permission.READ); 797 if (item instanceof Controlled)798 {799 commitQueue.put(item, Transactional.Action.UPDATE);800 }801 806 return item; 802 807 } … … 979 984 } 980 985 } 986 } 987 988 /** 989 Reload the item from the database. If the item isn't attached to this 990 DbControl it is automatically reattaced first. 991 992 @param item The item to reload 993 @since 2.4 994 */ 995 public void refreshItem(BasicItem item) 996 { 997 if (isClosed()) throw new ConnectionClosedException(); 998 sc.updateLastAccess(); 999 if (!isAttached(item)) reattachItem(item); 1000 HibernateUtil.refresh(hSession, item.getData()); 981 1001 } 982 1002 -
branches/webservices/src/core/net/sf/basedb/core/Experiment.java
r2694 r3653 237 237 { 238 238 getData().setBytes(addedBytes); 239 addedBytes = 0; 239 240 } 240 241 else if (action == Transactional.Action.UPDATE && addedBytes != 0) … … 256 257 query.setInteger("experiment", getId()); 257 258 query.executeUpdate(); 259 addedBytes = 0; 258 260 } 259 261 } -
branches/webservices/src/core/net/sf/basedb/core/ExtendedProperties.java
r3581 r3653 24 24 package net.sf.basedb.core; 25 25 26 import net.sf.basedb.core.dbengine.DbEngine; 27 import net.sf.basedb.util.Values; 26 28 import net.sf.basedb.util.XMLUtil; 27 29 30 import java.util.HashSet; 28 31 import java.util.List; 29 32 import java.util.ArrayList; 30 33 import java.util.Map; 31 34 import java.util.HashMap; 35 import java.util.Set; 36 import java.util.regex.PatternSyntaxException; 32 37 import java.net.URL; 33 38 … … 194 199 List<ExtendedProperty> properties = new ArrayList<ExtendedProperty>(); 195 200 List<Element> children = (List<Element>)classElement.getChildren("property"); 201 DbEngine engine = HibernateUtil.getDbEngine(); 202 String className = classElement.getAttributeValue("name"); 203 Set<String> usedNames = new HashSet<String>(); 196 204 for (Element property : children) 197 205 { 198 String name = property.getAttributeValue("name"); 199 String title = property.getAttributeValue("title"); 206 String name = Values.getStringOrNull(property.getAttributeValue("name")); 207 if (!ExtendedProperty.isValidName(name)) 208 { 209 throw new InvalidDataException("Invalid property for class " + 210 className + ": name=" + name); 211 } 212 if (usedNames.contains("name:" + name)) 213 { 214 throw new InvalidDataException("Duplicate property for class " + 215 className + ": name=" + name); 216 } 217 usedNames.add("name:" + name); 218 String title = Values.getStringOrNull(property.getAttributeValue("title")); 200 219 if (title == null) title = name; 220 String column = Values.getStringOrNull(property.getAttributeValue("column")); 221 if (!engine.isValidColumnName(column)) 222 { 223 throw new InvalidDataException("Invalid column for property " + 224 className + "[" + name + "]: column=" + column); 225 } 226 if (usedNames.contains("column:" + column)) 227 { 228 throw new InvalidDataException("Duplicate column for property " + 229 className + "[" + name + "]: column=" + column); 230 } 231 usedNames.add("column:" + column); 232 String description = Values.getStringOrNull(property.getAttributeValue("description")); 233 int length = XMLUtil.getIntAttribute(property, "length", 255); 201 234 Type type = Type.fromValue(property.getAttributeValue("type")); 202 String column = property.getAttributeValue("column");203 String description = property.getAttributeValue("description");204 int length = XMLUtil.getIntAttribute(property, "length", 255);205 235 if (type == Type.STRING && length > 255) type = Type.TEXT; 206 236 boolean nullable = XMLUtil.getBooleanAttribute(property, "null", true); … … 230 260 for (Element link : links) 231 261 { 232 String regexp = link.getAttributeValue("regexp"); 233 String url = link.getAttributeValue("url"); 234 epLinks.add(new ExtendedPropertyLinker(regexp, url)); 262 String regexp = Values.getStringOrNull(link.getAttributeValue("regexp")); 263 String url = Values.getStringOrNull(link.getAttributeValue("url")); 264 if (url == null) 265 { 266 throw new InvalidDataException("Missing url for property link " + 267 className + "[" + name + "]: regexp=" + regexp); 268 } 269 try 270 { 271 epLinks.add(new ExtendedPropertyLinker(regexp, url)); 272 } 273 catch (PatternSyntaxException ex) 274 { 275 throw new InvalidDataException("Invalid regexp for property link " + 276 className + "[" + name + "]: regexp=" + regexp, ex); 277 } 235 278 } 236 279 } -
branches/webservices/src/core/net/sf/basedb/core/ExtendedProperty.java
r3581 r3653 26 26 import java.text.NumberFormat; 27 27 import java.util.List; 28 import java.util.regex.Pattern; 29 28 30 29 31 /** … … 37 39 public class ExtendedProperty 38 40 { 41 42 /** 43 A regexp checking for invalid characters. 44 */ 45 private static final Pattern valid = Pattern.compile("[a-zA-Z_][a-zA-Z0-9_]*"); 46 47 /** 48 Check that the name only contains a-zA-Z0-9_ and starts with 49 a letter or underscore. 50 @since 2.4 51 */ 52 public static boolean isValidName(String name) 53 { 54 return name == null ? false : valid.matcher(name).matches(); 55 } 56 39 57 private final String name; 40 58 private final Type type; … … 196 214 Numeric properties are parsed with the specified number format. 197 215 @param value The value to parse 198 @param numberFormat The number format, or null to use Float.valueOf orDouble.valueOf216 @param numberFormat The number format, or null to use Double.valueOf 199 217 @return An object 200 218 @throws InvalidDataException If the string cannot be converted to the correct type … … 214 232 parsed. Otherwise an exception is thrown. 215 233 @param value The value to parse 216 @param numberFormat The number format, or null to use Float.valueOf orDouble.valueOf234 @param numberFormat The number format, or null to use Double.valueOf 217 235 @param nullIfException TRUE to return null in case the string can't be parsed, 218 236 FALSE to throw an exception -
branches/webservices/src/core/net/sf/basedb/core/Formula.java
r3581 r3653 24 24 package net.sf.basedb.core; 25 25 26 import java.util.ArrayList; 26 27 import java.util.Collections; 27 28 import java.util.HashMap; … … 166 167 */ 167 168 /** 168 @see Type#validate(Formula)169 @see Formula.Type#validate(Formula) 169 170 */ 170 171 @Override … … 351 352 public List<String> getFormulas() 352 353 { 353 return Collections.unmodifiableList( getData().getFormulas());354 return Collections.unmodifiableList(new ArrayList<String>(getData().getFormulas())); 354 355 } 355 356 … … 371 372 /** 372 373 The average method used for the values of this formula. The average 373 is only useful with a {@link Type#COLUMN_EXPRESSION}.374 is only useful with a {@link Formula.Type#COLUMN_EXPRESSION}. 374 375 @version 2.4 375 376 */ … … 378 379 NONE(0, "None") 379 380 { 381 @Override 380 382 public Expression getAverageExpression(Expression e) 381 383 { … … 385 387 GEOMETRIC_MEAN(1, "Geometric mean") 386 388 { 389 @Override 387 390 public Expression getAverageExpression(Expression e) 388 391 { … … 392 395 ARITHMETIC_MEAN(2, "Arithmetic mean") 393 396 { 397 @Override 394 398 public Expression getAverageExpression(Expression e) 395 399 { … … 400 404 MIN(3, "Min") 401 405 { 406 @Override 402 407 public Expression getAverageExpression(Expression e) 403 408 { … … 408 413 MAX(4, "Max") 409 414 { 415 @Override 410 416 public Expression getAverageExpression(Expression e) 411 417 { -
branches/webservices/src/core/net/sf/basedb/core/HibernateUtil.java
r3533 r3653 138 138 139 139 /** 140 Initialise this class. This is done at startup time by the 141 {@link Application#start()} method. Initialising means that we 140 First step of initialising this class. This is done at startup time by the 141 {@link Application#start()} method. In this step we load configuration 142 settings from the 'base.config' and 'hibernate.cfg.xml' files and 143 create the Dialect and DbEngine objects. 144 145 Initialising means that we 142 146 load the configuration from the properties and the xml file, 143 147 read all mapping files, generate additional mappings for the … … 145 149 used by {@link Query} implementation. 146 150 */ 147 static synchronized void init ()151 static synchronized void init1() 148 152 throws BaseException 149 153 { … … 155 159 cfg = new Configuration(); 156 160 setConfigurationProperties(cfg); 161 dialect = Dialect.getDialect(cfg.getProperties()); 162 dbEngine = EngineFactory.createEngine(dialect); 163 } 164 catch (HibernateException ex) 165 { 166 throw new BaseException(ex); 167 } 168 } 169 170 /** 171 Second step of initialising this class. This is done at startup time by the 172 {@link Application#start()} method. In this step we read all mapping files, 173 generate additional mappings for the {@link ExtendableData} items and raw data, 174 and generate filters used by {@link Query} implementation. 175 */ 176 static synchronized void init2() 177 throws BaseException 178 { 179 // Return if we have already been initialised 180 if (isInitialised) return; 181 182 try 183 { 157 184 addStaticMappings(cfg); 158 185 addExtendedPropertiesMappings(cfg); … … 161 188 cfg.configure(); 162 189 sf = cfg.buildSessionFactory(); 163 dialect = Dialect.getDialect(cfg.getProperties());164 dbEngine = EngineFactory.createEngine(dialect);165 190 } 166 191 catch (HibernateException ex) … … 170 195 isInitialised = true; 171 196 } 172 197 173 198 /** 174 199 Unload all settings. … … 1088 1113 1089 1114 /** 1115 Reload the data from the database for a given entity. 1116 @param session The Hibernate session which is connected to the 1117 database 1118 @param data The entity to reload 1119 @since 2.4 1120 */ 1121 static void refresh(Session session, BasicData data) 1122 { 1123 assert session != null : "session == null"; 1124 try 1125 { 1126 session.refresh(data); 1127 } 1128 catch(HibernateException ex) 1129 { 1130 throw new BaseException(ex); 1131 } 1132 } 1133 1134 /** 1090 1135 Checks if an item with the specified ID exists in the database. 1091 1136 @param session The Hibernate session which is connected to the … … 1330 1375 assert session != null : "session == null"; 1331 1376 assert sql != null : "sql == null"; 1377 sql = sql.replace('[', getDialect().openQuote()).replace(']', getDialect().closeQuote()); 1332 1378 try 1333 1379 { … … 1348 1394 assert session != null : "session == null"; 1349 1395 assert sql != null : "sql == null"; 1396 sql = sql.replace('[', getDialect().openQuote()).replace(']', getDialect().closeQuote()); 1350 1397 try 1351 1398 { -
branches/webservices/src/core/net/sf/basedb/core/Install.java
r3581 r3653 31 31 import net.sf.basedb.core.data.HardwareTypeData; 32 32 import net.sf.basedb.core.data.MimeTypeData; 33 import net.sf.basedb.core.data.PluginConfigurationData; 33 34 import net.sf.basedb.core.data.ProtocolTypeData; 34 35 import net.sf.basedb.core.data.QuotaData; … … 101 102 method. 102 103 */ 103 public static final int NEW_SCHEMA_VERSION = Integer.valueOf( 37).intValue();104 public static final int NEW_SCHEMA_VERSION = Integer.valueOf(40).intValue(); 104 105 105 106 public static synchronized void createTables(boolean update, final ProgressReporter progress) … … 151 152 required for BASE to work properly. Items that already exists 152 153 will not be modified. 153 154 154 155 @param progress An object implementing the {@link ProgressReporter} 155 156 interface … … 166 167 Application.start(false, false, false); 167 168 session = HibernateUtil.newSession(); 168 169 169 170 // SchemaVersion 170 171 schemaVersion = createSchemaVersion(update ? 1 : NEW_SCHEMA_VERSION); 171 172 172 173 // QuotaTypes 173 174 if (progress != null) progress.display((int)(1*progress_factor), "--Creating quota types..."); … … 176 177 createQuotaType(QuotaType.RAW_DATA, "Raw data", "Quota for raw data", false); 177 178 createQuotaType(QuotaType.EXPERIMENT, "Experiment", "Quota for experiment", false); 178 179 179 180 // Quota 180 181 if (progress != null) progress.display((int)(2*progress_factor), "--Creating quota..."); … … 186 187 QuotaData quota1000 = createQuota(null, "1 GB total", "1 GB total quota.", 1024*1024*1024); 187 188 QuotaData quota0 = createQuota(null, "No quota", "May not consume any disk space at all", 0); 188 189 189 190 // Roles 190 191 if (progress != null) progress.display((int)(3*progress_factor), "--Creating roles..."); … … 195 196 RoleData roleGuest = createRole(Role.GUEST, "Guest", false, "For users wanting to have a peek at BASE. Has very limited access to create new things."); 196 197 RoleData roleJobAgent = createRole(Role.JOBAGENT, "Job agent", false, "This role is given to job agents and allows them to read and execute jobs."); 197 198 198 199 // Users 199 200 if (progress != null) progress.display((int)(4*progress_factor), "--Creating users..."); … … 207 208 // createUser(0, "demo", "demo", "Demo", "This account can be used for demonstration purposes.", Role.GUEST); 208 209 // createUser(0, "power", "power", "Power user", "This account has power user privileges.", Role.POWER_USER); 209 210 210 211 // Now that we have a root user let's create a session 211 212 sessionControl = Application.newSessionControl( null, null, null ); 212 213 sessionControl.login(rootLogin, rootPassword, "InitDBSessionId", false); 213 214 214 215 if (progress != null) progress.display((int)(5*progress_factor), "--Creating groups..."); 215 216 GroupData groupEveryone = createGroup(Group.EVERYONE, "Everyone", false, "Everyone are members of this group."); 216 217 217 218 if (progress != null) progress.display((int)(6*progress_factor), "--Creating keys..."); 218 219 int PERMISSION_ALL = Permission.grant(Permission.READ, Permission.USE, Permission.WRITE, … … 228 229 administrators_all.put(roleAdmin, PERMISSION_ALL); 229 230 administrators_all.put(roleSuper, PERMISSION_READ); 230 231 231 232 // Administrators -> WRITE 232 233 HashMap<RoleData, Integer> administrators_write = new HashMap<RoleData, Integer>(); 233 234 administrators_write.put(roleAdmin, PERMISSION_WRITE); 234 235 administrators_write.put(roleSuper, PERMISSION_READ); 235 236 236 237 // Administrators -> READ 237 238 HashMap<RoleData, Integer> administrators_read = new HashMap<RoleData, Integer>(); 238 239 administrators_read.put(roleAdmin, PERMISSION_READ); 239 240 administrators_read.put(roleSuper, PERMISSION_READ); 240 241 241 242 // Power users -> CREATE; Administrators -> ALL 242 243 HashMap<RoleData, Integer> power_users_create = new HashMap<RoleData, Integer>(); … … 244 245 power_users_create.put(rolePower, PERMISSION_CREATE); 245 246 power_users_create.put(roleSuper, PERMISSION_READ); 246 247 247 248 // Power users & Users -> CREATE; Administrators -> ALL 248 249 HashMap<RoleData, Integer> users_create = new HashMap<RoleData, Integer>(); … … 251 252 users_create.put(roleUser, PERMISSION_CREATE); 252 253 users_create.put(roleSuper, PERMISSION_READ); 253 254 254 255 // Power users & Users & Guests -> CREATE; Administrators -> ALL 255 256 HashMap<RoleData, Integer> guests_create = new HashMap<RoleData, Integer>(); … … 259 260 guests_create.put(roleGuest, PERMISSION_CREATE); 260 261 guests_create.put(roleSuper, PERMISSION_READ); 261 262 262 263 // All -> CREATE; Administrators -> ALL 263 264 HashMap<RoleData, Integer> all_create = new HashMap<RoleData, Integer>(); … … 275 276 all_use_administrators_write.put(roleGuest, PERMISSION_USE); 276 277 all_use_administrators_write.put(roleSuper, PERMISSION_USE); 277 278 278 279 // Users & Guests & Powers users -> USE; Administrators -> ALL 279 280 HashMap<RoleData, Integer> guests_use_administrators_all = new HashMap<RoleData, Integer>(); … … 283 284 guests_use_administrators_all.put(roleGuest, PERMISSION_USE); 284 285 guests_use_administrators_all.put(roleSuper, PERMISSION_READ); 285 286 286 287 // Users & Guests -> USE; Powers users & Administrators -> ALL 287 288 HashMap<RoleData, Integer> guests_use_power_users_all = new HashMap<RoleData, Integer>(); … … 304 305 createRoleKey(Item.USER, "Users", "Gives access to users.", administrators_all); 305 306 createRoleKey(Item.PROJECT, "Projects", "Gives access to projects.", users_create); 306 307 307 308 // Files, directories and quota 308 309 createRoleKey(Item.DIRECTORY, "Directories", "Gives access to directories", guests_create); … … 312 313 createRoleKey(Item.QUOTA, "Quota", "Gives access to quota", administrators_all); 313 314 createRoleKey(Item.DISKUSAGE, "Disc usage", "Gives access disc usage", administrators_read); 314 315 315 316 // Protocols, hardware, software 316 317 createRoleKey(Item.PROTOCOLTYPE, "Protocol types", "Gives access to protocol types", guests_use_power_users_all); … … 320 321 createRoleKey(Item.SOFTWARETYPE, "Software types", "Gives access to software types", all_use_administrators_write); 321 322 createRoleKey(Item.SOFTWARE, "Software", "Gives access to software", power_users_create); 322 323 323 324 // Annotations 324 325 createRoleKey(Item.ANNOTATIONTYPE, "Annotation types", "Gives access to annotation types", power_users_create); … … 331 332 createRoleKey(Item.EXTRACT, "Extracts", "Gives access to extracts", users_create); 332 333 createRoleKey(Item.LABELEDEXTRACT, "Labeled extracts", "Gives access to labeled extracts", users_create); 333 334 334 335 // Reporters 335 336 createRoleKey(Item.REPORTERTYPE, "Reporter types", "Gives access to reporter types", guests_use_power_users_all); 336 337 createRoleKey(Item.REPORTER, "Reporters", "Gives access to reporter", guests_use_power_users_all); 337 338 createRoleKey(Item.REPORTERLIST, "Reporter lists", "Gives access to reporter lists", users_create); 338 339 339 340 // Array LIMS - plates 340 341 createRoleKey(Item.PLATEGEOMETRY, "Plate geometries", "Gives access to plate geometries", guests_use_administrators_all); … … 342 343 createRoleKey(Item.PLATE, "Plates", "Gives access to plates", power_users_create); 343 344 createRoleKey(Item.PLATEMAPPING, "Plate mappings", "Gives access to plate mappings", power_users_create); 344 345 345 346 // Array LIMS - arrays 346 347 createRoleKey(Item.ARRAYDESIGN, "Array design", "Gives access to array designs", power_users_create); 347 348 createRoleKey(Item.ARRAYBATCH, "Array batches", "Gives access to array batches", power_users_create); 348 349 createRoleKey(Item.ARRAYSLIDE, "Array slides", "Gives access to array slides", power_users_create); 349 350 350 351 // Hybridization, raw data and experiments 351 352 createRoleKey(Item.HYBRIDIZATION, "Hybridizations", "Gives access to hybridizations", users_create); … … 355 356 createRoleKey(Item.FORMULA, "Formulas", "Gives access to formulas", users_create); 356 357 createRoleKey(Item.EXTRAVALUETYPE, "Extra value types", "Gives access to extra value types", guests_use_power_users_all); 357 358 358 359 // Plugins, jobs 359 360 createRoleKey(Item.PLUGINTYPE, "Plugin types", "Gives access to plugin types", administrators_all); … … 362 363 createRoleKey(Item.JOB, "Jobs", "Gives access to jobs", guests_create); 363 364 createRoleKey(Item.JOBAGENT, "Job agents", "Gives access to job agents", administrators_all); 364 365 365 366 // Misc. 366 367 createRoleKey(Item.CLIENT, "Client applications", "Gives access to client applications", administrators_all); … … 370 371 createRoleKey(Item.SETTING, "Settings", "Gives access to settings", administrators_all); 371 372 createRoleKey(Item.MESSAGE, "Messages", "Gives access to messages", users_create); 372 373 373 374 // Permissions for job agents are only added for new installations and 374 375 // updates below schema version 13 … … 395 396 ItemKeyData keyEveryoneUse = createItemKey("EVERYONE=USE", null, everyoneUse); 396 397 ItemKeyData keyJobAgentUse = createItemKey("JOBAGENT=USE", jobAgentUse, null); 397 398 398 399 // ProtocolTypes 399 400 if (progress != null) progress.display((int)(7*progress_factor), "--Creating protocol types..."); … … 406 407 createProtocolType(ProtocolType.SCANNING, "Scanning", "Protocols used for scanning microarray images."); 407 408 createProtocolType(ProtocolType.FEATURE_EXTRACTION, "Feature extraction", "Protocols used for extracting features from microarray images."); 408 409 409 410 // HardwareTypes 410 411 if (progress != null) progress.display((int)(8*progress_factor), "--Creating hardware types..."); … … 412 413 HardwareTypeData robot = createHardwareType(HardwareType.PRINT_ROBOT, "Print robot", "Spot the probe on the array."); 413 414 HardwareTypeData hybStation = createHardwareType(HardwareType.HYBRIDIZATION_STATION, "Hybridization station", "Automate the hybridization of microarrays on slides."); 414 415 415 416 // Hardware 416 417 if (progress != null) progress.display((int)(9*progress_factor), "--Creating hardware..."); … … 425 426 createHardware("OmniGrid", "100", "", robot, rootUser, keyEveryoneUse); 426 427 createHardware("Qarray", "Max", "", robot, rootUser, keyEveryoneUse); 427 428 428 429 // SoftwareTypes 429 430 if (progress != null) progress.display((int)(10*progress_factor), "--Creating software types..."); 430 431 SoftwareTypeData feature = createSoftwareType(SoftwareType.FEATURE_EXTRACTION, "Feature extraction", "Extracts features from microarray images."); 431 432 432 433 // Software 433 434 if (progress != null) progress.display((int)(11*progress_factor), "--Creating software..."); … … 442 443 createSoftware("ScanArray", null, null, feature, rootUser, keyEveryoneUse); 443 444 createSoftware("Affymetrix GeneChip Software", "1.2", null, feature, rootUser, keyEveryoneUse); 444 445 createSoftware("Illumina BeadStudio", null, null, feature, rootUser, keyEveryoneUse); 446 445 447 // Directory 446 448 if (progress != null) progress.display((int)(12*progress_factor), "--Creating directories..."); … … 451 453 createDirectory(rootUser, templateDirectory, null, "raw data", "Place your raw data files here", null); 452 454 createDirectory(rootUser, templateDirectory, null, "projects", "Create a subdirectory for project-related files in this directory.", null); 453 455 454 456 // FileTypes 455 457 if (progress != null) progress.display((int)(13*progress_factor), "--Creating file types..."); … … 463 465 FileTypeData imageType = createFileType(FileType.IMAGE, "Image", "Image files"); 464 466 createFileType(FileType.SPOT_IMAGES, "Spot images", "A zip file containing generated spot images in png format."); 465 467 466 468 // MimeTypes 467 469 if (progress != null) progress.display((int)(14*progress_factor), "--Creating MIME types..."); … … 505 507 createMimeType("application/x-gtar", "Gzipped archive", "gtar", null); 506 508 createMimeType("application/x-gzip", "Gzip compressed file", "gz", null); 507 509 508 510 // Microarray/BASE-specific mime types 509 511 createMimeType("text/plain", "GenePix Results file", "gpr", rawDataType); … … 517 519 createMimeType("text/plain", "Biorobotics TAM format", "tam", printMapType); 518 520 createMimeType("text/plain", "Molecularware MWBR format", "mvbr", printMapType); 519 521 520 522 // Plate geometries 521 523 if (progress != null) progress.display((int)(15*progress_factor), "--Creating plate geometries..."); … … 523 525 PlateGeometryData plate384 = createPlateGeometry("384-well (16 x 24)", "384-wells, 16 rows, 24 columns", 16, 24); 524 526 PlateGeometryData plate1536 = createPlateGeometry("1536-well (32 x 48)", "1536-wells, 32 rows, 48 columns", 32, 48); 525 527 526 528 // Plate mappings 527 529 if (progress != null) progress.display((int)(16*progress_factor), "--Creating plate mappings..."); … … 536 538 createPlateMapping("1536 --> 1536", "Maps 1:1 between 1536-well (16 x 24) plates", 537 539 plate1536, 1, plate1536, 1, null, rootUser, keyEveryoneUse); 538 539 540 541 540 542 // Labels 541 543 if (progress != null) progress.display((int)(17*progress_factor), "--Creating labels..."); … … 556 558 createAnnotationType("Comment", "", Type.STRING, 1, null, 557 559 wells, null, false, false, rootUser, keyEveryoneUse ); 558 560 559 561 // Clients 560 562 if (progress != null) progress.display((int)(19*progress_factor), "--Creating clients..."); … … 562 564 createClient(rootUser, "net.sf.basedb.clients.migration", "Migration tool", "This client is used to migrate BASE 1.2.x data to BASE 2", null); 563 565 createClient(rootUser, "net.sf.basedb.clients.jobagent", "Job agent client", "This client is used by all job agents", keyJobAgentUse); 564 566 565 567 // News 566 568 if (progress != null) progress.display((int)(20*progress_factor), "--Creating news..."); 567 569 createNews("BASE 2 Server installed", "Welcome to your new BASE 2 server."); 568 570 569 571 // Plugins 570 572 if (progress != null) progress.display((int)(21*progress_factor), "--Creating plugin definitions..."); … … 581 583 "file system first. For this to work the plugin must also be given 'immediate exection' rights", 582 584 "net.sf.basedb.core.plugin.ImmediateDownloadExporter", null); 583 585 createPluginType("Analysis filter plug-in", "Analysis plug-ins that are pure filter plug-ins should " + 586 "implement this interface. If they do they are linked with the 'Filter bioassayset' button and" + 587 "not just the 'Run analysis plug-in' button.", 588 "net.sf.basedb.core.plugin.AnalysisFilterPlugin", null); 589 584 590 createPluginDefinition("net.sf.basedb.plugins.Base1PluginExecuter", null, null, true, null, false); 585 591 createPluginDefinition("net.sf.basedb.plugins.BioAssaySetExporter", null, keyEveryoneUse, true, null, true); … … 606 612 createPluginDefinition("net.sf.basedb.plugins.CdfFileReporterImporter", null, null, true, null, false); 607 613 createPluginDefinition("net.sf.basedb.plugins.PackedFileExporter", null, keyEveryoneUse, true, null, true); 614 createPluginDefinition("net.sf.basedb.plugins.AnnotationFlatFileImporter", null, keyEveryoneUse, true, null, false); 615 createPluginDefinition("net.sf.basedb.plugins.IlluminaRawDataImporter", null, keyEveryoneUse, true, null, false); 608 616 609 617 // Plugin configurations 610 if (!update) 611 { 612 if (progress != null) progress.display((int)(22*progress_factor), "--Creating example plugin configurations..."); 613 createPluginConfigurations("/plugin_configfile.xml", update); 614 } 615 618 if (progress != null) progress.display((int)(22*progress_factor), "--Creating example plugin configurations..."); 619 createPluginConfigurations("/plugin_configfile.xml", update); 620 createPluginConfigurations("/illumina.configurations.xml", update); 621 622 616 623 // Formulas 617 624 ColoringData NO_COLORING = new ColoringData(false, false, null, null, null); … … 630 637 "Swap channel 1 and 2", "Swaps the intensities in channel 1 and 2", null, 2, 631 638 new String[] { "ch(2)", "ch(1)" }, NO_COLORING, rootUser, keyEveryoneUse); 632 639 633 640 // Intensity expression formulas defined by raw data types 634 641 int maxChannels = 0; … … 655 662 null, channel, new String[] { expression }, NO_COLORING, rootUser, keyEveryoneUse); 656 663 } 657 664 658 665 if (progress != null) progress.display((int)(24*progress_factor), "--Creating job agents..."); 659 666 // TODO … … 1918 1925 { 1919 1926 int version = schemaVersion.getSchemaVersion(); 1920 pd.loadPluginInformation(jarPath, className, version < 20);1927 pd.loadPluginInformation(jarPath, className, true); 1921 1928 if (version < 21) pd.setAllowImmediateExecution(allowImmediateExecution); 1922 1929 dc.commit(); … … 1955 1962 DbControl dc = null; 1956 1963 1957 // Only get configurations from file if it's an installation of BASE2 and never if it's an update. 1958 if (!update) 1959 { 1960 try 1961 { 1962 dc = sessionControl.newDbControl(); 1963 URL dtdURL = Install.class.getResource("/net/sf/basedb/core/dtd/plugin-configuration-file.dtd"); 1964 try 1965 { 1966 dc = sessionControl.newDbControl(); 1967 1968 org.hibernate.Query query = null; 1969 if (update) 1970 { 1971 // If we are updating, we must check if a configuration with a 1972 // given name already exists 1973 query = HibernateUtil.getPredefinedQuery(dc.getHibernateSession(), 1974 "GET_PLUGINCONFIGURATION_FOR_PLUGIN_WITH_NAME"); 1975 /* 1976 SELECT pc 1977 FROM PluginConfigurationData pc 1978 WHERE pc.pluginDefinition.className = :className 1979 AND pc.name = :name 1980 */ 1981 } 1982 1983 URL dtdURL = Install.class.getResource("/net/sf/basedb/core/dtd/plugin-configuration-file.dtd"); 1984 URL fileURL = Install.class.getResource(filePath); 1985 Document doc = XMLUtil.getValidatedXml(fileURL, dtdURL); 1986 Element rootElement = doc.getRootElement(); 1987 1988 List configurations = rootElement.getChildren(); 1989 1990 for (Object obj : configurations) 1991 { 1992 Element configuration = (Element)obj; 1993 String name = configuration.getChildText("configname"); 1994 String description = configuration.getChildText("description"); 1995 String pluginClassName = configuration.getAttributeValue("pluginClassName"); 1964 1996 1965 Document doc = XMLUtil.getValidatedXML(Install.class.getResourceAsStream(filePath), dtdURL); 1966 Element rootElement = doc.getRootElement(); 1997 if (update) 1998 { 1999 // Check if it already exists 2000 query.setString("className", pluginClassName); 2001 query.setString("name", name); 2002 PluginConfigurationData config = HibernateUtil.loadData(PluginConfigurationData.class, query); 2003 2004 if (config != null) 2005 { 2006 log.info("createPluginConfigurations: EXISTS [plugin="+pluginClassName+ 2007 "; name=" + name + "]"); 2008 continue; 2009 // Continue with next configuration 2010 } 2011 } 1967 2012 1968 List configurations = rootElement.getChildren(); 1969 1970 for (Object obj : configurations) 2013 try 1971 2014 { 1972 Element configuration = (Element)obj; 1973 String name = configuration.getChildText("configname"); 1974 String description = configuration.getChildText("description"); 1975 String pluginClassName = configuration.getAttributeValue("pluginClassName"); 2015 PluginDefinition pluginDefinition = PluginDefinition.getByClassName(dc, pluginClassName); 2016 2017 // Create a new plugin configurationj 2018 PluginConfiguration pluginConfig = PluginConfiguration.getNew(dc, pluginDefinition); 2019 pluginConfig.setItemKey(pluginDefinition.getItemKey()); 2020 pluginConfig.setProjectKey(pluginDefinition.getProjectKey()); 2021 dc.saveItem(pluginConfig); 2022 pluginConfig.setName(name); 2023 pluginConfig.setDescription(description); 1976 2024 1977 try 1978 { 1979 PluginDefinition pluginDefinition = PluginDefinition.getByClassName(dc, pluginClassName); 1980 1981 // Create a new plugin configurationj 1982 PluginConfiguration pluginConfig = PluginConfiguration.getNew(dc, pluginDefinition); 1983 dc.saveItem(pluginConfig); 1984 pluginConfig.setName(name); 1985 pluginConfig.setDescription(description); 2025 // Get the confiuration parameters from file 2026 List parameters = configuration.getChildren("parameter"); 2027 for (Object elementObj : parameters) 2028 { 2029 Element parameter = (Element)elementObj; 2030 String parametername = parameter.getChildText("name"); 2031 String cl = parameter.getChildText("class"); 2032 String label = parameter.getChildText("label").length() > 0 ? parameter.getChildText("label") : name; 2033 Class clazz = null; 2034 List<Object> values = new ArrayList<Object>() ; 1986 2035 1987 // Get the confiuration parameters from file 1988 List parameters = configuration.getChildren("parameter"); 1989 for (Object elementObj : parameters) 2036 if (cl.length() > 0) 1990 2037 { 1991 Element parameter = (Element)elementObj; 1992 String parametername = parameter.getChildText("name"); 1993 String cl = parameter.getChildText("class"); 1994 String label = parameter.getChildText("label").length() > 0 ? parameter.getChildText("label") : name; 1995 Class clazz = null; 1996 List<Object> values = new ArrayList<Object>() ; 1997 1998 if (cl.length() > 0) 1999 { 2000 clazz = Class.forName(cl); 2001 List children = parameter.getChildren("value"); 2002 // Get the parameter values 2003 for (Object ch : children) 2004 { 2005 String text = ((Element)ch).getText(); 2006 if (clazz.equals(Boolean.class)) values.add(new Boolean(text)); 2007 else if (clazz.equals(Date.class)) values.add(DateUtil.parseString(text)); 2008 else if (clazz.equals(Double.class)) values.add(new Double(text)); 2009 else if (clazz.equals(Float.class)) values.add(new Float(text)); 2010 else if (clazz.equals(Long.class)) values.add(new Long(text)); 2011 else if (clazz.equals(String.class)) values.add(text); 2012 } 2013 // Get the parametertype 2014 ParameterType pType; 2015 if (clazz.equals(Boolean.class)) pType = new BooleanParameterType(); 2016 else if (clazz.equals(Date.class)) pType = new DateParameterType(); 2017 else if (clazz.equals(Double.class)) pType = new DoubleParameterType(); 2018 else if (clazz.equals(Float.class)) pType = new FloatParameterType(); 2019 else if (clazz.equals(Integer.class)) pType = new IntegerParameterType(); 2020 else if (clazz.equals(Long.class)) pType = new LongParameterType(); 2021 else if (clazz.equals(String.class)) pType = new StringParameterType(); 2022 else pType = null; 2023 pluginConfig.setParameterValues(parametername,label, "", pType, values); 2024 } 2025 } 2026 } 2027 catch(Throwable ex) 2028 { 2029 log.error("createPluginConfiguration: FAILED[Configuration: " + name, ex); 2030 } 2038 clazz = Class.forName(cl); 2039 List children = parameter.getChildren("value"); 2040 // Get the parameter values 2041 for (Object ch : children) 2042 { 2043 String text = ((Element)ch).getText(); 2044 if (clazz.equals(Boolean.class)) values.add(new Boolean(text)); 2045 else if (clazz.equals(Date.class)) values.add(DateUtil.parseString(text)); 2046 else if (clazz.equals(Double.class)) values.add(new Double(text)); 2047 else if (clazz.equals(Float.class)) values.add(new Float(text)); 2048 else if (clazz.equals(Long.class)) values.add(new Long(text)); 2049 else if (clazz.equals(String.class)) values.add(text); 2050 } 2051 // Get the parametertype 2052 ParameterType pType; 2053 if (clazz.equals(Boolean.class)) pType = new BooleanParameterType(); 2054 else if (clazz.equals(Date.class)) pType = new DateParameterType(); 2055 else if (clazz.equals(Double.class)) pType = new DoubleParameterType(); 2056 else if (clazz.equals(Float.class)) pType = new FloatParameterType(); 2057 else if (clazz.equals(Integer.class)) pType = new IntegerParameterType(); 2058 else if (clazz.equals(Long.class)) pType = new LongParameterType(); 2059 else if (clazz.equals(String.class)) pType = new StringParameterType(); 2060 else pType = null; 2061 pluginConfig.setParameterValues(parametername,label, "", pType, values); 2062 } 2063 } 2031 2064 } 2032 dc.commit(); 2033 } 2034 catch(Throwable ex) 2035 { 2036 throw new BaseException(ex); 2037 } 2038 finally 2039 { 2040 if (dc != null) dc.close(); 2041 } 2065 catch(Throwable ex) 2066 { 2067 log.error("createPluginConfiguration: FAILED[Configuration: " + name, ex); 2068 } 2069 } 2070 dc.commit(); 2071 } 2072 catch(Throwable ex) 2073 { 2074 throw new BaseException(ex); 2075 } 2076 finally 2077 { 2078 if (dc != null) dc.close(); 2042 2079 } 2043 2080 } -
branches/webservices/src/core/net/sf/basedb/core/Item.java
r3533 r3653 623 623 Get a value indicating the deletion order. Items with a low value 624 624 should be deleted before items with a high value, to minimize the risk 625 that the {@lin BasicItem#isUsed()} method returns true.625 that the {@link BasicItem#isUsed()} method returns true. 626 626 @return The deletion order value 627 627 @since 2.4 -
branches/webservices/src/core/net/sf/basedb/core/ItemContext.java
r3533 r3653 165 165 private final Item itemType; 166 166 private final String subContext; 167 private int contextId; 167 168 private int itemId; 168 169 private Set<Integer> selected; … … 218 219 this.subContext = context.getSubContext(); 219 220 this.name = context.getName(); 221 this.contextId = context.getId(); 220 222 this.itemId = context.getItemId(); 221 223 this.rowsPerPage = context.getRowsPerPage(); … … 295 297 } 296 298 299 /** 300 Get the database ID of this context. Can be used to load contexts 301 @return The database ID of the context, or 0 if it hasn't been saved to 302 the database 303 @since 2.4 304 */ 305 public int getContextId() 306 { 307 return contextId; 308 } 309 297 310 /** 298 311 Get the ID of the current item in this context. -
branches/webservices/src/core/net/sf/basedb/core/ItemInUseException.java
r2304 r3653 48 48 super("The item "+what+" is used by another item."); 49 49 } 50 51 /** 52 Creates a new <code>ItemInUseException</code> with a cause. The error 53 message produced will look like: 54 <code>Permission denied. The item User[ID=325] is used by another item.</code> 55 56 @param what A description of what already exists, for 57 example User[Id=325] 58 @param cause The cause of the error 59 @since 2.4 60 */ 61 public ItemInUseException(String what, Throwable cause) 62 { 63 super("The item "+what+" is used by another item.", cause); 64 } 65 50 66 } -
branches/webservices/src/core/net/sf/basedb/core/Job.java
r3533 r3653 25 25 26 26 import net.sf.basedb.core.Transactional.Action; 27 import net.sf.basedb.core.data.ExperimentData; 27 28 import net.sf.basedb.core.data.JobData; 28 29 import net.sf.basedb.core.data.MessageData; … … 283 284 } 284 285 } 286 /** 287 Grant read permission if the logged in user has read permission to the 288 experiment the job belongs to. 289 */ 290 @Override 291 void initPermissions(int granted, int denied) 292 { 293 ExperimentData parent = getData().getExperiment(); 294 if (parent != null) 295 { 296 int parentPermission = 0; 297 if (parent.getId() != 0) 298 { 299 parentPermission = getSessionControl().getAllPermissions(parent); 300 } 301 else 302 { 303 BasicItem<?> basicParent = getDbControl().getItem(BasicItem.class, parent); 304 parentPermission = Permission.grant(basicParent.getPermissions()); 305 } 306 if (Permission.hasPermission(parentPermission, Permission.READ)) 307 { 308 granted |= Permission.grant(Permission.READ); 309 } 310 } 311 super.initPermissions(granted, denied); 312 } 285 313 // ------------------------------------------- 286 314 … … 351 379 getData().setPluginConfiguration(pluginConfiguration.getData()); 352 380 getData().setParameterVersion(pluginConfiguration.getParameterVersion()); 381 } 382 } 383 384 /** 385 Get the experiment this job is a part of. 386 @return The <code>Experiment</code> item, or null if this job 387 isn't part of an experiment 388 @since 2.4 389 */ 390 public Experiment getExperiment() 391 { 392 return getDbControl().getItem(Experiment.class, getData().getExperiment()); 393 } 394 /** 395 Set the experiment this job is part of. Once an experiment has been set, it 396 can't be changed. 397 @since 2.4 398 */ 399 void setExperiment(Experiment experiment) 400 { 401 ExperimentData current = getData().getExperiment(); 402 ExperimentData next = experiment == null ? null : experiment.getData(); 403 if (current == null) 404 { 405 getData().setExperiment(next); 406 } 407 else if (!current.equals(next)) 408 { 409 throw new PermissionDeniedException("A job can only be part of one experiment: " + this); 353 410 } 354 411 } -
branches/webservices/src/core/net/sf/basedb/core/PluginDefinition.java
r3533 r3653 973 973 { 974 974 if (unloadBefore) JarClassLoader.unload(jarPath); 975 ClassLoader cl = JarClassLoader.getInstance(jarPath );975 ClassLoader cl = JarClassLoader.getInstance(jarPath, Application.autoUnloadPlugins()); 976 976 c = cl.loadClass(className); 977 977 } -
branches/webservices/src/core/net/sf/basedb/core/PluginRequest.java
r2981 r3653 146 146 } 147 147 148 /** 149 Get the values that are currently stored in the job or plugin configuration 150 related to this request. This method will first check the job and then the 151 plugin configuration. 152 @param name The name of the parameter 153 @return The parameter values, or null if no parameter with the given name 154 exists 155 */ 148 156 public List<?> getCurrentParameterValues(String name) 149 157 { … … 153 161 values = jobParameters.getValues(name); 154 162 } 155 else if (configurationParameters != null)163 if (values == null && configurationParameters != null) 156 164 { 157 165 values = configurationParameters.getValues(name); … … 161 169 162 170 171 /** 172 Get the values that are currently stored in the plugin configuration 173 related to this request. 174 @param name The name of the parameter 175 @return The parameter values, or null if no parameter with the given name 176 exists or if this request isn't associated with a plugin configuration 177 */ 163 178 public List<?> getCurrentConfigurationParameterValues(String name) 164 179 { … … 171 186 } 172 187 188 /** 189 Get the values that are currently stored in the job related to this 190 request. 191 @param name The name of the parameter 192 @return The parameter values, or null if no parameter with the given name 193 exists or if this request isn't associated with a job 194 */ 173 195 public List<?> getCurrentJobParameterValues(String name) 174 196 { -
branches/webservices/src/core/net/sf/basedb/core/PluginType.java
r3533 r3653 269 269 { 270 270 JarClassLoader.unload(jarPath); 271 ClassLoader cl = JarClassLoader.getInstance(jarPath );271 ClassLoader cl = JarClassLoader.getInstance(jarPath, Application.autoUnloadPlugins()); 272 272 c = cl.loadClass(interfaceName); 273 273 } -
branches/webservices/src/core/net/sf/basedb/core/Project.java
r3533 r3653 363 363 @param firstItem The index of the first item to return (0-based) 364 364 @param maxItems The maximum number of items to return, or 0 to return all items 365 @param permission sThe permission the logged in user must have on the item365 @param permission The permission the logged in user must have on the item 366 366 @param include Options for which items that should be included/excluded from the 367 367 result, or null to include all non-removed items -
branches/webservices/src/core/net/sf/basedb/core/RawDataTypes.java
r3581 r3653 24 24 package net.sf.basedb.core; 25 25 26 import net.sf.basedb.core.dbengine.DbEngine; 27 import net.sf.basedb.util.Values; 26 28 import net.sf.basedb.util.XMLUtil; 27 29 30 import java.util.HashSet; 28 31 import java.util.List; 29 32 import java.util.ArrayList; 30 33 import java.util.Map; 34 import java.util.Set; 31 35 import java.util.TreeMap; 32 36 import java.net.URL; … … 141 145 { 142 146 List<Element> rawDataTypeTags = dom.getRootElement().getChildren("raw-data-type"); 147 DbEngine engine = HibernateUtil.getDbEngine(); 148 Set<String> usedNames = new HashSet<String>(); 143 149 for (Element el : rawDataTypeTags) 144 150 { 145 String id = el.getAttributeValue("id");146 String name = el.getAttributeValue("name");147 String description = el.getAttributeValue("description");151 String id = Values.getStringOrNull(el.getAttributeValue("id")); 152 String name = Values.getStringOrNull(el.getAttributeValue("name")); 153 String description = Values.getStringOrNull(el.getAttributeValue("description")); 148 154 String storage = el.getAttributeValue("storage"); 149 String table = el.getAttributeValue("table");155 String table = Values.getStringOrNull(el.getAttributeValue("table")); 150 156 int channels = XMLUtil.getIntAttribute(el, "channels", 2); 151 List<RawDataProperty> properties = loadProperties(el); 157 158 if (!ExtendedProperty.isValidName(id)) 159 { 160 throw new InvalidDataException("Invalid id for raw data type: " + id); 161 } 162 if (usedNames.contains("name:" + name)) 163 { 164 throw new InvalidDataException("Duplicate name for raw data type " + 165 id +": name=" + name); 166 } 167 usedNames.add("name:" + name); 168 if ("database".equals(storage)) 169 { 170 if (!engine.isValidTableName(table)) 171 { 172 throw new InvalidDataException("Invalid table for raw data type " + 173 id +": table=" + table); 174 } 175 if (usedNames.contains("table:" + table)) 176 { 177 throw new InvalidDataException("Duplicate table for raw data type " + 178 id +": table=" + table); 179 } 180 usedNames.add("table:" + table); 181 } 182 if (channels <= 0) 183 { 184 throw new InvalidDataException("Number of channels must be > 0 for raw data type "+ 185 id + ": channels=" + channels); 186 } 187 List<RawDataProperty> properties = loadProperties(el, channels); 152 188 List<IntensityFormula> formulas = loadIntensityFormulas(el, channels); 153 189 RawDataType rdt = new RawDataType(id, name, description, channels, storage, table, properties, formulas); … … 161 197 */ 162 198 @SuppressWarnings({"unchecked"}) 163 private static List<RawDataProperty> loadProperties(Element rawDataTypeElement )199 private static List<RawDataProperty> loadProperties(Element rawDataTypeElement, int channels) 164 200 { 165 201 List<RawDataProperty> properties = new ArrayList<RawDataProperty>(); 166 202 List<Element> children = rawDataTypeElement.getChildren("property"); 203 String rawDataType = rawDataTypeElement.getAttributeValue("id"); 204 DbEngine engine = HibernateUtil.getDbEngine(); 205 Set<String> usedNames = new HashSet<String>(); 167 206 for (Element property : children) 168 207 { 169 String name = property.getAttributeValue("name"); 170 String title = property.getAttributeValue("title"); 208 String name = Values.getStringOrNull(property.getAttributeValue("name")); 209 if (!ExtendedProperty.isValidName(name)) 210 { 211 throw new InvalidDataException("Invalid property for raw data type " + 212 rawDataType + ": name=" + name); 213 } 214 if (usedNames.contains("name:" + name)) 215 { 216 throw new InvalidDataException("Duplicate property for raw data type " + 217 rawDataType + ": name=" + name); 218 } 219 usedNames.add("name:" + name); 220 String title = Values.getStringOrNull(property.getAttributeValue("title")); 171 221 if (title == null) title = name; 172 String column = property.getAttributeValue("column"); 173 String description = property.getAttributeValue("description"); 222 String column = Values.getStringOrNull(property.getAttributeValue("column")); 223 if (!engine.isValidColumnName(column)) 224 { 225 throw new InvalidDataException("Invalid column for property " + 226 rawDataType + "[" + name + "]: column=" + column); 227 } 228 if (usedNames.contains("column:" + column)) 229 { 230 throw new InvalidDataException("Duplicate column for property " + 231 rawDataType + "[" + name + "]: column=" + column); 232 } 233 usedNames.add("column:" + column); 234 String description = Values.getStringOrNull(property.getAttributeValue("description")); 174 235 Type type = Type.fromValue(property.getAttributeValue("type")); 175 236 int length = XMLUtil.getIntAttribute(property, "length", 255); … … 192 253 } 193 254 int channel = XMLUtil.getIntAttribute(property, "channel", 0); 255 if (channel < 0 || channel > channels) 256 { 257 throw new InvalidDataException("Channel for property " + rawDataType+ "[" + name + "]" 258 + " must be >= 0 and < " + channels + ": channel=" + channel); 259 } 194 260 properties.add(new RawDataProperty(name, title, description, column, type, length, nullable, averageMethod, channel)); 195 261 } … … 206 272 List<IntensityFormula> formulas = new ArrayList<IntensityFormula>(); 207 273 List<Element> children = rawDataTypeElement.getChildren("intensity-formula"); 274 String rawDataType = rawDataTypeElement.getAttributeValue("id"); 275 Set<String> usedNames = new HashSet<String>(); 208 276 for (Element formula : children) 209 277 { 210 String name = formula.getAttributeValue("name"); 211 String title = formula.getAttributeValue("title"); 278 String name = Values.getStringOrNull(formula.getAttributeValue("name")); 279 if (!ExtendedProperty.isValidName(name)) 280 { 281 throw new InvalidDataException("Invalid intensity formula for raw data type " + 282 rawDataType + ": name=" + name); 283 } 284 if (usedNames.contains("name:" + name)) 285 { 286 throw new InvalidDataException("Duplicate intensity formula for raw data type " + 287 rawDataType + ": name=" + name); 288 } 289 usedNames.add("name:" + name); 290 String title = Values.getStringOrNull(formula.getAttributeValue("title")); 212 291 if (title == null) title = name; 213 String description = formula.getAttributeValue("description");292 String description = Values.getStringOrNull(formula.getAttributeValue("description")); 214 293 String[] expressions = new String[channels]; 215 294 for (Element expression : (List<Element>)formula.getChildren("formula")) 216 295 { 217 296 int channel = XMLUtil.getIntAttribute(expression, "channel", 0); 297 String exp = Values.getStringOrNull(expression.getAttributeValue("expression")); 218 298 if (channel <= 0 || channel > channels) 219 299 { 220 throw new BaseException("Invalid channel number for expression: "+expression); 300 throw new InvalidDataException("Channel for intensity formula " + 301 rawDataType+ "[" + name + "]" 302 + " must be > 0 and < " + channels + ": channel=" + channel); 221 303 } 222 304 if (expressions[channel-1] != null) 223 305 { 224 throw new BaseException("Expression for channel "+channel+" has already been defined: " +expression); 225 } 226 expressions[channel-1] = expression.getAttributeValue("expression"); 306 throw new InvalidDataException("Duplicate expression for intensity formula " + 307 rawDataType + "[" + name + "]: channel=" + channel); 308 } 309 if (exp == null) 310 { 311 throw new InvalidDataException("Missing expression for formula " + 312 rawDataType + "[" + name + "]: channel=" + channel); 313 } 314 expressions[channel-1] = exp; 227 315 } 228 316 formulas.add(new IntensityFormula(name, title, description, expressions)); -
branches/webservices/src/core/net/sf/basedb/core/Transformation.java
r2998 r3653 151 151 } 152 152 153 // Keep reference to job so we can call Job.setExperiment at end of transaction 154 private Job job; 155 153 156 /** 154 157 Creates a new experiment item from the given data. … … 215 218 */ 216 219 /** 217 Delete the product bioassaysets. 220 Delete the product bioassaysets when deleting this transction. 221 Set the experiment for the job when creating this transction. 218 222 */ 219 223 void onBeforeCommit(Transactional.Action action) … … 224 228 { 225 229 deleteProducts(); 230 } 231 else if (action == Transactional.Action.CREATE) 232 { 233 if (job != null) 234 { 235 getDbControl().refreshItem(job); 236 job.setExperiment(getExperiment()); 237 } 226 238 } 227 239 } … … 358 370 private void setJob(Job job) 359 371 { 372 this.job = job; 360 373 getData().setJob(job == null ? null : job.getData()); 361 374 } -
branches/webservices/src/core/net/sf/basedb/core/Type.java
r3533 r3653 70 70 { 71 71 if (value == null) return null; 72 return new Integer( Float.valueOf(value).intValue());72 return new Integer(Double.valueOf(value).intValue()); 73 73 } 74 74 public Number convertNumber(Number num) … … 94 94 throws InvalidDataException 95 95 { 96 return new Long(Float.valueOf(value).intValue()); 96 if (value == null) return null; 97 return new Long(Double.valueOf(value).intValue()); 97 98 } 98 99 public Number convertNumber(Number num) … … 115 116 return value == null ? 0 : 4; 116 117 } 118 public Object parseString(String value) 119 throws InvalidDataException 120 { 121 if (value == null) return null; 122 Float f = Float.valueOf(value); 123 if (f.isNaN() || f.isInfinite()) f = null; 124 return f; 125 } 117 126 public Number convertNumber(Number num) 118 127 { … … 133 142 { 134 143 return value == null ? 0 : 8; 144 } 145 public Object parseString(String value) 146 throws InvalidDataException 147 { 148 if (value == null) return null; 149 Double d = Double.valueOf(value); 150 if (d.isNaN() || d.isInfinite()) d = null; 151 return d; 135 152 } 136 153 public Number convertNumber(Number num) -
branches/webservices/src/core/net/sf/basedb/core/Update.java
r3581 r3653 31 31 import java.util.Date; 32 32 import java.util.HashMap; 33 import java.util.HashSet; 33 34 import java.util.List; 34 35 import java.util.Map; 36 import java.util.Set; 35 37 36 38 import org.hibernate.mapping.Table; 37 39 38 40 import net.sf.basedb.core.data.DataCubeData; 41 import net.sf.basedb.core.data.ExperimentData; 39 42 import net.sf.basedb.core.data.FileData; 40 43 import net.sf.basedb.core.data.FormulaData; 44 import net.sf.basedb.core.data.JobData; 41 45 import net.sf.basedb.core.data.MeasuredBioMaterialData; 42 46 import net.sf.basedb.core.data.PlateData; 43 47 import net.sf.basedb.core.data.PlateMappingData; 48 import net.sf.basedb.core.data.PluginConfigurationData; 49 import net.sf.basedb.core.data.PluginDefinitionData; 44 50 import net.sf.basedb.core.data.SchemaVersionData; 51 import net.sf.basedb.core.data.TransformationData; 45 52 import net.sf.basedb.core.dbengine.DbEngine; 46 53 import net.sf.basedb.core.dbengine.TableInfo; … … 392 399 <td> 393 400 <ul> 394 <li>Added {@link net.sf.basedb.core.data.JobData#get DeleteJobWhenFinished()}.395 </ul> 396 The update sets the <code> deleteJobWhenFinished</code> value to false for all existing jobs.401 <li>Added {@link net.sf.basedb.core.data.JobData#getRemoveJobWhenFinished()}. 402 </ul> 403 The update sets the <code>removeJobWhenFinished</code> value to false for all existing jobs. 397 404 </td> 398 405 </tr> … … 439 446 </td> 440 447 </tr> 448 449 <tr> 450 <td>38</td> 451 <td> 452 <ul> 453 <li>Added {@link net.sf.basedb.core.data.JobData#getExperiment()}. 454 </ul> 455 The update sets the experiment for jobs that belong to a single 456 transformation. 457 </td> 458 </tr> 459 460 <tr> 461 <td>39</td> 462 <td> 463 Remove AnyToAny links created by older versions of the 464 Base1PluginExecuter plug-in. 465 </td> 466 </tr> 467 468 <tr> 469 <td>40</td> 470 <td> 471 Change invalid property filter on 'guiContexts.itemType' to 472 '$ctx.itemType'. 473 </td> 474 </tr> 441 475 442 476 </table> … … 617 651 } 618 652 619 if (schemaVersion < 36) 620 { 621 if (progress != null) progress.display((int)(35*progress_factor), "--Updating schema version: " + schemaVersion + " -> 36..."); 622 schemaVersion = setSchemaVersionInTransaction(session, 36); 623 } 624 625 653 // Schemaversion 36-37 only updates the version number 626 654 if (schemaVersion < 37) 627 655 { … … 630 658 } 631 659 660 if (schemaVersion < 38) 661 { 662 if (progress != null) progress.display((int)(37*progress_factor), "--Updating schema version: " + schemaVersion + " -> 38..."); 663 schemaVersion = updateToSchemaVersion38(session); 664 } 665 666 if (schemaVersion < 39) 667 { 668 if (progress != null) progress.display((int)(38*progress_factor), "--Updating schema version: " + schemaVersion + " -> 39..."); 669 schemaVersion = updateToSchemaVersion39(session); 670 } 671 672 if (schemaVersion < 40) 673 { 674 if (progress != null) progress.display((int)(39*progress_factor), "--Updating schema version: " + schemaVersion + " -> 40..."); 675 schemaVersion = updateToSchemaVersion40(session); 676 } 677 632 678 /* 633 if (schemaVersion < 38)634 { 635 if (progress != null) progress.display((int)( 37*progress_factor), "--Updating schema version: " + schemaVersion + " -> 38...");636 schemaVersion = setSchemaVersionInTransaction(session, 38);679 if (schemaVersion < 41) 680 { 681 if (progress != null) progress.display((int)(40*progress_factor), "--Updating schema version: " + schemaVersion + " -> 41..."); 682 schemaVersion = setSchemaVersionInTransaction(session, 41); 637 683 - or - 638 schemaVersion = updateToSchemaVersion 38(session);684 schemaVersion = updateToSchemaVersion41(session); 639 685 } 640 686 ... etc... … … 1386 1432 return schemaVersion; 1387 1433 } 1434 1435 /** 1436 Set the experiment for jobs that belong to a single transformation. 1437 @return The new schema version (=38) 1438 */ 1439 private static int updateToSchemaVersion38(org.hibernate.Session session) 1440 { 1441 final int schemaVersion = 38; 1442 org.hibernate.Transaction tx = null; 1443 try 1444 { 1445 tx = HibernateUtil.newTransaction(session); 1446 1447 // Load all transformations 1448 org.hibernate.Query query = HibernateUtil.createQuery(session, 1449 "SELECT t FROM TransformationData t"); 1450 1451 List<TransformationData> transformations = HibernateUtil.loadList(TransformationData.class, query); 1452 Map<JobData, Set<ExperimentData>> experiments = new HashMap<JobData, Set<ExperimentData>>(); 1453 for (TransformationData transformation : transformations) 1454 { 1455 JobData job = transformation.getJob(); 1456 if (job != null) 1457 { 1458 ExperimentData experiment = transformation.getExperiment(); 1459 if (!experiments.containsKey(job)) 1460 { 1461 experiments.put(job, new HashSet<ExperimentData>()); 1462 } 1463 experiments.get(job).add(experiment); 1464 } 1465 } 1466 1467 for (Map.Entry<JobData, Set<ExperimentData>> entry : experiments.entrySet()) 1468 { 1469 JobData job = entry.getKey(); 1470 Set<ExperimentData> jobExperiments = entry.getValue(); 1471 if (jobExperiments.size() == 1) 1472 { 1473 job.setExperiment(jobExperiments.iterator().next()); 1474 } 1475 } 1476 1477 // Update the shcema version number 1478 setSchemaVersion(session, schemaVersion); 1479 1480 // Commit the changes 1481 HibernateUtil.commit(tx); 1482 log.info("updateToSchemaVersion38: OK"); 1483 } 1484 catch (BaseException ex) 1485 { 1486 if (tx != null) HibernateUtil.rollback(tx); 1487 log.error("updateToSchemaVersion38: FAILED", ex); 1488 throw ex; 1489 } 1490 return schemaVersion; 1491 } 1492 /** 1493 Remove AnyToAny links created by older versions of Base1PluginExecuter 1494 @return The new schema version (=39) 1495 */ 1496 private static int updateToSchemaVersion39(org.hibernate.Session session) 1497 { 1498 final int schemaVersion = 39; 1499 org.hibernate.Transaction tx = null; 1500 try 1501 { 1502 tx = HibernateUtil.newTransaction(session); 1503 1504 // Load Base1PluginExecuter 1505 org.hibernate.Query query = HibernateUtil.getPredefinedQuery(session, 1506 "GET_PLUGINDEFINITION_FOR_CLASSNAME"); 1507 /* 1508 SELECT pd 1509 FROM PluginDefinitionData pd 1510 WHERE pd.className = :className 1511 */ 1512 query.setString("className", "net.sf.basedb.plugins.Base1PluginExecuter"); 1513 PluginDefinitionData base1Executer = HibernateUtil.loadData(PluginDefinitionData.class, query); 1514 1515 // Load all plugin configurations for net.sf.basedb.plugins.Base1PluginExecuter 1516 query = HibernateUtil.getPredefinedQuery(session, 1517 "GET_PLUGINCONFIGURATIONS_FOR_PLUGINDEFINITION", "pc"); 1518 /* 1519 SELECT {1} 1520 FROM PluginConfigurationData pc 1521 WHERE pc.pluginDefinition = :pluginDefinition 1522 */ 1523 query.setEntity("pluginDefinition", base1Executer); 1524 List<PluginConfigurationData> configurations = 1525 HibernateUtil.loadList(PluginConfigurationData.class, query); 1526 1527 // Delete AnyToAny for each configuration 1528 query = HibernateUtil.getPredefinedQuery(session, "DELETE_ALL_ANYTOANY_FROM"); 1529 /* 1530 DELETE FROM AnyToAnyData ata 1531 WHERE ata.fromId = :fromId AND ata.fromType = :fromType 1532 */ 1533 query.setInteger("fromType", Item.PLUGINCONFIGURATION.getValue()); 1534 for (PluginConfigurationData pc : configurations) 1535 { 1536 query.setInteger("fromId", pc.getId()); 1537 HibernateUtil.executeUpdate(query); 1538 } 1539 1540 // Update the schema version number 1541 setSchemaVersion(session, schemaVersion); 1542 1543 // Commit the changes 1544 HibernateUtil.commit(tx); 1545 log.info("updateToSchemaVersion39: OK"); 1546 } 1547 catch (BaseException ex) 1548 { 1549 if (tx != null) HibernateUtil.rollback(tx); 1550 log.error("updateToSchemaVersion39: FAILED", ex); 1551 throw ex; 1552 } 1553 return schemaVersion; 1554 } 1555 1556 /** 1557 Update property filter on 'guiContexts.itemType' to '$ctx.itemType' 1558 @return The new schema version (=40) 1559 */ 1560 private static int updateToSchemaVersion40(org.hibernate.Session session) 1561 { 1562 final int schemaVersion = 40; 1563 org.hibernate.Transaction tx = null; 1564 try 1565 { 1566 tx = HibernateUtil.newTransaction(session); 1567 1568 // Change property values 1569 org.hibernate.Query query = HibernateUtil.getPredefinedSQLQuery(session, 1570 "UPDATE_PROPERTY_FILTER"); 1571 /* 1572 UPDATE PropertyFilters pf 1573 SET pf.property = :newProperty 1574 WHERE pf.property = :oldProperty 1575 */ 1576 query.setString("oldProperty", "guiContexts.itemType"); 1577 query.setString("newProperty", "$ctx.itemType"); 1578 HibernateUtil.executeUpdate(query); 1579 1580 // Update the schema version number 1581 setSchemaVersion(session, schemaVersion); 1582 1583 // Commit the changes 1584 HibernateUtil.commit(tx); 1585 log.info("updateToSchemaVersion40: OK"); 1586 } 1587 catch (BaseException ex) 1588 { 1589 if (tx != null) HibernateUtil.rollback(tx); 1590 log.error("updateToSchemaVersion40: FAILED", ex); 1591 throw ex; 1592 } 1593 return schemaVersion; 1594 } 1595 1388 1596 1389 1597 /** -
branches/webservices/src/core/net/sf/basedb/core/data/ExperimentData.java
r2962 r3653 313 313 this.transformations = transformations; 314 314 } 315 316 private Set<JobData> jobs; 317 /** 318 This is the inverse end. 319 @see JobData#getExperiment() 320 @since 2.4 321 @hibernate.set lazy="true" inverse="true" cascade="delete" 322 @hibernate.collection-key column="`experiment_id`" 323 @hibernate.collection-one-to-many class="net.sf.basedb.core.data.JobData" 324 */ 325 Set<JobData> getJobs() 326 { 327 return jobs; 328 } 329 void setJobs(Set<JobData> jobs) 330 { 331 this.jobs = jobs; 332 } 315 333 316 334 private Set<AnnotationTypeData> experimentalFactors; -
branches/webservices/src/core/net/sf/basedb/core/data/ExtendableData.java
r3581 r3653 68 68 <td> 69 69 The property name of the extended property. 70 See {@link net.sf.basedb.core.ExtendedProperty#getName()}. 70 See {@link net.sf.basedb.core.ExtendedProperty#getName()}. The name 71 must only contain letters, numbers and underscores but the first character 72 can't be a number. The name must be unique within the class. 71 73 </td> 72 74 </tr> … … 89 91 <td> 90 92 The database column name of the extended property. This value 91 must of course be unique for each class. 92 See {@link net.sf.basedb.core.ExtendedProperty#getColumn()}. 93 must be unique for each class. The value is validated by the 94 {@link net.sf.basedb.core.dbengine.DbEngine#isValidColumnName(String)} 95 which normally means it must follow the same rules as the <code>name</code> 96 attribute. See {@link net.sf.basedb.core.ExtendedProperty#getColumn()}. 93 97 </td> 94 98 </tr> -
branches/webservices/src/core/net/sf/basedb/core/data/JobData.java
r3533 r3653 128 128 } 129 129 130 private ExperimentData experiment; 131 132 /** 133 The experiment this job is a part of. 134 @since 2.4 135 @hibernate.many-to-one column="`experiment_id`" not-null="false" outer-join="false" 136 */ 137 public ExperimentData getExperiment() 138 { 139 return experiment; 140 } 141 public void setExperiment(ExperimentData experiment) 142 { 143 this.experiment = experiment; 144 } 145 130 146 private int parameterVersion; 131 147 /** -
branches/webservices/src/core/net/sf/basedb/core/dbengine/AbstractDbEngine.java
r2812 r3653 24 24 package net.sf.basedb.core.dbengine; 25 25 26 import java.util.regex.Pattern; 27 26 28 /** 27 29 An abstract superclass with default implementations for most {@link DbEngine} … … 36 38 { 37 39 40 /** 41 A regexp checking for invalid characters. 42 */ 43 private static final Pattern valid = Pattern.compile("[a-zA-Z_][a-zA-Z0-9_]*"); 44 38 45 /** 39 46 Create AbstractDbEngine. … … 121 128 return "EXP("+ value + ")"; 122 129 } 130 /** 131 Checks that the name only contains the following characters: a-zA-Z0-9_ 132 It must also start with a letter or underscore. 133 @since 2.4 134 */ 135 public boolean isValidTableName(String tableName) 136 { 137 return isValidName(tableName); 138 } 139 140 /** 141 Checks that the name only contains the following characters: a-zA-Z0-9_ 142 It must also start with a letter or underscore. 143 */ 144 public boolean isValidColumnName(String columnName) 145 { 146 return isValidName(columnName); 147 } 123 148 // ------------------------------------------- 124 149 150 /** 151 Check that the name only contains a-zA-Z0-9_ and starts with 152 a letter or underscore. 153 @since 2.4 154 */ 155 protected boolean isValidName(String name) 156 { 157 return name == null ? false : valid.matcher(name).matches(); 158 } 159 160 125 161 } -
branches/webservices/src/core/net/sf/basedb/core/dbengine/DbEngine.java
r3533 r3653 239 239 public String exp(String value); 240 240 241 /** 242 Check if a given string is valid to be used as a table name in the 243 current database. 244 @param tableName The string to check 245 @return TRUE if the name is valid, FALSE if not 246 @since 2.4 247 */ 248 public boolean isValidTableName(String tableName); 249 250 /** 251 Check if a given string is valid to be used as a column name in the 252 current database. 253 @param columnName The string to check 254 @return TRUE if the name is valid, FALSE if not 255 @since 2.4 256 */ 257 public boolean isValidColumnName(String columnName); 241 258 } -
branches/webservices/src/core/net/sf/basedb/core/dtd/extended-properties.dtd
-
Property
svn:keyword
set to
Id Date
-
Property
svn:keyword
set to
-
branches/webservices/src/core/net/sf/basedb/core/dtd/helpfile.dtd
-
Property
svn:eol-style
set to
native
-
Property
svn:keyword
set to
Id Date
-
Property
svn:eol-style
set to
-
branches/webservices/src/core/net/sf/basedb/core/dtd/plugin-configuration-file.dtd
-
Property
svn:eol-style
set to
native
-
Property
svn:keyword
set to
Id Date
-
Property
svn:eol-style
set to
-
branches/webservices/src/core/net/sf/basedb/core/dtd/predefined-queries.dtd
-
Property
svn:keyword
set to
Id Date
-
Property
svn:keyword
set to
-
branches/webservices/src/core/net/sf/basedb/core/dtd/presets.dtd
-
Property
svn:eol-style
set to
native
-
Property
svn:keyword
set to
Id Date
-
Property
svn:eol-style
set to
-
branches/webservices/src/core/net/sf/basedb/core/dtd/raw-data-types.dtd
-
Property
svn:keyword
set to
Id Date
-
Property
svn:keyword
set to
-
branches/webservices/src/core/net/sf/basedb/core/plugin/AbstractAnalysisPlugin.java
r3581 r3653 31 31 import net.sf.basedb.core.BioAssaySet; 32 32 import net.sf.basedb.core.DbControl; 33 import net.sf.basedb.core.DynamicSpotQuery; 33 34 import net.sf.basedb.core.Experiment; 34 35 import net.sf.basedb.core.Item; … … 40 41 import net.sf.basedb.core.StringParameterType; 41 42 import net.sf.basedb.core.Transformation; 43 import net.sf.basedb.core.VirtualColumn; 44 import net.sf.basedb.core.query.Dynamic; 45 import net.sf.basedb.core.query.Expression; 46 import net.sf.basedb.core.query.Expressions; 47 import net.sf.basedb.core.query.Restrictions; 42 48 43 49 /** … … 111 117 The bioAssays subset parameter. 112 118 To use when working on subset of bioassays in a bioAssaySet 113 @see #getSourceBioAssays SubSetParameter(String, String, List<BioAssay>)119 @see #getSourceBioAssaysParameter(String, String) 114 120 */ 115 121 private PluginParameter<BioAssay> sourceSubSetParameter = null; … … 211 217 } 212 218 213 protected static final String SOURCE_BIOASSAYS = " source_subset";219 protected static final String SOURCE_BIOASSAYS = "bioAssays"; 214 220 215 221 /** … … 217 223 for the plugin. 218 224 @param label The parameter's label. Uses a default value if null. 219 *@param description Description of the parameter. Uses a default value if null.225 @param description Description of the parameter. Uses a default value if null. 220 226 @return A plugin parameter 221 227 @since 2.4 … … 414 420 } 415 421 416 422 /** 423 Restricts a query to only include values from listed bioassays 424 @param query The query to set restriction on 425 @param bioAssays A list with bioassays the restriction should include 426 @return A DynamicSpotQuery object. The query parameter is restricted if bioAssays holds item(s), 427 otherwise it will it will be the same 428 @since 2.4 429 */ 430 protected DynamicSpotQuery restrictSource(DynamicSpotQuery query, List<BioAssay> bioAssays) 431 { 432 if ( (bioAssays != null) && !bioAssays.isEmpty() ) 433 { 434 Expression[] selectedColumns = new Expression[bioAssays.size()]; 435 for (int i=0; i<selectedColumns.length; i++) 436 { 437 BioAssay ba = bioAssays.get(i); 438 selectedColumns[i] = Expressions.integer(ba.getDataCubeColumnNo()); 439 } 440 query.restrict(Restrictions.in( 441 Dynamic.column(VirtualColumn.COLUMN), 442 selectedColumns)); 443 } 444 return query; 445 } 417 446 } -
branches/webservices/src/core/net/sf/basedb/core/plugin/AbstractExporterPlugin.java
r3133 r3653 74 74 75 75 protected static final BooleanParameterType overwriteType = 76 new BooleanParameterType( false, false);76 new BooleanParameterType(null, false); 77 77 78 78 /** -
branches/webservices/src/core/net/sf/basedb/core/plugin/ExportOutputStream.java
r2981 r3653 88 88 out.write(b); 89 89 } 90 @Override 90 91 public void write(int b) 91 92 throws IOException -
branches/webservices/src/core/net/sf/basedb/util/FileUtil.java
r3533 r3653 86 86 totalBytes += bytes; 87 87 out.write(buffer, 0, bytes); 88 } 89 } 90 return totalBytes; 91 } 92 93 /** 94 Read from the input stream until the end is reached. 95 96 @param in The <code>InputStream</code> to read from 97 @return The number of bytes copied 98 @throws IOException This exception is thrown if there is an error 99 @since 2.4 100 */ 101 public static long read(InputStream in) 102 throws IOException 103 { 104 int bytes = 0; 105 long totalBytes = 0; 106 byte[] buffer = new byte[BUFFER_SIZE]; 107 108 while (bytes != -1) // -1 = end of stream 109 { 110 bytes = in.read(buffer, 0, buffer.length); 111 if (bytes > 0) 112 { 113 totalBytes += bytes; 88 114 } 89 115 } -
branches/webservices/src/core/net/sf/basedb/util/InputStreamSplitter.java
r2689 r3653 43 43 { 44 44 45 private int numRead; 46 45 47 /** 46 48 The source input stream to read from. … … 54 56 55 57 /** 56 Since the various read methods call each other we must keep 57 track of this so only the first called method copies the data. 58 */ 59 private boolean doCopy = true; 60 61 /** 62 Create a new input stream splitter. 58 If the {@link #skip(long)} method should copy 59 to the output streams or no. 60 */ 61 private final boolean copySkipped; 62 63 /** 64 If the remainder of the input stream should be copied 65 if it is closed before the end. 66 */ 67 private final boolean readToEnd; 68 69 /** 70 Create a new input stream splitter. Skipped bytes are not copied 71 and the remainder of the stream is not copied if closed. 63 72 @param in The input stream to read from 64 73 @param copyTo The output streams to copy data to 65 74 */ 66 75 public InputStreamSplitter(InputStream in, OutputStream... copyTo) 76 { 77 this(in, false, false, copyTo); 78 } 79 80 /** 81 Create a new input stream splitter. Skipped bytes are not copied 82 and the remainder of the stream is not copied if closed. 83 @param in The input stream to read from 84 @param copyTo The output streams to copy data to 85 @since 2.4 86 */ 87 public InputStreamSplitter(InputStream in, boolean copySkipped, boolean readToEnd, OutputStream... copyTo) 67 88 { 68 89 if (in == null) throw new NullPointerException("in"); … … 70 91 this.in = in; 71 92 this.copyTo = copyTo; 72 } 93 this.copySkipped = copySkipped; 94 this.readToEnd = readToEnd; 95 } 96 73 97 /* 74 98 From the InputStream class … … 85 109 throws IOException 86 110 { 111 if (readToEnd) 112 { 113 FileUtil.read(this); 114 } 87 115 in.close(); 116 for (OutputStream out : copyTo) 117 { 118 out.flush(); 119 } 88 120 } 89 121 @Override 90 122 public synchronized void mark(int readlimit) 91 { 92 in.mark(readlimit); 93 } 123 {} 94 124 @Override 95 125 public boolean markSupported() 96 126 { 97 return in.markSupported(); 98 } 127 return false; 128 } 129 @Override 99 130 public int read() 100 131 throws IOException 101 132 { 102 boolean oldDoCopy = doCopy;103 doCopy = false;104 133 int result = in.read(); 105 doCopy = oldDoCopy; 106 if (doCopy && result != -1) copy(result); 134 if (result >= 0) 135 { 136 ++numRead; 137 copy(result); 138 } 107 139 return result; 108 140 } … … 111 143 throws IOException 112 144 { 113 boolean oldDoCopy = doCopy;114 doCopy = false;115 145 int result = in.read(b, off, len); 116 doCopy = oldDoCopy; 117 if (doCopy && result > 0) copy(b, off, result); 146 if (result > 0) 147 { 148 numRead += result; 149 copy(b, off, result); 150 } 118 151 return result; 119 152 } … … 128 161 throws IOException 129 162 { 130 in.reset();163 throw new IOException("reset/mark is not supported"); 131 164 } 132 165 @Override … … 134 167 throws IOException 135 168 { 136 boolean oldDoCopy = doCopy; 137 doCopy = false; 138 long result = in.skip(n); 139 doCopy = oldDoCopy; 169 long result = 0; 170 if (copySkipped) 171 { 172 // This will call the read(byte[], int, int) method 173 result = super.skip(n); 174 } 175 else 176 { 177 in.skip(n); 178 } 140 179 return result; 141 180 } … … 143 182 144 183 /** 184 Get the number of bytes that has been read or skipped 185 from the input stream. 186 @since 2.4 187 */ 188 public long getNumRead() 189 { 190 return numRead; 191 } 192 193 /** 145 194 Write data to all output streams. 146 195 */ -
branches/webservices/src/core/net/sf/basedb/util/JarClassLoader.java
r3533 r3653 68 68 A map of all loaded class loaders. 69 69 */ 70 private static final HashMap<String, ClassLoader> classLoaders = new HashMap<String, ClassLoader>(); 70 private static final HashMap<String, JarClassLoader> classLoaders = 71 new HashMap<String, JarClassLoader>(); 71 72 72 73 /** … … 82 83 throws IOException 83 84 { 84 ClassLoader cl = classLoaders.get(jarPath); 85 return getInstance(jarPath, false); 86 } 87 88 /** 89 Get a class loader for the specified JAR file, optionally unloading an the old 90 one if the JAR file has been modified. A new class loader is created if no 91 class loader exists or if <code>autoUnload</code> is <code>true</code> and the JAR file 92 has changed since the existing class loader was created. 93 94 @param jarPath The path to a JAR file 95 @param autoUnload If TRUE the old class loaded will automaticall be unloaded if the 96 JAR file has been modified (if the timestamp and/or size) is different 97 @return A class loader 98 @throws IOException If the jar file can't be loaded 99 @since 2.4 100 */ 101 public static final ClassLoader getInstance(String jarPath, boolean autoUnload) 102 throws IOException 103 { 104 JarClassLoader cl = classLoaders.get(jarPath); 105 if (cl != null && autoUnload) 106 { 107 // Check if JAR file exists and has same timestamp and size 108 File jarFile = new File(jarPath); 109 boolean isSame = jarFile.exists() && jarFile.lastModified() == cl.jarTimeStamp && jarFile.length() == cl.jarTimeStamp; 110 if (!isSame) cl = null; 111 } 85 112 if (cl == null) 86 113 { … … 93 120 return cl; 94 121 } 95 122 123 96 124 /** 97 125 Unload the class loader for the given JAR file. The class loader will … … 134 162 135 163 /** 164 The timestamp of the JAR file. 165 */ 166 private final long jarTimeStamp; 167 168 /** 169 The size of the JAR file. 170 */ 171 private final long jarSize; 172 173 174 /** 136 175 Create a new JAR file class loader. 137 176 @param jarPath The path to the JAR file … … 145 184 mainJarFile = new File(jarPath); 146 185 if (!mainJarFile.exists()) throw new InvalidDataException("JAR file not found: " + jarPath); 186 this.jarTimeStamp = mainJarFile.lastModified(); 187 this.jarSize = mainJarFile.length(); 147 188 classPath = new HashMap<String, File>(); 148 189 loadJarFile(mainJarFile, true); -
branches/webservices/src/core/net/sf/basedb/util/RemovableUtil.java
r3133 r3653 167 167 Sets the removed-flag for all subitems to a parent 168 168 @param dc The <code>DbControl</code> wich will be used for database access 169 @param parentParent of the subitems that shall be removed169 @param item Parent of the subitems that shall be removed 170 170 @param removed True if the subitems should be flagged removed. 171 171 False if the subitems should be restored -
branches/webservices/src/core/net/sf/basedb/util/Values.java
r3194 r3653 65 65 if (value != null) 66 66 { 67 try { return Float.valueOf(value).intValue(); }67 try { return Double.valueOf(value).intValue(); } 68 68 catch (Throwable t) {} 69 69 } … … 75 75 if (value != null) 76 76 { 77 try { return Float.valueOf(value).intValue(); }77 try { return Double.valueOf(value).intValue(); } 78 78 catch (Throwable t) {} 79 79 } -
branches/webservices/src/core/net/sf/basedb/util/XMLUtil.java
r2304 r3653 91 91 92 92 /** 93 Validate an <code>InputStream</code> against a DTD file 94 and return it as a <code>Document</code> 95 96 @param is The inputstream to the XML 97 @param dtdFile The URL to the DTD used for validation 98 @return A <code>Document</code> object 93 @deprecated Use {@link #getValidatedXML(InputStream, URL, String)} instead 99 94 */ 100 95 public static Document getValidatedXML(InputStream is, URL dtdFile) 101 96 throws JDOMException, IOException 102 97 { 103 SAXBuilder sax = new SAXBuilder(xmlParserClass, true); 104 Validator validator = new Validator(null, dtdFile); 105 // The entity resolver lets us find the correct DTD for validating 106 sax.setEntityResolver(validator); 107 // So we can throw our own exception messages if there is a parse error 108 sax.setErrorHandler(validator); 109 Document dom = sax.build(is); 110 is.close(); 111 return dom; 98 return getValidatedXML(is, dtdFile, null); 112 99 } 113 100 … … 127 114 { 128 115 InputStream is = xmlFile.openStream(); 129 Document xmlDoc = getValidatedXML(is, dtdFile );116 Document xmlDoc = getValidatedXML(is, dtdFile, xmlFile.toString()); 130 117 return xmlDoc; 131 118 } … … 157 144 158 145 /** 146 Validate an <code>InputStream</code> against a DTD file 147 and return it as a <code>Document</code> 148 149 @param is The inputstream to the XML 150 @param dtdFile The URL to the DTD used for validation 151 @param filename The filename of the original XML data, used for error reporting only; 152 use null if the filename is not known 153 @return A <code>Document</code> object 154 @since 2.4 155 */ 156 public static Document getValidatedXML(InputStream is, URL dtdFile, String filename) 157 throws JDOMException, IOException 158 { 159 SAXBuilder sax = new SAXBuilder(xmlParserClass, true); 160 Validator validator = new Validator(filename, dtdFile); 161 // The entity resolver lets us find the correct DTD for validating 162 sax.setEntityResolver(validator); 163 // So we can throw our own exception messages if there is a parse error 164 sax.setErrorHandler(validator); 165 Document dom = sax.build(is); 166 is.close(); 167 return dom; 168 } 169 170 171 /** 159 172 Get the value of an attribute as an integer. If the attribute is missing or 160 173 doesn't contain an integer, the default value is returned. -
branches/webservices/src/core/net/sf/basedb/util/jep/Jep.java
r3533 r3653 24 24 package net.sf.basedb.util.jep; 25 25 26 import java.util.Collection; 26 27 import java.util.Vector; 27 28 … … 34 35 import net.sf.basedb.core.query.Restriction; 35 36 import net.sf.basedb.util.Enumeration; 37 import net.sf.basedb.util.jep.convert.ConverterFactory; 38 import net.sf.basedb.util.jep.convert.JepConversionFunction; 36 39 37 40 import org.nfunk.jep.JEP; 38 41 import org.nfunk.jep.Node; 39 import org.nfunk.jep.ASTFunNode;40 42 import org.nfunk.jep.ASTVarNode; 41 43 import org.nfunk.jep.ASTConstant; 42 import org.nfunk.jep.OperatorSet;43 import org.nfunk.jep.Operator;44 import org.nfunk.jep.function.Abs;45 import org.nfunk.jep.function.Exp;46 import org.nfunk.jep.function.PostfixMathCommandI;47 import org.nfunk.jep.function.SquareRoot;48 import org.nfunk.jep.function.Subtract;49 import org.nfunk.jep.function.Add;50 import org.nfunk.jep.function.Divide;51 import org.nfunk.jep.function.Multiply;52 import org.nfunk.jep.function.UMinus;53 import org.nfunk.jep.function.Logarithm;54 import org.nfunk.jep.function.NaturalLogarithm;55 import org.nfunk.jep.function.Comparative;56 import org.nfunk.jep.function.Logical;57 import org.nfunk.jep.function.Not;58 44 59 45 /** … … 86 72 87 73 private static Enumeration<String, String> functions; 74 private static Enumeration<String, String> operators; 88 75 89 76 /** … … 125 112 126 113 /** 127 Get a list of functions supported by the {@link #jepToExpression(JEP)} 128 converter. 114 Get a list of built-in functions supported by the {@link #jepToExpression(JEP)} 115 converter. More functions can be registered when creating the JEP 116 expression (see {@link #newJep(String, JepFunction[])}). 117 129 118 @return An enumeration with the name of the function in the key 130 119 and a description in the value part … … 135 124 { 136 125 functions = new Enumeration<String, String>(); 137 functions.add("log", "10-based logarithm"); 126 // Built-in converters 127 Collection<JepConversionFunction<?>> converters = ConverterFactory.getConverters(); 128 for (JepConversionFunction converter : converters) 129 { 130 if (converter.isFunction()) 131 { 132 functions.add(converter.getName(), converter.getDescription()); 133 } 134 } 135 // Other standard functions that we always add; see newJep() method 138 136 functions.add("log2", "2-based logarithm"); 139 functions.add("ln", "Natural logarithm"); 140 functions.add("exp", "Exponential"); 141 functions.add("sqrt", "Square root"); 142 functions.add("abs", "Absolute value"); 137 functions.sortKeys(); 143 138 functions.lock(); 144 139 } 145 140 return functions; 146 141 } 142 143 /** 144 Get a list of built-in operators supported by the {@link #jepToExpression(JEP)} 145 and {@link #jepToRestriction(JEP)} converter. It is not possible to 146 register more operators. 147 148 @return An enumeration with the symbol of the operator in the key 149 and a description in the value part 150 @since 2.4 151 */ 152 public static Enumeration<String, String> getOperators() 153 { 154 if (operators == null) 155 { 156 operators = new Enumeration<String, String>(); 157 // Built-in converters 158 Collection<JepConversionFunction<?>> converters = ConverterFactory.getConverters(); 159 for (JepConversionFunction converter : converters) 160 { 161 if (converter.isOperator()) 162 { 163 operators.add(converter.getName(), converter.getDescription()); 164 } 165 } 166 operators.sortKeys(); 167 operators.lock(); 168 } 169 return operators; 170 } 171 147 172 148 173 /** … … 228 253 throws BaseException 229 254 { 230 return nodeToRestriction(jep.getTopNode() , jep.getOperatorSet());255 return nodeToRestriction(jep.getTopNode()); 231 256 } 232 257 … … 245 270 Convert a node with it's children to an expression. 246 271 */ 272 @SuppressWarnings("unchecked") 247 273 public static Expression nodeToExpression(Node node) 248 274 throws BaseException 249 275 { 250 int numChildren = node.jjtGetNumChildren(); 251 if (node instanceof ASTFunNode) 252 { 253 ASTFunNode funNode = (ASTFunNode)node; 254 PostfixMathCommandI function = funNode.getPFMC(); 255 if (function instanceof Add) 256 { 257 if (numChildren != 2) 258 { 259 throw new BaseException("Invalid number of expressions for add function: " + numChildren); 260 } 261 return Expressions.add( 262 nodeToExpression(node.jjtGetChild(0)), 263 nodeToExpression(node.jjtGetChild(1)) 264 ); 265 } 266 else if (function instanceof Subtract) 267 { 268 if (numChildren != 2) 269 { 270 throw new BaseException("Invalid number of expressions for subtract function: " + numChildren); 271 } 272 return Expressions.subtract( 273 nodeToExpression(node.jjtGetChild(0)), 274 nodeToExpression(node.jjtGetChild(1)) 275 ); 276 } 277 else if (function instanceof Multiply) 278 { 279 if (numChildren != 2) 280 { 281 throw new BaseException("Invalid number of expressions for multiply function: " + numChildren); 282 } 283 return Expressions.multiply( 284 nodeToExpression(node.jjtGetChild(0)), 285 nodeToExpression(node.jjtGetChild(1)) 286 ); 287 } 288 else if (function instanceof Divide) 289 { 290 if (numChildren != 2) 291 { 292 throw new BaseException("Invalid number of expressions for divide function: " + numChildren); 293 } 294 return Expressions.divide( 295 nodeToExpression(node.jjtGetChild(0)), 296 nodeToExpression(node.jjtGetChild(1)) 297 ); 298 } 299 else if (function instanceof UMinus) 300 { 301 if (numChildren != 1) 302 { 303 throw new BaseException("Invalid number of expressions for negate function: " + numChildren); 304 } 305 return Expressions.negate(nodeToExpression(node.jjtGetChild(0))); 306 } 307 else if (function instanceof Logarithm) 308 { 309 if (numChildren != 1) 310 { 311 throw new BaseException("Invalid number of expressions for 'log10' function: " + numChildren); 312 } 313 return Expressions.log10(nodeToExpression(node.jjtGetChild(0))); 314 } 315 else if (function instanceof NaturalLogarithm) 316 { 317 if (numChildren != 1) 318 { 319 throw new BaseException("Invalid number of expressions for 'ln' function: " + numChildren); 320 } 321 return Expressions.ln(nodeToExpression(node.jjtGetChild(0))); 322 } 323 else if (function instanceof SquareRoot) 324 { 325 if (numChildren != 1) 326 { 327 throw new BaseException("Invalid number of expressions for 'sqrt' function: " + numChildren); 328 } 329 return Expressions.sqrt(nodeToExpression(node.jjtGetChild(0))); 330 } 331 else if (function instanceof Abs) 332 { 333 if (numChildren != 1) 334 { 335 throw new BaseException("Invalid number of expressions for 'sqrt' function: " + numChildren); 336 } 337 return Expressions.abs(nodeToExpression(node.jjtGetChild(0))); 338 } 339 else if (function instanceof Exp) 340 { 341 if (numChildren != 1) 342 { 343 throw new BaseException("Invalid number of expressions for 'sqrt' function: " + numChildren); 344 } 345 return Expressions.exp(nodeToExpression(node.jjtGetChild(0))); 346 } 347 else if (function instanceof JepExpressionFunction) 348 { 349 return ((JepExpressionFunction)function).toExpression(node); 350 } 351 else 352 { 353 throw new BaseException("Unsupported JEP function: " + node); 354 } 355 } 356 else if (node instanceof ASTVarNode) 357 { 358 ASTVarNode varNode = (ASTVarNode)node; 359 String name = varNode.getName(); 360 if ("NULL".equalsIgnoreCase(name)) name = null; 361 return name == null ? null : Expressions.parameter(name); 362 } 363 else if (node instanceof ASTConstant) 364 { 365 ASTConstant constNode = (ASTConstant)node; 366 Object value = constNode.getValue(); 367 if (value instanceof Integer) 368 { 369 return Expressions.integer((Integer)value); 370 } 371 else if (value instanceof Float) 372 { 373 return Expressions.aFloat((Float)value); 374 } 375 else if (value instanceof Double) 376 { 377 return Expressions.aFloat(((Double)value).floatValue()); 378 } 379 else if (value instanceof String) 380 { 381 return Expressions.string((String)value); 382 } 383 else 384 { 385 throw new BaseException("Unsupported type of constant: " + value.getClass().getName()); 386 } 276 JepConversionFunction converter = ConverterFactory.getConverterByClass(node); 277 Expression e = null; 278 if (converter != null) 279 { 280 e = converter.toExpression(node); 387 281 } 388 282 else 389 283 { 390 throw new BaseException("Unknown JEP node: " + node); 391 } 392 } 393 394 private static Restriction nodeToRestriction(Node node, OperatorSet opSet) 395 throws BaseException 396 { 397 int numChildren = node.jjtGetNumChildren(); 398 if (node instanceof ASTFunNode) 399 { 400 ASTFunNode funNode = (ASTFunNode)node; 401 PostfixMathCommandI function = funNode.getPFMC(); 402 if (function instanceof Comparative || function instanceof Logical) 403 { 404 Operator op = funNode.getOperator(); 405 if (numChildren != 2) 406 { 407 throw new BaseException("Invalid number of expressions for '"+ op.getSymbol()+"' function: " + numChildren); 408 } 409 if (op == opSet.getEQ()) 410 { 411 // A == B 412 return Restrictions.eq( 413 nodeToExpression(node.jjtGetChild(0)), 414 nodeToExpression(node.jjtGetChild(1)) 415 ); 416 } 417 else if (op == opSet.getNE()) 418 { 419 // A <> B 420 return Restrictions.neq( 421 nodeToExpression(node.jjtGetChild(0)), 422 nodeToExpression(node.jjtGetChild(1)) 423 ); 424 } 425 else if (op == opSet.getLT()) 426 { 427 // A < B 428 return Restrictions.lt( 429 nodeToExpression(node.jjtGetChild(0)), 430 nodeToExpression(node.jjtGetChild(1)) 431 ); 432 } 433 else if (op == opSet.getLE()) 434 { 435 // A <= B 436 return Restrictions.lteq( 437 nodeToExpression(node.jjtGetChild(0)), 438 nodeToExpression(node.jjtGetChild(1)) 439 ); 440 } 441 else if (op == opSet.getGT()) 442 { 443 // A > B 444 return Restrictions.gt( 445 nodeToExpression(node.jjtGetChild(0)), 446 nodeToExpression(node.jjtGetChild(1)) 447 ); 448 } 449 else if (op == opSet.getGE()) 450 { 451 // A >= B 452 return Restrictions.gteq( 453 nodeToExpression(node.jjtGetChild(0)), 454 nodeToExpression(node.jjtGetChild(1)) 455 ); 456 } 457 else if (op == opSet.getAnd()) 458 { 459 // A AND B 460 return Restrictions.and( 461 nodeToRestriction(node.jjtGetChild(0), opSet), 462 nodeToRestriction(node.jjtGetChild(1), opSet) 463 ); 464 } 465 else if (op == opSet.getOr()) 466 { 467 // A OR B 468 return Restrictions.or( 469 nodeToRestriction(node.jjtGetChild(0), opSet), 470 nodeToRestriction(node.jjtGetChild(1), opSet) 471 ); 472 } 473 else 474 { 475 throw new BaseException("Unsupported operator: "+ op.getSymbol()); 476 } 477 } 478 else if (function instanceof Not) 479 { 480 if (numChildren != 1) 481 { 482 throw new BaseException("Invalid number of expressions for not function: " + numChildren); 483 } 484 return Restrictions.not(nodeToRestriction(node.jjtGetChild(0), opSet)); 485 } 486 else if (function instanceof JepRestrictionFunction) 487 { 488 return ((JepRestrictionFunction)function).toRestriction(node); 489 } 490 else 491 { 492 throw new BaseException("Unsupported JEP function: " + node); 493 } 284 throw new BaseException("Unsupported JEP function: " + node); 285 } 286 return e; 287 } 288 289 /** 290 Convert a node with it's children to a restriction. 291 @since 2.4 292 */ 293 @SuppressWarnings("unchecked") 294 public static Restriction nodeToRestriction(Node node) 295 throws BaseException 296 { 297 JepConversionFunction converter = ConverterFactory.getConverterByClass(node); 298 Restriction r = null; 299 if (converter != null) 300 { 301 r = converter.toRestriction(node); 494 302 } 495 303 else 496 304 { 497 throw new BaseException("Unknown JEP node: " + node); 498 } 305 throw new BaseException("Unsupported JEP function: " + node); 306 } 307 return r; 499 308 } 500 309 -
branches/webservices/src/core/net/sf/basedb/util/jep/PositionFunction.java
r2992 r3653 38 38 JEP expression parser. The function will look up position. 39 39 For example: 40 <code>pos </code>40 <code>pos()</code> 41 41 <p> 42 42 To be able to use this function it must be registered with the JEP 43 parser and, before the expression is evaluated, a {@link SqlResult} object must be set. 43 parser. This function can only be used to convert a JEP expression to 44 a Query API {@link Expression}. It doesn't support dynamic evalution. 44 45 45 46 @author Enell … … 47 48 @base.modified $Date$ 48 49 @see Jep 49 @see BioAssaySetUtil#createJepExpression(DbControl, BioAssaySet, String)50 @see net.sf.basedb.util.BioAssaySetUtil#createJepExpression(net.sf.basedb.core.DbControl, net.sf.basedb.core.BioAssaySet, String, boolean) 50 51 */ 51 public class PositionFunction implements JepExpressionFunction 52 public class PositionFunction 53 implements JepExpressionFunction 52 54 { 53 55 … … 92 94 Can't be changed. 93 95 */ 94 public void setCurNumberOfParameters(int n) {} 96 public void setCurNumberOfParameters(int n) 97 {} 98 95 99 public boolean checkNumberOfParameters(int n) 96 100 { -
branches/webservices/src/core/net/sf/basedb/util/overview/ExperimentOverview.java
r3533 r3653 885 885 Add a labeled extract child node to the labeled extracts folder. Creates an 886 886 item-type subnode with the name <code>labeledextract.ID</code> where ID is the ID of the 887 {@link Lab ledExtract}.887 {@link LabeledExtract}. 888 888 */ 889 889 private Node addLabeledExtract(DbControl dc, Node parentNode, LabeledExtract le) -
branches/webservices/src/core/net/sf/basedb/util/overview/ValidationOptions.java
r3133 r3653 107 107 @param clearFirst If the current settings should be cleared 108 108 before the new ones are loaded from the preset 109 @see #saveToPreset( Preset)109 @see #saveToPreset(net.sf.basedb.core.Presets.Preset) 110 110 */ 111 111 public void loadFromPreset(Preset preset, boolean clearFirst) … … 122 122 is used as key and the {@link Severity#name()} is used as value. 123 123 @param preset The preset to save the settings to 124 @see #loadFromPreset(Preset, boolean)125 124 */ 126 public void saveToPreset( Preset preset)125 public void saveToPreset(net.sf.basedb.core.Presets.Preset preset) 127 126 { 128 127 for (Map.Entry<String, Severity> entry : options.entrySet()) -
branches/webservices/src/core/net/sf/basedb/util/parser/ColFunction.java
r3133 r3653 44 44 <pre class="code"> 45 45 FlatFileParser ffp = ... 46 Mapper diameterMapper = ffp.getMapper(" col('Radius') * 2");46 Mapper diameterMapper = ffp.getMapper("=col('Radius') * 2"); 47 47 while (ffp.hasMoreData()) 48 48 { -
branches/webservices/src/core/net/sf/basedb/util/parser/ColumnMapper.java
r3533 r3653 75 75 @param nullIfException If TRUE, the mapper returns null for unparsable numeric 76 76 values, otherwise an excption is thrown 77 @param parser The parser to use or null to use Float.valueOf()77 @param parser The parser to use or null to use Double.valueOf() 78 78 @since 2.4 79 79 */ … … 102 102 public Integer getInt(Data data) 103 103 { 104 return getFloat(getValue(data)).intValue(); 104 Double d = getDouble(getValue(data)); 105 return d == null ? null : d.intValue(); 105 106 } 106 107 public Float getFloat(Data data) 107 108 { 108 return getFloat(getValue(data)); 109 Double d = getDouble(getValue(data)); 110 return d == null ? null : d.floatValue(); 109 111 } 110 112 // ------------------------------------------- … … 120 122 // ------------------------------------------- 121 123 122 private Float getFloat(String value)124 private Double getDouble(String value) 123 125 { 124 126 if (value == null) return null; 125 Float f= null;127 Double d = null; 126 128 if (parser == null) 127 129 { 128 130 try 129 131 { 130 f = Float.valueOf(value);132 d = Double.valueOf(value); 131 133 } 132 134 catch (RuntimeException rex) … … 137 139 else 138 140 { 139 f = (Float)Type.FLOAT.parseString(value, parser, nullIfException);141 d = (Double)Type.DOUBLE.parseString(value, parser, nullIfException); 140 142 } 141 return f;143 return d; 142 144 } 143 145 -
branches/webservices/src/core/net/sf/basedb/util/parser/CompoundMapper.java
r3533 r3653 117 117 try 118 118 { 119 intValue = Integer.parseInt(stringValue);119 intValue = Double.valueOf(stringValue).intValue(); 120 120 } 121 121 catch (RuntimeException rex) -
branches/webservices/src/core/net/sf/basedb/util/parser/ConstantMapper.java
r3533 r3653 59 59 Create a constant mapper using a specific number formatter as it's parser. 60 60 @param constant The constant expression 61 @param parser The number format to use or null to use Float.valueOf()61 @param parser The number format to use or null to use Double.valueOf() 62 62 @since 2.2 63 63 @deprecated Use {@link ConstantMapper#ConstantMapper(String, NumberFormat, boolean)} … … 71 71 Create a constant mapper using a specific number formatter as it's parser. 72 72 @param constant The constant expression 73 @param parser The number format to use or null to use Float.valueOf()73 @param parser The number format to use or null to use Double.valueOf() 74 74 @param nullIfException If TRUE, the mapper returns null for unparsable numeric 75 75 values, otherwise an excption is thrown … … 151 151 else 152 152 { 153 asFloat = Float.valueOf(constant);153 asFloat = Double.valueOf(constant).floatValue(); 154 154 } 155 155 } -
branches/webservices/src/core/net/sf/basedb/util/parser/FlatFileParser.java
r3533 r3653 264 264 265 265 /** 266 The number of data lines parsed in the current section so far. 267 This value is reset at each new section. 268 */ 269 private int parsedDataLines; 270 271 /** 266 272 Map of header lines parsed by the {@link #parseHeaders()} method. 267 273 The map contains name -> value pairs … … 648 654 done = true; 649 655 lines.add(new Line(parsedLines, line, LineType.DATA)); 650 nextData = new Data(parsedLines, line, columns, emptyIsNull, nullIsNull); 656 parsedDataLines = 1; 657 nextData = new Data(parsedLines, parsedDataLines, line, columns, emptyIsNull, nullIsNull); 651 658 } 652 659 } … … 851 858 @param numberFormat The number format the mapper should use for 852 859 parsing numbers, or null to use Float.valueOf or Double.valueOf 853 @param useNullIfException TRUE to return a null value instead of throwing860 @param nullIfException TRUE to return a null value instead of throwing 854 861 an exception when a value can't be parsed. 855 862 @return A mapper object … … 969 976 { 970 977 parsedLines++; 978 parsedDataLines++; 971 979 parsedCharacters += line.length(); 972 980 … … 997 1005 { 998 1006 done = true; 1007 parsedDataLines = 0; 999 1008 nextSection = new Line(parsedLines, line, m.group(1), LineType.SECTION); 1000 1009 lines.add(nextSection); … … 1010 1019 { 1011 1020 done = true; 1012 nextData = new Data(parsedLines, line, columns, emptyIsNull, nullIsNull);1021 nextData = new Data(parsedLines, parsedDataLines, line, columns, emptyIsNull, nullIsNull); 1013 1022 } 1014 1023 } … … 1056 1065 { 1057 1066 return parsedLines; 1067 } 1068 1069 /** 1070 Get the number of parsed data lines so far in the current section. 1071 This value is reset for each new section. 1072 */ 1073 public int getParsedDataLines() 1074 { 1075 return parsedDataLines; 1058 1076 } 1059 1077 … … 1179 1197 done = true; 1180 1198 nextSection = new Line(parsedLines, line, m.group(1), LineType.SECTION); <