Newer
Older
/**
* Similar to <code>String.indexOf(String, int)</code>, but it ignores
* case
*/
private static int indexOfIgnoreCase(String textToSearch,
String pattern,
int fromIndex)
{
int n = pattern.length();
while(textToSearch.length() > ((fromIndex + n) - 1))
{
if(textToSearch.regionMatches(true, fromIndex, pattern, 0, n))
return fromIndex;
fromIndex++;
}
return -1;
}
* @return
*/
public static List getCvControledCurationNames()
{
if(cvControledCuratioNames != null)
return cvControledCuratioNames;
cvControledCuratioNames = new Vector();
final Enumeration enum_cvterm = cvterms.elements();
while(enum_cvterm.hasMoreElements())
{
final CvTerm cvTerm = (CvTerm)enum_cvterm.nextElement();
final String cvNameStr = cvTerm.getCv().getName();
if(cvNameStr.startsWith(DatabaseDocument.CONTROLLED_CURATION_TAG_CVNAME) &&
!cvControledCuratioNames.contains(cvNameStr))
cvControledCuratioNames.add(cvNameStr);
}
return cvControledCuratioNames;
}
/**
* Look up synonym type names e.g. synonym, systematic_id.
* @return the synonym tag names
*/
public static String[] getSynonymTypeNames(final String cv_name,
final GFFStreamFeature feature)
if(cvterms == null)
{
DatabaseDocument doc = (DatabaseDocument)feature.getDocumentEntry().getDocument();
try
{
Cv cv = new Cv();
cv.setName(cv_name);
List synonymCvTerms = doc.getDAO().getCvTermByNameInCv(null, cv);
String synonymNames[] = new String[synonymCvTerms.size()];
for(int i=0; i<synonymCvTerms.size(); i++)
synonymNames[i] = ((CvTerm) synonymCvTerms.get(i)).getName();
return synonymNames;
}
catch(ConnectException e){}
catch(SQLException e){}
}
Vector synonym_names = new Vector();
Enumeration cvterm_enum = cvterms.elements();
while(cvterm_enum.hasMoreElements())
{
CvTerm cvterm = (CvTerm)cvterm_enum.nextElement();
if(cvterm.getCv().getName().equals(cv_name))
synonym_names.add(cvterm.getName());
}
return (String[])synonym_names.toArray(
new String[synonym_names.size()]);
}
public void insertCvTerm(CvTerm cvTerm)
{
final GmodDAO dao = getDAOOnly();
dao.persist(cvTerm);
cvTerm = dao.getCvTermByNameAndCvName(cvTerm.getName(), cvTerm.getCv().getName());
cvterms.put(new Integer(cvTerm.getCvTermId()), cvTerm);
}
/**
* Get the sequence for a feature.
* @param dao the data access object
* @param buff the buffer to add the sequence to
* @return the resulting buffer
* @throws java.sql.SQLException
*/
Feature feature = dao.getFeatureById(Integer.parseInt(srcFeatureId));
2105
2106
2107
2108
2109
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
2121
2122
2123
2124
2125
2126
2127
2128
2129
/**
* Find the gene by the ID or synonym on the gene or transcript.
* @param id
* @return
*/
public Feature getChadoGeneByAnyCurrentName(String id)
{
Feature chadoFeature =
(Feature)(getDAOOnly().getFeaturesByAnyCurrentName(id).get(0));
if(!chadoFeature.getCvTerm().equals("gene"))
{
Iterator<FeatureRelationship> parents =
chadoFeature.getFeatureRelationshipsForSubjectId().iterator();
while(parents.hasNext())
{
FeatureRelationship fr = parents.next();
Feature parent = fr.getFeatureByObjectId();
if(parent.getCvTerm().getName().equals("gene"))
chadoFeature = parent;
}
}
return chadoFeature;
}
/**
* Get the sequence for a feature.
* @param dao the data access object
* @param buff the buffer to add the sequence to
* @return the resulting buffer
* @throws java.sql.SQLException
*/
private void getChadoSequence(final Feature feature, ByteBuffer buff)
{
/**
* Get the CDS FeatureLoc's associated with a give protein
* @param peptideName
* @return
*/
public List getCdsFeatureLocsByPeptideName(final String peptideName)
Collection frs = peptideFeature.getFeatureRelationshipsForSubjectId();
Iterator it = frs.iterator();
Feature transcriptFeature = null;
while(it.hasNext())
{
FeatureRelationship fr = (FeatureRelationship)it.next();
if(fr.getCvTerm().getName().equalsIgnoreCase("derives_from"))
{
transcriptFeature = fr.getFeatureByObjectId();
logger4j.debug("TRANSCRIPT :: "+transcriptFeature.getUniqueName());
break;
}
}
if(transcriptFeature == null)
return null;
return getCdsFeatureLocsByTranscriptName(transcriptFeature.getUniqueName());
* Get the CDS FeatureLoc's associated with a given transcript
* @param transcriptName
* @return
*/
public List getCdsFeatureLocsByTranscriptName(final String transcriptName)
{
Feature transcriptFeature = getFeatureByUniquename(transcriptName);
if(transcriptFeature == null)
Collection frs = transcriptFeature.getFeatureRelationshipsForObjectId();
Iterator it = frs.iterator();
while(it.hasNext())
{
FeatureRelationship fr = (FeatureRelationship)it.next();
org.gmod.schema.sequence.Feature child = fr.getFeatureBySubjectId();
if(child.getCvTerm().getName().equals("exon") ||
child.getCvTerm().getName().equals("pseudogenic_exon"))
Collection featureLocs = child.getFeatureLocsForFeatureId();
Iterator it2 = featureLocs.iterator();
while(it2.hasNext())
{
FeatureLoc featureLoc = (FeatureLoc) it2.next();
cdsFeatureLocs.add(featureLoc);
}
Collections.sort(cdsFeatureLocs, new LocationComarator());
/**
* Get the sequence for a feature.
* @return the resulting buffer
*/
public PartialSequence getChadoSequence(final String uniqueName)
{
Feature feature = (Feature) getDAOOnly().getResiduesByUniqueName(uniqueName).get(0);
char[] c = getChars(feature.getResidues());
PartialSequence ps = new PartialSequence(c, feature.getSeqLen(),
feature.getFeatureLoc().getFmin().intValue()+1,
feature.getFeatureLoc().getStrand(),
feature.getFeatureLoc().getPhase());
return ps;
}
/**
* Convert byte array to char array
* @param b byte array
* @return char array
*/
private char[] getChars(final byte b[])
{
char[] c = new char[b.length];
for(int i = 0; i < b.length; i++)
c[i] = (char)b[i];
/**
* Get the <code>List</code> of available schemas.
* @return the <code>List</code> of available schemas
*/
public List getSchema()
{
return schema_list;
}
public Feature getFeatureByUniquename(final String uniqueName)
{
GmodDAO dao = getDAOOnly();
return (Feature) dao.getFeatureByUniqueName(uniqueName, null);
2253
2254
2255
2256
2257
2258
2259
2260
2261
2262
2263
2264
2265
2266
2267
2268
2269
2270
2271
2272
2273
2274
2275
2276
2277
2278
2279
2280
2281
2282
2283
2284
2285
2286
2287
2288
2289
2290
2291
2292
2293
2294
2295
2296
2297
2298
2299
}
/**
* Given a gene unique name return the poplypeptide chado features that belong
* to that gene
* @param geneName
* @return
*/
public Vector getPolypeptideFeatures(final String geneName)
{
Feature geneFeature = getFeatureByUniquename(geneName);
if(geneFeature == null)
return null;
Collection frs = geneFeature.getFeatureRelationshipsForObjectId();
Iterator it = frs.iterator();
List transcripts = new Vector(frs.size());
while(it.hasNext())
{
FeatureRelationship fr = (FeatureRelationship)it.next();
transcripts.add(fr.getFeatureBySubjectId());
}
Vector polypep = new Vector();
for(int i=0; i<transcripts.size(); i++)
{
org.gmod.schema.sequence.Feature transcript =
(org.gmod.schema.sequence.Feature) transcripts.get(i);
frs = transcript.getFeatureRelationshipsForObjectId();
it = frs.iterator();
while(it.hasNext())
{
FeatureRelationship fr = (FeatureRelationship)it.next();
if(fr.getCvTerm().getName().equalsIgnoreCase("derives_from"))
if(fr.getFeatureBySubjectId().getCvTerm().getName().equalsIgnoreCase("polypeptide"))
polypep.add(fr.getFeatureBySubjectId());
}
}
return polypep;
}
/**
* Given a gene unique name return the poplypeptides that belong
* to that gene
* @param geneName
* @return
*/
/*public Vector getPolypeptideNames(final String geneName)
{
Vector polypeptides = getPolypeptideFeatures(geneName);
Vector polypeptideNames = new Vector(polypeptides.size());
for(int i=0; i<polypeptides.size(); i++)
{
Feature feature = (Feature)polypeptides.get(i);
polypeptideNames.add(feature.getUniqueName());
}
return polypeptideNames;
public List getClustersByFeatureIds(final List featureIds)
{
GmodDAO dao = getDAOOnly();
return dao.getClustersByFeatureIds(featureIds);
}
public List getParentFeaturesByChildFeatureIds(final List subjectIds)
{
GmodDAO dao = getDAOOnly();
return dao.getParentFeaturesByChildFeatureIds(subjectIds);
}
public List getFeatureDbXRefsByFeatureId(final List featureIds)
{
GmodDAO dao = getDAOOnly();
return dao.getFeatureDbXRefsByFeatureId(featureIds);
}
* Used by SimilarityLazyQualifierValue.bulkRetrieve() to get the match features
* @param featureIds the <code>List</code> of feature_id's
* @return the corresponding features
*/
public List getFeaturesByListOfIds(final List featureIds)
{
GmodDAO dao = getDAOOnly();
return dao.getFeaturesByListOfIds(featureIds);
}
public List getFeaturePropByFeatureIds(final List featureIds)
{
GmodDAO dao = getDAOOnly();
return dao.getFeaturePropByFeatureIds(featureIds);
}
public List getSimilarityMatches(List featureIds)
GmodDAO dao = getDAOOnly();
if(featureIds == null)
return dao.getSimilarityMatches(new Integer(srcFeatureId));
else
return dao.getSimilarityMatchesByFeatureIds(featureIds);
public List getFeatureLocsByListOfIds(List featureIds)
{
GmodDAO dao = getDAOOnly();
return dao.getFeatureLocsByListOfIds(featureIds);
}
2363
2364
2365
2366
2367
2368
2369
2370
2371
2372
2373
2374
2375
2376
2377
2378
2379
2380
2381
2382
2383
2384
/**
* Test the database connection.
* @throws ConnectException
* @throws SQLException
*/
public void ping() throws ConnectException, SQLException
{
getDAO().getAllCvs();
}
/**
* Load the cvterms
* @throws ConnectException
* @throws SQLException
*/
public void loadCvTerms() throws ConnectException, SQLException
{
GmodDAO dao = getDAO();
cvThread = new CvTermThread(dao);
cvThread.start();
}
* Get a list of the organisms with sequences
* @return
* @throws ConnectException
* @throws java.sql.SQLException
*/
public List getOrganismsContainingSrcFeatures()
throws ConnectException, java.sql.SQLException
cvThread = new CvTermThread(dao);
cvThread.start();
list = dao.getOrganismsContainingSrcFeatures();
Collections.sort(list, new Comparator()
Organism org1 = (Organism)o1;
Organism org2 = (Organism)o2;
String name1 = org1.getCommonName();
String name2 = org2.getCommonName();
if(name1 == null)
name1 = org1.getGenus() + "." + org1.getSpecies();
if(name2 == null)
name2 = org2.getGenus() + "." + org2.getSpecies();
return name1.compareToIgnoreCase( name2 );
}
});
}
catch(RuntimeException sqlExp)
{
JOptionPane.showMessageDialog(null, "SQL Problems...\n"+
sqlExp.getMessage(),
"SQL Error",
JOptionPane.ERROR_MESSAGE);
}
catch(ConnectException exp)
{
JOptionPane.showMessageDialog(null, "Connection Problems...\n"+
exp.getMessage(),
"Connection Error",
JOptionPane.ERROR_MESSAGE);
throw exp;
}
catch(java.sql.SQLException sqlExp)
{
JOptionPane.showMessageDialog(null, "SQL Problems....\n"+
sqlExp.getMessage(),
"SQL Error",
JOptionPane.ERROR_MESSAGE);
logger4j.debug(sqlExp.getMessage());
throw sqlExp;
}
if(cvThread.isAlive())
return true;
else
return false;
}
public List getResidueFeatures(Integer organismId)
{
return getDAOOnly().getResidueFeatures(organismId);
/**
*
*/
public void showCvTermLookUp()
{
try
{
new ChadoCvTermView( getDAO() );
}
catch(ConnectException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
catch(SQLException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
2492
2493
2494
2495
2496
2497
2498
2499
2500
2501
2502
2503
2504
2505
2506
2507
2508
2509
2510
2511
2512
2513
2514
2515
2516
2517
2518
2519
2520
2521
2522
2523
2524
2525
2526
2527
2528
2529
2530
2531
2532
2533
/**
* Get a list of the available graphs
* @return
*/
public List getGraphs()
{
GmodDAO dao = getDAOOnly();
List list = dao.getTableColumns("graph");
if(list == null || list.size() == 0)
return null;
return dao.getGraphs(Integer.parseInt(srcFeatureId));
}
/**
* Get the graph data from the database.
* This uses JDBC and the org.postgresql.largeobject API.
* @param graphId
* @param name
* @return
*/
public LargeObjectDocument getGraphData(int graphId, String name)
{
LargeObjectDocument doc = null;
try
{
// this causes class cast problems probably because it gets
// a pool connection rather than the underlying real connection
// Connection conn = ((SimpleDataSource)connIB.getDataSource()).getConnection();
SimpleDataSource ds = (SimpleDataSource)connIB.getDataSource();
Connection conn = DriverManager.getConnection(
ds.getJdbcUrl(), ds.getJdbcUsername(), ds.getJdbcPassword());
// All LargeObject API calls must be within a transaction
conn.setAutoCommit(false);
// Get the Large Object Manager to perform operations with
LargeObjectManager lobj =
((org.postgresql.PGConnection)conn).getLargeObjectAPI();
PreparedStatement pstmt =
conn.prepareStatement("SELECT * FROM graph.graph WHERE graph_id=?");
2535
2536
2537
2538
2539
2540
2541
2542
2543
2544
2545
2546
2547
2548
2549
2550
2551
2552
2553
2554
2555
2556
2557
2558
pstmt.setInt(1, graphId);
ResultSet rs = pstmt.executeQuery();
if (rs != null)
{
while(rs.next())
{
// open the large object for reading
int oid = rs.getInt(5);
doc = new LargeObjectDocument(ds.getJdbcUrl(), name,
lobj.open(oid, LargeObjectManager.READ));
}
rs.close();
}
pstmt.close();
}
catch (SQLException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
return doc;
}
/**
* Get the data access object (DAO).
* @return data access object
*/
if(!iBatis)
{
if(jdbcDAO == null)
jdbcDAO = new JdbcDAO((String)getLocation(), pfield);
return jdbcDAO;
}
else
{
System.setProperty("chado", (String)getLocation());
/**
* Get the username for this connection
* @return
*/
public String getUserName()
{
// "localhost:10001/backup?chado"
String url = (String)getLocation();
int index = url.indexOf("?");
String userName = url.substring(index+1).trim();
if(userName.startsWith("user="))
userName = userName.substring(5);
return userName;
}
2601
2602
2603
2604
2605
2606
2607
2608
2609
2610
2611
2612
2613
2614
2615
2616
2617
2618
2619
2620
2621
2622
2623
2624
2625
2626
2627
2628
2629
2630
private GmodDAO getDAOOnly()
{
GmodDAO dao = null;
try
{
dao = getDAO();
}
catch(RuntimeException sqlExp)
{
JOptionPane.showMessageDialog(null, "SQL Problems...\n"+
sqlExp.getMessage(),
"SQL Error",
JOptionPane.ERROR_MESSAGE);
}
catch(ConnectException exp)
{
JOptionPane.showMessageDialog(null, "Connection Problems...\n"+
exp.getMessage(),
"Connection Error",
JOptionPane.ERROR_MESSAGE);
}
catch(java.sql.SQLException sqlExp)
{
JOptionPane.showMessageDialog(null, "SQL Problems....\n"+
sqlExp.getMessage(),
"SQL Error",
JOptionPane.ERROR_MESSAGE);
}
return dao;
}
* Create a new OutputStream object from this Document. The contents of the
* Document can be written from the stream.
*
* @exception IOException
* Thrown if the Document can't be written.
*/
public OutputStream getOutputStream() throws IOException
{
final File write_file = new File(System.getProperty("user.dir")+
System.getProperty("file.separator")+
getName());
final FileOutputStream file_output_stream =
new FileOutputStream(write_file);
if(write_file.getName().endsWith(".gz"))
{
// assume this file should be gzipped
return new java.util.zip.GZIPOutputStream (file_output_stream);
}
else
return file_output_stream;
/**
* Commit the <code>ChadoTransaction</code> SQL back to the
* database.
* @param sql the collection of <code>ChadoTransaction</code> objects
* @return
*/
public int commit(final Vector sql,
final boolean force)
final Hashtable featureIdStore = new Hashtable();
boolean useTransactions = false;
if(!force && dao instanceof IBatisDAO)
useTransactions = true;
((IBatisDAO) dao).startTransaction();
logger4j.debug("START TRANSACTION");
}
boolean unchanged;
//
// check feature timestamps have not changed
Vector names_checked = new Vector();
final Object uniquenames[] = getUniqueNames(tsn);
for(int j=0; j<uniquenames.length; j++)
final String uniquename = (String) uniquenames[j];
if(uniquename == null || names_checked.contains(uniquename))
continue;
names_checked.add(uniquename);
final String keyName = tsn.getFeatureKey();
unchanged = checkFeatureTimestamp(schema, uniquename,
if(!unchanged)
{
if(useTransactions)
((IBatisDAO) dao).endTransaction();
return 0;
}
final Timestamp ts = new Timestamp(new java.util.Date().getTime());
//
// commit to database
for(ncommit = 0; ncommit < sql.size(); ncommit++)
try
{
ChadoTransaction tsn = (ChadoTransaction) sql.get(ncommit);
}
catch (RuntimeException re)
{
if(!force)
throw re;
logger4j.warn(constructExceptionMessage(re, sql, ncommit));
logger4j.warn("NOW TRYING TO CONTINUE TO COMMIT");
}
//
// update timelastmodified timestamp
names_checked = new Vector();
final ChadoTransaction tsn = (ChadoTransaction) sql.get(i);
final Object uniquenames[] = getUniqueNames(tsn);
if(uniquenames == null)
if(tsn.getType() == ChadoTransaction.UPDATE &&
tsn.getFeatureObject() instanceof Feature)
{
for(int j=0; j<uniquenames.length; j++)
names_checked.add((String) uniquenames[j]);
continue;
}
for(int j=0; j<uniquenames.length; j++)
{
final String uniquename = (String) uniquenames[j];
if(uniquename == null || names_checked.contains(uniquename))
continue;
// retieve from featureId store
if(featureIdStore != null && featureIdStore.containsKey(uniquename))
{
Feature f = (Feature) featureIdStore.get(uniquename);
}
else
feature = dao.getFeatureByUniqueName(uniquename,
tsn.getFeatureKey());
if(feature != null)
{
feature.setTimeLastModified(ts);
feature.setName("0"); // do not change name
}
GFFStreamFeature gff_feature = (GFFStreamFeature) tsn.getGff_feature();
gff_feature.setLastModified(ts);
final String nocommit = System.getProperty("nocommit");
if( useTransactions &&
(nocommit == null || nocommit.equals("false")))
logger4j.debug("TRANSACTION COMPLETE");
else if(useTransactions &&
(nocommit != null && nocommit.equals("true")))
logger4j.debug("TRANSACTION NOT COMMITTED : nocommit property set to true");
JOptionPane.showMessageDialog(null, "Problems Writing...\n" +
JOptionPane.showMessageDialog(null, "Problems connecting..."+
conn_ex.getMessage(),
"Database Connection Error - Check Server",
JOptionPane.ERROR_MESSAGE);
catch (RuntimeException re)
{
final String msg = constructExceptionMessage(re, sql, ncommit);
JOptionPane.showMessageDialog(null, msg,
"Problems Writing to Database ",
JOptionPane.ERROR_MESSAGE);
logger4j.error(msg);
}
finally
{
if(useTransactions)
try
{
((IBatisDAO) dao).endTransaction();
catch(SQLException e){ e.printStackTrace(); }
if(featureIdStore != null)
featureIdStore.clear();
return ncommit;
}
/**
* Get the uniquenames involved in a transaction
* @param tsn
* @return
*/
private Object[] getUniqueNames(final ChadoTransaction tsn)
{
if(tsn.getGff_feature() == null)
return null;
if(tsn.getGff_feature().getSegmentRangeStore() == null ||
tsn.getGff_feature().getSegmentRangeStore().size() < 2 ||
tsn.getFeatureObject() instanceof FeatureProp)
return new Object[]{ tsn.getUniquename() };
else
return tsn.getGff_feature().getSegmentRangeStore().keySet().toArray();
}
2861
2862
2863
2864
2865
2866
2867
2868
2869
2870
2871
2872
2873
2874
2875
2876
2877
2878
2879
2880
2881
2882
2883
2884
2885
2886
2887
2888
2889
2890
2891
2892
2893
2894
2895
2896
2897
2898
2899
/**
* Construct an exeption message from the ChadoTransaction
* @param re
* @param sql
* @param ncommit
* @return
*/
private String constructExceptionMessage(final RuntimeException re,
final Vector sql,
final int ncommit)
{
String msg = "";
if(ncommit > -1 && ncommit < sql.size())
{
final ChadoTransaction t_failed = (ChadoTransaction)sql.get(ncommit);
if(t_failed.getType() == ChadoTransaction.DELETE)
msg = "DELETE failed ";
else if(t_failed.getType() == ChadoTransaction.INSERT)
msg = "INSERT failed ";
else if(t_failed.getType() == ChadoTransaction.UPDATE)
msg = "UPDATE failed ";
if(t_failed.getUniquename() != null)
msg = msg + "for " + t_failed.getUniquename()+":";
else if(t_failed.getFeatureObject() != null &&
t_failed.getFeatureObject() instanceof Feature)
{
final Feature chadoFeature = (Feature)t_failed.getFeatureObject();
if(chadoFeature.getUniqueName() != null)
msg = msg + "for " + chadoFeature.getUniqueName() +":";
}
msg = msg+"\n";
}
return msg + re.getMessage();
}
/**
* Commit a single chado transaction
* @param tsn
* @param dao
*/
private void commitChadoTransaction(final ChadoTransaction tsn,
{
if(tsn.getType() == ChadoTransaction.UPDATE)
{
if(tsn.getFeatureObject() instanceof Feature)
{
Feature feature = (Feature)tsn.getFeatureObject();
if(feature.getUniqueName() != null)
{
final String uniquename;
if(tsn.getOldUniquename() != null)
uniquename = (String)tsn.getOldUniquename();
else
uniquename = feature.getUniqueName();
Feature old_feature
= dao.getFeatureByUniqueName(uniquename, tsn.getFeatureKey());
if(old_feature != null)
feature.setFeatureId( old_feature.getFeatureId() );
tsn.setOldUniquename(feature.getUniqueName());
}
if(tsn.getFeatureObject() instanceof FeatureLoc)
{
// update any featurelocs on the polypeptide
String keyStr = tsn.getGff_feature().getKey().getKeyString();
if(keyStr.equals("polypeptide"))
adjustChildCoordinates((FeatureLoc)tsn.getFeatureObject(), dao);
}
if(tsn.getFeatureObject() instanceof FeatureLoc)
{
// if CDS featureloc is changed update residues on
// associated features
List tsns = getUpdateResiduesColumnTransactions(tsn);
if(tsns != null)
{
for(int i=0; i<tsns.size(); i++)
dao.merge( ((ChadoTransaction)tsns.get(i)).getFeatureObject() );
}
}
}
else if(tsn.getType() == ChadoTransaction.INSERT)
{
if(tsn.getFeatureObject() instanceof FeatureCvTerm)
ArtemisUtils.inserFeatureCvTerm(dao, (FeatureCvTerm)tsn.getFeatureObject());
else
{
// set srcfeature_id
if(tsn.getFeatureObject() instanceof Feature &&
((Feature) tsn.getFeatureObject()).getFeatureLoc() != null)
{
FeatureLoc featureloc = ((Feature) tsn.getFeatureObject()).getFeatureLoc();
Feature featureBySrcFeatureId = new Feature();
featureBySrcFeatureId.setFeatureId(Integer.parseInt(srcFeatureId));
featureloc.setFeatureBySrcFeatureId(featureBySrcFeatureId);
}
if(tsn.getFeatureObject() instanceof Feature &&
tsn.getGff_feature() != null)
{
String keyStr = tsn.getGff_feature().getKey().getKeyString();
if(GeneUtils.isFeatureToUpdateResidues(keyStr))
{
String residues = GeneUtils.deriveResidues(tsn.getGff_feature());
if(residues != null)
((Feature)tsn.getFeatureObject()).setResidues(residues.getBytes());
}
}
dao.persist(tsn.getFeatureObject());
}
}
else if(tsn.getType() == ChadoTransaction.DELETE)
{
if(tsn.getFeatureObject() instanceof FeatureCvTerm)
ArtemisUtils.deleteFeatureCvTerm(dao, (FeatureCvTerm)tsn.getFeatureObject());
else
dao.delete(tsn.getFeatureObject());
}