Skip to content
Snippets Groups Projects
Commit e50a29c6 authored by tjc's avatar tjc
Browse files

split gff feature up into entries

git-svn-id: svn+ssh://svn.internal.sanger.ac.uk/repos/svn/pathsoft/artemis/trunk@3328 ee4ac58c-ac51-4696-9907-e4b3aa274f04
parent 14b1ae60
No related branches found
No related tags found
No related merge requests found
......@@ -54,6 +54,7 @@ public class DatabaseEntrySource implements EntrySource
{
private String location;
private Hashtable entries;
private boolean splitGFFEntry;
/**
* Create a new DatabaseEntrySource.
......@@ -301,6 +302,10 @@ public class DatabaseEntrySource implements EntrySource
}
}
protected void setSplitGFF(final boolean splitGFFEntry)
{
this.splitGFFEntry = splitGFFEntry;
}
/**
*
......@@ -326,17 +331,13 @@ public class DatabaseEntrySource implements EntrySource
{
DatabaseDocumentEntry db_entry = null;
// final EntryInformation entry_information =
// new SimpleEntryInformation(Options.getArtemisEntryInformation());
if(read_only)
{
}
else
{
DatabaseDocument doc = new DatabaseDocument(location, id, progress_listener);
// DatabaseDocument doc = new DatabaseDocument("jdbc:postgresql://localhost:13001/chadoCVS?user=es2",
// id, progress_listener);
DatabaseDocument doc = new DatabaseDocument(location, id,
splitGFFEntry, progress_listener);
db_entry = new DatabaseDocumentEntry(doc);
}
......@@ -379,4 +380,29 @@ public class DatabaseEntrySource implements EntrySource
return null;
}
protected DatabaseDocumentEntry[]
makeFromGff(final DatabaseDocument doc, String id)
throws OutOfRangeException, IOException
{
DatabaseDocumentEntry[] db_entry = null;
try
{
DatabaseDocument[] new_docs = doc.getGffDocuments(location, id);
db_entry = new DatabaseDocumentEntry[new_docs.length];
for(int i=1; i<new_docs.length; i++)
db_entry[i-1] = new DatabaseDocumentEntry(new_docs[i]);
}
catch(EntryInformationException e)
{
JOptionPane.showMessageDialog(null,
"Failed to get entry: " + e,
"Entry Information Exception",
JOptionPane.ERROR_MESSAGE);
}
return db_entry;
}
}
......@@ -20,7 +20,7 @@
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
* $Header: //tmp/pathsoft/artemis/uk/ac/sanger/artemis/components/DatabaseJFrame.java,v 1.4 2005-06-18 07:01:16 tjc Exp $
* $Header: //tmp/pathsoft/artemis/uk/ac/sanger/artemis/components/DatabaseJFrame.java,v 1.5 2005-08-04 15:58:05 tjc Exp $
*/
package uk.ac.sanger.artemis.components;
......@@ -29,6 +29,8 @@ import uk.ac.sanger.artemis.Entry;
import uk.ac.sanger.artemis.sequence.*;
import uk.ac.sanger.artemis.util.InputStreamProgressListener;
import uk.ac.sanger.artemis.util.OutOfRangeException;
import uk.ac.sanger.artemis.util.DatabaseDocument;
import uk.ac.sanger.artemis.io.DatabaseDocumentEntry;
import javax.swing.JFrame;
import javax.swing.JTree;
......@@ -37,6 +39,7 @@ import javax.swing.JSeparator;
import javax.swing.JMenuBar;
import javax.swing.JMenu;
import javax.swing.JMenuItem;
import javax.swing.JCheckBoxMenuItem;
import javax.swing.JPanel;
import javax.swing.JLabel;
import javax.swing.BorderFactory;
......@@ -57,6 +60,7 @@ import javax.swing.tree.DefaultMutableTreeNode;
public class DatabaseJFrame extends JFrame
{
private JLabel status_line = new JLabel("");
private boolean splitGFFEntry = true;
public DatabaseJFrame(final DatabaseEntrySource entry_source,
final ArtemisMain art_main)
......@@ -146,6 +150,7 @@ public class DatabaseJFrame extends JFrame
final InputStreamProgressListener progress_listener =
art_main.getInputStreamProgressListener();
entry_source.setSplitGFF(splitGFFEntry);
final Entry entry = entry_source.getEntry(id, progress_listener);
if(entry == null)
{
......@@ -155,6 +160,24 @@ public class DatabaseJFrame extends JFrame
}
final EntryEdit new_entry_edit = art_main.makeEntryEdit(entry);
// add gff entries
if(splitGFFEntry)
{
DatabaseDocumentEntry db_entry = (DatabaseDocumentEntry)entry.getEMBLEntry();
final DatabaseDocumentEntry[] entries =
entry_source.makeFromGff((DatabaseDocument)db_entry.getDocument(), id);
for(int i=0; i< entries.length; i++)
{
if(entries[i] == null)
continue;
final Entry new_entry = new Entry(new_entry_edit.getEntryGroup().getBases(), entries[i]);
new_entry_edit.getEntryGroup().add(new_entry);
}
}
new_entry_edit.setVisible(true);
status_line.setText("Sequence loaded.");
}
......@@ -209,6 +232,23 @@ public class DatabaseJFrame extends JFrame
});
fileMenu.add(fileMenuClose);
JMenu optionMenu = new JMenu("Options");
mBar.add(optionMenu);
final JCheckBoxMenuItem splitGFF = new JCheckBoxMenuItem("Split GFF into entries",
splitGFFEntry);
splitGFF.addActionListener(new ActionListener()
{
public void actionPerformed(ActionEvent e)
{
if(splitGFF.isSelected())
splitGFFEntry = true;
else
splitGFFEntry = false;
}
});
optionMenu.add(splitGFF);
return mBar;
}
}
......@@ -50,6 +50,10 @@ public class DatabaseDocument extends Document
private Vector organism;
private String sqlLog = System.getProperty("user.home")+
System.getProperty("file.separator")+"art_sql_debug.log";
private StringBuffer[] gff_buffer;
private StringBuffer gff_buff;
private String[] types = { "exon", "gene", "CDS", "transcript" };
private boolean splitGFFEntry;
/**
*
......@@ -78,13 +82,24 @@ public class DatabaseDocument extends Document
}
public DatabaseDocument(String location, String feature_id,
boolean splitGFFEntry,
InputStreamProgressListener progress_listener)
{
super(location);
this.feature_id = feature_id;
this.splitGFFEntry = splitGFFEntry;
this.progress_listener = progress_listener;
}
public DatabaseDocument(String location, String feature_id,
StringBuffer gff_buff, String name)
{
super(location);
this.feature_id = feature_id;
this.gff_buff = gff_buff;
this.name = name;
}
/**
*
* Append a String to the Document location.
......@@ -173,15 +188,34 @@ public class DatabaseDocument extends Document
**/
public InputStream getInputStream() throws IOException
{
ByteArrayInputStream instream;
if(gff_buff != null)
{
instream = new ByteArrayInputStream(gff_buff.toString().getBytes());
return instream;
}
try
{
Connection conn = getConnection();
System.out.println("Connected");
String entry = getGFF(conn,feature_id) + getSequence(conn);
// String entry = getSequence(conn);
gff_buffer = getGFF(conn,feature_id);
String entry;
if(splitGFFEntry)
entry = gff_buffer[0] + getSequence(conn);
else
{
entry = new String();
for(int i=0; i<gff_buffer.length; i++)
entry = entry + gff_buffer[i];
entry = entry + getSequence(conn);
}
appendToLogFile(entry,sqlLog);
ByteArrayInputStream instream = new ByteArrayInputStream(entry.getBytes());
instream = new ByteArrayInputStream(entry.getBytes());
conn.close();
return instream;
......@@ -195,6 +229,24 @@ public class DatabaseDocument extends Document
return null;
}
public DatabaseDocument[] getGffDocuments(String location, String id)
{
DatabaseDocument[] new_docs = new DatabaseDocument[gff_buffer.length-1];
for(int i=1; i<gff_buffer.length; i++)
{
String name;
if(i >= types.length)
name = "other";
else
name = types[i];
if(gff_buffer[i].length() > 0)
new_docs[i-1] = new DatabaseDocument(location, id, gff_buffer[i], name);
}
return new_docs;
}
/**
*
......@@ -202,34 +254,28 @@ public class DatabaseDocument extends Document
* in the form of a GFF stream.
*
*/
private String getGFF(Connection conn, String parentFeatureID)
private StringBuffer[] getGFF(Connection conn, String parentFeatureID)
throws java.sql.SQLException
{
Statement st = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE);
// String sql = "SELECT strand, fmin, fmax, uniquename, feature.type_id, featureprop.type_id AS prop_type_id, value"+
// " FROM feature, featureloc, featureprop WHERE srcfeature_id = "+parentFeatureID+
// " and featureloc.feature_id=featureprop.feature_id"+
// " and featureloc.feature_id=feature.feature_id";
// String sql = "SELECT feature.feature_id, object_id, strand, fmin, fmax, uniquename, feature.type_id, "+
// " featureprop.type_id AS prop_type_id, featureprop.value FROM feature, featureloc, featureprop, feature_relationship "+
// " WHERE srcfeature_id = "+parentFeatureID+" and featureloc.feature_id=featureprop.feature_id and "+
// " featureloc.feature_id=feature.feature_id and feature_relationship.subject_id=feature.feature_id";
String sql = "SELECT timelastmodified, feature.feature_id, object_id, strand, fmin, fmax, uniquename, feature.type_id, "+
" featureprop.type_id AS prop_type_id, featureprop.value FROM featureloc, featureprop, "+
" feature LEFT JOIN feature_relationship ON feature_relationship.subject_id=feature.feature_id "+
" WHERE srcfeature_id = "+parentFeatureID+" and featureloc.feature_id=featureprop.feature_id and "+
" featureloc.feature_id=feature.feature_id ORDER BY uniquename";
" featureloc.feature_id=feature.feature_id ORDER BY feature.type_id, uniquename";
appendToLogFile(sql,sqlLog);
ResultSet rs = st.executeQuery(sql);
StringBuffer cdsBuffer = new StringBuffer();
StringBuffer[] buffers = new StringBuffer[types.length+1];
for(int i=0; i<buffers.length; i++)
buffers[i] = new StringBuffer();
String parentFeature = getFeatureName(parentFeatureID,conn);
Hashtable hstore = new Hashtable();
StringBuffer this_buff;
while(rs.next())
{
......@@ -250,29 +296,38 @@ public class DatabaseDocument extends Document
parent_id = (String)hstore.get(parent_id);
// make gff format
cdsBuffer.append(parentFeature+"\t"); // seqid
cdsBuffer.append("chado\t"); // source
cdsBuffer.append(typeName+"\t"); // type
cdsBuffer.append(fmin+"\t"); // start
cdsBuffer.append(fmax+"\t"); // end
cdsBuffer.append(".\t"); // score
// select buffer
this_buff = buffers[types.length];
for(int i=0; i<types.length; i++)
{
if(types[i].equals(typeName))
this_buff = buffers[i];
}
this_buff.append(parentFeature+"\t"); // seqid
this_buff.append("chado\t"); // source
this_buff.append(typeName+"\t"); // type
this_buff.append(fmin+"\t"); // start
this_buff.append(fmax+"\t"); // end
this_buff.append(".\t"); // score
if(strand == -1) // strand
cdsBuffer.append("-\t");
this_buff.append("-\t");
else if(strand == 1)
cdsBuffer.append("+\t");
this_buff.append("+\t");
else
cdsBuffer.append(".\t");
this_buff.append(".\t");
cdsBuffer.append(".\t"); // phase
cdsBuffer.append("ID="+name+";");
this_buff.append(".\t"); // phase
this_buff.append("ID="+name+";");
if(parent_id != null)
cdsBuffer.append("Parent="+parent_id+";");
this_buff.append("Parent="+parent_id+";");
cdsBuffer.append("timelastmodified="+timelastmodified+";");
this_buff.append("timelastmodified="+timelastmodified+";");
String value = GFFStreamFeature.encode(rs.getString("value"));
cdsBuffer.append(propTypeName+"="+value); // attributes
this_buff.append(propTypeName+"="+value); // attributes
int rewind = 0;
while(rs.next() && rs.getString("uniquename").equals(name))
......@@ -280,19 +335,19 @@ public class DatabaseDocument extends Document
prop_type_id = rs.getLong("prop_type_id");
propTypeName = getCvtermName(conn,prop_type_id);
value = GFFStreamFeature.encode(rs.getString("value"));
cdsBuffer.append(";"+propTypeName+"="+value);
this_buff.append(";"+propTypeName+"="+value);
rewind++;
}
if(rewind > 0)
rs.previous();
cdsBuffer.append("\n");
this_buff.append("\n");
progress_listener.progressMade("Read from database: "+name);
}
return cdsBuffer.toString();
return buffers;
}
public static Long getCvtermID(String name)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment