Compare commits
264 Commits
v1.0
...
7bbeaf7dad
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7bbeaf7dad | ||
|
|
945b967382 | ||
|
|
a43ee469ea | ||
|
|
161a52aa89 | ||
|
|
9b2ad9da09 | ||
|
|
30a3f6e33d | ||
|
|
8cc1f19da1 | ||
|
|
3efa5c26d8 | ||
|
|
e686d4957b | ||
|
|
fbc0496675 | ||
|
|
0071cafbbd | ||
|
|
3d302cf8ad | ||
|
|
5f5d77b0a4 | ||
|
|
af32be85ee | ||
|
|
58cdf9ae93 | ||
|
|
202ad4c834 | ||
|
|
96d49d0034 | ||
|
|
d8e5f7ece0 | ||
|
|
9c81d919b4 | ||
|
|
70b08e7c22 | ||
|
|
44158d264c | ||
|
|
e97c2989db | ||
|
|
f7709ada73 | ||
|
|
25b37eff48 | ||
|
|
fbbb5a8792 | ||
|
|
4b9d7f8494 | ||
|
|
0de12a3a12 | ||
|
|
3c2ec9002e | ||
|
|
bcf3af5a83 | ||
|
|
fcca22a2f0 | ||
|
|
910de0ce9d | ||
|
|
ef349ea5f6 | ||
|
|
174db66c46 | ||
|
|
b3273855a6 | ||
|
|
51c1bc2551 | ||
|
|
f7d522e95d | ||
|
|
5f0c089b0a | ||
|
|
d3066095d9 | ||
|
|
55a5d9a892 | ||
|
|
49708f2f8a | ||
|
|
c7934ca498 | ||
|
|
8f0ed91cb7 | ||
|
|
40bc2ce88d | ||
|
|
a5a17d1f76 | ||
|
|
0f3ab0fdd7 | ||
|
|
01596ef43a | ||
|
|
cda25a2c62 | ||
|
|
bde6da3076 | ||
|
|
2eede214c0 | ||
|
|
98ce708825 | ||
|
|
e7e85a4542 | ||
|
|
c0dd2d31f2 | ||
|
|
cf103c5223 | ||
|
|
26f66fe139 | ||
|
|
89295777ef | ||
|
|
99c92e6eb5 | ||
|
|
b82176517c | ||
|
|
0657db5653 | ||
|
|
9f0ac227e2 | ||
|
|
54896bc47f | ||
|
|
b19a4d37c2 | ||
|
|
457d643477 | ||
|
|
593dd6c60f | ||
|
|
b8aeeb988f | ||
|
|
b9b13fb75e | ||
|
|
289220e0d0 | ||
|
|
19badac92b | ||
|
|
633334a1b8 | ||
|
|
e308e47578 | ||
|
|
133984276f | ||
|
|
ec6713a1c0 | ||
| 097590cf21 | |||
|
|
f1e4c4f194 | ||
|
|
b6218c3ed3 | ||
|
|
756e5572b9 | ||
|
|
c30167d5ec | ||
|
|
a19525f5bb | ||
|
|
e5803defa3 | ||
|
|
34dc2a5721 | ||
|
|
fd106a0d73 | ||
|
|
22faad3414 | ||
|
|
0b36e2b742 | ||
|
|
9dacd8cd34 | ||
|
|
89687fa849 | ||
|
|
fb443fe958 | ||
|
|
adebe1542e | ||
|
|
882fbfffc6 | ||
|
|
a88cfb8b0d | ||
|
|
deed98e79d | ||
|
|
1a35600f50 | ||
|
|
856063529b | ||
|
|
b7c86f20b3 | ||
|
|
3a47efd361 | ||
|
|
58bb04c431 | ||
|
|
610da68262 | ||
|
|
9973473cc6 | ||
|
|
8781afd74c | ||
|
|
88b6c79caa | ||
|
|
35a519d499 | ||
|
|
5bd1e568a6 | ||
|
|
4ad1979c18 | ||
|
|
423c9d5c93 | ||
|
|
7c3c95ab4b | ||
|
|
d71a99555c | ||
|
|
2bf2a9f5f7 | ||
|
|
810abdb705 | ||
|
|
f7b3c133bf | ||
|
|
14fcfe1ff3 | ||
|
|
70fec95a00 | ||
|
|
077af3b46e | ||
|
|
db99c74810 | ||
|
|
13a1af1f71 | ||
|
|
199c81f983 | ||
|
|
19a2a35f07 | ||
|
|
36c628cde5 | ||
|
|
1ddac63b0a | ||
|
|
e795b4cdd0 | ||
|
|
60cf6775c2 | ||
|
|
8a8c89c9ba | ||
|
|
86371668d5 | ||
|
|
d81ab25a68 | ||
|
|
02c8e6aacb | ||
|
|
f84dfb2b4b | ||
|
|
184278b72e | ||
|
|
489369f533 | ||
|
|
fbee591273 | ||
|
|
603a999b59 | ||
|
|
c3df4b12ab | ||
|
|
d1a56c3578 | ||
|
|
16daf02dd6 | ||
|
|
04a077da2e | ||
|
|
740835f814 | ||
|
|
8a77d53f1f | ||
|
|
58fa140ee5 | ||
|
|
475bbf3107 | ||
|
|
4f2fa4cbbe | ||
|
|
58d418e44b | ||
|
|
1971a96467 | ||
|
|
e699795521 | ||
|
|
bd6d010b0b | ||
|
|
61d1eb3eb1 | ||
|
|
cb41b45204 | ||
|
|
a84d2e1bfe | ||
|
|
7b61d2c0d7 | ||
|
|
56454417c0 | ||
|
|
8ee1c5903e | ||
|
|
5c03909a11 | ||
|
|
e4e5a1f979 | ||
|
|
73c83bf35d | ||
|
|
06e72314b0 | ||
|
|
63317f2aa0 | ||
|
|
a054c0c20a | ||
|
|
29b844afd2 | ||
|
|
dea4972927 | ||
|
|
9ae38bf247 | ||
|
|
3ba305abdb | ||
|
|
3707923398 | ||
|
|
cf771ce574 | ||
| f980722b56 | |||
| 1df86f01df | |||
| 96ba57d653 | |||
| b602fb02f1 | |||
| 325e1ebe2b | |||
| df047267ee | |||
| 03e8d31210 | |||
| 582dc3ef40 | |||
| 4c872ed48e | |||
| 3fc39302c7 | |||
| 578bdc0fbf | |||
| 8275cf7740 | |||
| 64209691f0 | |||
| 1886800873 | |||
| bedf0894bc | |||
| 2ac3451842 | |||
| 67ec3f3764 | |||
| b5a8b7e2d5 | |||
| 9fb3095f0f | |||
| 25acf920c2 | |||
| f301327693 | |||
| e04d2d6777 | |||
| 3e41afaa64 | |||
| bc5d67680d | |||
| f2347e8fc2 | |||
| c8364d8a6e | |||
| 6f5afbc6ec | |||
| fb4d22e7a4 | |||
| e10350c214 | |||
| b1155f8100 | |||
| 12b003a69f | |||
| 32c5bcaaff | |||
| 2485ac4cf6 | |||
| 05556bce0c | |||
| a822f69ea4 | |||
| 3d1f8668ee | |||
| 40c743308b | |||
| 5246cc4a0c | |||
| a5f7c0641d | |||
| 8ebfc1469f | |||
| b53f5f1cc0 | |||
| 974d2d650c | |||
| 6b5837e6ce | |||
| b4cc240048 | |||
| ff72c9b359 | |||
| 88eb8aca50 | |||
| 98bf452891 | |||
| c2db4f87c1 | |||
| 8935407ade | |||
| 9fcc20343d | |||
| 817fe51708 | |||
| 1ea68045ce | |||
| 75b2aa9553 | |||
| b3dc10f287 | |||
| fb8d8d8785 | |||
| ab437512e9 | |||
| 7b03a3cce8 | |||
| f032d3e852 | |||
| b604b1d3cd | |||
| e4d094d796 | |||
| f385ebc31f | |||
| 8745550e11 | |||
| 41805135b3 | |||
| 373a5e02f9 | |||
| 7f18311054 | |||
| bcb816c3e6 | |||
| dad0fd35fd | |||
| 35d580cfcf | |||
| ab8d98ed81 | |||
| 3d9890e16a | |||
| dd64ac2731 | |||
| a5238624f1 | |||
| d8ba42b801 | |||
| 8edd89d784 | |||
| 2829b88689 | |||
| 108b0ec13f | |||
| a8b58d3f79 | |||
| bf64d57731 | |||
| c068c3db3c | |||
| 4bcda9b66c | |||
| 17ae763c6c | |||
| decdb147a9 | |||
| 74ffbfd8ac | |||
| 08699ce8ce | |||
| 69b0cc535c | |||
| e58f7b0a55 | |||
| dd2164c250 | |||
| 7323093bdc | |||
| f904cf6672 | |||
| 3ccee9891b | |||
| 40c2be1cfb | |||
| 4b597c4e5e | |||
| b2398531a3 | |||
| 8e9a250890 | |||
| e2a996c997 | |||
| a5db89cb0b | |||
| 1630f9ccba | |||
| d785aa0da2 | |||
| a7afeb6119 | |||
| f8167b0774 | |||
| 68ee9e4bb6 | |||
| fd2ec76b71 | |||
| 875f457a2d | |||
| 906c06062f | |||
| 90ae2ff474 | |||
| 7d983076f3 |
1
.idea/.name
generated
Normal file
1
.idea/.name
generated
Normal file
@@ -0,0 +1 @@
|
||||
BiGpairSEQ
|
||||
13
.idea/libraries/commons_rng_1.xml
generated
Normal file
13
.idea/libraries/commons_rng_1.xml
generated
Normal file
@@ -0,0 +1,13 @@
|
||||
<component name="libraryTable">
|
||||
<library name="commons-rng-1">
|
||||
<CLASSES>
|
||||
<root url="file://$USER_HOME$/Downloads/commons-rng-1.6" />
|
||||
</CLASSES>
|
||||
<JAVADOC />
|
||||
<SOURCES>
|
||||
<root url="file://$USER_HOME$/Downloads/commons-rng-1.6" />
|
||||
</SOURCES>
|
||||
<jarDirectory url="file://$USER_HOME$/Downloads/commons-rng-1.6" recursive="false" />
|
||||
<jarDirectory url="file://$USER_HOME$/Downloads/commons-rng-1.6" recursive="false" type="SOURCES" />
|
||||
</library>
|
||||
</component>
|
||||
627
readme.md
627
readme.md
@@ -1,43 +1,121 @@
|
||||
# BiGpairSEQ SIMULATOR
|
||||
|
||||
## CONTENTS
|
||||
1. [ABOUT](#about)
|
||||
2. [THEORY](#theory)
|
||||
3. [THE BiGpairSEQ ALGORITHM](#the-bigpairseq-algorithm)
|
||||
4. [USAGE](#usage)
|
||||
1. [RUNNING THE PROGRAM](#running-the-program)
|
||||
2. [COMMAND LINE OPTIONS](#command-line-options)
|
||||
3. [INTERACTIVE INTERFACE](#interactive-interface)
|
||||
4. [INPUT/OUTPUT](#input-output)
|
||||
1. [Cell Sample Files](#cell-sample-files)
|
||||
2. [Sample Plate Files](#sample-plate-files)
|
||||
3. [Graph/Data Files](#graph-data-files)
|
||||
4. [Matching Results Files](#matching-results-files)
|
||||
5. [RESULTS](#results)
|
||||
1. [SAMPLE PLATES WITH VARYING NUMBERS OF CELLS PER WELL](#sample-plates-with-varying-numbers-of-cells-per-well)
|
||||
2. [SIMULATING EXPERIMENTS FROM THE 2015 pairSEQ PAPER](#simulating-experiments-from-the-2015-pairseq-paper)
|
||||
1. [EXPERIMENT 1](#experiment-1)
|
||||
2. [EXPERIMENT 3](#experiment-3)
|
||||
6. [CITATIONS](#citations)
|
||||
7. [EXTERNAL LIBRARIES USED](#external-libraries-used)
|
||||
8. [ACKNOWLEDGEMENTS](#acknowledgements)
|
||||
9. [AUTHOR](#author)
|
||||
10. [DISCLOSURE](#disclosure)
|
||||
11. [TODO](#todo)
|
||||
|
||||
## ABOUT
|
||||
|
||||
This program simulates BiGpairSEQ (Bipartite Graph pairSEQ), a graph theory-based adaptation
|
||||
of the pairSEQ algorithm (Howie, et al. 2015) for pairing T cell receptor sequences.
|
||||
of the pairSEQ algorithm ([Howie, et al. 2015](#citations)) for pairing T cell receptor sequences.
|
||||
|
||||
## THEORY
|
||||
|
||||
Unlike pairSEQ, which calculates p-values for every TCR alpha/beta overlap and compares
|
||||
against a null distribution, BiGpairSEQ does not do any statistical calculations
|
||||
directly.
|
||||
T cell receptors (TCRs) are encoded by pairs of sequences, alpha sequences (TCRAs) and beta sequences (TCRBs). These sequences
|
||||
are extremely diverse; to the first approximation, this pair of sequences uniquely identifies a line of T cells.
|
||||
|
||||
BiGpairSEQ creates a [simple bipartite weighted graph](https://en.wikipedia.org/wiki/Bipartite_graph) representing the sample plate.
|
||||
The distinct TCRA and TCRB sequences form the two sets of vertices. Every TCRA/TCRB pair that share a well
|
||||
are connected by an edge, with the edge weight set to the number of wells in which both sequences appear.
|
||||
(Sequences present in *all* wells are filtered out prior to creating the graph, as there is no signal in their occupancy pattern.)
|
||||
The problem of pairing TCRA/TCRB sequences thus reduces to the "assignment problem" of finding a maximum weight
|
||||
matching on a bipartite graph--the subset of vertex-disjoint edges whose weights sum to the maximum possible value.
|
||||
As described in the original 2015 paper, pairSEQ pairs TCRAs and TCRBs by distributing a
|
||||
sample of T cells across a 96-well sample plate, then sequencing the contents of each well. It then calculates p-values for
|
||||
every TCRA/TCRB sequence overlap and compares that against a null distribution, to find the most statistically probable pairings.
|
||||
|
||||
This is a well-studied combinatorial optimization problem, with many known solutions.
|
||||
The most efficient algorithm known to the author for maximum weight matching of a bipartite graph with strictly integral weights
|
||||
is from Duan and Su (2012). For a graph with m edges, n vertices per side, and maximum integer edge weight N,
|
||||
their algorithm runs in **O(m sqrt(n) log(N))** time. As the graph representation of a pairSEQ experiment is
|
||||
bipartite with integer weights, this algorithm is ideal for BiGpairSEQ.
|
||||
BiGpairSEQ uses the same fundamental idea of using occupancy overlap to pair TCR sequences, but unlike pairSEQ it
|
||||
does not require performing any statistical calculations at all. Instead, BiGpairSEQ uses graph theory methods which
|
||||
produce provably optimal solutions.
|
||||
|
||||
Unfortunately, it's a fairly new algorithm, and not yet implemented by the graph theory library used in this simulator.
|
||||
So this program instead uses the Fibonacci heap-based algorithm of Fredman and Tarjan (1987), which has a worst-case
|
||||
runtime of **O(n (n log(n) + m))**. The algorithm is implemented as described in Melhorn and Näher (1999).
|
||||
BiGpairSEQ creates a [weighted bipartite graph](https://en.wikipedia.org/wiki/Bipartite_graph) representing the sample plate.
|
||||
The distinct TCRA and TCRB sequences form the two sets of vertices. Every TCRA/TCRB pair that share a well on the sample plate
|
||||
are connected by an edge in the graph, with the edge weight set to the number of wells in which both sequences appear. The vertices
|
||||
themselves are labeled with the occupancy data for the individual sequences they represent, which is useful for pre-filtering
|
||||
before finding a maximum weight matching. Such a graph fully encodes the distribution data from the sample plate.
|
||||
|
||||
The current version of the program uses a pairing heap instead of a Fibonacci heap for its priority queue,
|
||||
which has lower theoretical efficiency but also lower complexity overhead, and is often equivalently performant
|
||||
in practice.
|
||||
The problem of pairing TCRA/TCRB sequences thus reduces to the [assignment problem](https://en.wikipedia.org/wiki/Assignment_problem) of finding a maximum weight
|
||||
matching (MWM) on a bipartite graph--the subset of vertex-disjoint edges whose weights sum to the maximum possible value.
|
||||
|
||||
This is a well-studied combinatorial optimization problem, with many known algorithms that produce
|
||||
provably-optimal solutions. The most theoretically efficient algorithm known to the author for maximum weight matching of a bipartite
|
||||
graph with strictly integral weights is from [Duan and Su (2012)](#citations). For a graph with m edges, n vertices per side,
|
||||
and maximum integer edge weight N, their algorithm runs in **O(m sqrt(n) log(N))** time. As the graph representation of
|
||||
a pairSEQ experiment is bipartite with integer weights, this algorithm seems ideal for BiGpairSEQ. Unfortunately, it is not
|
||||
implemented by the graph theory library used in this simulator (JGraphT), and the author has not yet had time to write a
|
||||
full, optimized implementation himself for testing.
|
||||
|
||||
So this program instead uses the [Fibonacci heap](https://en.wikipedia.org/wiki/Fibonacci_heap) based algorithm of Fredman and Tarjan (1987) (essentially
|
||||
[the Hungarian algorithm](https://en.wikipedia.org/wiki/Hungarian_algorithm) augmented with a more efficient priority queue) which has a worst-case
|
||||
runtime of **O(n (n log(n) + m))**. The algorithm is implemented as described in [Melhorn and Näher (1999)](#citations). (The simulator can use either a
|
||||
Fibonacci heap or a [pairing heap](https://en.wikipedia.org/wiki/Pairing_heap) as desired. By default, a pairing heap is used,
|
||||
as in practice they often offer superior performance.)
|
||||
|
||||
One possible advantage of this less efficient algorithm is that the Hungarian algorithm and its variations work with both the balanced and the unbalanced assignment problem
|
||||
(that is, cases where both sides of the bipartite graph have the same number of vertices and those in which they don't.)
|
||||
Many other MWM algorithms only work for the balanced assignment problem. While pairSEQ-style experiments should theoretically
|
||||
be balanced assignment problems, in practice sequence dropout can cause them to be unbalanced. The unbalanced case
|
||||
*can* be reduced to the balanced case, but doing so involves doubling the graph size. Since the current implementation uses only
|
||||
the Hungarian algorithm, graph doubling--which could be challenging with the computational resources available to the
|
||||
author--has not yet been necessary.
|
||||
|
||||
There have been some studies which show that [auction algorithms](https://en.wikipedia.org/wiki/Auction_algorithm) for the assignment problem can have superior performance in
|
||||
real-world implementations, due to their simplicity, than more complex algorithms with better theoretical asymptotic
|
||||
performance. The author has implemented a basic forward auction algorithm, which produces optimal assignment for unbalanced bipartite graphs with
|
||||
integer weights. To allow for unbalanced assignment, this algorithm eschews epsilon-scaling,
|
||||
and as a result is prone to "bidding-wars" which increase run time, making it less efficient than the implementation of
|
||||
the Fredman-Tarjan algorithm in JGraphT. A forward/reverse auction algorithm as developed by Bertsekas and Castañon
|
||||
should be able to handle unbalanced (or, as they call it, asymmetric) assignment much more efficiently, but has yet to be
|
||||
implemented.
|
||||
|
||||
The relative time/space efficiencies of BiGpairSEQ when backed by different MWM algorithms remains an open problem.
|
||||
|
||||
## THE BiGpairSEQ ALGORITHM
|
||||
|
||||
1. Sequence a sample plate of T cells as in pairSEQ.
|
||||
2. Pre-filter the sequence data to reduce error and minimize the size of the necessary graph.
|
||||
1. *Saturating sequence filter*: remove any sequences present in all wells on the sample plate, as there is no signal in the occupancy data of saturating sequences (and each saturating sequence will have an edge to every vertex on the opposite side of the graph, vastly increasing the total graph size).
|
||||
2. *Non-existent sequence filter*: sequencing misreads can pollute the data from the sample plate with non-existent sequences. These can be identified by the discrepancy between their occupancy and their total read count. Assuming sequences are read correctly at least half the time, then a sequence's total read count (R) should be at least half the well occupancy of that sequence (O) times the read depth of the sequencing run (D). Remove any sequences for which R < (O * D) / 2.
|
||||
3. *Misidentified sequence filter*: sequencing misreads can cause one real sequence to be misidentified as a different real sequence. This should be fairly infrequent, but is a problem if it skews a sequence's overall occupancy pattern by causing the sequence to seem to be in a well where it's not. This can be detected by looking for discrepancies in a sequence's per-well read count. On average, the read count for a sequence in an individual well (r) should be equal to its total read count (R) divided by its total well occupancy (O). Remove from the list of wells occupied by a sequence any wells for which r < R / (2 * O).
|
||||
3. Encode the occupancy data from the sample plate as a weighted bipartite graph, where one set of vertices represent the distinct TCRAs and the other set represents distinct TCRBs. Between any TCRA and TCRB that share a well, draw an edge. Assign that edge a weight equal to the total number of wells shared by both sequences.
|
||||
4. Find a maximum weight matching of the bipartite graph, using any [MWM algorithm](https://en.wikipedia.org/wiki/Assignment_problem#Algorithms) that produces a provably optimal result.
|
||||
* If desired, restrict the matching to a subset of the graph. (Example: restricting matching attempts to cases where the occupancy overlap is 4 or more wells--that is, edges with weight >= 4.0.) See below for discussion of why this might be desirable.
|
||||
5. The resultant matching represents the likeliest TCRA/TCRB sequence pairs based on the occupancy pattern of the sample plate.
|
||||
|
||||
It is important to note that a maximum weight matching is not necessarily unique. If two different sets of vertex-disjoint edges
|
||||
sum to the same maximal weight, then a MWM algorithms might find either one of them.
|
||||
|
||||
For example, consider a well that contains four rare sequences found only in that well, two TCRAs and two TCRBs.
|
||||
In the graph, both of those TCRAs would have edges to both TCRBs (and to others of course, but since those edges will have a weight of 1.0,
|
||||
they are unlikely be paired in a MWM to sequences with total occupancy of more than one well). If these four sequences
|
||||
represent two unique T cells, then only one of the two possible pairings between these sequences is correct. But both
|
||||
the correct and incorrect pairing will add 2.0 to the total graph weight, so either one could be part of a maximum weight matching.
|
||||
|
||||
It is to minimize the number of possible equivalent-weight matchings that one might restrict the algorithm to examining
|
||||
only a subset of the graph, as described in step 4 above.
|
||||
|
||||
## USAGE
|
||||
|
||||
### RUNNING THE PROGRAM
|
||||
|
||||
BiGpairSEQ_Sim is an executable .jar file. Requires Java 11 or higher. [OpenJDK 17](https://jdk.java.net/17/)
|
||||
[Download the current version of BiGpairSEQ_Sim.](https://gitea.ejsf.synology.me/efischer/BiGpairSEQ/releases)
|
||||
|
||||
BiGpairSEQ_Sim is an executable .jar file. Requires Java 14 or higher. [OpenJDK 17](https://jdk.java.net/17/)
|
||||
recommended.
|
||||
|
||||
Run with the command:
|
||||
@@ -45,13 +123,124 @@ Run with the command:
|
||||
`java -jar BiGpairSEQ_Sim.jar`
|
||||
|
||||
Processing sample plates with tens of thousands of sequences may require large amounts
|
||||
of RAM. It is often desirable to increase the JVM maximum heap allocation with the -Xmx flag.
|
||||
of RAM. It is often desirable to increase the JVM maximum heap allocation with the `-Xmx` flag.
|
||||
For example, to run the program with 32 gigabytes of memory, use the command:
|
||||
|
||||
`java -Xmx32G -jar BiGpairSEQ_Sim.jar`
|
||||
|
||||
Once running, BiGpairSEQ_Sim has an interactive, menu-driven CLI for generating files and simulating TCR pairing. The
|
||||
main menu looks like this:
|
||||
### COMMAND LINE OPTIONS
|
||||
|
||||
There are a number of command line options, to allow the program to be used in shell scripts. These can be viewed with
|
||||
the `-help` flag:
|
||||
|
||||
`java -jar BiGpairSEQ_Sim.jar -help`
|
||||
|
||||
```
|
||||
usage: BiGpairSEQ_Sim.jar
|
||||
-cells,--make-cells Makes a cell sample file of distinct T cells
|
||||
-graph,--make-graph Makes a graph/data file. Requires a cell sample
|
||||
file and a sample plate file
|
||||
-help Displays this help menu
|
||||
-match,--match-cdr3 Matches CDR3s. Requires a graph/data file.
|
||||
-plate,--make-plate Makes a sample plate file. Requires a cell sample
|
||||
file.
|
||||
-version Prints the program version number to stdout
|
||||
|
||||
usage: BiGpairSEQ_Sim.jar -cells
|
||||
-d,--diversity-factor <factor> The factor by which unique CDR3s
|
||||
outnumber unique CDR1s
|
||||
-n,--num-cells <number> The number of distinct cells to generate
|
||||
-o,--output-file <filename> Name of output file
|
||||
|
||||
usage: BiGpairSEQ_Sim.jar -plate
|
||||
-c,--cell-file <filename> The cell sample file to use
|
||||
-d,--dropout-rate <rate> The sequence dropout rate due to
|
||||
amplification error. (0.0 - 1.0)
|
||||
-exponential Use an exponential distribution for cell
|
||||
sample
|
||||
-gaussian Use a Gaussian distribution for cell sample
|
||||
-lambda <value> If using -exponential flag, lambda value
|
||||
for distribution
|
||||
-o,--output-file <filename> Name of output file
|
||||
-poisson Use a Poisson distribution for cell sample
|
||||
-pop <number [number]...> The well populations for each section of
|
||||
the sample plate. There will be as many
|
||||
sections as there are populations given.
|
||||
-random <min> <max> Randomize well populations on sample plate.
|
||||
Takes two arguments: the minimum possible
|
||||
population and the maximum possible
|
||||
population.
|
||||
-stddev <value> If using -gaussian flag, standard deviation
|
||||
for distrbution
|
||||
-w,--wells <number> The number of wells on the sample plate
|
||||
|
||||
usage: BiGpairSEQ_Sim.jar -graph
|
||||
-c,--cell-file <filename> Cell sample file to use for
|
||||
checking pairing accuracy
|
||||
-err,--read-error-prob <prob> (Optional) The probability that
|
||||
a sequence will be misread. (0.0
|
||||
- 1.0)
|
||||
-errcoll,--error-collision-prob <prob> (Optional) The probability that
|
||||
two misreads will produce the
|
||||
same spurious sequence. (0.0 -
|
||||
1.0)
|
||||
-graphml (Optional) Output GraphML file
|
||||
-nb,--no-binary (Optional) Don't output
|
||||
serialized binary file
|
||||
-o,--output-file <filename> Name of output file
|
||||
-p,--plate-filename <filename> Sample plate file from which to
|
||||
construct graph
|
||||
-rd,--read-depth <depth> (Optional) The number of times
|
||||
to read each sequence.
|
||||
-realcoll,--real-collision-prob <prob> (Optional) The probability that
|
||||
a sequence will be misread as
|
||||
another real sequence. (Only
|
||||
applies to unique misreads;
|
||||
after this has happened once,
|
||||
future error collisions could
|
||||
produce the real sequence again)
|
||||
(0.0 - 1.0)
|
||||
|
||||
usage: BiGpairSEQ_Sim.jar -match
|
||||
-g,--graph-file <filename> The graph/data file to use
|
||||
-max <number> The maximum number of shared wells to
|
||||
attempt to match a sequence pair
|
||||
-maxdiff <number> (Optional) The maximum difference in total
|
||||
occupancy between two sequences to attempt
|
||||
matching.
|
||||
-min <number> The minimum number of shared wells to
|
||||
attempt to match a sequence pair
|
||||
-minpct <percent> (Optional) The minimum percentage of a
|
||||
sequence's total occupancy shared by
|
||||
another sequence to attempt matching. (0 -
|
||||
100)
|
||||
-o,--output-file <filename> (Optional) Name of output the output file.
|
||||
If not present, no file will be written.
|
||||
--print-alphas (Optional) Print the number of distinct
|
||||
alpha sequences to stdout.
|
||||
--print-attempt (Optional) Print the pairing attempt rate
|
||||
to stdout
|
||||
--print-betas (Optional) Print the number of distinct
|
||||
beta sequences to stdout.
|
||||
--print-correct (Optional) Print the number of correct
|
||||
pairs to stdout
|
||||
--print-error (Optional) Print the pairing error rate to
|
||||
stdout
|
||||
--print-incorrect (Optional) Print the number of incorrect
|
||||
pairs to stdout
|
||||
--print-metadata (Optional) Print a full summary of the
|
||||
matching results to stdout.
|
||||
--print-time (Optional) Print the total simulation time
|
||||
to stdout.
|
||||
-pv,--p-value (Optional) Calculate p-values for sequence
|
||||
pairs.
|
||||
|
||||
```
|
||||
|
||||
### INTERACTIVE INTERFACE
|
||||
|
||||
If no command line arguments are given, BiGpairSEQ_Sim will launch with an interactive, menu-driven CLI for
|
||||
generating files and simulating TCR pairing. The main menu looks like this:
|
||||
|
||||
```
|
||||
--------BiGPairSEQ SIMULATOR--------
|
||||
@@ -63,26 +252,58 @@ Please select an option:
|
||||
2) Generate a sample plate of T cells
|
||||
3) Generate CDR3 alpha/beta occupancy data and overlap graph
|
||||
4) Simulate bipartite graph CDR3 alpha/beta matching (BiGpairSEQ)
|
||||
8) Options
|
||||
9) About/Acknowledgments
|
||||
0) Exit
|
||||
```
|
||||
|
||||
### OUTPUT
|
||||
By default, the Options menu looks like this:
|
||||
```
|
||||
--------------OPTIONS---------------
|
||||
1) Turn on cell sample file caching
|
||||
2) Turn on plate file caching
|
||||
3) Turn on graph/data file caching
|
||||
4) Turn off serialized binary graph output
|
||||
5) Turn on GraphML graph output
|
||||
6) Turn on calculation of p-values
|
||||
7) Maximum weight matching algorithm options
|
||||
0) Return to main menu
|
||||
```
|
||||
|
||||
|
||||
### INPUT/OUTPUT
|
||||
|
||||
To run the simulation, the program reads and writes 4 kinds of files:
|
||||
* Cell Sample files in CSV format
|
||||
* Sample Plate files in CSV format
|
||||
* Graph and Data files in binary object serialization format
|
||||
* Graph/Data files in binary object serialization format
|
||||
* Matching Results files in CSV format
|
||||
|
||||
When entering filenames, it is not necessary to include the file extension (.csv or .ser). When reading or
|
||||
writing files, the program will automatically add the correct extension to any filename without one.
|
||||
These files are often generated in sequence. When entering filenames, it is not necessary to include the file extension
|
||||
(.csv or .ser). When reading or writing files, the program will automatically add the correct extension to any filename
|
||||
without one.
|
||||
|
||||
To save file I/O time when using the interactive interface, the most recent instance of each of these four
|
||||
files either generated or read from disk can be cached in program memory. When caching is active, subsequent uses of the
|
||||
same data file won't need to be read in again until another file of that type is used or generated,
|
||||
or caching is turned off for that file type. The program checks whether it needs to update its cached data by comparing
|
||||
filenames as entered by the user. On encountering a new filename, the program flushes its cache and reads in the new file.
|
||||
|
||||
(Note that cached Graph/Data files must be transformed back into their original state after a matching experiment, which
|
||||
may take some time. Whether file I/O or graph transformation takes longer for graph/data files is likely to be
|
||||
device-specific.)
|
||||
|
||||
The program's caching behavior can be controlled in the Options menu. By default, all caching is OFF.
|
||||
|
||||
The program can optionally output Graph/Data files in GraphML format (.graphml) for data portability. This can be
|
||||
turned on in the Options menu. By default, GraphML output is OFF.
|
||||
|
||||
---
|
||||
#### Cell Sample Files
|
||||
Cell Sample files consist of any number of distinct "T cells." Every cell contains
|
||||
four sequences: Alpha CDR3, Beta CDR3, Alpha CDR1, Beta CDR1. The sequences are represented by
|
||||
random integers. CDR3 Alpha and Beta sequences are all unique within a given Cell Sample file. CDR1 Alpha and Beta sequences
|
||||
are not necessarily unique; the relative diversity can be set when making the file.
|
||||
are not necessarily unique; the relative diversity of CRD1s with respect to CDR3s can be set when making the file.
|
||||
|
||||
(Note: though cells still have CDR1 sequences, matching of CDR1s is currently awaiting re-implementation.)
|
||||
|
||||
@@ -95,12 +316,11 @@ Comments are preceded by `#`
|
||||
|
||||
Structure:
|
||||
|
||||
---
|
||||
# Sample contains 1 unique CDR1 for every 4 unique CDR3s.
|
||||
| Alpha CDR3 | Beta CDR3 | Alpha CDR1 | Beta CDR1 |
|
||||
|---|---|---|---|
|
||||
|unique number|unique number|number|number|
|
||||
|
||||
| ... | ... |... | ... |
|
||||
---
|
||||
|
||||
#### Sample Plate Files
|
||||
@@ -109,7 +329,8 @@ described above). The wells are filled randomly from a Cell Sample file, accordi
|
||||
frequency distribution. Additionally, every individual sequence within each cell may, with some
|
||||
given dropout probability, be omitted from the file; this simulates the effect of amplification errors
|
||||
prior to sequencing. Plates can also be partitioned into any number of sections, each of which can have a
|
||||
different concentration of T cells per well.
|
||||
different concentration of T cells per well. Alternatively, the number of T cells in each well can be randomized between
|
||||
given minimum and maximum population values.
|
||||
|
||||
Options when making a Sample Plate file:
|
||||
* Cell Sample file to use
|
||||
@@ -119,15 +340,19 @@ Options when making a Sample Plate file:
|
||||
* Standard deviation size
|
||||
* Exponential
|
||||
* Lambda value
|
||||
* (Based on the slope of the graph in Figure 4C of the pairSEQ paper, the distribution of the original experiment was exponential with a lambda of approximately 0.6. (Howie, et al. 2015))
|
||||
* Zipf
|
||||
* Exponent value
|
||||
* Total number of wells on the plate
|
||||
* Well populations random or fixed
|
||||
* If random, minimum and maximum population sizes
|
||||
* If fixed
|
||||
* Number of sections on plate
|
||||
* Number of T cells per well
|
||||
* per section, if more than one section
|
||||
* Dropout rate
|
||||
* Sequence dropout rate
|
||||
|
||||
Files are in CSV format. There are no header labels. Every row represents a well.
|
||||
Every column represents an individual cell, containing four sequences, depicted as an array string:
|
||||
Every value represents an individual cell, containing four sequences, depicted as an array string:
|
||||
`[CDR3A, CDR3B, CDR1A, CDR1B]`. So a representative cell might look like this:
|
||||
|
||||
`[525902, 791533, -1, 866282]`
|
||||
@@ -137,7 +362,6 @@ Dropout sequences are replaced with the value `-1`. Comments are preceded by `#`
|
||||
|
||||
Structure:
|
||||
|
||||
---
|
||||
```
|
||||
# Cell source file name:
|
||||
# Each row represents one well on the plate
|
||||
@@ -153,25 +377,42 @@ Structure:
|
||||
|
||||
---
|
||||
|
||||
#### Graph and Data Files
|
||||
Graph and Data files are serialized binaries of a Java object containing the weigthed bipartite graph representation of a
|
||||
#### Graph/Data Files
|
||||
Graph/Data files are serialized binaries of a Java object containing the weigthed bipartite graph representation of a
|
||||
Sample Plate, along with the necessary metadata for matching and results output. Making them requires a Cell Sample file
|
||||
(to construct a list of correct sequence pairs for checking the accuracy of BiGpairSEQ simulations) and a
|
||||
Sample Plate file (to construct the associated occupancy graph). These files can be several gigabytes in size.
|
||||
Writing them to a file lets us generate a graph and its metadata once, then use it for multiple different BiGpairSEQ simulations.
|
||||
Sample Plate file (to construct the associated occupancy graph).
|
||||
|
||||
Options for creating a Graph and Data file:
|
||||
These files can be several gigabytes in size. Writing them to a file lets us generate a graph and its metadata once,
|
||||
then use it for multiple different BiGpairSEQ simulations.
|
||||
|
||||
Options for creating a Graph/Data file:
|
||||
* The Cell Sample file to use
|
||||
* The Sample Plate file to use. (This must have been generated from the selected Cell Sample file.)
|
||||
* The Sample Plate file to use (This must have been generated from the selected Cell Sample file.)
|
||||
* Whether to simulate sequencing read depth. If simulated:
|
||||
* The read depth (The number of times each sequence is read)
|
||||
* The read error rate (The probability a sequence is misread)
|
||||
* The error collision rate (The probability two misreads produce the same spurious sequence)
|
||||
* The real sequence collision rate (The probability that a misread will produce a different, real sequence from the sample plate. Only applies to new misreads; once an error of this type has occurred, it's likelihood of occurring again is dominated by the error collision probability.)
|
||||
|
||||
These files do not have a human-readable structure, and are not portable to other programs. (Export of graphs in a
|
||||
portable data format may be implemented in the future. The tricky part is encoding the necessary metadata.)
|
||||
These files do not have a human-readable structure, and are not portable to other programs.
|
||||
|
||||
*Optional GraphML output*
|
||||
|
||||
For portability of graph data to other software, turn on [GraphML](http://graphml.graphdrawing.org/index.html) output
|
||||
in the Options menu in interactive mode, or use the `-graphml`command line argument. This will produce a .graphml file
|
||||
for the weighted graph, with vertex attributes for sequence, type, total occupancy, total read count, and the read count for every individual occupied well.
|
||||
This graph contains all the data necessary for the BiGpairSEQ matching algorithm. It does not include the data to measure pairing accuracy; for that,
|
||||
compare the matching results to the original Cell Sample .csv file.
|
||||
|
||||
---
|
||||
|
||||
#### Matching Results Files
|
||||
Matching results files consist of the results of a BiGpairSEQ matching simulation.
|
||||
Files are in CSV format. Rows are sequence pairings with extra relevant data. Columns are pairing-specific details.
|
||||
Matching results files consist of the results of a BiGpairSEQ matching simulation. Making them requires a serialized
|
||||
binary Graph/Data file (.ser). (Because .graphML files are larger than .ser files, BiGpairSEQ_Sim supports .graphML
|
||||
output only. Graph input must use a serialized binary.)
|
||||
|
||||
Matching results files are in CSV format. Rows are sequence pairings with extra relevant data. Columns are pairing-specific details.
|
||||
Metadata about the matching simulation is included as comments. Comments are preceded by `#`.
|
||||
|
||||
Options when running a BiGpairSEQ simulation of CDR3 alpha/beta matching:
|
||||
@@ -186,93 +427,259 @@ Options when running a BiGpairSEQ simulation of CDR3 alpha/beta matching:
|
||||
|
||||
Example output:
|
||||
|
||||
---
|
||||
```
|
||||
# Source Sample Plate file: 4MilCellsPlate.csv
|
||||
# Source Graph and Data file: 4MilCellsPlateGraph.ser
|
||||
# T cell counts in sample plate wells: 30000
|
||||
# Total alphas found: 11813
|
||||
# Total betas found: 11808
|
||||
# High overlap threshold: 94
|
||||
# Low overlap threshold: 3
|
||||
# Minimum overlap percent: 0
|
||||
# Maximum occupancy difference: 96
|
||||
# Pairing attempt rate: 0.438
|
||||
# Correct pairings: 5151
|
||||
# Incorrect pairings: 18
|
||||
# Pairing error rate: 0.00348
|
||||
# Simulation time: 862 seconds
|
||||
# cell sample filename: 8MilCells.csv
|
||||
# cell sample size: 8000000
|
||||
# sample plate filename: 8MilCells_Plate.csv
|
||||
# sample plate well count: 96
|
||||
# sequence dropout rate: 0.1
|
||||
# graph filename: 8MilGraph_rd2
|
||||
# MWM algorithm type: LEDA book with heap: FIBONACCI
|
||||
# matching weight: 218017.0
|
||||
# well populations: 4000
|
||||
# sequence read depth: 100
|
||||
# sequence read error rate: 0.01
|
||||
# read error collision rate: 0.1
|
||||
# real sequence collision rate: 0.05
|
||||
# total alphas read from plate: 323711
|
||||
# total betas read from plate: 323981
|
||||
# alphas in graph (after pre-filtering): 11707
|
||||
# betas in graph (after pre-filtering): 11705
|
||||
# high overlap threshold for pairing: 95
|
||||
# low overlap threshold for pairing: 3
|
||||
# minimum overlap percent for pairing: 0
|
||||
# maximum occupancy difference for pairing: 2147483647
|
||||
# pairing attempt rate: 0.716
|
||||
# correct pairing count: 8373
|
||||
# incorrect pairing count: 7
|
||||
# pairing error rate: 0.000835
|
||||
# time to generate graph (seconds): 293
|
||||
# time to pair sequences (seconds): 1,416
|
||||
# total simulation time (seconds): 1,709
|
||||
```
|
||||
|
||||
| Alpha | Alpha well count | Beta | Beta well count | Overlap count | Matched Correctly? | P-value |
|
||||
|---|---|---|---|---|---|---|
|
||||
|5242972|17|1571520|18|17|true|1.41E-18|
|
||||
|5161027|18|2072219|18|18|true|7.31E-20|
|
||||
|4145198|33|1064455|30|29|true|2.65E-21|
|
||||
|7700582|18|112748|18|18|true|7.31E-20|
|
||||
|10258642|73|1172093|72|70.0|true|4.19E-21|
|
||||
|6186865|34|4290363|37|34.0|true|4.56E-26|
|
||||
|10222686|70|11044018|72|68.0|true|9.55E-25|
|
||||
|5338100|75|2422988|76|74.0|true|4.57E-22|
|
||||
|12363907|33|6569852|35|33.0|true|5.70E-26|
|
||||
|...|...|...|...|...|...|...|
|
||||
|
||||
---
|
||||
|
||||
**NOTE: The p-values in the output are not used for matching**—they aren't part of the BiGpairSEQ algorithm at all.
|
||||
P-values are calculated *after* BiGpairSEQ matching is completed, for purposes of comparison only,
|
||||
using the (2021 corrected) formula from the original pairSEQ paper. (Howie, et al. 2015)
|
||||
|
||||
### PERFORMANCE
|
||||
Performance details of the example excerpted above:
|
||||
|
||||
On a home computer with a Ryzen 5600X CPU, 64GB of 3200MHz DDR4 RAM (half of which was allocated to the Java Virtual Machine), and a PCIe 3.0 SSD, running Linux Mint 20.3 Edge (5.13 kernel),
|
||||
the author ran a BiGpairSEQ simulation of a 96-well sample plate with 30,000 T cells/well comprising ~11,800 alphas and betas,
|
||||
taken from a sample of 4,000,000 distinct cells with an exponential frequency distribution.
|
||||
|
||||
With min/max occupancy threshold of 3 and 94 wells for matching, and no other pre-filtering, BiGpairSEQ identified 5,151
|
||||
correct pairings and 18 incorrect pairings, for an accuracy of 99.652%.
|
||||
|
||||
The simulation time was 14'22". If intermediate results were held in memory, this would be equivalent to the total elapsed time.
|
||||
|
||||
Since this implementation of BiGpairSEQ writes intermediate results to disk (to improve the efficiency of *repeated* simulations
|
||||
with different filtering options), the actual elapsed time was greater. File I/O time was not measured, but took
|
||||
slightly less time than the simulation itself. Real elapsed time from start to finish was under 30 minutes.
|
||||
|
||||
## TODO
|
||||
|
||||
* ~~Try invoking GC at end of workloads to reduce paging to disk~~ DONE
|
||||
* ~~Hold graph data in memory until another graph is read-in?~~ ABANDONED
|
||||
* *No, this won't work, because BiGpairSEQ simulations alter the underlying graph based on filtering constraints. Changes would cascade with multiple experiments.*
|
||||
* See if there's a reasonable way to reformat Sample Plate files so that wells are columns instead of rows.
|
||||
* ~~Problem is variable number of cells in a well~~
|
||||
* ~~Apache Commons CSV library writes entries a row at a time~~
|
||||
* _Got this working, but at the cost of a profoundly strange bug in graph occupancy filtering. Have reverted the repo until I can figure out what caused that. Given how easily Thingiverse transposes CSV matrices in R, might not even be worth fixing._
|
||||
* Re-implement command line arguments, to enable scripting and statistical simulation studies
|
||||
* Implement sample plates with random numbers of T cells per well.
|
||||
* Possible BiGpairSEQ advantage over pairSEQ: BiGpairSEQ is resilient to variations in well population sizes on a sample plate; pairSEQ is not.
|
||||
* preliminary data suggests that BiGpairSEQ behaves roughly as though the whole plate had whatever the *average* well concentration is, but that's still speculative.
|
||||
* Enable GraphML output in addition to serialized object binaries, for data portability
|
||||
* Custom vertex type with attribute for sequence occupancy?
|
||||
* Re-implement CDR1 matching method
|
||||
* Implement Duan and Su's maximum weight matching algorithm
|
||||
* Add controllable algorithm-type parameter?
|
||||
* Test whether pairing heap (currently used) or Fibonacci heap is more efficient for priority queue in current matching algorithm
|
||||
* in theory Fibonacci heap should be more efficient, but complexity overhead may eliminate theoretical advantage
|
||||
* Add controllable heap-type parameter?
|
||||
**NOTE: The p-values in the sample output above are not used for matching**—they aren't part of the BiGpairSEQ algorithm at all.
|
||||
P-values (if enabled in the interactive menu options or by using the -pv flag on the command line) are calculated *after*
|
||||
BiGpairSEQ matching is completed, for purposes of comparison with pairSEQ only, using the (corrected) formula from the
|
||||
original pairSEQ paper. (Howie, et al. 2015) Calculation of p-values is off by default to reduce processing time.
|
||||
|
||||
|
||||
## RESULTS
|
||||
|
||||
Several BiGpairSEQ simulations were performed on a home computer with the following specs:
|
||||
|
||||
* Ryzen 5600X CPU
|
||||
* 128GB of 3200MHz DDR4 RAM
|
||||
* 2TB PCIe 3.0 SSD
|
||||
* Linux Mint 21 (5.15 kernel)
|
||||
|
||||
### SAMPLE PLATES WITH VARYING NUMBERS OF CELLS PER WELL
|
||||
|
||||
The probability calculations used by pairSEQ require that every well on the sample plate contain the same number of T cells.
|
||||
BiGpairSEQ does not share this limitation; it is robust to variations in the number of cells per well.
|
||||
|
||||
A series of BiGpairSEQ simulations were conducted using a cell sample file of 3.5 million unique T cells. From these cells,
|
||||
10 sample plate files were created. All of these sample plates had 96 wells, used an exponential distribution with a lambda of 0.6, and
|
||||
had a sequence dropout rate of 10%.
|
||||
|
||||
The well populations of the plates were:
|
||||
* One sample plate with 1000 T cells/well
|
||||
* One sample plate with 2000 T cells/well
|
||||
* One sample plate with 3000 T cells/well
|
||||
* One sample plate with 4000 T cells/well
|
||||
* One sample plate with 5000 T cells/well
|
||||
* Five sample plates with each individual well's population randomized, from 1000 to 5000 T cells. (Average population ~3000 T cells/well.)
|
||||
|
||||
All BiGpairSEQ simulations were run with a low overlap threshold of 3 and a high overlap threshold of 94.
|
||||
No optional filters were used, so pairing was attempted for all sequences with overlaps within the threshold values.
|
||||
|
||||
NOTE: these results were obtained with an earlier version of BiGpairSEQ_Sim, and should be re-run with the current version.
|
||||
The observed behavior is not believed to be likely to change, however.
|
||||
|
||||
Constant well population plate results:
|
||||
|
||||
| |1000 Cell/Well Plate|2000 Cell/Well Plate|3000 Cell/Well Plate|4000 Cell/Well Plate|5000 Cell/Well Plate
|
||||
|---|---|---|---|---|---|
|
||||
|Total Alphas Found|6407|7330|7936|8278|8553|
|
||||
|Total Betas Found|6405|7333|7968|8269|8582|
|
||||
|Pairing Attempt Rate|0.661|0.653|0.600|0.579|0.559|
|
||||
|Correct Pairing Count|4231|4749|4723|4761|4750|
|
||||
|Incorrect Pairing Count|3|34|40|26|29|
|
||||
|Pairing Error Rate|0.000709|0.00711|0.00840|0.00543|0.00607|
|
||||
|Simulation Time (Seconds)|500|643|700|589|598|
|
||||
|
||||
Randomized well population plate results:
|
||||
|
||||
| |Random Plate 1 | Random Plate 2|Random Plate 3|Random Plate 4|Random Plate 5|Average|
|
||||
|---|---|---|---|---|---|---|
|
||||
Total Alphas Found|7853|7904|7964|7898|7917|7907|
|
||||
Total Betas Found|7851|7891|7920|7910|7894|7893|
|
||||
Pairing Attempt Rate|0.607|0.610|0.601|0.605|0.603|0.605|
|
||||
Correct Pairing Count|4718|4782|4721|4755|4731|4741|
|
||||
Incorrect Pairing Count|51|35|42|27|29|37|
|
||||
Pairing Error Rate|0.0107|0.00727|0.00882|0.00565|0.00609|0.00771|
|
||||
Simulation Time (Seconds)|590|677|730|618|615|646|
|
||||
|
||||
The average results for the randomized plates are closest to the constant plate with 3000 T cells/well.
|
||||
This and several other tests indicate that BiGpairSEQ treats a sample plate with a highly variable number of T cells/well
|
||||
roughly as though it had a constant well population equal to the plate's average well population.
|
||||
|
||||
### SIMULATING EXPERIMENTS FROM THE 2015 pairSEQ PAPER
|
||||
#### Experiment 1
|
||||
This simulation was an attempt to replicate the conditions of experiment 1 from the 2015 pairSEQ paper: a matching was found for a
|
||||
96-well sample plate with 4,000 T cells/well, taken from a sample of 8,400,000
|
||||
distinct cells sampled with an exponential frequency distribution. Examination of Figure 4C from the paper seems to show the points
|
||||
(-5, 4) and (-4.5, 3.3) on the line at the boundary of the shaded region, so a lambda value of 1.4 was used for the
|
||||
exponential distribution.
|
||||
|
||||
The sequence dropout rate was 10%, as the analysis in the paper concluded that most TCR
|
||||
sequences "have less than a 10% chance of going unobserved." (Howie, et al. 2015) Given this choice of 10%, the simulated
|
||||
sample plate is likely to have more sequence dropout, and thus greater error, than the real experiment.
|
||||
|
||||
The original paper does not contain (or the author of this document failed to identify) information on sequencing depth,
|
||||
read error probability, or the probabilities of different kinds of read error collisions. As the pre-filtering of BiGpairSEQ
|
||||
has successfully filtered out all such errors for any reasonable error rates the author has yet tested, this simulation was
|
||||
done without simulating any sequencing errors, to reduce the processing time.
|
||||
|
||||
This simulation was performed 5 times with min/max occupancy thresholds of 3 and 95 wells respectively for matching.
|
||||
|
||||
| |Run 1|Run 2|Run 3|Run 4|Run 5| Average|
|
||||
|---|---|---|---|---|---|---|
|
||||
|Total pairs|4398|4420|4404|4409|4414|4409|
|
||||
|Correct pairs|4322|4313|4337|4336|4339|4329.4|
|
||||
|Incorrect pairs|76|107|67|73|75|79.6|
|
||||
|Error rate|0.0173|0.0242|0.0152|0.0166|0.0170|0.018|
|
||||
|Simulation time (seconds)|697|484|466|473|463|516.6|
|
||||
|
||||
The experiment in the original paper called 4143 pairs with a false discovery rate of 0.01.
|
||||
|
||||
Given the roughness of the estimation for the cell frequency distribution of the original experiment and the likely
|
||||
higher rate of sequence dropout in the simulation, these simulated results match the real experiment fairly well.
|
||||
|
||||
#### Experiment 3
|
||||
To simulate experiment 3 from the original paper, a matching was made for a 96-well sample plate with 160,000 T cells/well,
|
||||
taken from a sample of 4.5 million distinct T cells sampled with an exponential frequency distribution (lambda 1.4). The
|
||||
sequence dropout rate was again 10%, and no sequencing errors were simulated. Once again, deviation from the original
|
||||
experiment is expected due to the roughness of the estimated frequency distribution, and due to the high sequence dropout
|
||||
rate.
|
||||
|
||||
Results metadata:
|
||||
```
|
||||
# total alphas read from plate: 6929
|
||||
# total betas read from plate: 6939
|
||||
# alphas in graph (after pre-filtering): 4452
|
||||
# betas in graph (after pre-filtering): 4461
|
||||
# high overlap threshold for pairing: 95
|
||||
# low overlap threshold for pairing: 3
|
||||
# minimum overlap percent for pairing: 0
|
||||
# maximum occupancy difference for pairing: 100
|
||||
# pairing attempt rate: 0.767
|
||||
# correct pairing count: 3233
|
||||
# incorrect pairing count: 182
|
||||
# pairing error rate: 0.0533
|
||||
# time to generate graph (seconds): 40
|
||||
# time to pair sequences (seconds): 230
|
||||
# total simulation time (seconds): 270
|
||||
```
|
||||
|
||||
The simulation ony found 6929 distinct TCRAs and 6939 TCRBs on the sample plate, orders of magnitude fewer than the number of
|
||||
pairs called in the pairSEQ experiment. These results show that at very high sampling depths, the differences in the
|
||||
underlying frequency distribution drastically affect the results. The real distribution clearly has a much longer "tail"
|
||||
than the simulated exponential distribution. Implementing a way to exert finer control over the sampling distribution from
|
||||
the file of distinct cells may enable better simulated replication of this experiment.
|
||||
|
||||
## CITATIONS
|
||||
* Howie, B., Sherwood, A. M., et al. ["High-throughput pairing of T cell receptor alpha and beta sequences."](https://pubmed.ncbi.nlm.nih.gov/26290413/) Sci. Transl. Med. 7, 301ra131 (2015)
|
||||
* Duan, R., Su H. ["A Scaling Algorithm for Maximum Weight Matching in Bipartite Graphs."](https://web.eecs.umich.edu/~pettie/matching/Duan-Su-scaling-bipartite-matching.pdf) Proceedings of the Twenty-Third Annual ACM-SIAM Symposium on Discrete Algorithms, p. 1413-1424. (2012)
|
||||
* Melhorn, K., Näher, St. [The LEDA Platform of Combinatorial and Geometric Computing.](https://people.mpi-inf.mpg.de/~mehlhorn/LEDAbook.html) Cambridge University Press. Chapter 7, Graph Algorithms; p. 132-162 (1999)
|
||||
* Fredman, M., Tarjan, R. ["Fibonacci heaps and their uses in improved network optimization algorithms."](https://www.cl.cam.ac.uk/teaching/1011/AlgorithII/1987-FredmanTar-fibonacci.pdf) J. ACM, 34(3):596–615 (1987))
|
||||
* Bertsekas, D., Castañon, D. ["A forward/reverse auction algorithm for asymmetric assignment problems."](https://www.mit.edu/~dimitrib/For_Rev_Asym_Auction.pdf) Computational Optimization and Applications 1, 277-297 (1992)
|
||||
* Dimitrios Michail, Joris Kinable, Barak Naveh, and John V. Sichi. ["JGraphT—A Java Library for Graph Data Structures and Algorithms."](https://dl.acm.org/doi/10.1145/3381449) ACM Trans. Math. Softw. 46, 2, Article 16 (2020)
|
||||
|
||||
## EXTERNAL LIBRARIES USED
|
||||
* [JGraphT](https://jgrapht.org) -- Graph theory data structures and algorithms
|
||||
* [JHeaps](https://www.jheaps.org) -- For pairing heap priority queue used in maximum weight matching algorithm
|
||||
* [Apache Commons CSV](https://commons.apache.org/proper/commons-csv/) -- For CSV file output
|
||||
* [Apache Commons CLI](https://commons.apache.org/proper/commons-cli/) -- To enable command line arguments for scripting. (**Awaiting re-implementation**.)
|
||||
* [Apache Commons CLI](https://commons.apache.org/proper/commons-cli/) -- To enable command line arguments for scripting.
|
||||
|
||||
## ACKNOWLEDGEMENTS
|
||||
BiGpairSEQ was conceived in collaboration with Dr. Alice MacQueen, who brought the original
|
||||
BiGpairSEQ was conceived in collaboration with the author's spouse, Dr. Alice MacQueen, who brought the original
|
||||
pairSEQ paper to the author's attention and explained all the biology terms he didn't know.
|
||||
|
||||
## AUTHOR
|
||||
BiGpairSEQ algorithm and simulation by Eugene Fischer, 2021. UI improvements and documentation, 2022.
|
||||
BiGpairSEQ algorithm and simulation by Eugene Fischer, 2021. Improvements and documentation, 2022–2025.
|
||||
|
||||
## DISCLOSURE
|
||||
The earliest versions of the BiGpairSEQ simulator were written in 2021 to let Dr. MacQueen test hypothetical extensions
|
||||
of the published pairSEQ protocol while she was interviewing for a position at Adaptive Biotechnologies. She was
|
||||
employed at Adaptive Biotechnologies starting in 2022.
|
||||
|
||||
The author has worked on this BiGpairSEQ simulator since 2021 without Dr. MacQueen's involvement, since she has had
|
||||
access to related, proprietary technologies. The author has had no such access, relying exclusively on the 2015 pairSEQ
|
||||
paper and other academic publications. He continues to work on the BiGpairSEQ simulator recreationally, as it has been
|
||||
a means of exploring some very beautiful math.
|
||||
|
||||
## TODO
|
||||
|
||||
* Update CLI option text in this readme to include Zipf distribution options
|
||||
* ~~Try invoking GC at end of workloads to reduce paging to disk~~ DONE
|
||||
* ~~Hold graph data in memory until another graph is read-in? ABANDONED UNABANDONED~~ DONE
|
||||
* ~~*No, this won't work, because BiGpairSEQ simulations alter the underlying graph based on filtering constraints. Changes would cascade with multiple experiments.*~~
|
||||
* Might have figured out a way to do it, by taking edges out and then putting them back into the graph. This may actually be possible.
|
||||
* It is possible, though the modifications to the graph incur their own performance penalties. Need testing to see which option is best. It may be computer-specific.
|
||||
* ~~Test whether pairing heap (currently used) or Fibonacci heap is more efficient for priority queue in current matching algorithm~~ DONE
|
||||
* ~~in theory Fibonacci heap should be more efficient, but complexity overhead may eliminate theoretical advantage~~
|
||||
* ~~Add controllable heap-type parameter?~~
|
||||
* Parameter implemented. Pairing heap the current default.
|
||||
* ~~Implement sample plates with random numbers of T cells per well.~~ DONE
|
||||
* Possible BiGpairSEQ advantage over pairSEQ: BiGpairSEQ is resilient to variations in well population sizes on a sample plate; pairSEQ is not due to nature of probability calculations.
|
||||
* preliminary data suggests that BiGpairSEQ behaves roughly as though the whole plate had whatever the *average* well concentration is, but that's still speculative.
|
||||
* ~~See if there's a reasonable way to reformat Sample Plate files so that wells are columns instead of rows.~~
|
||||
* ~~Problem is variable number of cells in a well~~
|
||||
* ~~Apache Commons CSV library writes entries a row at a time~~
|
||||
* Got this working, but at the cost of a profoundly strange bug in graph occupancy filtering. Have reverted the repo until I can figure out what caused that. Given how easily Thingiverse transposes CSV matrices in R, might not even be worth fixing.
|
||||
* ~~Enable GraphML output in addition to serialized object binaries, for data portability~~ DONE
|
||||
* ~~Have a branch where this is implemented, but there's a bug that broke matching. Don't currently have time to fix.~~
|
||||
* ~~Re-implement command line arguments, to enable scripting and statistical simulation studies~~ DONE
|
||||
* ~~Implement custom Vertex class to simplify code and make it easier to implement different MWM algorithms~~ DONE
|
||||
* Advantage: would eliminate the need to use maps to associate vertices with sequences, which would make the code easier to understand.
|
||||
* This also seems to be faster when using the same algorithm than the version with lots of maps, which is a nice bonus!
|
||||
* ~~Implement simulation of read depth, and of read errors. Pre-filter graph for difference in read count to eliminate spurious sequences.~~ DONE
|
||||
* Pre-filtering based on comparing (read depth) * (occupancy) to (read count) for each sequence works extremely well
|
||||
* ~~Add read depth simulation options to CLI~~ DONE
|
||||
* ~~Update graphml output to reflect current Vertex class attributes~~ DONE
|
||||
* Individual well data from the SequenceRecords could be included, if there's ever a reason for it
|
||||
* ~~Implement simulation of sequences being misread as other real sequence~~ DONE
|
||||
* Implement redistributive heap for LEDA matching algorithm to achieve theoretical worst case of O(n(m + n log C)) where C is highest edge weight.
|
||||
* Update matching metadata output options in CLI
|
||||
* Add frequency distribution details to metadata output
|
||||
* need to make an enum for the different distribution types and refactor the Plate class and user interfaces, also add the necessary fields to GraphWithMapData and then call if from Simulator
|
||||
* Update performance data in this readme
|
||||
* ~~Add section to ReadMe describing data filtering methods.~~ DONE, now part of algorithm description
|
||||
* Re-implement CDR1 matching method
|
||||
* ~~Refactor simulator code to collect all needed data in a single scan of the plate~~ DONE
|
||||
* ~~Currently it scans once for the vertices and then again for the edge weights. This made simulating read depth awkward, and incompatible with caching of plate files.~~
|
||||
* ~~This would be a fairly major rewrite of the simulator code, but could make things faster, and would definitely make them cleaner.~~
|
||||
* Implement Duan and Su's maximum weight matching algorithm
|
||||
* ~~Add controllable algorithm-type parameter?~~ DONE
|
||||
* This would be fun and valuable, but probably take more time than I have for a hobby project.
|
||||
* ~~Implement an auction algorithm for maximum weight matching~~ DONE
|
||||
* Implement a forward/reverse auction algorithm for maximum weight matching
|
||||
* Implement an algorithm for approximating a maximum weight matching
|
||||
* Some of these run in linear or near-linear time
|
||||
* given that the underlying biological samples have many, many sources of error, this would probably be the most useful option in practice. It seems less mathematically elegant, though, and so less fun for me.
|
||||
* Implement Vose's alias method for arbitrary statistical distributions of cells
|
||||
* Should probably refactor to use apache commons rng for this
|
||||
* Use commons JCS for caching
|
||||
* Parameterize pre-filtering options
|
||||
5
src/main/java/AlgorithmType.java
Normal file
5
src/main/java/AlgorithmType.java
Normal file
@@ -0,0 +1,5 @@
|
||||
public enum AlgorithmType {
|
||||
HUNGARIAN, //Hungarian algorithm
|
||||
AUCTION, //Forward auction algorithm
|
||||
INTEGER_WEIGHT_SCALING, //integer weight scaling algorithm of Duan and Su
|
||||
}
|
||||
195
src/main/java/BiGpairSEQ.java
Normal file
195
src/main/java/BiGpairSEQ.java
Normal file
@@ -0,0 +1,195 @@
|
||||
import java.util.Random;
|
||||
|
||||
//main class. For choosing interface type and holding settings
|
||||
public class BiGpairSEQ {
|
||||
|
||||
private static final Random rand = new Random();
|
||||
private static CellSample cellSampleInMemory = null;
|
||||
private static String cellFilename = null;
|
||||
private static Plate plateInMemory = null;
|
||||
private static String plateFilename = null;
|
||||
private static GraphWithMapData graphInMemory = null;
|
||||
private static String graphFilename = null;
|
||||
private static boolean cacheCells = false;
|
||||
private static boolean cachePlate = false;
|
||||
private static boolean cacheGraph = false;
|
||||
private static AlgorithmType matchingAlgoritmType = AlgorithmType.HUNGARIAN;
|
||||
private static HeapType priorityQueueHeapType = HeapType.PAIRING;
|
||||
private static DistributionType distributionType = DistributionType.ZIPF;
|
||||
private static boolean outputBinary = true;
|
||||
private static boolean outputGraphML = false;
|
||||
private static boolean calculatePValue = false;
|
||||
private static final String version = "version 4.2";
|
||||
|
||||
public static void main(String[] args) {
|
||||
if (args.length == 0) {
|
||||
InteractiveInterface.startInteractive();
|
||||
}
|
||||
else {
|
||||
//This will be uncommented when command line arguments are re-implemented.
|
||||
CommandLineInterface.startCLI(args);
|
||||
//System.out.println("Command line arguments are still being re-implemented.");
|
||||
}
|
||||
}
|
||||
|
||||
public static Random getRand() {
|
||||
return rand;
|
||||
}
|
||||
|
||||
public static CellSample getCellSampleInMemory() {
|
||||
return cellSampleInMemory;
|
||||
}
|
||||
|
||||
public static void setCellSampleInMemory(CellSample cellSample, String filename) {
|
||||
if(cellSampleInMemory != null) {
|
||||
clearCellSampleInMemory();
|
||||
}
|
||||
cellSampleInMemory = cellSample;
|
||||
cellFilename = filename;
|
||||
System.out.println("Cell sample file " + filename + " cached.");
|
||||
}
|
||||
|
||||
public static void clearCellSampleInMemory() {
|
||||
cellSampleInMemory = null;
|
||||
cellFilename = null;
|
||||
System.gc();
|
||||
System.out.println("Cell sample file cache cleared.");
|
||||
|
||||
}
|
||||
|
||||
public static String getCellFilename() {
|
||||
return cellFilename;
|
||||
}
|
||||
|
||||
public static DistributionType getDistributionType() {return distributionType;}
|
||||
|
||||
public static void setDistributionType(DistributionType type) {distributionType = type;}
|
||||
|
||||
public static Plate getPlateInMemory() {
|
||||
return plateInMemory;
|
||||
}
|
||||
|
||||
public static void setPlateInMemory(Plate plate, String filename) {
|
||||
if(plateInMemory != null) {
|
||||
clearPlateInMemory();
|
||||
}
|
||||
plateInMemory = plate;
|
||||
plateFilename = filename;
|
||||
System.out.println("Sample plate file " + filename + " cached.");
|
||||
}
|
||||
|
||||
public static void clearPlateInMemory() {
|
||||
plateInMemory = null;
|
||||
plateFilename = null;
|
||||
System.gc();
|
||||
System.out.println("Sample plate file cache cleared.");
|
||||
|
||||
}
|
||||
|
||||
public static String getPlateFilename() {
|
||||
return plateFilename;
|
||||
}
|
||||
|
||||
|
||||
public static GraphWithMapData getGraphInMemory() {return graphInMemory;
|
||||
}
|
||||
|
||||
public static void setGraphInMemory(GraphWithMapData g, String filename) {
|
||||
if (graphInMemory != null) {
|
||||
clearGraphInMemory();
|
||||
}
|
||||
graphInMemory = g;
|
||||
graphFilename = filename;
|
||||
System.out.println("Graph and data file " + filename + " cached.");
|
||||
}
|
||||
|
||||
public static void clearGraphInMemory() {
|
||||
graphInMemory = null;
|
||||
graphFilename = null;
|
||||
System.gc();
|
||||
System.out.println("Graph and data file cache cleared.");
|
||||
}
|
||||
|
||||
public static String getGraphFilename() {
|
||||
return graphFilename;
|
||||
}
|
||||
|
||||
public static boolean cacheCells() {
|
||||
return cacheCells;
|
||||
}
|
||||
|
||||
public static void setCacheCells(boolean cacheCells) {
|
||||
//if not caching, clear the memory
|
||||
if(!cacheCells){
|
||||
BiGpairSEQ.clearCellSampleInMemory();
|
||||
System.out.println("Cell sample file caching: OFF.");
|
||||
}
|
||||
else {
|
||||
System.out.println("Cell sample file caching: ON.");
|
||||
}
|
||||
BiGpairSEQ.cacheCells = cacheCells;
|
||||
}
|
||||
|
||||
public static boolean cachePlate() {
|
||||
return cachePlate;
|
||||
}
|
||||
|
||||
public static void setCachePlate(boolean cachePlate) {
|
||||
//if not caching, clear the memory
|
||||
if(!cachePlate) {
|
||||
BiGpairSEQ.clearPlateInMemory();
|
||||
System.out.println("Sample plate file caching: OFF.");
|
||||
}
|
||||
else {
|
||||
System.out.println("Sample plate file caching: ON.");
|
||||
}
|
||||
BiGpairSEQ.cachePlate = cachePlate;
|
||||
}
|
||||
|
||||
public static boolean cacheGraph() {
|
||||
return cacheGraph;
|
||||
}
|
||||
|
||||
public static void setCacheGraph(boolean cacheGraph) {
|
||||
//if not caching, clear the memory
|
||||
if(!cacheGraph) {
|
||||
BiGpairSEQ.clearGraphInMemory();
|
||||
System.out.println("Graph/data file caching: OFF.");
|
||||
}
|
||||
else {
|
||||
System.out.println("Graph/data file caching: ON.");
|
||||
}
|
||||
BiGpairSEQ.cacheGraph = cacheGraph;
|
||||
}
|
||||
|
||||
public static HeapType getPriorityQueueHeapType() {
|
||||
return priorityQueueHeapType;
|
||||
}
|
||||
|
||||
public static AlgorithmType getMatchingAlgoritmType() { return matchingAlgoritmType; }
|
||||
|
||||
public static void setHungarianAlgorithm() { matchingAlgoritmType = AlgorithmType.HUNGARIAN; }
|
||||
|
||||
public static void setIntegerWeightScalingAlgorithm() { matchingAlgoritmType = AlgorithmType.INTEGER_WEIGHT_SCALING; }
|
||||
|
||||
public static void setAuctionAlgorithm() { matchingAlgoritmType = AlgorithmType.AUCTION; }
|
||||
|
||||
public static void setPairingHeap() {
|
||||
priorityQueueHeapType = HeapType.PAIRING;
|
||||
}
|
||||
|
||||
public static void setFibonacciHeap() {
|
||||
priorityQueueHeapType = HeapType.FIBONACCI;
|
||||
}
|
||||
|
||||
public static boolean outputBinary() {return outputBinary;}
|
||||
public static void setOutputBinary(boolean b) {outputBinary = b;}
|
||||
|
||||
public static boolean outputGraphML() {return outputGraphML;}
|
||||
public static void setOutputGraphML(boolean b) {outputGraphML = b;}
|
||||
|
||||
public static boolean calculatePValue() {return calculatePValue; }
|
||||
public static void setCalculatePValue(boolean b) {calculatePValue = b; }
|
||||
|
||||
public static String getVersion() { return version; }
|
||||
}
|
||||
@@ -12,7 +12,8 @@ import java.util.List;
|
||||
public class CellFileReader {
|
||||
|
||||
private String filename;
|
||||
private List<Integer[]> distinctCells = new ArrayList<>();
|
||||
private List<String[]> distinctCells = new ArrayList<>();
|
||||
private Integer cdr1Freq;
|
||||
|
||||
public CellFileReader(String filename) {
|
||||
if(!filename.matches(".*\\.csv")){
|
||||
@@ -31,26 +32,36 @@ public class CellFileReader {
|
||||
CSVParser parser = new CSVParser(reader, cellFileFormat);
|
||||
){
|
||||
for(CSVRecord record: parser.getRecords()) {
|
||||
Integer[] cell = new Integer[4];
|
||||
cell[0] = Integer.valueOf(record.get("Alpha CDR3"));
|
||||
cell[1] = Integer.valueOf(record.get("Beta CDR3"));
|
||||
cell[2] = Integer.valueOf(record.get("Alpha CDR1"));
|
||||
cell[3] = Integer.valueOf(record.get("Beta CDR1"));
|
||||
String[] cell = new String[4];
|
||||
cell[0] = record.get("Alpha CDR3");
|
||||
cell[1] = record.get("Beta CDR3");
|
||||
cell[2] = record.get("Alpha CDR1");
|
||||
cell[3] = record.get("Beta CDR1");
|
||||
distinctCells.add(cell);
|
||||
}
|
||||
|
||||
|
||||
} catch(IOException ex){
|
||||
System.out.println("cell file " + filename + " not found.");
|
||||
System.err.println(ex);
|
||||
}
|
||||
|
||||
//get CDR1 frequency
|
||||
ArrayList<String> cdr1Alphas = new ArrayList<>();
|
||||
for (String[] cell : distinctCells) {
|
||||
cdr1Alphas.add(cell[3]);
|
||||
}
|
||||
double count = cdr1Alphas.stream().distinct().count();
|
||||
count = Math.ceil(distinctCells.size() / count);
|
||||
cdr1Freq = (int) count;
|
||||
|
||||
}
|
||||
|
||||
public CellSample getCellSample() {
|
||||
CellSample sample = new CellSample(distinctCells, cdr1Freq);
|
||||
sample.setFilename(filename);
|
||||
return sample;
|
||||
}
|
||||
|
||||
public String getFilename() { return filename;}
|
||||
|
||||
public List<Integer[]> getCells(){
|
||||
return distinctCells;
|
||||
}
|
||||
|
||||
public Integer getCellCount() {
|
||||
return distinctCells.size();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ import java.util.List;
|
||||
public class CellFileWriter {
|
||||
|
||||
private String[] headers = {"Alpha CDR3", "Beta CDR3", "Alpha CDR1", "Beta CDR1"};
|
||||
List<Integer[]> cells;
|
||||
List<String[]> cells;
|
||||
String filename;
|
||||
Integer cdr1Freq;
|
||||
|
||||
|
||||
@@ -1,16 +1,53 @@
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
public class CellSample {
|
||||
|
||||
private List<Integer[]> cells;
|
||||
private List<String[]> cells;
|
||||
private Integer cdr1Freq;
|
||||
private String filename;
|
||||
|
||||
public CellSample(List<Integer[]> cells, Integer cdr1Freq){
|
||||
public CellSample(Integer numDistinctCells, Integer cdr1Freq){
|
||||
this.cdr1Freq = cdr1Freq;
|
||||
List<Integer> numbersCDR3 = new ArrayList<>();
|
||||
List<Integer> numbersCDR1 = new ArrayList<>();
|
||||
Integer numDistCDR3s = 2 * numDistinctCells + 1;
|
||||
//Assign consecutive integers for each CDR3. This ensures they are all unique.
|
||||
IntStream.range(1, numDistCDR3s + 1).forEach(i -> numbersCDR3.add(i));
|
||||
//After all CDR3s are assigned, start assigning consecutive integers to CDR1s
|
||||
//There will usually be fewer integers in the CDR1 list, which will allow repeats below
|
||||
IntStream.range(numDistCDR3s + 1, numDistCDR3s + 1 + (numDistCDR3s / cdr1Freq) + 1).forEach(i -> numbersCDR1.add(i));
|
||||
//randomize the order of the numbers in the lists
|
||||
Collections.shuffle(numbersCDR3);
|
||||
Collections.shuffle(numbersCDR1);
|
||||
|
||||
//Each cell represented by 4 values
|
||||
//two CDR3s, and two CDR1s. First two values are CDR3s (alpha, beta), second two are CDR1s (alpha, beta)
|
||||
List<String[]> distinctCells = new ArrayList<>();
|
||||
for(int i = 0; i < numbersCDR3.size() - 1; i = i + 2){
|
||||
//Go through entire CDR3 list once, make pairs of alphas and betas
|
||||
String tmpCDR3a = numbersCDR3.get(i).toString();
|
||||
String tmpCDR3b = numbersCDR3.get(i+1).toString();
|
||||
//Go through the (likely shorter) CDR1 list as many times as necessary, make pairs of alphas and betas
|
||||
String tmpCDR1a = numbersCDR1.get(i % numbersCDR1.size()).toString();
|
||||
String tmpCDR1b = numbersCDR1.get((i+1) % numbersCDR1.size()).toString();
|
||||
//Make the array representing the cell
|
||||
String[] tmp = {tmpCDR3a, tmpCDR3b, tmpCDR1a, tmpCDR1b};
|
||||
//Add the cell to the list of distinct cells
|
||||
distinctCells.add(tmp);
|
||||
}
|
||||
this.cells = distinctCells;
|
||||
this.filename = filename;
|
||||
}
|
||||
|
||||
public CellSample(List<String[]> cells, Integer cdr1Freq){
|
||||
this.cells = cells;
|
||||
this.cdr1Freq = cdr1Freq;
|
||||
}
|
||||
|
||||
public List<Integer[]> getCells(){
|
||||
public List<String[]> getCells(){
|
||||
return cells;
|
||||
}
|
||||
|
||||
@@ -18,8 +55,12 @@ public class CellSample {
|
||||
return cdr1Freq;
|
||||
}
|
||||
|
||||
public Integer population(){
|
||||
public Integer getCellCount(){
|
||||
return cells.size();
|
||||
}
|
||||
|
||||
public String getFilename() { return filename; }
|
||||
|
||||
public void setFilename(String filename) { this.filename = filename; }
|
||||
|
||||
}
|
||||
|
||||
574
src/main/java/CommandLineInterface.java
Normal file
574
src/main/java/CommandLineInterface.java
Normal file
@@ -0,0 +1,574 @@
|
||||
import org.apache.commons.cli.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
/*
|
||||
* Class for parsing options passed to program from command line
|
||||
*
|
||||
* Top-level flags:
|
||||
* cells : to make a cell sample file
|
||||
* plate : to make a sample plate file
|
||||
* graph : to make a graph and data file
|
||||
* match : to do a cdr3 matching (WITH OR WITHOUT MAKING A RESULTS FILE. May just want to print summary for piping.)
|
||||
*
|
||||
* Cell flags:
|
||||
* count : number of cells to generate
|
||||
* diversity factor : factor by which CDR3s are more diverse than CDR1s
|
||||
* output : name of the output file
|
||||
*
|
||||
* Plate flags:
|
||||
* cellfile : name of the cell sample file to use as input
|
||||
* wells : the number of wells on the plate
|
||||
* dist : the statistical distribution to use
|
||||
* (if exponential) lambda : the lambda value of the exponential distribution
|
||||
* (if gaussian) stddev : the standard deviation of the gaussian distribution
|
||||
* rand : randomize well populations, take a minimum argument and a maximum argument
|
||||
* populations : number of t cells per well per section (number of arguments determines number of sections)
|
||||
* dropout : plate dropout rate, double from 0.0 to 1.0
|
||||
* output : name of the output file
|
||||
*
|
||||
* Graph flags:
|
||||
* cellfile : name of the cell sample file to use as input
|
||||
* platefile : name of the sample plate file to use as input
|
||||
* output : name of the output file
|
||||
* graphml : output a graphml file
|
||||
* binary : output a serialized binary object file
|
||||
* IF SIMULATING READ DEPTH, ALL THESE ARE REQUIRED. Absence indicates not simulating read depth
|
||||
* readdepth: number of reads per sequence
|
||||
* readerrorprob: probability of reading a sequence incorrectly
|
||||
* errcollisionprob: probability of two read errors being identical
|
||||
*
|
||||
* Match flags:
|
||||
* graphFile : name of graph and data file to use as input
|
||||
* min : minimum number of overlap wells to attempt a matching
|
||||
* max : the maximum number of overlap wells to attempt a matching
|
||||
* maxdiff : (optional) the maximum difference in occupancy to attempt a matching
|
||||
* minpercent : (optional) the minimum percent overlap to attempt a matching.
|
||||
* writefile : (optional) the filename to write results to
|
||||
* output : the values to print to System.out for piping
|
||||
* pv : (optional) calculate p-values
|
||||
*
|
||||
*/
|
||||
public class CommandLineInterface {
|
||||
|
||||
public static void startCLI(String[] args) {
|
||||
//Options sets for the different modes
|
||||
Options mainOptions = buildMainOptions();
|
||||
Options cellOptions = buildCellOptions();
|
||||
Options plateOptions = buildPlateOptions();
|
||||
Options graphOptions = buildGraphOptions();
|
||||
Options matchOptions = buildMatchCDR3options();
|
||||
|
||||
CommandLineParser parser = new DefaultParser();
|
||||
try{
|
||||
CommandLine line = parser.parse(mainOptions, Arrays.copyOfRange(args, 0, 1));
|
||||
|
||||
if (line.hasOption("help")) {
|
||||
HelpFormatter formatter = new HelpFormatter();
|
||||
formatter.printHelp("BiGpairSEQ_Sim.jar", mainOptions);
|
||||
System.out.println();
|
||||
formatter.printHelp("BiGpairSEQ_Sim.jar -cells", cellOptions);
|
||||
System.out.println();
|
||||
formatter.printHelp("BiGpairSEQ_Sim.jar -plate", plateOptions);
|
||||
System.out.println();
|
||||
formatter.printHelp("BiGpairSEQ_Sim.jar -graph", graphOptions);
|
||||
System.out.println();
|
||||
formatter.printHelp("BiGpairSEQ_Sim.jar -match", matchOptions);
|
||||
}
|
||||
else if (line.hasOption("version")) {
|
||||
System.out.println("BiGpairSEQ_Sim " + BiGpairSEQ.getVersion());
|
||||
}
|
||||
else if (line.hasOption("cells")) {
|
||||
line = parser.parse(cellOptions, Arrays.copyOfRange(args, 1, args.length));
|
||||
Integer number = Integer.valueOf(line.getOptionValue("n"));
|
||||
Integer diversity = Integer.valueOf(line.getOptionValue("d"));
|
||||
String filename = line.getOptionValue("o");
|
||||
makeCells(filename, number, diversity);
|
||||
}
|
||||
|
||||
else if (line.hasOption("plate")) {
|
||||
line = parser.parse(plateOptions, Arrays.copyOfRange(args, 1, args.length));
|
||||
//get the cells
|
||||
String cellFilename = line.getOptionValue("c");
|
||||
CellSample cells = getCells(cellFilename);
|
||||
//get the rest of the parameters
|
||||
Integer[] populations;
|
||||
String outputFilename = line.getOptionValue("o");
|
||||
Integer numWells = Integer.parseInt(line.getOptionValue("w"));
|
||||
Double dropoutRate = Double.parseDouble(line.getOptionValue("d"));
|
||||
if (line.hasOption("random")) {
|
||||
//Array holding values of minimum and maximum populations
|
||||
Integer[] min_max = Stream.of(line.getOptionValues("random"))
|
||||
.mapToInt(Integer::parseInt)
|
||||
.boxed()
|
||||
.toArray(Integer[]::new);
|
||||
populations = BiGpairSEQ.getRand().ints(min_max[0], min_max[1] + 1)
|
||||
.limit(numWells)
|
||||
.boxed()
|
||||
.toArray(Integer[]::new);
|
||||
}
|
||||
else if (line.hasOption("pop")) {
|
||||
populations = Stream.of(line.getOptionValues("pop"))
|
||||
.mapToInt(Integer::parseInt)
|
||||
.boxed()
|
||||
.toArray(Integer[]::new);
|
||||
}
|
||||
else{
|
||||
populations = new Integer[1];
|
||||
populations[0] = 1;
|
||||
}
|
||||
//make the plate
|
||||
Plate plate;
|
||||
if (line.hasOption("poisson")) {
|
||||
Double stdDev = Math.sqrt(numWells);
|
||||
plate = new Plate(cells, cellFilename, numWells, populations, dropoutRate, stdDev);
|
||||
}
|
||||
else if (line.hasOption("gaussian")) {
|
||||
Double stdDev = Double.parseDouble(line.getOptionValue("stddev"));
|
||||
plate = new Plate(cells, cellFilename, numWells, populations, dropoutRate, stdDev);
|
||||
}
|
||||
else if (line.hasOption("zipf")) {
|
||||
Double zipfExponent = Double.parseDouble(line.getOptionValue("exp"));
|
||||
plate = new Plate(cells, cellFilename, numWells, populations, dropoutRate, zipfExponent);
|
||||
}
|
||||
else {
|
||||
assert line.hasOption("exponential");
|
||||
Double lambda = Double.parseDouble(line.getOptionValue("lambda"));
|
||||
plate = new Plate(cells, cellFilename, numWells, populations, dropoutRate, lambda);
|
||||
}
|
||||
PlateFileWriter writer = new PlateFileWriter(outputFilename, plate);
|
||||
writer.writePlateFile();
|
||||
}
|
||||
|
||||
else if (line.hasOption("graph")) { //Making a graph
|
||||
line = parser.parse(graphOptions, Arrays.copyOfRange(args, 1, args.length));
|
||||
String cellFilename = line.getOptionValue("c");
|
||||
String plateFilename = line.getOptionValue("p");
|
||||
String outputFilename = line.getOptionValue("o");
|
||||
//get cells
|
||||
CellSample cells = getCells(cellFilename);
|
||||
//get plate
|
||||
Plate plate = getPlate(plateFilename);
|
||||
GraphWithMapData graph;
|
||||
Integer readDepth = 1;
|
||||
Double readErrorRate = 0.0;
|
||||
Double errorCollisionRate = 0.0;
|
||||
Double realSequenceCollisionRate = 0.0;
|
||||
if (line.hasOption("rd")) {
|
||||
readDepth = Integer.parseInt(line.getOptionValue("rd"));
|
||||
}
|
||||
if (line.hasOption("err")) {
|
||||
readErrorRate = Double.parseDouble(line.getOptionValue("err"));
|
||||
}
|
||||
if (line.hasOption("errcoll")) {
|
||||
errorCollisionRate = Double.parseDouble(line.getOptionValue("errcoll"));
|
||||
}
|
||||
if (line.hasOption("realcoll")) {
|
||||
realSequenceCollisionRate = Double.parseDouble(line.getOptionValue("realcoll"));
|
||||
}
|
||||
graph = Simulator.makeCDR3Graph(cells, plate, readDepth, readErrorRate, errorCollisionRate,
|
||||
realSequenceCollisionRate, false);
|
||||
if (!line.hasOption("no-binary")) { //output binary file unless told not to
|
||||
GraphDataObjectWriter writer = new GraphDataObjectWriter(outputFilename, graph, false);
|
||||
writer.writeDataToFile();
|
||||
}
|
||||
if (line.hasOption("graphml")) { //if told to, output graphml file
|
||||
GraphMLFileWriter gmlwriter = new GraphMLFileWriter(outputFilename, graph);
|
||||
gmlwriter.writeGraphToFile();
|
||||
}
|
||||
}
|
||||
|
||||
else if (line.hasOption("match")) { //can add a flag for which match type in future, spit this in two
|
||||
line = parser.parse(matchOptions, Arrays.copyOfRange(args, 1, args.length));
|
||||
String graphFilename = line.getOptionValue("g");
|
||||
|
||||
String outputFilename;
|
||||
if(line.hasOption("o")) {
|
||||
outputFilename = line.getOptionValue("o");
|
||||
}
|
||||
else {
|
||||
outputFilename = null;
|
||||
}
|
||||
Integer minThreshold = Integer.parseInt(line.getOptionValue("min"));
|
||||
Integer maxThreshold = Integer.parseInt(line.getOptionValue("max"));
|
||||
int minOverlapPct;
|
||||
if (line.hasOption("minpct")) { //see if this filter is being used
|
||||
minOverlapPct = Integer.parseInt(line.getOptionValue("minpct"));
|
||||
}
|
||||
else {
|
||||
minOverlapPct = 0;
|
||||
}
|
||||
int maxOccupancyDiff;
|
||||
if (line.hasOption("maxdiff")) { //see if this filter is being used
|
||||
maxOccupancyDiff = Integer.parseInt(line.getOptionValue("maxdiff"));
|
||||
}
|
||||
else {
|
||||
maxOccupancyDiff = Integer.MAX_VALUE;
|
||||
}
|
||||
if (line.hasOption("pv")) {
|
||||
BiGpairSEQ.setCalculatePValue(true);
|
||||
}
|
||||
GraphWithMapData graph = getGraph(graphFilename);
|
||||
MatchingResult result = Simulator.matchCDR3s(graph, graphFilename, minThreshold, maxThreshold,
|
||||
maxOccupancyDiff, minOverlapPct, false, BiGpairSEQ.calculatePValue());
|
||||
if(outputFilename != null){
|
||||
MatchingFileWriter writer = new MatchingFileWriter(outputFilename, result);
|
||||
writer.writeResultsToFile();
|
||||
}
|
||||
//can put a bunch of ifs for outputting various things from the MatchingResult to System.out here
|
||||
//after I put those flags in the matchOptions
|
||||
if(line.hasOption("print-metadata")) {
|
||||
for (String k : result.getMetadata().keySet()) {
|
||||
System.out.println(k + ": " + result.getMetadata().get(k));
|
||||
}
|
||||
}
|
||||
if(line.hasOption("print-error")) {
|
||||
System.out.println("pairing error rate: " + result.getPairingErrorRate());
|
||||
}
|
||||
if(line.hasOption("print-attempt")) {
|
||||
System.out.println("pairing attempt rate: " +result.getPairingAttemptRate());
|
||||
}
|
||||
if(line.hasOption("print-correct")) {
|
||||
System.out.println("correct pairings: " + result.getCorrectPairingCount());
|
||||
}
|
||||
if(line.hasOption("print-incorrect")) {
|
||||
System.out.println("incorrect pairings: " + result.getIncorrectPairingCount());
|
||||
}
|
||||
if(line.hasOption("print-alphas")) {
|
||||
System.out.println("total alphas found: " + result.getAlphaCount());
|
||||
}
|
||||
if(line.hasOption("print-betas")) {
|
||||
System.out.println("total betas found: " + result.getBetaCount());
|
||||
}
|
||||
if(line.hasOption("print-time")) {
|
||||
System.out.println("simulation time (seconds): " + result.getSimulationTime());
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (ParseException exp) {
|
||||
System.err.println("Parsing failed. Reason: " + exp.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private static Option outputFileOption() {
|
||||
Option outputFile = Option.builder("o")
|
||||
.longOpt("output-file")
|
||||
.hasArg()
|
||||
.argName("filename")
|
||||
.desc("Name of output file")
|
||||
.required()
|
||||
.build();
|
||||
return outputFile;
|
||||
}
|
||||
|
||||
private static Options buildMainOptions() {
|
||||
Options mainOptions = new Options();
|
||||
Option help = Option.builder("help")
|
||||
.desc("Displays this help menu")
|
||||
.build();
|
||||
Option makeCells = Option.builder("cells")
|
||||
.longOpt("make-cells")
|
||||
.desc("Makes a cell sample file of distinct T cells")
|
||||
.build();
|
||||
Option makePlate = Option.builder("plate")
|
||||
.longOpt("make-plate")
|
||||
.desc("Makes a sample plate file. Requires a cell sample file.")
|
||||
.build();
|
||||
Option makeGraph = Option.builder("graph")
|
||||
.longOpt("make-graph")
|
||||
.desc("Makes a graph/data file. Requires a cell sample file and a sample plate file")
|
||||
.build();
|
||||
Option matchCDR3 = Option.builder("match")
|
||||
.longOpt("match-cdr3")
|
||||
.desc("Matches CDR3s. Requires a graph/data file.")
|
||||
.build();
|
||||
Option printVersion = Option.builder("version")
|
||||
.desc("Prints the program version number to stdout").build();
|
||||
OptionGroup mainGroup = new OptionGroup();
|
||||
mainGroup.addOption(help);
|
||||
mainGroup.addOption(printVersion);
|
||||
mainGroup.addOption(makeCells);
|
||||
mainGroup.addOption(makePlate);
|
||||
mainGroup.addOption(makeGraph);
|
||||
mainGroup.addOption(matchCDR3);
|
||||
mainGroup.setRequired(true);
|
||||
mainOptions.addOptionGroup(mainGroup);
|
||||
return mainOptions;
|
||||
}
|
||||
|
||||
private static Options buildCellOptions() {
|
||||
Options cellOptions = new Options();
|
||||
Option numCells = Option.builder("n")
|
||||
.longOpt("num-cells")
|
||||
.desc("The number of distinct cells to generate")
|
||||
.hasArg()
|
||||
.argName("number")
|
||||
.required().build();
|
||||
Option cdr3Diversity = Option.builder("d")
|
||||
.longOpt("diversity-factor")
|
||||
.desc("The factor by which unique CDR3s outnumber unique CDR1s")
|
||||
.hasArg()
|
||||
.argName("factor")
|
||||
.required().build();
|
||||
cellOptions.addOption(numCells);
|
||||
cellOptions.addOption(cdr3Diversity);
|
||||
cellOptions.addOption(outputFileOption());
|
||||
return cellOptions;
|
||||
}
|
||||
|
||||
private static Options buildPlateOptions() {
|
||||
Options plateOptions = new Options();
|
||||
Option cellFile = Option.builder("c") // add this to plate options
|
||||
.longOpt("cell-file")
|
||||
.desc("The cell sample file to use")
|
||||
.hasArg()
|
||||
.argName("filename")
|
||||
.required().build();
|
||||
Option numWells = Option.builder("w")// add this to plate options
|
||||
.longOpt("wells")
|
||||
.desc("The number of wells on the sample plate")
|
||||
.hasArg()
|
||||
.argName("number")
|
||||
.required().build();
|
||||
//options group for choosing with distribution to use
|
||||
OptionGroup distributions = new OptionGroup();// add this to plate options
|
||||
distributions.setRequired(true);
|
||||
Option poisson = Option.builder("poisson")
|
||||
.desc("Use a Poisson distribution for cell sample")
|
||||
.build();
|
||||
Option gaussian = Option.builder("gaussian")
|
||||
.desc("Use a Gaussian distribution for cell sample")
|
||||
.build();
|
||||
Option exponential = Option.builder("exponential")
|
||||
.desc("Use an exponential distribution for cell sample")
|
||||
.build();
|
||||
Option zipf = Option.builder("zipf")
|
||||
.desc("Use a Zipf distribution for cell sample")
|
||||
.build();
|
||||
distributions.addOption(poisson);
|
||||
distributions.addOption(gaussian);
|
||||
distributions.addOption(exponential);
|
||||
distributions.addOption(zipf);
|
||||
//options group for statistical distribution parameters
|
||||
OptionGroup statParams = new OptionGroup();// add this to plate options
|
||||
Option stdDev = Option.builder("stddev")
|
||||
.desc("If using -gaussian flag, standard deviation for distrbution")
|
||||
.hasArg()
|
||||
.argName("value")
|
||||
.build();
|
||||
Option lambda = Option.builder("lambda")
|
||||
.desc("If using -exponential flag, lambda value for distribution")
|
||||
.hasArg()
|
||||
.argName("value")
|
||||
.build();
|
||||
Option zipfExponent = Option.builder("exp")
|
||||
.desc("If using -zipf flag, exponent value for distribution")
|
||||
.hasArg()
|
||||
.argName("value")
|
||||
.build();
|
||||
statParams.addOption(stdDev);
|
||||
statParams.addOption(lambda);
|
||||
//Option group for random plate or set populations
|
||||
OptionGroup wellPopOptions = new OptionGroup(); // add this to plate options
|
||||
wellPopOptions.setRequired(true);
|
||||
Option randomWellPopulations = Option.builder("random")
|
||||
.desc("Randomize well populations on sample plate. Takes two arguments: the minimum possible population and the maximum possible population.")
|
||||
.hasArgs()
|
||||
.numberOfArgs(2)
|
||||
.argName("min> <max")
|
||||
.build();
|
||||
Option specificWellPopulations = Option.builder("pop")
|
||||
.desc("The well populations for each section of the sample plate. There will be as many sections as there are populations given.")
|
||||
.hasArgs()
|
||||
.argName("number [number]...")
|
||||
.build();
|
||||
Option dropoutRate = Option.builder("d") //add this to plate options
|
||||
.longOpt("dropout-rate")
|
||||
.hasArg()
|
||||
.desc("The sequence dropout rate due to amplification error. (0.0 - 1.0)")
|
||||
.argName("rate")
|
||||
.required()
|
||||
.build();
|
||||
wellPopOptions.addOption(randomWellPopulations);
|
||||
wellPopOptions.addOption(specificWellPopulations);
|
||||
plateOptions.addOption(cellFile);
|
||||
plateOptions.addOption(numWells);
|
||||
plateOptions.addOptionGroup(distributions);
|
||||
plateOptions.addOptionGroup(statParams);
|
||||
plateOptions.addOptionGroup(wellPopOptions);
|
||||
plateOptions.addOption(dropoutRate);
|
||||
plateOptions.addOption(outputFileOption());
|
||||
return plateOptions;
|
||||
}
|
||||
|
||||
private static Options buildGraphOptions() {
|
||||
Options graphOptions = new Options();
|
||||
Option cellFilename = Option.builder("c")
|
||||
.longOpt("cell-file")
|
||||
.desc("Cell sample file to use for checking pairing accuracy")
|
||||
.hasArg()
|
||||
.argName("filename")
|
||||
.required().build();
|
||||
Option plateFilename = Option.builder("p")
|
||||
.longOpt("plate-filename")
|
||||
.desc("Sample plate file from which to construct graph")
|
||||
.hasArg()
|
||||
.argName("filename")
|
||||
.required().build();
|
||||
Option outputGraphML = Option.builder("graphml")
|
||||
.desc("(Optional) Output GraphML file")
|
||||
.build();
|
||||
Option outputSerializedBinary = Option.builder("nb")
|
||||
.longOpt("no-binary")
|
||||
.desc("(Optional) Don't output serialized binary file")
|
||||
.build();
|
||||
Option readDepth = Option.builder("rd")
|
||||
.longOpt("read-depth")
|
||||
.desc("(Optional) The number of times to read each sequence.")
|
||||
.hasArg()
|
||||
.argName("depth")
|
||||
.build();
|
||||
Option readErrorProb = Option.builder("err")
|
||||
.longOpt("read-error-prob")
|
||||
.desc("(Optional) The probability that a sequence will be misread. (0.0 - 1.0)")
|
||||
.hasArg()
|
||||
.argName("prob")
|
||||
.build();
|
||||
Option errorCollisionProb = Option.builder("errcoll")
|
||||
.longOpt("error-collision-prob")
|
||||
.desc("(Optional) The probability that two misreads will produce the same spurious sequence. (0.0 - 1.0)")
|
||||
.hasArg()
|
||||
.argName("prob")
|
||||
.build();
|
||||
Option realSequenceCollisionProb = Option.builder("realcoll")
|
||||
.longOpt("real-collision-prob")
|
||||
.desc("(Optional) The probability that a sequence will be misread " +
|
||||
"as another real sequence. (Only applies to unique misreads; after this has happened once, " +
|
||||
"future error collisions could produce the real sequence again) (0.0 - 1.0)")
|
||||
.hasArg()
|
||||
.argName("prob")
|
||||
.build();
|
||||
graphOptions.addOption(cellFilename);
|
||||
graphOptions.addOption(plateFilename);
|
||||
graphOptions.addOption(outputFileOption());
|
||||
graphOptions.addOption(outputGraphML);
|
||||
graphOptions.addOption(outputSerializedBinary);
|
||||
graphOptions.addOption(readDepth);
|
||||
graphOptions.addOption(readErrorProb);
|
||||
graphOptions.addOption(errorCollisionProb);
|
||||
graphOptions.addOption(realSequenceCollisionProb);
|
||||
return graphOptions;
|
||||
}
|
||||
|
||||
private static Options buildMatchCDR3options() {
|
||||
Options matchCDR3options = new Options();
|
||||
Option graphFilename = Option.builder("g")
|
||||
.longOpt("graph-file")
|
||||
.desc("The graph/data file to use")
|
||||
.hasArg()
|
||||
.argName("filename")
|
||||
.required().build();
|
||||
Option minOccupancyOverlap = Option.builder("min")
|
||||
.desc("The minimum number of shared wells to attempt to match a sequence pair")
|
||||
.hasArg()
|
||||
.argName("number")
|
||||
.required().build();
|
||||
Option maxOccupancyOverlap = Option.builder("max")
|
||||
.desc("The maximum number of shared wells to attempt to match a sequence pair")
|
||||
.hasArg()
|
||||
.argName("number")
|
||||
.required().build();
|
||||
Option minOverlapPercent = Option.builder("minpct")
|
||||
.desc("(Optional) The minimum percentage of a sequence's total occupancy shared by another sequence to attempt matching. (0 - 100) ")
|
||||
.hasArg()
|
||||
.argName("percent")
|
||||
.build();
|
||||
Option maxOccupancyDifference = Option.builder("maxdiff")
|
||||
.desc("(Optional) The maximum difference in total occupancy between two sequences to attempt matching.")
|
||||
.hasArg()
|
||||
.argName("number")
|
||||
.build();
|
||||
Option outputFile = Option.builder("o") //can't call the method this time, because this one's optional
|
||||
.longOpt("output-file")
|
||||
.hasArg()
|
||||
.argName("filename")
|
||||
.desc("(Optional) Name of output the output file. If not present, no file will be written.")
|
||||
.build();
|
||||
Option pValue = Option.builder("pv") //can't call the method this time, because this one's optional
|
||||
.longOpt("p-value")
|
||||
.desc("(Optional) Calculate p-values for sequence pairs.")
|
||||
.build();
|
||||
matchCDR3options.addOption(graphFilename)
|
||||
.addOption(minOccupancyOverlap)
|
||||
.addOption(maxOccupancyOverlap)
|
||||
.addOption(minOverlapPercent)
|
||||
.addOption(maxOccupancyDifference)
|
||||
.addOption(outputFile)
|
||||
.addOption(pValue);
|
||||
|
||||
//options for output to System.out
|
||||
Option printAlphaCount = Option.builder().longOpt("print-alphas")
|
||||
.desc("(Optional) Print the number of distinct alpha sequences to stdout.").build();
|
||||
Option printBetaCount = Option.builder().longOpt("print-betas")
|
||||
.desc("(Optional) Print the number of distinct beta sequences to stdout.").build();
|
||||
Option printTime = Option.builder().longOpt("print-time")
|
||||
.desc("(Optional) Print the total simulation time to stdout.").build();
|
||||
Option printErrorRate = Option.builder().longOpt("print-error")
|
||||
.desc("(Optional) Print the pairing error rate to stdout").build();
|
||||
Option printAttempt = Option.builder().longOpt("print-attempt")
|
||||
.desc("(Optional) Print the pairing attempt rate to stdout").build();
|
||||
Option printCorrect = Option.builder().longOpt("print-correct")
|
||||
.desc("(Optional) Print the number of correct pairs to stdout").build();
|
||||
Option printIncorrect = Option.builder().longOpt("print-incorrect")
|
||||
.desc("(Optional) Print the number of incorrect pairs to stdout").build();
|
||||
Option printMetadata = Option.builder().longOpt("print-metadata")
|
||||
.desc("(Optional) Print a full summary of the matching results to stdout.").build();
|
||||
|
||||
matchCDR3options
|
||||
.addOption(printErrorRate)
|
||||
.addOption(printAttempt)
|
||||
.addOption(printCorrect)
|
||||
.addOption(printIncorrect)
|
||||
.addOption(printMetadata)
|
||||
.addOption(printAlphaCount)
|
||||
.addOption(printBetaCount)
|
||||
.addOption(printTime);
|
||||
return matchCDR3options;
|
||||
}
|
||||
|
||||
|
||||
|
||||
private static CellSample getCells(String cellFilename) {
|
||||
assert cellFilename != null;
|
||||
CellFileReader reader = new CellFileReader(cellFilename);
|
||||
return reader.getCellSample();
|
||||
}
|
||||
|
||||
private static Plate getPlate(String plateFilename) {
|
||||
assert plateFilename != null;
|
||||
PlateFileReader reader = new PlateFileReader(plateFilename);
|
||||
return reader.getSamplePlate();
|
||||
}
|
||||
|
||||
private static GraphWithMapData getGraph(String graphFilename) {
|
||||
assert graphFilename != null;
|
||||
try{
|
||||
GraphDataObjectReader reader = new GraphDataObjectReader(graphFilename, false);
|
||||
return reader.getData();
|
||||
|
||||
}
|
||||
catch (IOException ex) {
|
||||
ex.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
//for calling from command line
|
||||
public static void makeCells(String filename, Integer numCells, Integer cdr1Freq) {
|
||||
CellSample sample = new CellSample(numCells, cdr1Freq);
|
||||
CellFileWriter writer = new CellFileWriter(filename, sample);
|
||||
writer.writeCellsToFile();
|
||||
}
|
||||
}
|
||||
6
src/main/java/DistributionType.java
Normal file
6
src/main/java/DistributionType.java
Normal file
@@ -0,0 +1,6 @@
|
||||
public enum DistributionType {
|
||||
POISSON,
|
||||
GAUSSIAN,
|
||||
EXPONENTIAL,
|
||||
ZIPF
|
||||
}
|
||||
@@ -4,10 +4,6 @@ import java.math.MathContext;
|
||||
|
||||
public abstract class Equations {
|
||||
|
||||
public static int getRandomNumber(int min, int max) {
|
||||
return (int) ((Math.random() * (max - min)) + min);
|
||||
}
|
||||
|
||||
//pValue calculation as described in original pairSEQ paper.
|
||||
//Included for comparison with original results.
|
||||
//Not used by BiGpairSEQ for matching.
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import java.io.*;
|
||||
|
||||
public class GraphDataObjectReader {
|
||||
|
||||
private GraphWithMapData data;
|
||||
private String filename;
|
||||
|
||||
public GraphDataObjectReader(String filename) throws IOException {
|
||||
|
||||
public GraphDataObjectReader(String filename, boolean verbose) throws IOException {
|
||||
if(!filename.matches(".*\\.ser")){
|
||||
filename = filename + ".ser";
|
||||
}
|
||||
@@ -13,8 +15,13 @@ public class GraphDataObjectReader {
|
||||
BufferedInputStream fileIn = new BufferedInputStream(new FileInputStream(filename));
|
||||
ObjectInputStream in = new ObjectInputStream(fileIn))
|
||||
{
|
||||
if (verbose) {
|
||||
System.out.println("Reading graph data from file. This may take some time");
|
||||
System.out.println("File I/O time is not included in results");
|
||||
}
|
||||
data = (GraphWithMapData) in.readObject();
|
||||
} catch (FileNotFoundException | ClassNotFoundException ex) {
|
||||
System.out.println("Graph/data file " + filename + " not found.");
|
||||
ex.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import org.jgrapht.Graph;
|
||||
|
||||
import java.io.BufferedOutputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
@@ -7,6 +9,7 @@ public class GraphDataObjectWriter {
|
||||
|
||||
private GraphWithMapData data;
|
||||
private String filename;
|
||||
private boolean verbose = true;
|
||||
|
||||
public GraphDataObjectWriter(String filename, GraphWithMapData data) {
|
||||
if(!filename.matches(".*\\.ser")){
|
||||
@@ -16,10 +19,24 @@ public class GraphDataObjectWriter {
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
public GraphDataObjectWriter(String filename, GraphWithMapData data, boolean verbose) {
|
||||
this.verbose = verbose;
|
||||
if(!filename.matches(".*\\.ser")){
|
||||
filename = filename + ".ser";
|
||||
}
|
||||
this.filename = filename;
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
public void writeDataToFile() {
|
||||
try (BufferedOutputStream bufferedOut = new BufferedOutputStream(new FileOutputStream(filename));
|
||||
|
||||
ObjectOutputStream out = new ObjectOutputStream(bufferedOut);
|
||||
){
|
||||
if(verbose) {
|
||||
System.out.println("Writing graph and occupancy data to file. This may take some time.");
|
||||
System.out.println("File I/O time is not included in results.");
|
||||
}
|
||||
out.writeObject(data);
|
||||
} catch (IOException ex) {
|
||||
ex.printStackTrace();
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
import org.jgrapht.graph.SimpleWeightedGraph;
|
||||
import org.jgrapht.nio.graphml.GraphMLImporter;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
public class GraphMLFileReader {
|
||||
|
||||
private String filename;
|
||||
private SimpleWeightedGraph graph;
|
||||
|
||||
public GraphMLFileReader(String filename, SimpleWeightedGraph graph) {
|
||||
if(!filename.matches(".*\\.graphml")){
|
||||
filename = filename + ".graphml";
|
||||
}
|
||||
this.filename = filename;
|
||||
this.graph = graph;
|
||||
|
||||
try(//don't need to close reader bc of try-with-resources auto-closing
|
||||
BufferedReader reader = Files.newBufferedReader(Path.of(filename));
|
||||
){
|
||||
GraphMLImporter<SimpleWeightedGraph, BufferedReader> importer = new GraphMLImporter<>();
|
||||
importer.importGraph(graph, reader);
|
||||
}
|
||||
catch (IOException ex) {
|
||||
System.out.println("Graph file " + filename + " not found.");
|
||||
System.err.println(ex);
|
||||
}
|
||||
}
|
||||
|
||||
public SimpleWeightedGraph getGraph() { return graph; }
|
||||
|
||||
}
|
||||
@@ -1,20 +1,38 @@
|
||||
import org.jgrapht.graph.DefaultWeightedEdge;
|
||||
import org.jgrapht.graph.SimpleWeightedGraph;
|
||||
import org.jgrapht.nio.dot.DOTExporter;
|
||||
import org.jgrapht.nio.Attribute;
|
||||
import org.jgrapht.nio.AttributeType;
|
||||
import org.jgrapht.nio.DefaultAttribute;
|
||||
import org.jgrapht.nio.graphml.GraphMLExporter;
|
||||
import org.jgrapht.nio.graphml.GraphMLExporter.AttributeCategory;
|
||||
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
public class GraphMLFileWriter {
|
||||
|
||||
String filename;
|
||||
SimpleWeightedGraph graph;
|
||||
GraphWithMapData data;
|
||||
Map<String, Attribute> graphAttributes;
|
||||
|
||||
public GraphMLFileWriter(String filename, GraphWithMapData data) {
|
||||
if(!filename.matches(".*\\.graphml")){
|
||||
filename = filename + ".graphml";
|
||||
}
|
||||
this.filename = filename;
|
||||
this.data = data;
|
||||
this.graph = data.getGraph();
|
||||
graphAttributes = createGraphAttributes();
|
||||
}
|
||||
|
||||
public GraphMLFileWriter(String filename, SimpleWeightedGraph graph) {
|
||||
public GraphMLFileWriter(String filename, SimpleWeightedGraph<Vertex, DefaultWeightedEdge> graph) {
|
||||
if(!filename.matches(".*\\.graphml")){
|
||||
filename = filename + ".graphml";
|
||||
}
|
||||
@@ -22,10 +40,75 @@ public class GraphMLFileWriter {
|
||||
this.graph = graph;
|
||||
}
|
||||
|
||||
private Map<String, Attribute> createGraphAttributes(){
|
||||
Map<String, Attribute> attributes = new HashMap<>();
|
||||
//Sample plate filename
|
||||
attributes.put("sample plate filename", DefaultAttribute.createAttribute(data.getPlateFilename()));
|
||||
// Number of wells
|
||||
attributes.put("well count", DefaultAttribute.createAttribute(data.getNumWells().toString()));
|
||||
//Well populations
|
||||
Integer[] wellPopulations = data.getWellPopulations();
|
||||
StringBuilder populationsStringBuilder = new StringBuilder();
|
||||
populationsStringBuilder.append(wellPopulations[0].toString());
|
||||
for(int i = 1; i < wellPopulations.length; i++){
|
||||
populationsStringBuilder.append(", ");
|
||||
populationsStringBuilder.append(wellPopulations[i].toString());
|
||||
}
|
||||
String wellPopulationsString = populationsStringBuilder.toString();
|
||||
attributes.put("well populations", DefaultAttribute.createAttribute(wellPopulationsString));
|
||||
attributes.put("read depth", DefaultAttribute.createAttribute(data.getReadDepth().toString()));
|
||||
attributes.put("read error rate", DefaultAttribute.createAttribute(data.getReadErrorRate().toString()));
|
||||
attributes.put("error collision rate", DefaultAttribute.createAttribute(data.getErrorCollisionRate().toString()));
|
||||
attributes.put("real sequence collision rate", DefaultAttribute.createAttribute(data.getRealSequenceCollisionRate()));
|
||||
return attributes;
|
||||
}
|
||||
|
||||
private Map<String, Attribute> createVertexAttributes(Vertex v){
|
||||
Map<String, Attribute> attributes = new HashMap<>();
|
||||
//sequence type
|
||||
attributes.put("type", DefaultAttribute.createAttribute(v.getType().name()));
|
||||
//sequence
|
||||
attributes.put("sequence", DefaultAttribute.createAttribute(v.getSequence()));
|
||||
//number of wells the sequence appears in
|
||||
attributes.put("occupancy", DefaultAttribute.createAttribute(v.getOccupancy()));
|
||||
//total number of times the sequence was read
|
||||
attributes.put("total read count", DefaultAttribute.createAttribute(v.getReadCount()));
|
||||
StringBuilder wellsAndReadCountsBuilder = new StringBuilder();
|
||||
Iterator<Map.Entry<Integer, Integer>> wellOccupancies = v.getWellOccupancies().entrySet().iterator();
|
||||
while (wellOccupancies.hasNext()) {
|
||||
Map.Entry<Integer, Integer> entry = wellOccupancies.next();
|
||||
wellsAndReadCountsBuilder.append(entry.getKey() + ":" + entry.getValue());
|
||||
if (wellOccupancies.hasNext()) {
|
||||
wellsAndReadCountsBuilder.append(", ");
|
||||
}
|
||||
}
|
||||
String wellsAndReadCounts = wellsAndReadCountsBuilder.toString();
|
||||
//the wells the sequence appears in and the read counts in those wells
|
||||
attributes.put("wells:read counts", DefaultAttribute.createAttribute(wellsAndReadCounts));
|
||||
return attributes;
|
||||
}
|
||||
|
||||
public void writeGraphToFile() {
|
||||
try(BufferedWriter writer = Files.newBufferedWriter(Path.of(filename), StandardOpenOption.CREATE_NEW);
|
||||
){
|
||||
GraphMLExporter<SimpleWeightedGraph, BufferedWriter> exporter = new GraphMLExporter<>();
|
||||
//create exporter. Let the vertex labels be the unique ids for the vertices
|
||||
GraphMLExporter<Vertex, SimpleWeightedGraph<Vertex, DefaultWeightedEdge>> exporter = new GraphMLExporter<>(v -> v.getVertexLabel().toString());
|
||||
//set to export weights
|
||||
exporter.setExportEdgeWeights(true);
|
||||
//Set graph attributes
|
||||
exporter.setGraphAttributeProvider( () -> graphAttributes);
|
||||
//set type, sequence, and occupancy attributes for each vertex
|
||||
exporter.setVertexAttributeProvider(this::createVertexAttributes);
|
||||
//register the attributes
|
||||
for(String s : graphAttributes.keySet()) {
|
||||
exporter.registerAttribute(s, AttributeCategory.GRAPH, AttributeType.STRING);
|
||||
}
|
||||
exporter.registerAttribute("type", AttributeCategory.NODE, AttributeType.STRING);
|
||||
exporter.registerAttribute("sequence", AttributeCategory.NODE, AttributeType.STRING);
|
||||
exporter.registerAttribute("occupancy", AttributeCategory.NODE, AttributeType.STRING);
|
||||
exporter.registerAttribute("total read count", AttributeCategory.NODE, AttributeType.STRING);
|
||||
exporter.registerAttribute("wells:read counts", AttributeCategory.NODE, AttributeType.STRING);
|
||||
//export the graph
|
||||
exporter.exportGraph(graph, writer);
|
||||
} catch(IOException ex){
|
||||
System.out.println("Could not make new file named "+filename);
|
||||
|
||||
138
src/main/java/GraphModificationFunctions.java
Normal file
138
src/main/java/GraphModificationFunctions.java
Normal file
@@ -0,0 +1,138 @@
|
||||
import org.jgrapht.graph.DefaultWeightedEdge;
|
||||
import org.jgrapht.graph.SimpleWeightedGraph;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public interface GraphModificationFunctions {
|
||||
|
||||
//remove over- and under-weight edges, return removed edges
|
||||
static Map<Vertex[], Integer> filterByOverlapThresholds(SimpleWeightedGraph<Vertex, DefaultWeightedEdge> graph,
|
||||
int low, int high, boolean saveEdges) {
|
||||
Map<Vertex[], Integer> removedEdges = new HashMap<>();
|
||||
for (DefaultWeightedEdge e : graph.edgeSet()) {
|
||||
if ((graph.getEdgeWeight(e) > high) || (graph.getEdgeWeight(e) < low)) {
|
||||
if(saveEdges) {
|
||||
Vertex source = graph.getEdgeSource(e);
|
||||
Vertex target = graph.getEdgeTarget(e);
|
||||
Integer weight = (int) graph.getEdgeWeight(e);
|
||||
Vertex[] edge = {source, target};
|
||||
removedEdges.put(edge, weight);
|
||||
}
|
||||
else {
|
||||
graph.setEdgeWeight(e, 0.0);
|
||||
}
|
||||
}
|
||||
}
|
||||
if(saveEdges) {
|
||||
for (Vertex[] edge : removedEdges.keySet()) {
|
||||
graph.removeEdge(edge[0], edge[1]);
|
||||
}
|
||||
}
|
||||
return removedEdges;
|
||||
}
|
||||
|
||||
//Remove edges for pairs with large occupancy discrepancy, return removed edges
|
||||
static Map<Vertex[], Integer> filterByRelativeOccupancy(SimpleWeightedGraph<Vertex, DefaultWeightedEdge> graph,
|
||||
Integer maxOccupancyDifference, boolean saveEdges) {
|
||||
Map<Vertex[], Integer> removedEdges = new HashMap<>();
|
||||
for (DefaultWeightedEdge e : graph.edgeSet()) {
|
||||
Integer alphaOcc = graph.getEdgeSource(e).getOccupancy();
|
||||
Integer betaOcc = graph.getEdgeTarget(e).getOccupancy();
|
||||
if (Math.abs(alphaOcc - betaOcc) >= maxOccupancyDifference) {
|
||||
if (saveEdges) {
|
||||
Vertex source = graph.getEdgeSource(e);
|
||||
Vertex target = graph.getEdgeTarget(e);
|
||||
Integer weight = (int) graph.getEdgeWeight(e);
|
||||
Vertex[] edge = {source, target};
|
||||
removedEdges.put(edge, weight);
|
||||
}
|
||||
else {
|
||||
graph.setEdgeWeight(e, 0.0);
|
||||
}
|
||||
}
|
||||
}
|
||||
if(saveEdges) {
|
||||
for (Vertex[] edge : removedEdges.keySet()) {
|
||||
graph.removeEdge(edge[0], edge[1]);
|
||||
}
|
||||
}
|
||||
return removedEdges;
|
||||
}
|
||||
|
||||
//Remove edges for pairs where overlap size is significantly lower than the well occupancy, return removed edges
|
||||
static Map<Vertex[], Integer> filterByOverlapPercent(SimpleWeightedGraph<Vertex, DefaultWeightedEdge> graph,
|
||||
Integer minOverlapPercent,
|
||||
boolean saveEdges) {
|
||||
Map<Vertex[], Integer> removedEdges = new HashMap<>();
|
||||
for (DefaultWeightedEdge e : graph.edgeSet()) {
|
||||
Integer alphaOcc = graph.getEdgeSource(e).getOccupancy();
|
||||
Integer betaOcc = graph.getEdgeTarget(e).getOccupancy();
|
||||
double weight = graph.getEdgeWeight(e);
|
||||
double min = minOverlapPercent / 100.0;
|
||||
if ((weight / alphaOcc < min) || (weight / betaOcc < min)) {
|
||||
if (saveEdges) {
|
||||
Vertex source = graph.getEdgeSource(e);
|
||||
Vertex target = graph.getEdgeTarget(e);
|
||||
Integer intWeight = (int) graph.getEdgeWeight(e);
|
||||
Vertex[] edge = {source, target};
|
||||
removedEdges.put(edge, intWeight);
|
||||
}
|
||||
else {
|
||||
graph.setEdgeWeight(e, 0.0);
|
||||
}
|
||||
}
|
||||
}
|
||||
if(saveEdges) {
|
||||
for (Vertex[] edge : removedEdges.keySet()) {
|
||||
graph.removeEdge(edge[0], edge[1]);
|
||||
}
|
||||
}
|
||||
return removedEdges;
|
||||
}
|
||||
|
||||
static Map<Vertex[], Integer> filterByRelativeReadCount (SimpleWeightedGraph<Vertex, DefaultWeightedEdge> graph, Integer threshold, boolean saveEdges) {
|
||||
Map<Vertex[], Integer> removedEdges = new HashMap<>();
|
||||
Boolean passes;
|
||||
for (DefaultWeightedEdge e : graph.edgeSet()) {
|
||||
Integer alphaReadCount = graph.getEdgeSource(e).getReadCount();
|
||||
Integer betaReadCount = graph.getEdgeTarget(e).getReadCount();
|
||||
passes = RelativeReadCountFilterFunction(threshold, alphaReadCount, betaReadCount);
|
||||
if (!passes) {
|
||||
if (saveEdges) {
|
||||
Vertex source = graph.getEdgeSource(e);
|
||||
Vertex target = graph.getEdgeTarget(e);
|
||||
Integer intWeight = (int) graph.getEdgeWeight(e);
|
||||
Vertex[] edge = {source, target};
|
||||
removedEdges.put(edge, intWeight);
|
||||
}
|
||||
else {
|
||||
graph.setEdgeWeight(e, 0.0);
|
||||
}
|
||||
}
|
||||
}
|
||||
if(saveEdges) {
|
||||
for (Vertex[] edge : removedEdges.keySet()) {
|
||||
graph.removeEdge(edge[0], edge[1]);
|
||||
}
|
||||
}
|
||||
return removedEdges;
|
||||
}
|
||||
|
||||
static Boolean RelativeReadCountFilterFunction(Integer threshold, Integer alphaReadCount, Integer betaReadCount) {
|
||||
return Math.abs(alphaReadCount - betaReadCount) < threshold;
|
||||
}
|
||||
|
||||
static void addRemovedEdges(SimpleWeightedGraph<Vertex, DefaultWeightedEdge> graph,
|
||||
Map<Vertex[], Integer> removedEdges) {
|
||||
for (Vertex[] edge : removedEdges.keySet()) {
|
||||
DefaultWeightedEdge e = graph.addEdge(edge[0], edge[1]);
|
||||
graph.setEdgeWeight(e, removedEdges.get(edge));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
@@ -6,41 +6,57 @@ import java.util.Map;
|
||||
//Can't just write the graph, because I need the occupancy data too.
|
||||
//Makes most sense to serialize object and write that to a file.
|
||||
//Which means there's no reason to split map data and graph data up.
|
||||
//Custom vertex class means a lot of the map data can now be encoded in the graph itself
|
||||
public class GraphWithMapData implements java.io.Serializable {
|
||||
|
||||
private String sourceFilename;
|
||||
private String cellFilename;
|
||||
private int cellSampleSize;
|
||||
private String plateFilename;
|
||||
private final SimpleWeightedGraph graph;
|
||||
private Integer numWells;
|
||||
private Integer[] wellConcentrations;
|
||||
private Integer alphaCount;
|
||||
private Integer betaCount;
|
||||
private final Map<Integer, Integer> distCellsMapAlphaKey;
|
||||
private final Map<Integer, Integer> plateVtoAMap;
|
||||
private final Map<Integer, Integer> plateVtoBMap;
|
||||
private final Map<Integer, Integer> plateAtoVMap;
|
||||
private final Map<Integer, Integer> plateBtoVMap;
|
||||
private final Map<Integer, Integer> alphaWellCounts;
|
||||
private final Map<Integer, Integer> betaWellCounts;
|
||||
private final int numWells;
|
||||
private final Integer[] wellPopulations;
|
||||
private final int alphaCount;
|
||||
private final int betaCount;
|
||||
private final double dropoutRate;
|
||||
private final int readDepth;
|
||||
private final double readErrorRate;
|
||||
private final double errorCollisionRate;
|
||||
private final double realSequenceCollisionRate;
|
||||
private final Map<String, String> distCellsMapAlphaKey;
|
||||
// private final Map<Integer, Integer> plateVtoAMap;
|
||||
// private final Map<Integer, Integer> plateVtoBMap;
|
||||
// private final Map<Integer, Integer> plateAtoVMap;
|
||||
// private final Map<Integer, Integer> plateBtoVMap;
|
||||
// private final Map<Integer, Integer> alphaWellCounts;
|
||||
// private final Map<Integer, Integer> betaWellCounts;
|
||||
private final Duration time;
|
||||
|
||||
public GraphWithMapData(SimpleWeightedGraph graph, Integer numWells, Integer[] wellConcentrations,
|
||||
Integer alphaCount, Integer betaCount,
|
||||
Map<Integer, Integer> distCellsMapAlphaKey, Map<Integer, Integer> plateVtoAMap,
|
||||
Map<Integer,Integer> plateVtoBMap, Map<Integer, Integer> plateAtoVMap,
|
||||
Map<Integer, Integer> plateBtoVMap, Map<Integer, Integer> alphaWellCounts,
|
||||
Map<Integer, Integer> betaWellCounts, Duration time) {
|
||||
Map<String, String> distCellsMapAlphaKey, Integer alphaCount, Integer betaCount,
|
||||
Double dropoutRate, Integer readDepth, Double readErrorRate, Double errorCollisionRate,
|
||||
Double realSequenceCollisionRate, Duration time){
|
||||
|
||||
// Map<Integer, Integer> plateVtoAMap,
|
||||
// Map<Integer,Integer> plateVtoBMap, Map<Integer, Integer> plateAtoVMap,
|
||||
// Map<Integer, Integer> plateBtoVMap, Map<Integer, Integer> alphaWellCounts,
|
||||
// Map<Integer, Integer> betaWellCounts,) {
|
||||
this.graph = graph;
|
||||
this.numWells = numWells;
|
||||
this.wellConcentrations = wellConcentrations;
|
||||
this.wellPopulations = wellConcentrations;
|
||||
this.alphaCount = alphaCount;
|
||||
this.betaCount = betaCount;
|
||||
this.distCellsMapAlphaKey = distCellsMapAlphaKey;
|
||||
this.plateVtoAMap = plateVtoAMap;
|
||||
this.plateVtoBMap = plateVtoBMap;
|
||||
this.plateAtoVMap = plateAtoVMap;
|
||||
this.plateBtoVMap = plateBtoVMap;
|
||||
this.alphaWellCounts = alphaWellCounts;
|
||||
this.betaWellCounts = betaWellCounts;
|
||||
// this.plateVtoAMap = plateVtoAMap;
|
||||
// this.plateVtoBMap = plateVtoBMap;
|
||||
// this.plateAtoVMap = plateAtoVMap;
|
||||
// this.plateBtoVMap = plateBtoVMap;
|
||||
// this.alphaWellCounts = alphaWellCounts;
|
||||
// this.betaWellCounts = betaWellCounts;
|
||||
this.dropoutRate = dropoutRate;
|
||||
this.readDepth = readDepth;
|
||||
this.readErrorRate = readErrorRate;
|
||||
this.errorCollisionRate = errorCollisionRate;
|
||||
this.realSequenceCollisionRate = realSequenceCollisionRate;
|
||||
this.time = time;
|
||||
}
|
||||
|
||||
@@ -52,8 +68,8 @@ public class GraphWithMapData implements java.io.Serializable {
|
||||
return numWells;
|
||||
}
|
||||
|
||||
public Integer[] getWellConcentrations() {
|
||||
return wellConcentrations;
|
||||
public Integer[] getWellPopulations() {
|
||||
return wellPopulations;
|
||||
}
|
||||
|
||||
public Integer getAlphaCount() {
|
||||
@@ -64,43 +80,65 @@ public class GraphWithMapData implements java.io.Serializable {
|
||||
return betaCount;
|
||||
}
|
||||
|
||||
public Map<Integer, Integer> getDistCellsMapAlphaKey() {
|
||||
public Map<String, String> getDistCellsMapAlphaKey() {
|
||||
return distCellsMapAlphaKey;
|
||||
}
|
||||
|
||||
public Map<Integer, Integer> getPlateVtoAMap() {
|
||||
return plateVtoAMap;
|
||||
}
|
||||
// public Map<Integer, Integer> getPlateVtoAMap() {
|
||||
// return plateVtoAMap;
|
||||
// }
|
||||
//
|
||||
// public Map<Integer, Integer> getPlateVtoBMap() {
|
||||
// return plateVtoBMap;
|
||||
// }
|
||||
//
|
||||
// public Map<Integer, Integer> getPlateAtoVMap() {
|
||||
// return plateAtoVMap;
|
||||
// }
|
||||
//
|
||||
// public Map<Integer, Integer> getPlateBtoVMap() {
|
||||
// return plateBtoVMap;
|
||||
// }
|
||||
//
|
||||
// public Map<Integer, Integer> getAlphaWellCounts() {
|
||||
// return alphaWellCounts;
|
||||
// }
|
||||
//
|
||||
// public Map<Integer, Integer> getBetaWellCounts() {
|
||||
// return betaWellCounts;
|
||||
// }
|
||||
|
||||
public Map<Integer, Integer> getPlateVtoBMap() {
|
||||
return plateVtoBMap;
|
||||
}
|
||||
|
||||
public Map<Integer, Integer> getPlateAtoVMap() {
|
||||
return plateAtoVMap;
|
||||
}
|
||||
|
||||
public Map<Integer, Integer> getPlateBtoVMap() {
|
||||
return plateBtoVMap;
|
||||
}
|
||||
|
||||
public Map<Integer, Integer> getAlphaWellCounts() {
|
||||
return alphaWellCounts;
|
||||
}
|
||||
|
||||
public Map<Integer, Integer> getBetaWellCounts() {
|
||||
return betaWellCounts;
|
||||
}
|
||||
public Integer getReadDepth() { return readDepth; }
|
||||
|
||||
public Duration getTime() {
|
||||
return time;
|
||||
}
|
||||
|
||||
public void setSourceFilename(String filename) {
|
||||
this.sourceFilename = filename;
|
||||
public void setCellFilename(String filename) { this.cellFilename = filename; }
|
||||
|
||||
public String getCellFilename() { return this.cellFilename; }
|
||||
|
||||
public Integer getCellSampleSize() { return this.cellSampleSize; }
|
||||
|
||||
public void setCellSampleSize(int size) { this.cellSampleSize = size;}
|
||||
|
||||
public void setPlateFilename(String filename) {
|
||||
this.plateFilename = filename;
|
||||
}
|
||||
|
||||
public String getSourceFilename() {
|
||||
return sourceFilename;
|
||||
public String getPlateFilename() {
|
||||
return plateFilename;
|
||||
}
|
||||
|
||||
public Double getReadErrorRate() {
|
||||
return readErrorRate;
|
||||
}
|
||||
|
||||
public Double getErrorCollisionRate() {
|
||||
return errorCollisionRate;
|
||||
}
|
||||
|
||||
public Double getRealSequenceCollisionRate() { return realSequenceCollisionRate; }
|
||||
|
||||
public Double getDropoutRate() { return dropoutRate; }
|
||||
}
|
||||
|
||||
4
src/main/java/HeapType.java
Normal file
4
src/main/java/HeapType.java
Normal file
@@ -0,0 +1,4 @@
|
||||
public enum HeapType {
|
||||
FIBONACCI,
|
||||
PAIRING
|
||||
}
|
||||
654
src/main/java/InteractiveInterface.java
Normal file
654
src/main/java/InteractiveInterface.java
Normal file
@@ -0,0 +1,654 @@
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
//
|
||||
public class InteractiveInterface {
|
||||
|
||||
private static final Random rand = BiGpairSEQ.getRand();
|
||||
private static final Scanner sc = new Scanner(System.in);
|
||||
private static int input;
|
||||
private static boolean quit = false;
|
||||
|
||||
public static void startInteractive() {
|
||||
|
||||
while (!quit) {
|
||||
System.out.println();
|
||||
System.out.println("--------BiGPairSEQ SIMULATOR--------");
|
||||
System.out.println("ALPHA/BETA T CELL RECEPTOR MATCHING");
|
||||
System.out.println(" USING WEIGHTED BIPARTITE GRAPHS ");
|
||||
System.out.println("------------------------------------");
|
||||
System.out.println("Please select an option:");
|
||||
System.out.println("1) Generate a population of distinct cells");
|
||||
System.out.println("2) Generate a sample plate of T cells");
|
||||
System.out.println("3) Generate CDR3 alpha/beta occupancy data and overlap graph");
|
||||
System.out.println("4) Simulate bipartite graph CDR3 alpha/beta matching (BiGpairSEQ)");
|
||||
//Need to re-do the CDR3/CDR1 matching to correspond to new pattern
|
||||
//System.out.println("5) Generate CDR3/CDR1 occupancy graph");
|
||||
//System.out.println("6) Simulate CDR3/CDR1 T cell matching");
|
||||
System.out.println("8) Options");
|
||||
System.out.println("9) About/Acknowledgments");
|
||||
System.out.println("0) Exit");
|
||||
try {
|
||||
input = sc.nextInt();
|
||||
switch (input) {
|
||||
case 1 -> makeCells();
|
||||
case 2 -> makePlate();
|
||||
case 3 -> makeCDR3Graph();
|
||||
case 4 -> matchCDR3s();
|
||||
//case 6 -> matchCellsCDR1();
|
||||
case 8 -> mainOptions();
|
||||
case 9 -> acknowledge();
|
||||
case 0 -> quit = true;
|
||||
default -> System.out.println("Invalid input.");
|
||||
}
|
||||
} catch (InputMismatchException | IOException ex) {
|
||||
System.out.println(ex);
|
||||
sc.next();
|
||||
}
|
||||
}
|
||||
sc.close();
|
||||
}
|
||||
|
||||
private static void makeCells() {
|
||||
String filename = null;
|
||||
Integer numCells = 0;
|
||||
Integer cdr1Freq = 1;
|
||||
try {
|
||||
System.out.println("\nSimulated T-Cells consist of integer values representing:\n" +
|
||||
"* a pair of alpha and beta CDR3 peptides (unique within simulated population)\n" +
|
||||
"* a pair of alpha and beta CDR1 peptides (not necessarily unique).");
|
||||
System.out.println("\nThe cells will be written to a CSV file.");
|
||||
System.out.print("Please enter a file name: ");
|
||||
filename = sc.next();
|
||||
System.out.println("\nCDR3 sequences are more diverse than CDR1 sequences.");
|
||||
System.out.println("Please enter the factor by which distinct CDR3s outnumber CDR1s: ");
|
||||
cdr1Freq = sc.nextInt();
|
||||
System.out.print("\nPlease enter the number of T-cells to generate: ");
|
||||
numCells = sc.nextInt();
|
||||
if(numCells <= 0){
|
||||
throw new InputMismatchException("Number of cells must be a positive integer.");
|
||||
}
|
||||
} catch (InputMismatchException ex) {
|
||||
System.out.println(ex);
|
||||
sc.next();
|
||||
}
|
||||
CellSample sample = new CellSample(numCells, cdr1Freq);
|
||||
assert filename != null;
|
||||
System.out.println("Writing cells to file");
|
||||
CellFileWriter writer = new CellFileWriter(filename, sample);
|
||||
writer.writeCellsToFile();
|
||||
System.out.println("Cell sample written to: " + filename);
|
||||
if(BiGpairSEQ.cacheCells()) {
|
||||
BiGpairSEQ.setCellSampleInMemory(sample, filename);
|
||||
}
|
||||
}
|
||||
|
||||
//Output a CSV of sample plate
|
||||
private static void makePlate() {
|
||||
String cellFile = null;
|
||||
String filename = null;
|
||||
Double parameter = 0.0;
|
||||
Integer numWells = 0;
|
||||
Integer numSections;
|
||||
Integer[] populations = {1};
|
||||
Double dropOutRate = 0.0;
|
||||
;
|
||||
try {
|
||||
System.out.println("\nSimulated sample plates consist of:");
|
||||
System.out.println("* a number of wells");
|
||||
System.out.println(" * separated into one or more sections");
|
||||
System.out.println(" * each of which has a set quantity of cells per well");
|
||||
System.out.println(" * selected from a statistical distribution of distinct cells");
|
||||
System.out.println(" * with a set dropout rate for individual sequences within a cell");
|
||||
System.out.println("\nMaking a sample plate requires a population of distinct cells");
|
||||
System.out.print("Please enter name of an existing cell sample file: ");
|
||||
cellFile = sc.next();
|
||||
System.out.println("\nThe sample plate will be written to a CSV file");
|
||||
System.out.print("Please enter a name for the output file: ");
|
||||
filename = sc.next();
|
||||
System.out.println("\nSelect T-cell frequency distribution function");
|
||||
System.out.println("1) Poisson");
|
||||
System.out.println("2) Gaussian");
|
||||
System.out.println("3) Exponential");
|
||||
System.out.println("4) Zipf");
|
||||
|
||||
System.out.println("(Note: wider distributions are more memory intensive to match)");
|
||||
System.out.print("Enter selection value: ");
|
||||
input = sc.nextInt();
|
||||
switch (input) {
|
||||
case 1 -> {
|
||||
BiGpairSEQ.setDistributionType(DistributionType.POISSON);
|
||||
}
|
||||
case 2 -> {
|
||||
BiGpairSEQ.setDistributionType(DistributionType.GAUSSIAN);
|
||||
System.out.println("How many distinct T-cells within one standard deviation of peak frequency?");
|
||||
System.out.println("(Note: wider distributions are more memory intensive to match)");
|
||||
parameter = sc.nextDouble();
|
||||
if (parameter <= 0.0) {
|
||||
throw new InputMismatchException("Value must be positive.");
|
||||
}
|
||||
}
|
||||
case 3 -> {
|
||||
BiGpairSEQ.setDistributionType(DistributionType.EXPONENTIAL);
|
||||
System.out.print("Please enter lambda value for exponential distribution: ");
|
||||
parameter = sc.nextDouble();
|
||||
if (parameter <= 0.0) {
|
||||
parameter = 1.4;
|
||||
System.out.println("Value must be positive. Defaulting to 1.4.");
|
||||
}
|
||||
}
|
||||
case 4 -> {
|
||||
BiGpairSEQ.setDistributionType(DistributionType.ZIPF);
|
||||
System.out.print("Please enter exponent value for Zipf distribution: ");
|
||||
parameter = sc.nextDouble();
|
||||
if (parameter <= 0.0) {
|
||||
parameter = 1.4;
|
||||
System.out.println("Value must be positive. Defaulting to 1.4.");
|
||||
}
|
||||
}
|
||||
default -> {
|
||||
System.out.println("Invalid input. Defaulting to exponential.");
|
||||
parameter = 1.4;
|
||||
BiGpairSEQ.setDistributionType(DistributionType.EXPONENTIAL);
|
||||
}
|
||||
}
|
||||
System.out.print("\nNumber of wells on plate: ");
|
||||
numWells = sc.nextInt();
|
||||
if(numWells < 1){
|
||||
throw new InputMismatchException("No wells on plate");
|
||||
}
|
||||
//choose whether to make T cell population/well random
|
||||
boolean randomWellPopulations;
|
||||
System.out.println("Randomize number of T cells in each well? (y/n)");
|
||||
String ans = sc.next();
|
||||
Pattern pattern = Pattern.compile("(?:yes|y)", Pattern.CASE_INSENSITIVE);
|
||||
Matcher matcher = pattern.matcher(ans);
|
||||
if(matcher.matches()){
|
||||
randomWellPopulations = true;
|
||||
}
|
||||
else{
|
||||
randomWellPopulations = false;
|
||||
}
|
||||
if(randomWellPopulations) { //if T cell population/well is random
|
||||
numSections = numWells;
|
||||
Integer minPop;
|
||||
Integer maxPop;
|
||||
System.out.print("Please enter minimum number of T cells in a well: ");
|
||||
minPop = sc.nextInt();
|
||||
if(minPop < 1) {
|
||||
throw new InputMismatchException("Minimum well population must be positive");
|
||||
}
|
||||
System.out.println("Please enter maximum number of T cells in a well: ");
|
||||
maxPop = sc.nextInt();
|
||||
if(maxPop < minPop) {
|
||||
throw new InputMismatchException("Max well population must be greater than min well population");
|
||||
}
|
||||
//maximum should be inclusive, so need to add one to max of randomly generated values
|
||||
populations = rand.ints(minPop, maxPop + 1)
|
||||
.limit(numSections)
|
||||
.boxed()
|
||||
.toArray(Integer[]::new);
|
||||
System.out.print("Populations: ");
|
||||
System.out.println(Arrays.toString(populations));
|
||||
}
|
||||
else{ //if T cell population/well is not random
|
||||
System.out.println("\nThe plate can be evenly sectioned to allow different numbers of T cells per well.");
|
||||
System.out.println("How many sections would you like to make (minimum 1)?");
|
||||
numSections = sc.nextInt();
|
||||
if (numSections < 1) {
|
||||
throw new InputMismatchException("Too few sections.");
|
||||
} else if (numSections > numWells) {
|
||||
throw new InputMismatchException("Cannot have more sections than wells.");
|
||||
}
|
||||
int i = 1;
|
||||
populations = new Integer[numSections];
|
||||
while (numSections > 0) {
|
||||
System.out.print("Enter number of T cells per well in section " + i + ": ");
|
||||
populations[i - 1] = sc.nextInt();
|
||||
i++;
|
||||
numSections--;
|
||||
}
|
||||
}
|
||||
System.out.println("\nErrors in amplification can induce a well dropout rate for sequences");
|
||||
System.out.print("Enter well dropout rate (0.0 to 1.0): ");
|
||||
dropOutRate = sc.nextDouble();
|
||||
if(dropOutRate < 0.0 || dropOutRate > 1.0) {
|
||||
throw new InputMismatchException("The well dropout rate must be in the range [0.0, 1.0]");
|
||||
}
|
||||
}catch(InputMismatchException ex){
|
||||
System.out.println(ex);
|
||||
sc.next();
|
||||
}
|
||||
assert cellFile != null;
|
||||
CellSample cells;
|
||||
if (cellFile.equals(BiGpairSEQ.getCellFilename())){
|
||||
cells = BiGpairSEQ.getCellSampleInMemory();
|
||||
}
|
||||
else {
|
||||
System.out.println("Reading Cell Sample file: " + cellFile);
|
||||
CellFileReader cellReader = new CellFileReader(cellFile);
|
||||
cells = cellReader.getCellSample();
|
||||
if(BiGpairSEQ.cacheCells()) {
|
||||
BiGpairSEQ.setCellSampleInMemory(cells, cellFile);
|
||||
}
|
||||
}
|
||||
assert filename != null;
|
||||
Plate samplePlate;
|
||||
PlateFileWriter writer;
|
||||
DistributionType type = BiGpairSEQ.getDistributionType();
|
||||
switch(type) {
|
||||
case POISSON -> {
|
||||
parameter = Math.sqrt(cells.getCellCount()); //gaussian with square root of elements approximates poisson
|
||||
samplePlate = new Plate(cells, cellFile, numWells, populations, dropOutRate, parameter);
|
||||
writer = new PlateFileWriter(filename, samplePlate);
|
||||
}
|
||||
default -> {
|
||||
samplePlate = new Plate(cells, cellFile, numWells, populations, dropOutRate, parameter);
|
||||
writer = new PlateFileWriter(filename, samplePlate);
|
||||
}
|
||||
}
|
||||
System.out.println("Writing Sample Plate to file");
|
||||
writer.writePlateFile();
|
||||
System.out.println("Sample Plate written to file: " + filename);
|
||||
if(BiGpairSEQ.cachePlate()) {
|
||||
BiGpairSEQ.setPlateInMemory(samplePlate, filename);
|
||||
}
|
||||
}
|
||||
|
||||
//Output serialized binary of GraphAndMapData object
|
||||
private static void makeCDR3Graph() {
|
||||
String filename = null;
|
||||
String cellFile = null;
|
||||
String plateFile = null;
|
||||
Boolean simulateReadDepth = false;
|
||||
//number of times to read each sequence in a well
|
||||
int readDepth = 1;
|
||||
double readErrorRate = 0.0;
|
||||
double errorCollisionRate = 0.0;
|
||||
double realSequenceCollisionRate = 0.0;
|
||||
try {
|
||||
String str = "\nGenerating bipartite weighted graph encoding occupancy overlap data ";
|
||||
str = str.concat("\nrequires a cell sample file and a sample plate file.");
|
||||
System.out.println(str);
|
||||
System.out.print("\nPlease enter name of an existing cell sample file: ");
|
||||
cellFile = sc.next();
|
||||
System.out.print("\nPlease enter name of an existing sample plate file: ");
|
||||
plateFile = sc.next();
|
||||
System.out.println("\nEnable simulation of sequence read depth and sequence read errors? (y/n)");
|
||||
String ans = sc.next();
|
||||
Pattern pattern = Pattern.compile("(?:yes|y)", Pattern.CASE_INSENSITIVE);
|
||||
Matcher matcher = pattern.matcher(ans);
|
||||
if(matcher.matches()){
|
||||
simulateReadDepth = true;
|
||||
}
|
||||
if (simulateReadDepth) {
|
||||
System.out.print("\nPlease enter the read depth (the integer number of times a sequence is read): ");
|
||||
readDepth = sc.nextInt();
|
||||
if(readDepth < 1) {
|
||||
throw new InputMismatchException("The read depth must be an integer >= 1");
|
||||
}
|
||||
System.out.println("\nPlease enter the read error probability (0.0 to 1.0)");
|
||||
System.out.print("(The probability that a sequence will be misread): ");
|
||||
readErrorRate = sc.nextDouble();
|
||||
if(readErrorRate < 0.0 || readErrorRate > 1.0) {
|
||||
throw new InputMismatchException("The read error probability must be in the range [0.0, 1.0]");
|
||||
}
|
||||
System.out.println("\nPlease enter the error collision probability (0.0 to 1.0)");
|
||||
System.out.print("(The probability of a sequence being misread in a way it has been misread before): ");
|
||||
errorCollisionRate = sc.nextDouble();
|
||||
if(errorCollisionRate < 0.0 || errorCollisionRate > 1.0) {
|
||||
throw new InputMismatchException("The error collision probability must be an in the range [0.0, 1.0]");
|
||||
}
|
||||
System.out.println("\nPlease enter the real sequence collision probability (0.0 to 1.0)");
|
||||
System.out.print("(The probability that a (non-collision) misread produces a different, real sequence): ");
|
||||
realSequenceCollisionRate = sc.nextDouble();
|
||||
if(realSequenceCollisionRate < 0.0 || realSequenceCollisionRate > 1.0) {
|
||||
throw new InputMismatchException("The real sequence collision probability must be an in the range [0.0, 1.0]");
|
||||
}
|
||||
}
|
||||
System.out.println("\nThe graph and occupancy data will be written to a file.");
|
||||
System.out.print("Please enter a name for the output file: ");
|
||||
filename = sc.next();
|
||||
} catch (InputMismatchException ex) {
|
||||
System.out.println(ex);
|
||||
sc.next();
|
||||
}
|
||||
|
||||
assert cellFile != null;
|
||||
CellSample cellSample;
|
||||
//check if cells are already in memory
|
||||
if(cellFile.equals(BiGpairSEQ.getCellFilename()) && BiGpairSEQ.getCellSampleInMemory() != null) {
|
||||
cellSample = BiGpairSEQ.getCellSampleInMemory();
|
||||
}
|
||||
else {
|
||||
System.out.println("Reading Cell Sample file: " + cellFile);
|
||||
CellFileReader cellReader = new CellFileReader(cellFile);
|
||||
cellSample = cellReader.getCellSample();
|
||||
if(BiGpairSEQ.cacheCells()) {
|
||||
BiGpairSEQ.setCellSampleInMemory(cellSample, cellFile);
|
||||
}
|
||||
}
|
||||
|
||||
assert plateFile != null;
|
||||
Plate plate;
|
||||
//check if plate is already in memory
|
||||
if(plateFile.equals(BiGpairSEQ.getPlateFilename())){
|
||||
plate = BiGpairSEQ.getPlateInMemory();
|
||||
}
|
||||
else {
|
||||
System.out.println("Reading Sample Plate file: " + plateFile);
|
||||
PlateFileReader plateReader = new PlateFileReader(plateFile);
|
||||
plate = plateReader.getSamplePlate();
|
||||
if(BiGpairSEQ.cachePlate()) {
|
||||
BiGpairSEQ.setPlateInMemory(plate, plateFile);
|
||||
}
|
||||
}
|
||||
if (cellSample.getCells().size() == 0){
|
||||
System.out.println("No cell sample found.");
|
||||
System.out.println("Returning to main menu.");
|
||||
}
|
||||
else if(plate.getWells().size() == 0 || plate.getPopulations().length == 0){
|
||||
System.out.println("No sample plate found.");
|
||||
System.out.println("Returning to main menu.");
|
||||
}
|
||||
else{
|
||||
GraphWithMapData data = Simulator.makeCDR3Graph(cellSample, plate, readDepth, readErrorRate,
|
||||
errorCollisionRate, realSequenceCollisionRate, true);
|
||||
assert filename != null;
|
||||
if(BiGpairSEQ.outputBinary()) {
|
||||
GraphDataObjectWriter dataWriter = new GraphDataObjectWriter(filename, data);
|
||||
dataWriter.writeDataToFile();
|
||||
System.out.println("Serialized binary graph/data file written to: " + filename);
|
||||
}
|
||||
if(BiGpairSEQ.outputGraphML()) {
|
||||
GraphMLFileWriter graphMLWriter = new GraphMLFileWriter(filename, data);
|
||||
graphMLWriter.writeGraphToFile();
|
||||
System.out.println("GraphML file written to: " + filename);
|
||||
}
|
||||
if(BiGpairSEQ.cacheGraph()) {
|
||||
BiGpairSEQ.setGraphInMemory(data, filename);
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//Simulate matching and output CSV file of results
|
||||
private static void matchCDR3s() throws IOException {
|
||||
String filename = null;
|
||||
String graphFilename = null;
|
||||
Integer lowThreshold = 0;
|
||||
Integer highThreshold = Integer.MAX_VALUE;
|
||||
Integer maxOccupancyDiff = Integer.MAX_VALUE;
|
||||
Integer minOverlapPercent = 0;
|
||||
try {
|
||||
System.out.println("\nBiGpairSEQ simulation requires an occupancy data and overlap graph file");
|
||||
System.out.println("Please enter name of an existing graph and occupancy data file: ");
|
||||
graphFilename = sc.next();
|
||||
System.out.println("The matching results will be written to a file.");
|
||||
System.out.print("Please enter a name for the output file: ");
|
||||
filename = sc.next();
|
||||
System.out.println("\nWhat is the minimum number of CDR3 alpha/beta overlap wells to attempt matching?");
|
||||
lowThreshold = sc.nextInt();
|
||||
if(lowThreshold < 1){
|
||||
lowThreshold = 1;
|
||||
System.out.println("Value for low occupancy overlap threshold must be positive");
|
||||
System.out.println("Value for low occupancy overlap threshold set to 1");
|
||||
}
|
||||
System.out.println("\nWhat is the maximum number of CDR3 alpha/beta overlap wells to attempt matching?");
|
||||
highThreshold = sc.nextInt();
|
||||
if(highThreshold < lowThreshold) {
|
||||
highThreshold = lowThreshold;
|
||||
System.out.println("Value for high occupancy overlap threshold must be >= low overlap threshold");
|
||||
System.out.println("Value for high occupancy overlap threshold set to " + lowThreshold);
|
||||
}
|
||||
System.out.println("What is the minimum percentage of a sequence's wells in alpha/beta overlap to attempt matching? (0 - 100)");
|
||||
minOverlapPercent = sc.nextInt();
|
||||
if (minOverlapPercent < 0 || minOverlapPercent > 100) {
|
||||
System.out.println("Value outside range. Minimum occupancy overlap percentage set to 0");
|
||||
}
|
||||
System.out.println("\nWhat is the maximum difference in alpha/beta occupancy to attempt matching?");
|
||||
maxOccupancyDiff = sc.nextInt();
|
||||
if (maxOccupancyDiff < 0) {
|
||||
maxOccupancyDiff = 0;
|
||||
System.out.println("Maximum allowable difference in alpha/beta occupancy must be nonnegative");
|
||||
System.out.println("Maximum allowable difference in alpha/beta occupancy set to 0");
|
||||
}
|
||||
} catch (InputMismatchException ex) {
|
||||
System.out.println(ex);
|
||||
sc.next();
|
||||
}
|
||||
assert graphFilename != null;
|
||||
//check if this is the same graph we already have in memory.
|
||||
GraphWithMapData data;
|
||||
if(graphFilename.equals(BiGpairSEQ.getGraphFilename())) {
|
||||
data = BiGpairSEQ.getGraphInMemory();
|
||||
}
|
||||
else {
|
||||
GraphDataObjectReader dataReader = new GraphDataObjectReader(graphFilename, true);
|
||||
data = dataReader.getData();
|
||||
if(BiGpairSEQ.cacheGraph()) {
|
||||
BiGpairSEQ.setGraphInMemory(data, graphFilename);
|
||||
}
|
||||
}
|
||||
//simulate matching
|
||||
MatchingResult results = Simulator.matchCDR3s(data, graphFilename, lowThreshold, highThreshold, maxOccupancyDiff,
|
||||
minOverlapPercent, true, BiGpairSEQ.calculatePValue());
|
||||
//write results to file
|
||||
assert filename != null;
|
||||
MatchingFileWriter writer = new MatchingFileWriter(filename, results);
|
||||
System.out.println("Writing results to file");
|
||||
writer.writeResultsToFile();
|
||||
System.out.println("Results written to file: " + filename);
|
||||
}
|
||||
|
||||
///////
|
||||
//Rewrite this to fit new matchCDR3 method with file I/O
|
||||
///////
|
||||
// public static void matchCellsCDR1(){
|
||||
// /*
|
||||
// The idea here is that we'll get the CDR3 alpha/beta matches first. Then we'll try to match CDR3s to CDR1s by
|
||||
// looking at the top two matches for each CDR3. If CDR3s in the same cell simply swap CDR1s, we assume a correct
|
||||
// match
|
||||
// */
|
||||
// String filename = null;
|
||||
// String preliminaryResultsFilename = null;
|
||||
// String cellFile = null;
|
||||
// String plateFile = null;
|
||||
// Integer lowThresholdCDR3 = 0;
|
||||
// Integer highThresholdCDR3 = Integer.MAX_VALUE;
|
||||
// Integer maxOccupancyDiffCDR3 = 96; //no filtering if max difference is all wells by default
|
||||
// Integer minOverlapPercentCDR3 = 0; //no filtering if min percentage is zero by default
|
||||
// Integer lowThresholdCDR1 = 0;
|
||||
// Integer highThresholdCDR1 = Integer.MAX_VALUE;
|
||||
// boolean outputCDR3Matches = false;
|
||||
// try {
|
||||
// System.out.println("\nSimulated experiment requires a cell sample file and a sample plate file.");
|
||||
// System.out.print("Please enter name of an existing cell sample file: ");
|
||||
// cellFile = sc.next();
|
||||
// System.out.print("Please enter name of an existing sample plate file: ");
|
||||
// plateFile = sc.next();
|
||||
// System.out.println("The matching results will be written to a file.");
|
||||
// System.out.print("Please enter a name for the output file: ");
|
||||
// filename = sc.next();
|
||||
// System.out.println("What is the minimum number of CDR3 alpha/beta overlap wells to attempt matching?");
|
||||
// lowThresholdCDR3 = sc.nextInt();
|
||||
// if(lowThresholdCDR3 < 1){
|
||||
// throw new InputMismatchException("Minimum value for low threshold is 1");
|
||||
// }
|
||||
// System.out.println("What is the maximum number of CDR3 alpha/beta overlap wells to attempt matching?");
|
||||
// highThresholdCDR3 = sc.nextInt();
|
||||
// System.out.println("What is the maximum difference in CDR3 alpha/beta occupancy to attempt matching?");
|
||||
// maxOccupancyDiffCDR3 = sc.nextInt();
|
||||
// System.out.println("What is the minimum CDR3 overlap percentage to attempt matching? (0 - 100)");
|
||||
// minOverlapPercentCDR3 = sc.nextInt();
|
||||
// if (minOverlapPercentCDR3 < 0 || minOverlapPercentCDR3 > 100) {
|
||||
// throw new InputMismatchException("Value outside range. Minimum percent set to 0");
|
||||
// }
|
||||
// System.out.println("What is the minimum number of CDR3/CDR1 overlap wells to attempt matching?");
|
||||
// lowThresholdCDR1 = sc.nextInt();
|
||||
// if(lowThresholdCDR1 < 1){
|
||||
// throw new InputMismatchException("Minimum value for low threshold is 1");
|
||||
// }
|
||||
// System.out.println("What is the maximum number of CDR3/CDR1 overlap wells to attempt matching?");
|
||||
// highThresholdCDR1 = sc.nextInt();
|
||||
// System.out.println("Matching CDR3s to CDR1s requires first matching CDR3 alpha/betas.");
|
||||
// System.out.println("Output a file for CDR3 alpha/beta match results as well?");
|
||||
// System.out.print("Please enter y/n: ");
|
||||
// String ans = sc.next();
|
||||
// Pattern pattern = Pattern.compile("(?:yes|y)", Pattern.CASE_INSENSITIVE);
|
||||
// Matcher matcher = pattern.matcher(ans);
|
||||
// if(matcher.matches()){
|
||||
// outputCDR3Matches = true;
|
||||
// System.out.println("Please enter filename for CDR3 alpha/beta match results");
|
||||
// preliminaryResultsFilename = sc.next();
|
||||
// System.out.println("CDR3 alpha/beta matches will be output to file");
|
||||
// }
|
||||
// else{
|
||||
// System.out.println("CDR3 alpha/beta matches will not be output to file");
|
||||
// }
|
||||
// } catch (InputMismatchException ex) {
|
||||
// System.out.println(ex);
|
||||
// sc.next();
|
||||
// }
|
||||
// CellFileReader cellReader = new CellFileReader(cellFile);
|
||||
// PlateFileReader plateReader = new PlateFileReader(plateFile);
|
||||
// Plate plate = new Plate(plateReader.getFilename(), plateReader.getWells());
|
||||
// if (cellReader.getCells().size() == 0){
|
||||
// System.out.println("No cell sample found.");
|
||||
// System.out.println("Returning to main menu.");
|
||||
// }
|
||||
// else if(plate.getWells().size() == 0){
|
||||
// System.out.println("No sample plate found.");
|
||||
// System.out.println("Returning to main menu.");
|
||||
//
|
||||
// }
|
||||
// else{
|
||||
// if(highThresholdCDR3 >= plate.getSize()){
|
||||
// highThresholdCDR3 = plate.getSize() - 1;
|
||||
// }
|
||||
// if(highThresholdCDR1 >= plate.getSize()){
|
||||
// highThresholdCDR1 = plate.getSize() - 1;
|
||||
// }
|
||||
// List<Integer[]> cells = cellReader.getCells();
|
||||
// MatchingResult preliminaryResults = Simulator.matchCDR3s(cells, plate, lowThresholdCDR3, highThresholdCDR3,
|
||||
// maxOccupancyDiffCDR3, minOverlapPercentCDR3, true);
|
||||
// MatchingResult[] results = Simulator.matchCDR1s(cells, plate, lowThresholdCDR1,
|
||||
// highThresholdCDR1, preliminaryResults);
|
||||
// MatchingFileWriter writer = new MatchingFileWriter(filename + "_FirstPass", results[0]);
|
||||
// writer.writeResultsToFile();
|
||||
// writer = new MatchingFileWriter(filename + "_SecondPass", results[1]);
|
||||
// writer.writeResultsToFile();
|
||||
// if(outputCDR3Matches){
|
||||
// writer = new MatchingFileWriter(preliminaryResultsFilename, preliminaryResults);
|
||||
// writer.writeResultsToFile();
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
private static void mainOptions(){
|
||||
boolean backToMain = false;
|
||||
while(!backToMain) {
|
||||
System.out.println("\n--------------OPTIONS---------------");
|
||||
System.out.println("1) Turn " + getOnOff(!BiGpairSEQ.cacheCells()) + " cell sample file caching");
|
||||
System.out.println("2) Turn " + getOnOff(!BiGpairSEQ.cachePlate()) + " plate file caching");
|
||||
System.out.println("3) Turn " + getOnOff(!BiGpairSEQ.cacheGraph()) + " graph/data file caching");
|
||||
System.out.println("4) Turn " + getOnOff(!BiGpairSEQ.outputBinary()) + " serialized binary graph output");
|
||||
System.out.println("5) Turn " + getOnOff(!BiGpairSEQ.outputGraphML()) + " GraphML graph output (for data portability to other programs)");
|
||||
System.out.println("6) Turn " + getOnOff(!BiGpairSEQ.calculatePValue()) + " calculation of p-values");
|
||||
System.out.println("7) Maximum weight matching algorithm options");
|
||||
System.out.println("0) Return to main menu");
|
||||
try {
|
||||
input = sc.nextInt();
|
||||
switch (input) {
|
||||
case 1 -> BiGpairSEQ.setCacheCells(!BiGpairSEQ.cacheCells());
|
||||
case 2 -> BiGpairSEQ.setCachePlate(!BiGpairSEQ.cachePlate());
|
||||
case 3 -> BiGpairSEQ.setCacheGraph(!BiGpairSEQ.cacheGraph());
|
||||
case 4 -> BiGpairSEQ.setOutputBinary(!BiGpairSEQ.outputBinary());
|
||||
case 5 -> BiGpairSEQ.setOutputGraphML(!BiGpairSEQ.outputGraphML());
|
||||
case 6 -> BiGpairSEQ.setCalculatePValue(!BiGpairSEQ.calculatePValue());
|
||||
case 7 -> algorithmOptions();
|
||||
case 0 -> backToMain = true;
|
||||
default -> System.out.println("Invalid input");
|
||||
}
|
||||
} catch (InputMismatchException ex) {
|
||||
System.out.println(ex);
|
||||
sc.next();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function for printing menu items in mainOptions(). Returns a string based on the value of parameter.
|
||||
*
|
||||
* @param b - a boolean value
|
||||
* @return String "on" if b is true, "off" if b is false
|
||||
*/
|
||||
private static String getOnOff(boolean b) {
|
||||
if (b) { return "on";}
|
||||
else { return "off"; }
|
||||
}
|
||||
|
||||
private static void algorithmOptions(){
|
||||
boolean backToOptions = false;
|
||||
while(!backToOptions) {
|
||||
System.out.println("\n---------ALGORITHM OPTIONS----------");
|
||||
System.out.println("1) Use Hungarian algorithm with Fibonacci heap priority queue");
|
||||
System.out.println("2) Use Hungarian algorithm with pairing heap priority queue");
|
||||
System.out.println("3) Use auction algorithm");
|
||||
System.out.println("4) Use integer weight scaling algorithm by Duan and Su. (buggy, not yet fully implemented!)");
|
||||
System.out.println("0) Return to Options menu");
|
||||
try {
|
||||
input = sc.nextInt();
|
||||
switch (input) {
|
||||
case 1 -> {
|
||||
BiGpairSEQ.setHungarianAlgorithm();
|
||||
BiGpairSEQ.setFibonacciHeap();
|
||||
System.out.println("MWM algorithm set to Hungarian with Fibonacci heap");
|
||||
backToOptions = true;
|
||||
}
|
||||
case 2 -> {
|
||||
BiGpairSEQ.setHungarianAlgorithm();
|
||||
BiGpairSEQ.setPairingHeap();
|
||||
System.out.println("MWM algorithm set to Hungarian with pairing heap");
|
||||
backToOptions = true;
|
||||
}
|
||||
case 3 -> {
|
||||
BiGpairSEQ.setAuctionAlgorithm();
|
||||
System.out.println("MWM algorithm set to auction");
|
||||
backToOptions = true;
|
||||
}
|
||||
case 4 -> {
|
||||
System.out.println("Scaling integer weight MWM algorithm not yet fully implemented. Sorry.");
|
||||
// BiGpairSEQ.setIntegerWeightScalingAlgorithm();
|
||||
// System.out.println("MWM algorithm set to integer weight scaling algorithm of Duan and Su");
|
||||
// backToOptions = true;
|
||||
}
|
||||
case 0 -> backToOptions = true;
|
||||
default -> System.out.println("Invalid input");
|
||||
}
|
||||
} catch (InputMismatchException ex) {
|
||||
System.out.println(ex);
|
||||
sc.next();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void acknowledge(){
|
||||
System.out.println("BiGpairSEQ_Sim " + BiGpairSEQ.getVersion());
|
||||
System.out.println();
|
||||
System.out.println("This program simulates BiGpairSEQ, a graph theory based adaptation");
|
||||
System.out.println("of the pairSEQ algorithm for pairing T cell receptor sequences.");
|
||||
System.out.println();
|
||||
System.out.println("For full documentation, view readme.md file distributed with this code");
|
||||
System.out.println("or visit https://gitea.ejsf.synology.me/efischer/BiGpairSEQ.");
|
||||
System.out.println();
|
||||
System.out.println("pairSEQ citation:");
|
||||
System.out.println("Howie, B., Sherwood, A. M., et. al.");
|
||||
System.out.println("High-throughput pairing of T cell receptor alpha and beta sequences.");
|
||||
System.out.println("Sci. Transl. Med. 7, 301ra131 (2015)");
|
||||
System.out.println();
|
||||
System.out.println("BiGpairSEQ_Sim by Eugene Fischer, 2021-2022");
|
||||
}
|
||||
}
|
||||
3
src/main/java/META-INF/MANIFEST.MF
Normal file
3
src/main/java/META-INF/MANIFEST.MF
Normal file
@@ -0,0 +1,3 @@
|
||||
Manifest-Version: 1.0
|
||||
Main-Class: BiGpairSEQ
|
||||
|
||||
@@ -7,13 +7,10 @@ import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.List;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
|
||||
public class MatchingFileWriter {
|
||||
|
||||
private String filename;
|
||||
private String sourceFileName;
|
||||
private List<String> comments;
|
||||
private List<String> headers;
|
||||
private List<List<String>> allResults;
|
||||
@@ -23,7 +20,6 @@ public class MatchingFileWriter {
|
||||
filename = filename + ".csv";
|
||||
}
|
||||
this.filename = filename;
|
||||
this.sourceFileName = result.getSourceFileName();
|
||||
this.comments = result.getComments();
|
||||
this.headers = result.getHeaders();
|
||||
this.allResults = result.getAllResults();
|
||||
|
||||
@@ -1,25 +1,55 @@
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class MatchingResult {
|
||||
private String sourceFile;
|
||||
private List<String> comments;
|
||||
private List<String> headers;
|
||||
private List<List<String>> allResults;
|
||||
private Map<Integer, Integer> matchMap;
|
||||
private Duration time;
|
||||
|
||||
public MatchingResult(String sourceFileName, List<String> comments, List<String> headers, List<List<String>> allResults, Map<Integer, Integer>matchMap, Duration time){
|
||||
this.sourceFile = sourceFileName;
|
||||
this.comments = comments;
|
||||
private final Map<String, String> metadata;
|
||||
private final List<String> comments;
|
||||
private final List<String> headers;
|
||||
private final List<List<String>> allResults;
|
||||
private final Map<String, String> matchMap;
|
||||
|
||||
public MatchingResult(Map<String, String> metadata, List<String> headers,
|
||||
List<List<String>> allResults, Map<String, String>matchMap){
|
||||
/*
|
||||
* POSSIBLE KEYS FOR METADATA MAP ARE:
|
||||
* sample plate filename *
|
||||
* graph filename *
|
||||
* matching weight *
|
||||
* well populations *
|
||||
* sequence read depth *
|
||||
* sequence read error rate *
|
||||
* read error collision rate *
|
||||
* total alphas read from plate *
|
||||
* total betas read from plate *
|
||||
* alphas in graph (after pre-filtering) *
|
||||
* betas in graph (after pre-filtering) *
|
||||
* high overlap threshold for pairing *
|
||||
* low overlap threshold for pairing *
|
||||
* maximum occupancy difference for pairing *
|
||||
* minimum overlap percent for pairing *
|
||||
* pairing attempt rate *
|
||||
* correct pairing count *
|
||||
* incorrect pairing count *
|
||||
* pairing error rate *
|
||||
* time to generate graph (seconds) *
|
||||
* time to pair sequences (seconds) *
|
||||
* total simulation time (seconds) *
|
||||
*/
|
||||
this.metadata = metadata;
|
||||
this.comments = new ArrayList<>();
|
||||
for (String key : metadata.keySet()) {
|
||||
comments.add(key +": " + metadata.get(key));
|
||||
}
|
||||
this.headers = headers;
|
||||
this.allResults = allResults;
|
||||
this.matchMap = matchMap;
|
||||
this.time = time;
|
||||
|
||||
}
|
||||
|
||||
public Map<String, String> getMetadata() {return metadata;}
|
||||
|
||||
public List<String> getComments() {
|
||||
return comments;
|
||||
}
|
||||
@@ -32,15 +62,56 @@ public class MatchingResult {
|
||||
return headers;
|
||||
}
|
||||
|
||||
public Map<Integer, Integer> getMatchMap() {
|
||||
public Map<String, String> getMatchMap() {
|
||||
return matchMap;
|
||||
}
|
||||
|
||||
public Duration getTime() {
|
||||
return time;
|
||||
// public Duration getTime() {
|
||||
// return time;
|
||||
// }
|
||||
|
||||
public String getPlateFilename() {
|
||||
return metadata.get("sample plate filename");
|
||||
}
|
||||
|
||||
public String getSourceFileName() {
|
||||
return sourceFile;
|
||||
public String getGraphFilename() {
|
||||
return metadata.get("graph filename");
|
||||
}
|
||||
|
||||
public Integer[] getWellPopulations() {
|
||||
List<Integer> wellPopulations = new ArrayList<>();
|
||||
String popString = metadata.get("well populations");
|
||||
for (String p : popString.split(", ")) {
|
||||
wellPopulations.add(Integer.parseInt(p));
|
||||
}
|
||||
Integer[] popArray = new Integer[wellPopulations.size()];
|
||||
return wellPopulations.toArray(popArray);
|
||||
}
|
||||
|
||||
public Integer getAlphaCount() {
|
||||
return Integer.parseInt(metadata.get("total alphas read from plate"));
|
||||
}
|
||||
|
||||
public Integer getBetaCount() {
|
||||
return Integer.parseInt(metadata.get("total betas read from plate"));
|
||||
}
|
||||
|
||||
public Integer getHighOverlapThreshold() { return Integer.parseInt(metadata.get("high overlap threshold for pairing"));}
|
||||
|
||||
public Integer getLowOverlapThreshold() { return Integer.parseInt(metadata.get("low overlap threshold for pairing"));}
|
||||
|
||||
public Integer getMaxOccupancyDifference() { return Integer.parseInt(metadata.get("maximum occupancy difference for pairing"));}
|
||||
|
||||
public Integer getMinOverlapPercent() { return Integer.parseInt(metadata.get("minimum overlap percent for pairing"));}
|
||||
|
||||
public Double getPairingAttemptRate() { return Double.parseDouble(metadata.get("pairing attempt rate"));}
|
||||
|
||||
public Integer getCorrectPairingCount() { return Integer.parseInt(metadata.get("correct pairing count"));}
|
||||
|
||||
public Integer getIncorrectPairingCount() { return Integer.parseInt(metadata.get("incorrect pairing count"));}
|
||||
|
||||
public Double getPairingErrorRate() { return Double.parseDouble(metadata.get("pairing error rate"));}
|
||||
|
||||
public String getSimulationTime() { return metadata.get("total simulation time (seconds)"); }
|
||||
|
||||
}
|
||||
|
||||
177
src/main/java/MaximumIntegerWeightBipartiteAuctionMatching.java
Normal file
177
src/main/java/MaximumIntegerWeightBipartiteAuctionMatching.java
Normal file
@@ -0,0 +1,177 @@
|
||||
import org.jgrapht.Graph;
|
||||
import org.jgrapht.GraphTests;
|
||||
import org.jgrapht.alg.interfaces.MatchingAlgorithm;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* Maximum weight matching in bipartite graphs with strictly integer edge weights, using a forward auction algorithm.
|
||||
* This implementation uses the Gauss-Seidel version of the forward auction algorithm, in which bids are submitted
|
||||
* one at a time. For any weighted bipartite graph with n vertices in the smaller partition, this algorithm will produce
|
||||
* a matching that is within n*epsilon of being optimal. Using an epsilon = 1/(n+1) ensures that this matching differs
|
||||
* from an optimal matching by <1. Thus, for a bipartite graph with strictly integer weights, this algorithm returns
|
||||
* a maximum weight matching.
|
||||
*
|
||||
* See:
|
||||
* "Towards auction algorithms for large dense assignment problems"
|
||||
* Libor Buš and Pavel Tvrdík, Comput Optim Appl (2009) 43:411-436
|
||||
* https://link.springer.com/article/10.1007/s10589-007-9146-5
|
||||
*
|
||||
* See also:
|
||||
* Many books and papers by Dimitri Bertsekas, including chapter 4 of Linear Network Optimization:
|
||||
* https://web.mit.edu/dimitrib/www/LNets_Full_Book.pdf
|
||||
*
|
||||
* @param <V> the graph vertex type
|
||||
* @param <E> the graph edge type
|
||||
*
|
||||
* @author Eugene Fischer
|
||||
*/
|
||||
|
||||
public class MaximumIntegerWeightBipartiteAuctionMatching<V, E> implements MatchingAlgorithm<V, E> {
|
||||
|
||||
private final Graph<V, E> graph;
|
||||
private final Set<V> partition1;
|
||||
private final Set<V> partition2;
|
||||
private final BigDecimal epsilon;
|
||||
private final Set<E> matching;
|
||||
private BigDecimal matchingWeight;
|
||||
|
||||
private boolean swappedPartitions = false;
|
||||
|
||||
public MaximumIntegerWeightBipartiteAuctionMatching(Graph<V, E> graph, Set<V> partition1, Set<V> partition2) {
|
||||
this.graph = GraphTests.requireUndirected(graph);
|
||||
this.partition1 = Objects.requireNonNull(partition1, "Partition 1 cannot be null");
|
||||
this.partition2 = Objects.requireNonNull(partition2, "Partition 2 cannot be null");
|
||||
int n = Math.max(partition1.size(), partition2.size());
|
||||
this.epsilon = BigDecimal.valueOf(1 / ((double) n + 1)); //The minimum price increase of a bid
|
||||
this.matching = new LinkedHashSet<>();
|
||||
this.matchingWeight = BigDecimal.ZERO;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
Method coded using MaximumWeightBipartiteMatching.class from JgraphT as a model
|
||||
*/
|
||||
@Override
|
||||
public Matching<V, E> getMatching() {
|
||||
|
||||
/*
|
||||
* Test input instance
|
||||
*/
|
||||
if (!GraphTests.isSimple(graph)) {
|
||||
throw new IllegalArgumentException("Only simple graphs supported");
|
||||
}
|
||||
if (!GraphTests.isBipartitePartition(graph, partition1, partition2)) {
|
||||
throw new IllegalArgumentException("Graph partition is not bipartite");
|
||||
}
|
||||
|
||||
/*
|
||||
If the two partitions are different sizes, the bidders must be the smaller of the two partitions.
|
||||
*/
|
||||
Set<V> items;
|
||||
Set<V> bidders;
|
||||
if (partition2.size() >= partition1.size()) {
|
||||
bidders = partition1;
|
||||
items = partition2;
|
||||
}
|
||||
else {
|
||||
bidders = partition2;
|
||||
items = partition1;
|
||||
swappedPartitions = true;
|
||||
}
|
||||
|
||||
/*
|
||||
Create a map to track the owner of each item, which is initially null,
|
||||
and a map to track the price of each item, which is initially 0. An
|
||||
Initial price of 0 allows for asymmetric assignment (though does mean
|
||||
that this form of the algorithm cannot take advantage of epsilon-scaling).
|
||||
*/
|
||||
Map<V, V> owners = new HashMap<>();
|
||||
Map<V, BigDecimal> prices = new HashMap<>();
|
||||
for(V item: items) {
|
||||
owners.put(item, null);
|
||||
prices.put(item, BigDecimal.ZERO);
|
||||
}
|
||||
|
||||
//Create a queue of bidders that don't currently own an item, which is initially all of them
|
||||
Queue<V> unmatchedBidders = new ArrayDeque<>();
|
||||
for(V bidder: bidders) {
|
||||
unmatchedBidders.offer(bidder);
|
||||
}
|
||||
|
||||
//Run the auction while there are remaining unmatched bidders
|
||||
while (unmatchedBidders.size() > 0) {
|
||||
V bidder = unmatchedBidders.poll();
|
||||
V item = null;
|
||||
BigDecimal bestValue = BigDecimal.valueOf(-1.0);
|
||||
BigDecimal runnerUpValue = BigDecimal.valueOf(-1.0);
|
||||
/*
|
||||
Find the items that offer the best and second-best value for the bidder,
|
||||
then submit a bid equal to the price of the best-valued item plus the marginal value over
|
||||
the second-best-valued item plus epsilon.
|
||||
*/
|
||||
for (E edge: graph.edgesOf(bidder)) {
|
||||
double weight = graph.getEdgeWeight(edge);
|
||||
if(weight == 0.0) {
|
||||
continue;
|
||||
}
|
||||
V tmp = getItem(edge);
|
||||
BigDecimal value = BigDecimal.valueOf(weight).subtract(prices.get(tmp));
|
||||
if (value.compareTo(bestValue) >= 0) {
|
||||
runnerUpValue = bestValue;
|
||||
bestValue = value;
|
||||
item = tmp;
|
||||
}
|
||||
else if (value.compareTo(runnerUpValue) >= 0) {
|
||||
runnerUpValue = value;
|
||||
}
|
||||
}
|
||||
if(bestValue.compareTo(BigDecimal.ZERO) >= 0) {
|
||||
V formerOwner = owners.get(item);
|
||||
BigDecimal price = prices.get(item);
|
||||
BigDecimal bid = price.add(bestValue).subtract(runnerUpValue).add(epsilon);
|
||||
if (formerOwner != null) {
|
||||
unmatchedBidders.offer(formerOwner);
|
||||
}
|
||||
owners.put(item, bidder);
|
||||
prices.put(item, bid);
|
||||
}
|
||||
}
|
||||
//Add all edges between items and their owners to the matching
|
||||
for (V item: owners.keySet()) {
|
||||
if (owners.get(item) != null) {
|
||||
matching.add(graph.getEdge(item, owners.get(item)));
|
||||
}
|
||||
}
|
||||
//Sum the edges of the matching to obtain the matching weight
|
||||
for(E edge: matching) {
|
||||
this.matchingWeight = this.matchingWeight.add(BigDecimal.valueOf(graph.getEdgeWeight(edge)));
|
||||
}
|
||||
|
||||
return new MatchingImpl<>(graph, matching, matchingWeight.doubleValue());
|
||||
}
|
||||
|
||||
private V getItem(E edge) {
|
||||
if (swappedPartitions) {
|
||||
return graph.getEdgeSource(edge);
|
||||
}
|
||||
else {
|
||||
return graph.getEdgeTarget(edge);
|
||||
}
|
||||
}
|
||||
|
||||
// //method for implementing a forward-reverse auction algorithm, not used here
|
||||
// private V getBidder(E edge) {
|
||||
// if (swappedPartitions) {
|
||||
// return graph.getEdgeTarget(edge);
|
||||
// }
|
||||
// else {
|
||||
// return graph.getEdgeSource(edge);
|
||||
// }
|
||||
// }
|
||||
|
||||
public BigDecimal getMatchingWeight() {
|
||||
return matchingWeight;
|
||||
}
|
||||
}
|
||||
1284
src/main/java/MaximumIntegerWeightBipartiteMatching.java
Normal file
1284
src/main/java/MaximumIntegerWeightBipartiteMatching.java
Normal file
File diff suppressed because it is too large
Load Diff
212
src/main/java/MaximumWeightBipartiteLookBackAuctionMatching.java
Normal file
212
src/main/java/MaximumWeightBipartiteLookBackAuctionMatching.java
Normal file
@@ -0,0 +1,212 @@
|
||||
import org.jgrapht.Graph;
|
||||
import org.jgrapht.GraphTests;
|
||||
import org.jgrapht.alg.interfaces.MatchingAlgorithm;
|
||||
import org.jgrapht.alg.util.Pair;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.*;
|
||||
|
||||
/*
|
||||
Maximum weight matching in bipartite graphs with strictly integer edge weights, found using the
|
||||
unscaled look-back auction algorithm
|
||||
*/
|
||||
|
||||
public class MaximumWeightBipartiteLookBackAuctionMatching<V, E> implements MatchingAlgorithm<V, E> {
|
||||
|
||||
private final Graph<V, E> graph;
|
||||
private final Set<V> partition1;
|
||||
private final Set<V> partition2;
|
||||
private final BigDecimal delta;
|
||||
private final Set<E> matching;
|
||||
private BigDecimal matchingWeight;
|
||||
private boolean swappedPartitions = false;
|
||||
|
||||
public MaximumWeightBipartiteLookBackAuctionMatching(Graph<V, E> graph, Set<V> partition1, Set<V> partition2) {
|
||||
this.graph = GraphTests.requireUndirected(graph);
|
||||
this.partition1 = Objects.requireNonNull(partition1, "Partition 1 cannot be null");
|
||||
this.partition2 = Objects.requireNonNull(partition2, "Partition 2 cannot be null");
|
||||
int n = Math.max(partition1.size(), partition2.size());
|
||||
this.delta = BigDecimal.valueOf(1 / ((double) n + 1));
|
||||
this.matching = new LinkedHashSet<>();
|
||||
this.matchingWeight = BigDecimal.ZERO;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
Method coded using MaximumWeightBipartiteMatching.class from JgraphT as a model
|
||||
*/
|
||||
@Override
|
||||
public Matching<V, E> getMatching() {
|
||||
|
||||
/*
|
||||
* Test input instance
|
||||
*/
|
||||
if (!GraphTests.isSimple(graph)) {
|
||||
throw new IllegalArgumentException("Only simple graphs supported");
|
||||
}
|
||||
if (!GraphTests.isBipartitePartition(graph, partition1, partition2)) {
|
||||
throw new IllegalArgumentException("Graph partition is not bipartite");
|
||||
}
|
||||
|
||||
/*
|
||||
If the two partitions are different sizes, the bidders must be the smaller of the two partitions.
|
||||
*/
|
||||
Set<V> items;
|
||||
Set<V> bidders;
|
||||
if (partition2.size() >= partition1.size()) {
|
||||
bidders = partition1;
|
||||
items = partition2;
|
||||
}
|
||||
else {
|
||||
bidders = partition2;
|
||||
items = partition1;
|
||||
swappedPartitions = true;
|
||||
}
|
||||
|
||||
/*
|
||||
Create a map to track the owner of each item, which is initially null,
|
||||
and a map to track the price of each item, which is initially 0.
|
||||
*/
|
||||
Map<V, V> owners = new HashMap<>();
|
||||
|
||||
/*
|
||||
Create a map to track the prices of the objects
|
||||
*/
|
||||
Map<V, BigDecimal> prices = new HashMap<>();
|
||||
for(V item: items) {
|
||||
owners.put(item, null);
|
||||
prices.put(item, BigDecimal.ZERO);
|
||||
}
|
||||
|
||||
/*
|
||||
Create a map to track the most valuable object for a bidder
|
||||
*/
|
||||
Map<V, V> mostValuableItems = new HashMap<>();
|
||||
|
||||
/*
|
||||
Create a map to track the second most valuable object for a bidder
|
||||
*/
|
||||
Map<V, V> runnerUpItems = new HashMap<>();
|
||||
|
||||
/*
|
||||
Create a map to track the bidder value thresholds
|
||||
*/
|
||||
Map<V, BigDecimal> valueThresholds = new HashMap<>();
|
||||
|
||||
|
||||
//Initialize queue of all bidders that don't currently own an item
|
||||
Queue<V> unmatchedBidders = new ArrayDeque<>();
|
||||
for(V bidder: bidders) {
|
||||
unmatchedBidders.offer(bidder);
|
||||
valueThresholds.put(bidder, BigDecimal.ZERO);
|
||||
mostValuableItems.put(bidder, null);
|
||||
runnerUpItems.put(bidder, null);
|
||||
}
|
||||
|
||||
while (unmatchedBidders.size() > 0) {
|
||||
V bidder = unmatchedBidders.poll();
|
||||
// BigDecimal valueThreshold = valueThresholds.get(bidder);
|
||||
BigDecimal bestValue = BigDecimal.ZERO;
|
||||
BigDecimal runnerUpValue = BigDecimal.ZERO;
|
||||
boolean reinitialize = true;
|
||||
// if (mostValuableItems.get(bidder) != null && runnerUpItems.get(bidder) != null) {
|
||||
// reinitialize = false;
|
||||
// //get the weight of the edge between the bidder and the best valued item
|
||||
// V bestItem = mostValuableItems.get(bidder);
|
||||
// BigDecimal bestItemWeight = BigDecimal.valueOf(graph.getEdgeWeight(graph.getEdge(bidder, bestItem)));
|
||||
// bestValue = bestItemWeight.subtract(prices.get(bestItem));
|
||||
// V runnerUpItem = runnerUpItems.get(bidder);
|
||||
// BigDecimal runnerUpWeight = BigDecimal.valueOf(graph.getEdgeWeight(graph.getEdge(bidder, runnerUpItem)));
|
||||
// runnerUpValue = runnerUpWeight.subtract(prices.get(runnerUpItem));
|
||||
// //if both values are still above the threshold
|
||||
// if (bestValue.compareTo(valueThreshold) >= 0 && runnerUpValue.compareTo(valueThreshold) >= 0) {
|
||||
// if (bestValue.compareTo(runnerUpValue) < 0) { //if best value is lower than runner up
|
||||
// BigDecimal tmp = bestValue;
|
||||
// bestValue = runnerUpValue;
|
||||
// runnerUpValue = tmp;
|
||||
// mostValuableItems.put(bidder, runnerUpItem);
|
||||
// runnerUpItems.put(bidder, bestItem);
|
||||
// }
|
||||
// BigDecimal newValueThreshold = bestValue.min(runnerUpValue);
|
||||
// valueThresholds.put(bidder, newValueThreshold);
|
||||
// System.out.println("lookback successful");
|
||||
// }
|
||||
// else {
|
||||
// reinitialize = true; //lookback failed
|
||||
// }
|
||||
// }
|
||||
if (reinitialize){
|
||||
bestValue = BigDecimal.ZERO;
|
||||
runnerUpValue = BigDecimal.ZERO;
|
||||
for (E edge: graph.edgesOf(bidder)) {
|
||||
double weight = graph.getEdgeWeight(edge);
|
||||
if (weight == 0.0) {
|
||||
continue;
|
||||
}
|
||||
V tmpItem = getItem(bidder, edge);
|
||||
BigDecimal tmpValue = BigDecimal.valueOf(weight).subtract(prices.get(tmpItem));
|
||||
if (tmpValue.compareTo(bestValue) >= 0) {
|
||||
runnerUpValue = bestValue;
|
||||
bestValue = tmpValue;
|
||||
runnerUpItems.put(bidder, mostValuableItems.get(bidder));
|
||||
mostValuableItems.put(bidder, tmpItem);
|
||||
}
|
||||
else if (tmpValue.compareTo(runnerUpValue) >= 0) {
|
||||
runnerUpValue = tmpValue;
|
||||
runnerUpItems.put(bidder, tmpItem);
|
||||
}
|
||||
}
|
||||
valueThresholds.put(bidder, runnerUpValue);
|
||||
}
|
||||
//Should now have initialized the maps to make look back possible
|
||||
//skip this bidder if the best value is still zero
|
||||
if (BigDecimal.ZERO.equals(bestValue)) {
|
||||
continue;
|
||||
}
|
||||
V mostValuableItem = mostValuableItems.get(bidder);
|
||||
BigDecimal price = prices.get(mostValuableItem);
|
||||
BigDecimal bid = price.add(bestValue).subtract(runnerUpValue).add(this.delta);
|
||||
V formerOwner = owners.get(mostValuableItem);
|
||||
if (formerOwner != null) {
|
||||
unmatchedBidders.offer(formerOwner);
|
||||
}
|
||||
owners.put(mostValuableItem, bidder);
|
||||
prices.put(mostValuableItem, bid);
|
||||
}
|
||||
|
||||
for (V item: owners.keySet()) {
|
||||
if (owners.get(item) != null) {
|
||||
matching.add(graph.getEdge(item, owners.get(item)));
|
||||
}
|
||||
}
|
||||
|
||||
for(E edge: matching) {
|
||||
this.matchingWeight = this.matchingWeight.add(BigDecimal.valueOf(graph.getEdgeWeight(edge)));
|
||||
}
|
||||
|
||||
|
||||
return new MatchingImpl<>(graph, matching, matchingWeight.doubleValue());
|
||||
}
|
||||
|
||||
private V getItem(V bidder, E edge) {
|
||||
if (swappedPartitions) {
|
||||
return graph.getEdgeSource(edge);
|
||||
}
|
||||
else {
|
||||
return graph.getEdgeTarget(edge);
|
||||
}
|
||||
}
|
||||
|
||||
private V getBidder(V item, E edge) {
|
||||
if (swappedPartitions) {
|
||||
return graph.getEdgeTarget(edge);
|
||||
}
|
||||
else {
|
||||
return graph.getEdgeSource(edge);
|
||||
}
|
||||
}
|
||||
|
||||
public BigDecimal getMatchingWeight() {
|
||||
return matchingWeight;
|
||||
}
|
||||
}
|
||||
@@ -1,75 +1,136 @@
|
||||
import java.util.*;
|
||||
|
||||
|
||||
/*
|
||||
TODO: Implement exponential distribution using inversion method - DONE
|
||||
TODO: Implement collisions with real sequences by having the counting function keep a map of all sequences it's read,
|
||||
with values of all misreads. Can then have a spurious/real collision rate, which will have count randomly select a sequence
|
||||
it's already read at least once, and put that into the list of spurious sequences for the given real sequence. Will let me get rid
|
||||
of the distinctMisreadCount map, and use this new map instead. Doing it this way, once a sequence has been misread as another
|
||||
sequence once, it is more likely to be misread that way again, as future read error collisions can also be real sequence collisions
|
||||
Prob A: a read error occurs. Prob B: it's a new error (otherwise it's a repeated error). Prob C: if new error, prob that it's
|
||||
a real sequence collision (otherwise it's a new spurious sequence) - DONE
|
||||
TODO: Implement discrete frequency distributions using Vose's Alias Method
|
||||
*/
|
||||
|
||||
|
||||
import org.apache.commons.rng.UniformRandomProvider;
|
||||
import org.apache.commons.rng.core.BaseProvider;
|
||||
import org.apache.commons.rng.sampling.distribution.RejectionInversionZipfSampler;
|
||||
import org.apache.commons.rng.simple.JDKRandomWrapper;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
public class Plate {
|
||||
private CellSample cells;
|
||||
private String sourceFile;
|
||||
private List<List<Integer[]>> wells;
|
||||
private Random rand = new Random();
|
||||
private String filename;
|
||||
private List<List<String[]>> wells;
|
||||
private final Random rand = BiGpairSEQ.getRand();
|
||||
private int size;
|
||||
private double error;
|
||||
private Integer[] concentrations;
|
||||
private Integer[] populations;
|
||||
private double stdDev;
|
||||
private double lambda;
|
||||
boolean exponential = false;
|
||||
private double zipfExponent;
|
||||
private DistributionType distributionType;
|
||||
|
||||
public Plate(CellSample cells, String cellFilename, int numWells, Integer[] populations,
|
||||
double dropoutRate, double parameter){
|
||||
this.cells = cells;
|
||||
this.sourceFile = cellFilename;
|
||||
this.size = numWells;
|
||||
this.wells = new ArrayList<>();
|
||||
this.error = dropoutRate;
|
||||
this.populations = populations;
|
||||
this.stdDev = parameter;
|
||||
this.lambda = parameter;
|
||||
this.zipfExponent = parameter;
|
||||
this.distributionType = BiGpairSEQ.getDistributionType();
|
||||
fillWells(cells.getCells());
|
||||
}
|
||||
|
||||
|
||||
public Plate(int size, double error, Integer[] concentrations) {
|
||||
public Plate(int size, double error, Integer[] populations) {
|
||||
this.size = size;
|
||||
this.error = error;
|
||||
this.concentrations = concentrations;
|
||||
this.populations = populations;
|
||||
wells = new ArrayList<>();
|
||||
}
|
||||
|
||||
public Plate(String sourceFileName, List<List<Integer[]>> wells) {
|
||||
this.sourceFile = sourceFileName;
|
||||
//constructor for returning a Plate from a PlateFileReader
|
||||
public Plate(String filename, List<List<String[]>> wells) {
|
||||
this.filename = filename;
|
||||
this.wells = wells;
|
||||
this.size = wells.size();
|
||||
|
||||
double totalCellCount = 0.0;
|
||||
double totalDropoutCount = 0.0;
|
||||
List<Integer> concentrations = new ArrayList<>();
|
||||
for (List<Integer[]> w: wells) {
|
||||
for (List<String[]> w: wells) {
|
||||
if(!concentrations.contains(w.size())){
|
||||
concentrations.add(w.size());
|
||||
}
|
||||
for (String[] cell: w) {
|
||||
totalCellCount += 1.0;
|
||||
for (String sequence: cell) {
|
||||
if("-1".equals(sequence)) {
|
||||
totalDropoutCount += 1.0;
|
||||
}
|
||||
this.concentrations = new Integer[concentrations.size()];
|
||||
for (int i = 0; i < this.concentrations.length; i++) {
|
||||
this.concentrations[i] = concentrations.get(i);
|
||||
}
|
||||
}
|
||||
}
|
||||
double totalSequenceCount = totalCellCount * 4;
|
||||
this.error = totalDropoutCount / totalSequenceCount;
|
||||
this.populations = new Integer[concentrations.size()];
|
||||
for (int i = 0; i < this.populations.length; i++) {
|
||||
this.populations[i] = concentrations.get(i);
|
||||
}
|
||||
}
|
||||
|
||||
public void fillWellsExponential(String sourceFileName, List<Integer[]> cells, double lambda){
|
||||
this.lambda = lambda;
|
||||
exponential = true;
|
||||
sourceFile = sourceFileName;
|
||||
int numSections = concentrations.length;
|
||||
private void fillWellsZipf(List<String[]> cells, double exponent) {
|
||||
int numSections = populations.length;
|
||||
int section = 0;
|
||||
int n;
|
||||
RejectionInversionZipfSampler zipfSampler = new RejectionInversionZipfSampler(new JDKRandomWrapper(rand), cells.size(), exponent);
|
||||
while (section < numSections){
|
||||
for (int i = 0; i < (size / numSections); i++) {
|
||||
List<String[]> well = new ArrayList<>();
|
||||
for (int j = 0; j < populations[section]; j++) {
|
||||
do {
|
||||
n = zipfSampler.sample();
|
||||
} while (n >= cells.size() || n < 0);
|
||||
String[] cellToAdd = cells.get(n).clone();
|
||||
for(int k = 0; k < cellToAdd.length; k++){
|
||||
if(Math.abs(rand.nextDouble()) < error){//error applied to each sequence
|
||||
cellToAdd[k] = "-1";
|
||||
}
|
||||
}
|
||||
well.add(cellToAdd);
|
||||
}
|
||||
wells.add(well);
|
||||
}
|
||||
section++;
|
||||
}
|
||||
}
|
||||
|
||||
private void fillWellsExponential(List<String[]> cells, double lambda){
|
||||
int numSections = populations.length;
|
||||
int section = 0;
|
||||
double m;
|
||||
int n;
|
||||
int test=0;
|
||||
while (section < numSections){
|
||||
for (int i = 0; i < (size / numSections); i++) {
|
||||
List<Integer[]> well = new ArrayList<>();
|
||||
for (int j = 0; j < concentrations[section]; j++) {
|
||||
List<String[]> well = new ArrayList<>();
|
||||
for (int j = 0; j < populations[section]; j++) {
|
||||
do {
|
||||
//inverse transform sampling: for random number u in [0,1), x = log(1-u) / (-lambda)
|
||||
m = (Math.log10((1 - rand.nextDouble()))/(-lambda)) * Math.sqrt(cells.size());
|
||||
} while (m >= cells.size() || m < 0);
|
||||
n = (int) Math.floor(m);
|
||||
//n = Equations.getRandomNumber(0, cells.size());
|
||||
// was testing generating the cell sample file with exponential dist, then sampling flat here
|
||||
//that would be more realistic
|
||||
//But would mess up other things in the simulation with how I've coded it.
|
||||
if(n > test){
|
||||
test = n;
|
||||
}
|
||||
Integer[] cellToAdd = cells.get(n).clone();
|
||||
String[] cellToAdd = cells.get(n).clone();
|
||||
for(int k = 0; k < cellToAdd.length; k++){
|
||||
if(Math.abs(rand.nextDouble()) < error){//error applied to each seqeunce
|
||||
cellToAdd[k] = -1;
|
||||
if(Math.abs(rand.nextDouble()) <= error){//error applied to each sequence
|
||||
cellToAdd[k] = "-1";
|
||||
}
|
||||
}
|
||||
well.add(cellToAdd);
|
||||
@@ -78,28 +139,26 @@ public class Plate {
|
||||
}
|
||||
section++;
|
||||
}
|
||||
System.out.println("Highest index: " +test);
|
||||
}
|
||||
|
||||
public void fillWells(String sourceFileName, List<Integer[]> cells, double stdDev) {
|
||||
private void fillWells( List<String[]> cells, double stdDev) {
|
||||
this.stdDev = stdDev;
|
||||
sourceFile = sourceFileName;
|
||||
int numSections = concentrations.length;
|
||||
int numSections = populations.length;
|
||||
int section = 0;
|
||||
double m;
|
||||
int n;
|
||||
while (section < numSections){
|
||||
for (int i = 0; i < (size / numSections); i++) {
|
||||
List<Integer[]> well = new ArrayList<>();
|
||||
for (int j = 0; j < concentrations[section]; j++) {
|
||||
List<String[]> well = new ArrayList<>();
|
||||
for (int j = 0; j < populations[section]; j++) {
|
||||
do {
|
||||
m = (rand.nextGaussian() * stdDev) + (cells.size() / 2);
|
||||
} while (m >= cells.size() || m < 0);
|
||||
n = (int) Math.floor(m);
|
||||
Integer[] cellToAdd = cells.get(n).clone();
|
||||
String[] cellToAdd = cells.get(n).clone();
|
||||
for(int k = 0; k < cellToAdd.length; k++){
|
||||
if(Math.abs(rand.nextDouble()) < error){//error applied to each sequence
|
||||
cellToAdd[k] = -1;
|
||||
cellToAdd[k] = "-1";
|
||||
}
|
||||
}
|
||||
well.add(cellToAdd);
|
||||
@@ -110,8 +169,26 @@ public class Plate {
|
||||
}
|
||||
}
|
||||
|
||||
public Integer[] getConcentrations(){
|
||||
return concentrations;
|
||||
private void fillWells(List<String[]> cells){
|
||||
DistributionType type = BiGpairSEQ.getDistributionType();
|
||||
switch (type) {
|
||||
case POISSON, GAUSSIAN -> {
|
||||
fillWells(cells, getStdDev());
|
||||
break;
|
||||
}
|
||||
case EXPONENTIAL -> {
|
||||
fillWellsExponential(cells, getLambda());
|
||||
break;
|
||||
}
|
||||
case ZIPF -> {
|
||||
fillWellsZipf(cells, getZipfExponent());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Integer[] getPopulations(){
|
||||
return populations;
|
||||
}
|
||||
|
||||
public int getSize(){
|
||||
@@ -122,48 +199,122 @@ public class Plate {
|
||||
return stdDev;
|
||||
}
|
||||
|
||||
public boolean isExponential(){return exponential;}
|
||||
public DistributionType getDistributionType() { return distributionType;}
|
||||
|
||||
public double getLambda(){return lambda;}
|
||||
|
||||
public double getZipfExponent(){return zipfExponent;}
|
||||
|
||||
public double getError() {
|
||||
return error;
|
||||
}
|
||||
|
||||
public List<List<Integer[]>> getWells() {
|
||||
public List<List<String[]>> getWells() {
|
||||
return wells;
|
||||
}
|
||||
|
||||
//returns a map of the counts of the sequence at cell index sIndex, in all wells
|
||||
public Map<Integer, Integer> assayWellsSequenceS(int... sIndices){
|
||||
return this.assayWellsSequenceS(0, size, sIndices);
|
||||
}
|
||||
|
||||
//returns a map of the counts of the sequence at cell index sIndex, in a specific well
|
||||
public Map<Integer, Integer> assayWellsSequenceS(int n, int... sIndices) { return this.assayWellsSequenceS(n, n+1, sIndices);}
|
||||
|
||||
//returns a map of the counts of the sequence at cell index sIndex, in a range of wells
|
||||
public Map<Integer, Integer> assayWellsSequenceS(int start, int end, int... sIndices) {
|
||||
Map<Integer,Integer> assay = new HashMap<>();
|
||||
for(int pIndex: sIndices){
|
||||
for(int i = start; i < end; i++){
|
||||
countSequences(assay, wells.get(i), pIndex);
|
||||
}
|
||||
}
|
||||
return assay;
|
||||
}
|
||||
//For the sequences at cell indices sIndices, counts number of unique sequences in the given well into the given map
|
||||
private void countSequences(Map<Integer, Integer> wellMap, List<Integer[]> well, int... sIndices) {
|
||||
for(Integer[] cell : well) {
|
||||
//For the sequences at cell indices sIndices, counts number of unique sequences in all wells.
|
||||
//Also simulates sequence read errors with given probabilities.
|
||||
//Returns a map of SequenceRecords containing plate data for all sequences read.
|
||||
//TODO actually implement usage of misreadSequences - DONE
|
||||
public Map<String, SequenceRecord> countSequences(Integer readDepth, Double readErrorRate,
|
||||
Double errorCollisionRate, Double realSequenceCollisionRate, int... sIndices) {
|
||||
SequenceType[] sequenceTypes = EnumSet.allOf(SequenceType.class).toArray(new SequenceType[0]);
|
||||
//Map of all real sequences read. Keys are sequences, values are ways sequence has been misread.
|
||||
Map<String, List<String>> sequencesAndMisreads = new HashMap<>();
|
||||
//Map of all sequences read. Keys are sequences, values are associated SequenceRecords
|
||||
Map<String, SequenceRecord> sequenceMap = new LinkedHashMap<>();
|
||||
//get list of all distinct, real sequences
|
||||
String[] realSequences = assayWells(sIndices).toArray(new String[0]);
|
||||
for (int well = 0; well < size; well++) {
|
||||
for (String[] cell: wells.get(well)) {
|
||||
for (int sIndex: sIndices) {
|
||||
if(cell[sIndex] != -1){
|
||||
wellMap.merge(cell[sIndex], 1, (oldValue, newValue) -> oldValue + newValue);
|
||||
//the sequence being read
|
||||
String currentSequence = cell[sIndex];
|
||||
//skip dropout sequences, which have value -1
|
||||
if (!"-1".equals(currentSequence)) {
|
||||
//keep rereading the sequence until the read depth is reached
|
||||
for (int j = 0; j < readDepth; j++) {
|
||||
//The sequence is misread
|
||||
if (rand.nextDouble() < readErrorRate) {
|
||||
//The sequence hasn't been read or misread before
|
||||
if (!sequencesAndMisreads.containsKey(currentSequence)) {
|
||||
sequencesAndMisreads.put(currentSequence, new ArrayList<>());
|
||||
}
|
||||
//The specific misread hasn't happened before
|
||||
if (rand.nextDouble() >= errorCollisionRate || sequencesAndMisreads.get(currentSequence).size() == 0) {
|
||||
//The misread doesn't collide with a real sequence already on the plate and some sequences have already been read
|
||||
if(rand.nextDouble() >= realSequenceCollisionRate || !sequenceMap.isEmpty()){
|
||||
StringBuilder spurious = new StringBuilder(currentSequence);
|
||||
for (int k = 0; k <= sequencesAndMisreads.get(currentSequence).size(); k++) {
|
||||
spurious.append("*");
|
||||
}
|
||||
//New sequence record for the spurious sequence
|
||||
SequenceRecord tmp = new SequenceRecord(spurious.toString(), sequenceTypes[sIndex]);
|
||||
tmp.addRead(well);
|
||||
sequenceMap.put(spurious.toString(), tmp);
|
||||
//add spurious sequence to list of misreads for the real sequence
|
||||
sequencesAndMisreads.get(currentSequence).add(spurious.toString());
|
||||
}
|
||||
//The misread collides with a real sequence already read from plate
|
||||
else {
|
||||
String wrongSequence;
|
||||
do{
|
||||
//get a random real sequence that's been read from the plate before
|
||||
int index = rand.nextInt(realSequences.length);
|
||||
wrongSequence = realSequences[index];
|
||||
//make sure it's not accidentally the *right* sequence
|
||||
//Also that it's not a wrong sequence already in the misread list
|
||||
} while(currentSequence.equals(wrongSequence) || sequencesAndMisreads.get(currentSequence).contains(wrongSequence));
|
||||
//update the SequenceRecord for wrongSequence
|
||||
sequenceMap.get(wrongSequence).addRead(well);
|
||||
//add wrongSequence to the misreads for currentSequence
|
||||
sequencesAndMisreads.get(currentSequence).add(wrongSequence);
|
||||
}
|
||||
}
|
||||
}
|
||||
//The sequence is read correctly
|
||||
else {
|
||||
//the sequence hasn't been read before
|
||||
if (!sequenceMap.containsKey(currentSequence)) {
|
||||
//create new record for the sequence
|
||||
SequenceRecord tmp = new SequenceRecord(currentSequence, sequenceTypes[sIndex]);
|
||||
//add this read to the sequence record
|
||||
tmp.addRead(well);
|
||||
//add the sequence and its record to the sequence map
|
||||
sequenceMap.put(currentSequence, tmp);
|
||||
//add the sequence to the sequences and misreads map
|
||||
sequencesAndMisreads.put(currentSequence, new ArrayList<>());
|
||||
}
|
||||
//the sequence has been read before
|
||||
else {
|
||||
//get the sequence's record and add this read to it
|
||||
sequenceMap.get(currentSequence).addRead(well);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return sequenceMap;
|
||||
}
|
||||
|
||||
private HashSet<String> assayWells(int[] indices) {
|
||||
HashSet<String> allSequences = new HashSet<>();
|
||||
for (List<String[]> well: wells) {
|
||||
for (String[] cell: well) {
|
||||
for(int index: indices) {
|
||||
allSequences.add(cell[index]);
|
||||
}
|
||||
}
|
||||
}
|
||||
return allSequences;
|
||||
}
|
||||
|
||||
public String getSourceFileName() {
|
||||
return sourceFile;
|
||||
}
|
||||
|
||||
public String getFilename() { return filename; }
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ import java.util.regex.Pattern;
|
||||
|
||||
public class PlateFileReader {
|
||||
|
||||
private List<List<Integer[]>> wells = new ArrayList<>();
|
||||
private List<List<String[]>> wells = new ArrayList<>();
|
||||
private String filename;
|
||||
|
||||
public PlateFileReader(String filename){
|
||||
@@ -32,17 +32,17 @@ public class PlateFileReader {
|
||||
CSVParser parser = new CSVParser(reader, plateFileFormat);
|
||||
){
|
||||
for(CSVRecord record: parser.getRecords()) {
|
||||
List<Integer[]> well = new ArrayList<>();
|
||||
List<String[]> well = new ArrayList<>();
|
||||
for(String s: record) {
|
||||
if(!"".equals(s)) {
|
||||
String[] intString = s.replaceAll("\\[", "")
|
||||
String[] sequences = s.replaceAll("\\[", "")
|
||||
.replaceAll("]", "")
|
||||
.replaceAll(" ", "")
|
||||
.split(",");
|
||||
//System.out.println(intString);
|
||||
Integer[] arr = new Integer[intString.length];
|
||||
for (int i = 0; i < intString.length; i++) {
|
||||
arr[i] = Integer.valueOf(intString[i]);
|
||||
//System.out.println(sequences);
|
||||
String[] arr = new String[sequences.length];
|
||||
for (int i = 0; i < sequences.length; i++) {
|
||||
arr[i] = sequences[i];
|
||||
}
|
||||
well.add(arr);
|
||||
}
|
||||
@@ -56,11 +56,8 @@ public class PlateFileReader {
|
||||
|
||||
}
|
||||
|
||||
public List<List<Integer[]>> getWells() {
|
||||
return wells;
|
||||
public Plate getSamplePlate() {
|
||||
return new Plate(filename, wells);
|
||||
}
|
||||
|
||||
public String getFilename() {
|
||||
return filename;
|
||||
}
|
||||
}
|
||||
@@ -7,19 +7,19 @@ import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.*;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
public class PlateFileWriter {
|
||||
private int size;
|
||||
private List<List<Integer[]>> wells;
|
||||
private List<List<String[]>> wells;
|
||||
private double stdDev;
|
||||
private double lambda;
|
||||
private double zipfExponent;
|
||||
private DistributionType distributionType;
|
||||
private Double error;
|
||||
private String filename;
|
||||
private String sourceFileName;
|
||||
private String[] headers;
|
||||
private List<Integer> concentrations;
|
||||
private boolean isExponential = false;
|
||||
private Integer[] populations;
|
||||
|
||||
|
||||
public PlateFileWriter(String filename, Plate plate) {
|
||||
if(!filename.matches(".*\\.csv")){
|
||||
@@ -28,27 +28,32 @@ public class PlateFileWriter {
|
||||
this.filename = filename;
|
||||
this.sourceFileName = plate.getSourceFileName();
|
||||
this.size = plate.getSize();
|
||||
this.isExponential = plate.isExponential();
|
||||
if(isExponential) {
|
||||
this.distributionType = plate.getDistributionType();
|
||||
switch(distributionType) {
|
||||
case POISSON, GAUSSIAN -> {
|
||||
this.stdDev = plate.getStdDev();
|
||||
}
|
||||
case EXPONENTIAL -> {
|
||||
this.lambda = plate.getLambda();
|
||||
}
|
||||
else{
|
||||
this.stdDev = plate.getStdDev();
|
||||
case ZIPF -> {
|
||||
this.zipfExponent = plate.getZipfExponent();
|
||||
}
|
||||
}
|
||||
this.error = plate.getError();
|
||||
this.wells = plate.getWells();
|
||||
this.concentrations = Arrays.asList(plate.getConcentrations());
|
||||
concentrations.sort(Comparator.reverseOrder());
|
||||
this.populations = plate.getPopulations();
|
||||
Arrays.sort(populations);
|
||||
}
|
||||
|
||||
public void writePlateFile(){
|
||||
Comparator<List<Integer[]>> listLengthDescending = Comparator.comparingInt(List::size);
|
||||
Comparator<List<String[]>> listLengthDescending = Comparator.comparingInt(List::size);
|
||||
wells.sort(listLengthDescending.reversed());
|
||||
int maxLength = wells.get(0).size();
|
||||
List<List<String>> wellsAsStrings = new ArrayList<>();
|
||||
for (List<Integer[]> w: wells){
|
||||
for (List<String[]> w: wells){
|
||||
List<String> tmp = new ArrayList<>();
|
||||
for(Integer[] c: w) {
|
||||
for(String[] c: w) {
|
||||
tmp.add(Arrays.toString(c));
|
||||
}
|
||||
wellsAsStrings.add(tmp);
|
||||
@@ -59,28 +64,32 @@ public class PlateFileWriter {
|
||||
}
|
||||
}
|
||||
|
||||
//this took forever
|
||||
List<List<String>> rows = new ArrayList<>();
|
||||
List<String> tmp = new ArrayList<>();
|
||||
for(int i = 0; i < wellsAsStrings.size(); i++){//List<Integer[]> w: wells){
|
||||
tmp.add("well " + (i+1));
|
||||
}
|
||||
rows.add(tmp);
|
||||
for(int row = 0; row < maxLength; row++){
|
||||
tmp = new ArrayList<>();
|
||||
for(List<String> c: wellsAsStrings){
|
||||
tmp.add(c.get(row));
|
||||
}
|
||||
rows.add(tmp);
|
||||
}
|
||||
//build string of well concentrations
|
||||
StringBuilder concen = new StringBuilder();
|
||||
for(Integer i: concentrations){
|
||||
concen.append(i.toString());
|
||||
concen.append(" ");
|
||||
}
|
||||
String concenString = concen.toString();
|
||||
// //this took forever and I don't use it
|
||||
// //if I wanted to use it, I'd replace printer.printRecords(wellsAsStrings) with printer.printRecords(rows)
|
||||
// List<List<String>> rows = new ArrayList<>();
|
||||
// List<String> tmp = new ArrayList<>();
|
||||
// for(int i = 0; i < wellsAsStrings.size(); i++){//List<Integer[]> w: wells){
|
||||
// tmp.add("well " + (i+1));
|
||||
// }
|
||||
// rows.add(tmp);
|
||||
// for(int row = 0; row < maxLength; row++){
|
||||
// tmp = new ArrayList<>();
|
||||
// for(List<String> c: wellsAsStrings){
|
||||
// tmp.add(c.get(row));
|
||||
// }
|
||||
// rows.add(tmp);
|
||||
// }
|
||||
|
||||
//make string out of populations array
|
||||
StringBuilder populationsStringBuilder = new StringBuilder();
|
||||
populationsStringBuilder.append(populations[0].toString());
|
||||
for(int i = 1; i < populations.length; i++){
|
||||
populationsStringBuilder.append(", ");
|
||||
populationsStringBuilder.append(populations[i].toString());
|
||||
}
|
||||
String wellPopulationsString = populationsStringBuilder.toString();
|
||||
|
||||
//set CSV format
|
||||
CSVFormat plateFileFormat = CSVFormat.Builder.create()
|
||||
.setCommentMarker('#')
|
||||
.build();
|
||||
@@ -91,13 +100,24 @@ public class PlateFileWriter {
|
||||
printer.printComment("Cell source file name: " + sourceFileName);
|
||||
printer.printComment("Each row represents one well on the plate.");
|
||||
printer.printComment("Plate size: " + size);
|
||||
printer.printComment("Well populations: " + wellPopulationsString);
|
||||
printer.printComment("Error rate: " + error);
|
||||
printer.printComment("Concentrations: " + concenString);
|
||||
if(isExponential){
|
||||
printer.printComment("Lambda: " + lambda);
|
||||
switch (distributionType) {
|
||||
case POISSON -> {
|
||||
printer.printComment("Cell frequency distribution: POISSON");
|
||||
}
|
||||
case GAUSSIAN -> {
|
||||
printer.printComment("Cell frequency distribution: GAUSSIAN");
|
||||
printer.printComment("--Standard deviation: " + stdDev);
|
||||
}
|
||||
case EXPONENTIAL -> {
|
||||
printer.printComment("Cell frequency distribution: EXPONENTIAL");
|
||||
printer.printComment("--Lambda: " + lambda);
|
||||
}
|
||||
case ZIPF -> {
|
||||
printer.printComment("Cell frequency distribution: ZIPF");
|
||||
printer.printComment("--Exponent: " + zipfExponent);
|
||||
}
|
||||
else {
|
||||
printer.printComment("Std. dev.: " + stdDev);
|
||||
}
|
||||
printer.printRecords(wellsAsStrings);
|
||||
} catch(IOException ex){
|
||||
|
||||
70
src/main/java/SequenceRecord.java
Normal file
70
src/main/java/SequenceRecord.java
Normal file
@@ -0,0 +1,70 @@
|
||||
/*
|
||||
Class to represent individual sequences, holding their well occupancy and read count information.
|
||||
Will make a map of these keyed to the sequences themselves.
|
||||
Ideally, I'll be able to construct both the Vertices and the weights matrix from this map.
|
||||
|
||||
*/
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.*;
|
||||
|
||||
public class SequenceRecord implements Serializable {
|
||||
private final String sequence;
|
||||
private final SequenceType type;
|
||||
//keys are well numbers, values are read count in that well
|
||||
private final Map<Integer, Integer> wells;
|
||||
|
||||
public SequenceRecord (String sequence, SequenceType type) {
|
||||
this.sequence = sequence;
|
||||
this.type = type;
|
||||
this.wells = new LinkedHashMap<>();
|
||||
}
|
||||
|
||||
//this shouldn't be necessary, since the sequence will be the map key, but
|
||||
public String getSequence() {
|
||||
return sequence;
|
||||
}
|
||||
|
||||
public SequenceType getSequenceType(){
|
||||
return type;
|
||||
}
|
||||
|
||||
//use this to update the record for each new read
|
||||
public void addRead(Integer wellNumber) {
|
||||
wells.merge(wellNumber,1, Integer::sum);
|
||||
}
|
||||
|
||||
//don't know if I'll ever need this
|
||||
public void addWellData(Integer wellNumber, Integer readCount) {
|
||||
wells.put(wellNumber, readCount);
|
||||
}
|
||||
|
||||
//Method to remove a well from the occupancy map.
|
||||
//Useful for cases where one sequence is misread as another sequence that isn't actually present in the well
|
||||
//This can reveal itself as an anomalously low read count in that well.
|
||||
public void deleteWell(Integer wellNumber) { wells.remove(wellNumber); }
|
||||
|
||||
public Set<Integer> getWells() {
|
||||
return wells.keySet();
|
||||
}
|
||||
|
||||
public Map<Integer, Integer> getWellOccupancies() { return wells;}
|
||||
|
||||
public boolean isInWell(Integer wellNumber) {
|
||||
return wells.containsKey(wellNumber);
|
||||
}
|
||||
|
||||
public Integer getOccupancy() {
|
||||
return wells.size();
|
||||
}
|
||||
|
||||
//read count for whole plate
|
||||
public Integer getReadCount(){
|
||||
return wells.values().stream().mapToInt(Integer::valueOf).sum();
|
||||
}
|
||||
|
||||
//read count in a specific well
|
||||
public Integer getReadCount(Integer wellNumber) {
|
||||
return wells.get(wellNumber);
|
||||
}
|
||||
}
|
||||
8
src/main/java/SequenceType.java
Normal file
8
src/main/java/SequenceType.java
Normal file
@@ -0,0 +1,8 @@
|
||||
//enum for tagging types of sequences
|
||||
//Listed in order that they appear in a cell array, so ordinal() method will return correct index
|
||||
public enum SequenceType {
|
||||
CDR3_ALPHA,
|
||||
CDR3_BETA,
|
||||
CDR1_ALPHA,
|
||||
CDR1_BETA
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,694 +0,0 @@
|
||||
import org.apache.commons.cli.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Scanner;
|
||||
import java.util.InputMismatchException;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
//
|
||||
public class UserInterface {
|
||||
|
||||
final static Scanner sc = new Scanner(System.in);
|
||||
static int input;
|
||||
static boolean quit = false;
|
||||
|
||||
public static void main(String[] args) {
|
||||
//for now, commenting out all the command line argument stuff.
|
||||
// Refactoring to output files of graphs, so it would all need to change anyway.
|
||||
|
||||
// if(args.length != 0){
|
||||
// //These command line options are a big mess
|
||||
// //Really, I don't think command line tools are expected to work in this many different modes
|
||||
// //making cells, making plates, and matching are the sort of thing that UNIX philosophy would say
|
||||
// //should be three separate programs.
|
||||
// //There might be a way to do it with option parameters?
|
||||
//
|
||||
// Options mainOptions = new Options();
|
||||
// Option makeCells = Option.builder("cells")
|
||||
// .longOpt("make-cells")
|
||||
// .desc("Makes a file of distinct cells")
|
||||
// .build();
|
||||
// Option makePlate = Option.builder("plates")
|
||||
// .longOpt("make-plates")
|
||||
// .desc("Makes a sample plate file")
|
||||
// .build();
|
||||
// Option matchCDR3 = Option.builder("match")
|
||||
// .longOpt("match-cdr3")
|
||||
// .desc("Match CDR3s. Requires a cell sample file and any number of plate files.")
|
||||
// .build();
|
||||
// OptionGroup mainGroup = new OptionGroup();
|
||||
// mainGroup.addOption(makeCells);
|
||||
// mainGroup.addOption(makePlate);
|
||||
// mainGroup.addOption(matchCDR3);
|
||||
// mainGroup.setRequired(true);
|
||||
// mainOptions.addOptionGroup(mainGroup);
|
||||
//
|
||||
// //Reuse clones of this for other options groups, rather than making it lots of times
|
||||
// Option outputFile = Option.builder("o")
|
||||
// .longOpt("output-file")
|
||||
// .hasArg()
|
||||
// .argName("filename")
|
||||
// .desc("Name of output file")
|
||||
// .build();
|
||||
// mainOptions.addOption(outputFile);
|
||||
//
|
||||
// //Options cellOptions = new Options();
|
||||
// Option numCells = Option.builder("nc")
|
||||
// .longOpt("num-cells")
|
||||
// .desc("The number of distinct cells to generate")
|
||||
// .hasArg()
|
||||
// .argName("number")
|
||||
// .build();
|
||||
// mainOptions.addOption(numCells);
|
||||
// Option cdr1Freq = Option.builder("d")
|
||||
// .longOpt("peptide-diversity-factor")
|
||||
// .hasArg()
|
||||
// .argName("number")
|
||||
// .desc("Number of distinct CDR3s for every CDR1")
|
||||
// .build();
|
||||
// mainOptions.addOption(cdr1Freq);
|
||||
// //Option cellOutput = (Option) outputFile.clone();
|
||||
// //cellOutput.setRequired(true);
|
||||
// //mainOptions.addOption(cellOutput);
|
||||
//
|
||||
// //Options plateOptions = new Options();
|
||||
// Option inputCells = Option.builder("c")
|
||||
// .longOpt("cell-file")
|
||||
// .hasArg()
|
||||
// .argName("file")
|
||||
// .desc("The cell sample file used for filling wells")
|
||||
// .build();
|
||||
// mainOptions.addOption(inputCells);
|
||||
// Option numWells = Option.builder("w")
|
||||
// .longOpt("num-wells")
|
||||
// .hasArg()
|
||||
// .argName("number")
|
||||
// .desc("The number of wells on each plate")
|
||||
// .build();
|
||||
// mainOptions.addOption(numWells);
|
||||
// Option numPlates = Option.builder("np")
|
||||
// .longOpt("num-plates")
|
||||
// .hasArg()
|
||||
// .argName("number")
|
||||
// .desc("The number of plate files to output")
|
||||
// .build();
|
||||
// mainOptions.addOption(numPlates);
|
||||
// //Option plateOutput = (Option) outputFile.clone();
|
||||
// //plateOutput.setRequired(true);
|
||||
// //plateOutput.setDescription("Prefix for plate output filenames");
|
||||
// //mainOptions.addOption(plateOutput);
|
||||
// Option plateErr = Option.builder("err")
|
||||
// .longOpt("drop-out-rate")
|
||||
// .hasArg()
|
||||
// .argName("number")
|
||||
// .desc("Well drop-out rate. (Probability between 0 and 1)")
|
||||
// .build();
|
||||
// mainOptions.addOption(plateErr);
|
||||
// Option plateConcentrations = Option.builder("t")
|
||||
// .longOpt("t-cells-per-well")
|
||||
// .hasArgs()
|
||||
// .argName("number 1, number 2, ...")
|
||||
// .desc("Number of T cells per well for each plate section")
|
||||
// .build();
|
||||
// mainOptions.addOption(plateConcentrations);
|
||||
//
|
||||
////different distributions, mutually exclusive
|
||||
// OptionGroup plateDistributions = new OptionGroup();
|
||||
// Option plateExp = Option.builder("exponential")
|
||||
// .desc("Sample from distinct cells with exponential frequency distribution")
|
||||
// .build();
|
||||
// plateDistributions.addOption(plateExp);
|
||||
// Option plateGaussian = Option.builder("gaussian")
|
||||
// .desc("Sample from distinct cells with gaussain frequency distribution")
|
||||
// .build();
|
||||
// plateDistributions.addOption(plateGaussian);
|
||||
// Option platePoisson = Option.builder("poisson")
|
||||
// .desc("Sample from distinct cells with poisson frequency distribution")
|
||||
// .build();
|
||||
// plateDistributions.addOption(platePoisson);
|
||||
// mainOptions.addOptionGroup(plateDistributions);
|
||||
//
|
||||
// Option plateStdDev = Option.builder("stddev")
|
||||
// .desc("Standard deviation for gaussian distribution")
|
||||
// .hasArg()
|
||||
// .argName("number")
|
||||
// .build();
|
||||
// mainOptions.addOption(plateStdDev);
|
||||
//
|
||||
// Option plateLambda = Option.builder("lambda")
|
||||
// .desc("Lambda for exponential distribution")
|
||||
// .hasArg()
|
||||
// .argName("number")
|
||||
// .build();
|
||||
// mainOptions.addOption(plateLambda);
|
||||
//
|
||||
//
|
||||
//
|
||||
////
|
||||
//// String cellFile, String filename, Double stdDev,
|
||||
//// Integer numWells, Integer numSections,
|
||||
//// Integer[] concentrations, Double dropOutRate
|
||||
////
|
||||
//
|
||||
// //Options matchOptions = new Options();
|
||||
// inputCells.setDescription("The cell sample file to be used for matching.");
|
||||
// mainOptions.addOption(inputCells);
|
||||
// Option lowThresh = Option.builder("low")
|
||||
// .longOpt("low-threshold")
|
||||
// .hasArg()
|
||||
// .argName("number")
|
||||
// .desc("Sets the minimum occupancy overlap to attempt matching")
|
||||
// .build();
|
||||
// mainOptions.addOption(lowThresh);
|
||||
// Option highThresh = Option.builder("high")
|
||||
// .longOpt("high-threshold")
|
||||
// .hasArg()
|
||||
// .argName("number")
|
||||
// .desc("Sets the maximum occupancy overlap to attempt matching")
|
||||
// .build();
|
||||
// mainOptions.addOption(highThresh);
|
||||
// Option occDiff = Option.builder("occdiff")
|
||||
// .longOpt("occupancy-difference")
|
||||
// .hasArg()
|
||||
// .argName("Number")
|
||||
// .desc("Maximum difference in alpha/beta occupancy to attempt matching")
|
||||
// .build();
|
||||
// mainOptions.addOption(occDiff);
|
||||
// Option overlapPer = Option.builder("ovper")
|
||||
// .longOpt("overlap-percent")
|
||||
// .hasArg()
|
||||
// .argName("Percent")
|
||||
// .desc("Minimum overlap percent to attempt matching (0 -100)")
|
||||
// .build();
|
||||
// mainOptions.addOption(overlapPer);
|
||||
// Option inputPlates = Option.builder("p")
|
||||
// .longOpt("plate-files")
|
||||
// .hasArgs()
|
||||
// .desc("Plate files to match")
|
||||
// .build();
|
||||
// mainOptions.addOption(inputPlates);
|
||||
//
|
||||
//
|
||||
//
|
||||
// CommandLineParser parser = new DefaultParser();
|
||||
// try {
|
||||
// CommandLine line = parser.parse(mainOptions, args);
|
||||
// if(line.hasOption("match")){
|
||||
// //line = parser.parse(mainOptions, args);
|
||||
// String cellFile = line.getOptionValue("c");
|
||||
// Integer lowThreshold = Integer.valueOf(line.getOptionValue(lowThresh));
|
||||
// Integer highThreshold = Integer.valueOf(line.getOptionValue(highThresh));
|
||||
// Integer occupancyDifference = Integer.valueOf(line.getOptionValue(occDiff));
|
||||
// Integer overlapPercent = Integer.valueOf(line.getOptionValue(overlapPer));
|
||||
// for(String plate: line.getOptionValues("p")) {
|
||||
// matchCDR3s(cellFile, plate, lowThreshold, highThreshold, occupancyDifference, overlapPercent);
|
||||
// }
|
||||
// }
|
||||
// else if(line.hasOption("cells")){
|
||||
// //line = parser.parse(mainOptions, args);
|
||||
// String filename = line.getOptionValue("o");
|
||||
// Integer numDistCells = Integer.valueOf(line.getOptionValue("nc"));
|
||||
// Integer freq = Integer.valueOf(line.getOptionValue("d"));
|
||||
// makeCells(filename, numDistCells, freq);
|
||||
// }
|
||||
// else if(line.hasOption("plates")){
|
||||
// //line = parser.parse(mainOptions, args);
|
||||
// String cellFile = line.getOptionValue("c");
|
||||
// String filenamePrefix = line.getOptionValue("o");
|
||||
// Integer numWellsOnPlate = Integer.valueOf(line.getOptionValue("w"));
|
||||
// Integer numPlatesToMake = Integer.valueOf(line.getOptionValue("np"));
|
||||
// String[] concentrationsToUseString = line.getOptionValues("t");
|
||||
// Integer numSections = concentrationsToUseString.length;
|
||||
//
|
||||
// Integer[] concentrationsToUse = new Integer[numSections];
|
||||
// for(int i = 0; i <numSections; i++){
|
||||
// concentrationsToUse[i] = Integer.valueOf(concentrationsToUseString[i]);
|
||||
// }
|
||||
// Double dropOutRate = Double.valueOf(line.getOptionValue("err"));
|
||||
// if(line.hasOption("exponential")){
|
||||
// Double lambda = Double.valueOf(line.getOptionValue("lambda"));
|
||||
// for(int i = 1; i <= numPlatesToMake; i++){
|
||||
// makePlateExp(cellFile, filenamePrefix + i, lambda, numWellsOnPlate,
|
||||
// concentrationsToUse,dropOutRate);
|
||||
// }
|
||||
// }
|
||||
// else if(line.hasOption("gaussian")){
|
||||
// Double stdDev = Double.valueOf(line.getOptionValue("std-dev"));
|
||||
// for(int i = 1; i <= numPlatesToMake; i++){
|
||||
// makePlate(cellFile, filenamePrefix + i, stdDev, numWellsOnPlate,
|
||||
// concentrationsToUse,dropOutRate);
|
||||
// }
|
||||
//
|
||||
// }
|
||||
// else if(line.hasOption("poisson")){
|
||||
// for(int i = 1; i <= numPlatesToMake; i++){
|
||||
// makePlatePoisson(cellFile, filenamePrefix + i, numWellsOnPlate,
|
||||
// concentrationsToUse,dropOutRate);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// catch (ParseException exp) {
|
||||
// System.err.println("Parsing failed. Reason: " + exp.getMessage());
|
||||
// }
|
||||
// }
|
||||
// else {
|
||||
while (!quit) {
|
||||
System.out.println();
|
||||
System.out.println("--------BiGPairSEQ SIMULATOR--------");
|
||||
System.out.println("ALPHA/BETA T CELL RECEPTOR MATCHING");
|
||||
System.out.println(" USING WEIGHTED BIPARTITE GRAPHS ");
|
||||
System.out.println("------------------------------------");
|
||||
System.out.println("Please select an option:");
|
||||
System.out.println("1) Generate a population of distinct cells");
|
||||
System.out.println("2) Generate a sample plate of T cells");
|
||||
System.out.println("3) Generate CDR3 alpha/beta occupancy data and overlap graph");
|
||||
System.out.println("4) Simulate bipartite graph CDR3 alpha/beta matching (BiGpairSEQ)");
|
||||
//Need to re-do the CDR3/CDR1 matching to correspond to new pattern
|
||||
//System.out.println("5) Generate CDR3/CDR1 occupancy graph");
|
||||
//System.out.println("6) Simulate CDR3/CDR1 T cell matching");
|
||||
System.out.println("9) About/Acknowledgments");
|
||||
System.out.println("0) Exit");
|
||||
try {
|
||||
input = sc.nextInt();
|
||||
switch (input) {
|
||||
case 1 -> makeCells();
|
||||
case 2 -> makePlate();
|
||||
case 3 -> makeCDR3Graph();
|
||||
case 4 -> matchCDR3s();
|
||||
//case 6 -> matchCellsCDR1();
|
||||
case 9 -> acknowledge();
|
||||
case 0 -> quit = true;
|
||||
default -> throw new InputMismatchException("Invalid input.");
|
||||
}
|
||||
} catch (InputMismatchException | IOException ex) {
|
||||
System.out.println(ex);
|
||||
sc.next();
|
||||
}
|
||||
}
|
||||
sc.close();
|
||||
// }
|
||||
}
|
||||
|
||||
private static void makeCells() {
|
||||
String filename = null;
|
||||
Integer numCells = 0;
|
||||
Integer cdr1Freq = 1;
|
||||
try {
|
||||
System.out.println("\nSimulated T-Cells consist of integer values representing:\n" +
|
||||
"* a pair of alpha and beta CDR3 peptides (unique within simulated population)\n" +
|
||||
"* a pair of alpha and beta CDR1 peptides (not necessarily unique).");
|
||||
System.out.println("\nThe cells will be written to a CSV file.");
|
||||
System.out.print("Please enter a file name: ");
|
||||
filename = sc.next();
|
||||
System.out.println("\nCDR3 sequences are more diverse than CDR1 sequences.");
|
||||
System.out.println("Please enter the factor by which distinct CDR3s outnumber CDR1s: ");
|
||||
cdr1Freq = sc.nextInt();
|
||||
System.out.print("\nPlease enter the number of T-cells to generate: ");
|
||||
numCells = sc.nextInt();
|
||||
if(numCells <= 0){
|
||||
throw new InputMismatchException("Number of cells must be a positive integer.");
|
||||
}
|
||||
} catch (InputMismatchException ex) {
|
||||
System.out.println(ex);
|
||||
sc.next();
|
||||
}
|
||||
CellSample sample = Simulator.generateCellSample(numCells, cdr1Freq);
|
||||
assert filename != null;
|
||||
CellFileWriter writer = new CellFileWriter(filename, sample);
|
||||
writer.writeCellsToFile();
|
||||
System.gc();
|
||||
}
|
||||
|
||||
// //for calling from command line
|
||||
// private static void makeCells(String filename, Integer numCells, Integer cdr1Freq){
|
||||
// CellSample sample = Simulator.generateCellSample(numCells, cdr1Freq);
|
||||
// CellFileWriter writer = new CellFileWriter(filename, sample);
|
||||
// writer.writeCellsToFile();
|
||||
// }
|
||||
//
|
||||
// private static void makePlateExp(String cellFile, String filename, Double lambda,
|
||||
// Integer numWells, Integer[] concentrations, Double dropOutRate){
|
||||
// CellFileReader cellReader = new CellFileReader(cellFile);
|
||||
// Plate samplePlate = new Plate(numWells, dropOutRate, concentrations);
|
||||
// samplePlate.fillWellsExponential(cellReader.getFilename(), cellReader.getCells(), lambda);
|
||||
// PlateFileWriter writer = new PlateFileWriter(filename, samplePlate);
|
||||
// writer.writePlateFile();
|
||||
// }
|
||||
//
|
||||
// private static void makePlatePoisson(String cellFile, String filename, Integer numWells,
|
||||
// Integer[] concentrations, Double dropOutRate){
|
||||
// CellFileReader cellReader = new CellFileReader(cellFile);
|
||||
// Double stdDev = Math.sqrt(cellReader.getCellCount());
|
||||
// Plate samplePlate = new Plate(numWells, dropOutRate, concentrations);
|
||||
// samplePlate.fillWells(cellReader.getFilename(), cellReader.getCells(), stdDev);
|
||||
// PlateFileWriter writer = new PlateFileWriter(filename, samplePlate);
|
||||
// writer.writePlateFile();
|
||||
// }
|
||||
//
|
||||
// private static void makePlate(String cellFile, String filename, Double stdDev,
|
||||
// Integer numWells, Integer[] concentrations, Double dropOutRate){
|
||||
// CellFileReader cellReader = new CellFileReader(cellFile);
|
||||
// Plate samplePlate = new Plate(numWells, dropOutRate, concentrations);
|
||||
// samplePlate.fillWells(cellReader.getFilename(), cellReader.getCells(), stdDev);
|
||||
// PlateFileWriter writer = new PlateFileWriter(filename, samplePlate);
|
||||
// writer.writePlateFile();
|
||||
// }
|
||||
|
||||
//Output a CSV of sample plate
|
||||
private static void makePlate() {
|
||||
String cellFile = null;
|
||||
String filename = null;
|
||||
Double stdDev = 0.0;
|
||||
Integer numWells = 0;
|
||||
Integer numSections;
|
||||
Integer[] concentrations = {1};
|
||||
Double dropOutRate = 0.0;
|
||||
boolean poisson = false;
|
||||
boolean exponential = false;
|
||||
double lambda = 1.5;
|
||||
try {
|
||||
System.out.println("\nSimulated sample plates consist of:");
|
||||
System.out.println("* a number of wells");
|
||||
System.out.println(" * separated into one or more sections");
|
||||
System.out.println(" * each of which has a set quantity of cells per well");
|
||||
System.out.println(" * selected from a statistical distribution of distinct cells");
|
||||
System.out.println(" * with a set dropout rate for individual sequences within a cell");
|
||||
System.out.println("\nMaking a sample plate requires a population of distinct cells");
|
||||
System.out.print("Please enter name of an existing cell sample file: ");
|
||||
cellFile = sc.next();
|
||||
System.out.println("\nThe sample plate will be written to a CSV file");
|
||||
System.out.print("Please enter a name for the output file: ");
|
||||
filename = sc.next();
|
||||
System.out.println("\nSelect T-cell frequency distribution function");
|
||||
System.out.println("1) Poisson");
|
||||
System.out.println("2) Gaussian");
|
||||
System.out.println("3) Exponential");
|
||||
System.out.println("(Note: approximate distribution in original paper is exponential, lambda = 0.6)");
|
||||
System.out.println("(lambda value approximated from slope of log-log graph in figure 4c)");
|
||||
System.out.println("(Note: wider distributions are more memory intensive to match)");
|
||||
System.out.print("Enter selection value: ");
|
||||
input = sc.nextInt();
|
||||
switch (input) {
|
||||
case 1 -> poisson = true;
|
||||
case 2 -> {
|
||||
System.out.println("How many distinct T-cells within one standard deviation of peak frequency?");
|
||||
System.out.println("(Note: wider distributions are more memory intensive to match)");
|
||||
stdDev = sc.nextDouble();
|
||||
if (stdDev <= 0.0) {
|
||||
throw new InputMismatchException("Value must be positive.");
|
||||
}
|
||||
}
|
||||
case 3 -> {
|
||||
exponential = true;
|
||||
System.out.println("Please enter lambda value for exponential distribution.");
|
||||
lambda = sc.nextDouble();
|
||||
if (lambda <= 0.0) {
|
||||
throw new InputMismatchException("Value must be positive.");
|
||||
}
|
||||
}
|
||||
default -> {
|
||||
System.out.println("Invalid input. Defaulting to exponential.");
|
||||
exponential = true;
|
||||
}
|
||||
}
|
||||
System.out.print("\nNumber of wells on plate: ");
|
||||
numWells = sc.nextInt();
|
||||
if(numWells < 1){
|
||||
throw new InputMismatchException("No wells on plate");
|
||||
}
|
||||
System.out.println("\nThe plate can be evenly sectioned to allow multiple concentrations of T-cells/well");
|
||||
System.out.println("How many sections would you like to make (minimum 1)?");
|
||||
numSections = sc.nextInt();
|
||||
if(numSections < 1) {
|
||||
throw new InputMismatchException("Too few sections.");
|
||||
}
|
||||
else if (numSections > numWells) {
|
||||
throw new InputMismatchException("Cannot have more sections than wells.");
|
||||
}
|
||||
int i = 1;
|
||||
concentrations = new Integer[numSections];
|
||||
while(numSections > 0) {
|
||||
System.out.print("Enter number of T-cells per well in section " + i +": ");
|
||||
concentrations[i - 1] = sc.nextInt();
|
||||
i++;
|
||||
numSections--;
|
||||
}
|
||||
System.out.println("\nErrors in amplification can induce a well dropout rate for sequences");
|
||||
System.out.print("Enter well dropout rate (0.0 to 1.0): ");
|
||||
dropOutRate = sc.nextDouble();
|
||||
if(dropOutRate < 0.0 || dropOutRate > 1.0) {
|
||||
throw new InputMismatchException("The well dropout rate must be in the range [0.0, 1.0]");
|
||||
}
|
||||
}catch(InputMismatchException ex){
|
||||
System.out.println(ex);
|
||||
sc.next();
|
||||
}
|
||||
System.out.println("Reading Cell Sample file: " + cellFile);
|
||||
assert cellFile != null;
|
||||
CellFileReader cellReader = new CellFileReader(cellFile);
|
||||
if(exponential){
|
||||
Plate samplePlate = new Plate(numWells, dropOutRate, concentrations);
|
||||
samplePlate.fillWellsExponential(cellReader.getFilename(), cellReader.getCells(), lambda);
|
||||
PlateFileWriter writer = new PlateFileWriter(filename, samplePlate);
|
||||
writer.writePlateFile();
|
||||
}
|
||||
else {
|
||||
if (poisson) {
|
||||
stdDev = Math.sqrt(cellReader.getCellCount()); //gaussian with square root of elements approximates poisson
|
||||
}
|
||||
Plate samplePlate = new Plate(numWells, dropOutRate, concentrations);
|
||||
samplePlate.fillWells(cellReader.getFilename(), cellReader.getCells(), stdDev);
|
||||
assert filename != null;
|
||||
PlateFileWriter writer = new PlateFileWriter(filename, samplePlate);
|
||||
System.out.println("Writing Sample Plate to file");
|
||||
writer.writePlateFile();
|
||||
System.out.println("Sample Plate written to file: " + filename);
|
||||
System.gc();
|
||||
}
|
||||
}
|
||||
|
||||
//Output serialized binary of GraphAndMapData object
|
||||
private static void makeCDR3Graph() {
|
||||
String filename = null;
|
||||
String cellFile = null;
|
||||
String plateFile = null;
|
||||
|
||||
try {
|
||||
String str = "\nGenerating bipartite weighted graph encoding occupancy overlap data ";
|
||||
str = str.concat("\nrequires a cell sample file and a sample plate file.");
|
||||
System.out.println(str);
|
||||
System.out.print("\nPlease enter name of an existing cell sample file: ");
|
||||
cellFile = sc.next();
|
||||
System.out.print("\nPlease enter name of an existing sample plate file: ");
|
||||
plateFile = sc.next();
|
||||
System.out.println("\nThe graph and occupancy data will be written to a serialized binary file.");
|
||||
System.out.print("Please enter a name for the output file: ");
|
||||
filename = sc.next();
|
||||
} catch (InputMismatchException ex) {
|
||||
System.out.println(ex);
|
||||
sc.next();
|
||||
}
|
||||
System.out.println("Reading Cell Sample file: " + cellFile);
|
||||
assert cellFile != null;
|
||||
CellFileReader cellReader = new CellFileReader(cellFile);
|
||||
System.out.println("Reading Sample Plate file: " + plateFile);
|
||||
assert plateFile != null;
|
||||
PlateFileReader plateReader = new PlateFileReader(plateFile);
|
||||
Plate plate = new Plate(plateReader.getFilename(), plateReader.getWells());
|
||||
if (cellReader.getCells().size() == 0){
|
||||
System.out.println("No cell sample found.");
|
||||
System.out.println("Returning to main menu.");
|
||||
}
|
||||
else if(plate.getWells().size() == 0 || plate.getConcentrations().length == 0){
|
||||
System.out.println("No sample plate found.");
|
||||
System.out.println("Returning to main menu.");
|
||||
}
|
||||
else{
|
||||
List<Integer[]> cells = cellReader.getCells();
|
||||
GraphWithMapData data = Simulator.makeGraph(cells, plate, true);
|
||||
assert filename != null;
|
||||
GraphDataObjectWriter dataWriter = new GraphDataObjectWriter(filename, data);
|
||||
System.out.println("Writing graph and occupancy data to file. This may take some time.");
|
||||
System.out.println("File I/O time is not included in results.");
|
||||
dataWriter.writeDataToFile();
|
||||
System.out.println("Graph and Data file written to: " + filename);
|
||||
System.gc();
|
||||
}
|
||||
}
|
||||
|
||||
//Simulate matching and output CSV file of results
|
||||
private static void matchCDR3s() throws IOException {
|
||||
String filename = null;
|
||||
String dataFilename = null;
|
||||
Integer lowThreshold = 0;
|
||||
Integer highThreshold = Integer.MAX_VALUE;
|
||||
Integer maxOccupancyDiff = Integer.MAX_VALUE;
|
||||
Integer minOverlapPercent = 0;
|
||||
try {
|
||||
System.out.println("\nBiGpairSEQ simulation requires an occupancy data and overlap graph file");
|
||||
System.out.println("Please enter name of an existing graph and occupancy data file: ");
|
||||
dataFilename = sc.next();
|
||||
System.out.println("The matching results will be written to a file.");
|
||||
System.out.print("Please enter a name for the output file: ");
|
||||
filename = sc.next();
|
||||
System.out.println("\nWhat is the minimum number of CDR3 alpha/beta overlap wells to attempt matching?");
|
||||
lowThreshold = sc.nextInt();
|
||||
if(lowThreshold < 1){
|
||||
throw new InputMismatchException("Minimum value for low threshold set to 1");
|
||||
}
|
||||
System.out.println("\nWhat is the maximum number of CDR3 alpha/beta overlap wells to attempt matching?");
|
||||
highThreshold = sc.nextInt();
|
||||
System.out.println("\nWhat is the maximum difference in alpha/beta occupancy to attempt matching?");
|
||||
maxOccupancyDiff = sc.nextInt();
|
||||
System.out.println("\nWell overlap percentage = pair overlap / sequence occupancy");
|
||||
System.out.println("What is the minimum well overlap percentage to attempt matching? (0 to 100)");
|
||||
minOverlapPercent = sc.nextInt();
|
||||
if (minOverlapPercent < 0 || minOverlapPercent > 100) {
|
||||
throw new InputMismatchException("Value outside range. Minimum percent set to 0");
|
||||
}
|
||||
} catch (InputMismatchException ex) {
|
||||
System.out.println(ex);
|
||||
sc.next();
|
||||
}
|
||||
//read object data from file
|
||||
System.out.println("Reading graph data from file. This may take some time");
|
||||
System.out.println("File I/O time is not included in results");
|
||||
assert dataFilename != null;
|
||||
GraphDataObjectReader dataReader = new GraphDataObjectReader(dataFilename);
|
||||
GraphWithMapData data = dataReader.getData();
|
||||
//set source file name
|
||||
data.setSourceFilename(dataFilename);
|
||||
//simulate matching
|
||||
MatchingResult results = Simulator.matchCDR3s(data, dataFilename, lowThreshold, highThreshold, maxOccupancyDiff,
|
||||
minOverlapPercent, true);
|
||||
//write results to file
|
||||
assert filename != null;
|
||||
MatchingFileWriter writer = new MatchingFileWriter(filename, results);
|
||||
System.out.println("Writing results to file");
|
||||
writer.writeResultsToFile();
|
||||
System.out.println("Results written to file: " + filename);
|
||||
System.gc();
|
||||
}
|
||||
|
||||
///////
|
||||
//Rewrite this to fit new matchCDR3 method with file I/O
|
||||
///////
|
||||
// public static void matchCellsCDR1(){
|
||||
// /*
|
||||
// The idea here is that we'll get the CDR3 alpha/beta matches first. Then we'll try to match CDR3s to CDR1s by
|
||||
// looking at the top two matches for each CDR3. If CDR3s in the same cell simply swap CDR1s, we assume a correct
|
||||
// match
|
||||
// */
|
||||
// String filename = null;
|
||||
// String preliminaryResultsFilename = null;
|
||||
// String cellFile = null;
|
||||
// String plateFile = null;
|
||||
// Integer lowThresholdCDR3 = 0;
|
||||
// Integer highThresholdCDR3 = Integer.MAX_VALUE;
|
||||
// Integer maxOccupancyDiffCDR3 = 96; //no filtering if max difference is all wells by default
|
||||
// Integer minOverlapPercentCDR3 = 0; //no filtering if min percentage is zero by default
|
||||
// Integer lowThresholdCDR1 = 0;
|
||||
// Integer highThresholdCDR1 = Integer.MAX_VALUE;
|
||||
// boolean outputCDR3Matches = false;
|
||||
// try {
|
||||
// System.out.println("\nSimulated experiment requires a cell sample file and a sample plate file.");
|
||||
// System.out.print("Please enter name of an existing cell sample file: ");
|
||||
// cellFile = sc.next();
|
||||
// System.out.print("Please enter name of an existing sample plate file: ");
|
||||
// plateFile = sc.next();
|
||||
// System.out.println("The matching results will be written to a file.");
|
||||
// System.out.print("Please enter a name for the output file: ");
|
||||
// filename = sc.next();
|
||||
// System.out.println("What is the minimum number of CDR3 alpha/beta overlap wells to attempt matching?");
|
||||
// lowThresholdCDR3 = sc.nextInt();
|
||||
// if(lowThresholdCDR3 < 1){
|
||||
// throw new InputMismatchException("Minimum value for low threshold is 1");
|
||||
// }
|
||||
// System.out.println("What is the maximum number of CDR3 alpha/beta overlap wells to attempt matching?");
|
||||
// highThresholdCDR3 = sc.nextInt();
|
||||
// System.out.println("What is the maximum difference in CDR3 alpha/beta occupancy to attempt matching?");
|
||||
// maxOccupancyDiffCDR3 = sc.nextInt();
|
||||
// System.out.println("What is the minimum CDR3 overlap percentage to attempt matching? (0 - 100)");
|
||||
// minOverlapPercentCDR3 = sc.nextInt();
|
||||
// if (minOverlapPercentCDR3 < 0 || minOverlapPercentCDR3 > 100) {
|
||||
// throw new InputMismatchException("Value outside range. Minimum percent set to 0");
|
||||
// }
|
||||
// System.out.println("What is the minimum number of CDR3/CDR1 overlap wells to attempt matching?");
|
||||
// lowThresholdCDR1 = sc.nextInt();
|
||||
// if(lowThresholdCDR1 < 1){
|
||||
// throw new InputMismatchException("Minimum value for low threshold is 1");
|
||||
// }
|
||||
// System.out.println("What is the maximum number of CDR3/CDR1 overlap wells to attempt matching?");
|
||||
// highThresholdCDR1 = sc.nextInt();
|
||||
// System.out.println("Matching CDR3s to CDR1s requires first matching CDR3 alpha/betas.");
|
||||
// System.out.println("Output a file for CDR3 alpha/beta match results as well?");
|
||||
// System.out.print("Please enter y/n: ");
|
||||
// String ans = sc.next();
|
||||
// Pattern pattern = Pattern.compile("(?:yes|y)", Pattern.CASE_INSENSITIVE);
|
||||
// Matcher matcher = pattern.matcher(ans);
|
||||
// if(matcher.matches()){
|
||||
// outputCDR3Matches = true;
|
||||
// System.out.println("Please enter filename for CDR3 alpha/beta match results");
|
||||
// preliminaryResultsFilename = sc.next();
|
||||
// System.out.println("CDR3 alpha/beta matches will be output to file");
|
||||
// }
|
||||
// else{
|
||||
// System.out.println("CDR3 alpha/beta matches will not be output to file");
|
||||
// }
|
||||
// } catch (InputMismatchException ex) {
|
||||
// System.out.println(ex);
|
||||
// sc.next();
|
||||
// }
|
||||
// CellFileReader cellReader = new CellFileReader(cellFile);
|
||||
// PlateFileReader plateReader = new PlateFileReader(plateFile);
|
||||
// Plate plate = new Plate(plateReader.getFilename(), plateReader.getWells());
|
||||
// if (cellReader.getCells().size() == 0){
|
||||
// System.out.println("No cell sample found.");
|
||||
// System.out.println("Returning to main menu.");
|
||||
// }
|
||||
// else if(plate.getWells().size() == 0){
|
||||
// System.out.println("No sample plate found.");
|
||||
// System.out.println("Returning to main menu.");
|
||||
//
|
||||
// }
|
||||
// else{
|
||||
// if(highThresholdCDR3 >= plate.getSize()){
|
||||
// highThresholdCDR3 = plate.getSize() - 1;
|
||||
// }
|
||||
// if(highThresholdCDR1 >= plate.getSize()){
|
||||
// highThresholdCDR1 = plate.getSize() - 1;
|
||||
// }
|
||||
// List<Integer[]> cells = cellReader.getCells();
|
||||
// MatchingResult preliminaryResults = Simulator.matchCDR3s(cells, plate, lowThresholdCDR3, highThresholdCDR3,
|
||||
// maxOccupancyDiffCDR3, minOverlapPercentCDR3, true);
|
||||
// MatchingResult[] results = Simulator.matchCDR1s(cells, plate, lowThresholdCDR1,
|
||||
// highThresholdCDR1, preliminaryResults);
|
||||
// MatchingFileWriter writer = new MatchingFileWriter(filename + "_FirstPass", results[0]);
|
||||
// writer.writeResultsToFile();
|
||||
// writer = new MatchingFileWriter(filename + "_SecondPass", results[1]);
|
||||
// writer.writeResultsToFile();
|
||||
// if(outputCDR3Matches){
|
||||
// writer = new MatchingFileWriter(preliminaryResultsFilename, preliminaryResults);
|
||||
// writer.writeResultsToFile();
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
private static void acknowledge(){
|
||||
System.out.println("This program simulates BiGpairSEQ, a graph theory based adaptation");
|
||||
System.out.println("of the pairSEQ algorithm for pairing T cell receptor sequences.");
|
||||
System.out.println();
|
||||
System.out.println("For full documentation, view readme.md file distributed with this code");
|
||||
System.out.println("or visit https://gitea.ejsf.synology.me/efischer/BiGpairSEQ.");
|
||||
System.out.println();
|
||||
System.out.println("pairSEQ citation:");
|
||||
System.out.println("Howie, B., Sherwood, A. M., et. al.");
|
||||
System.out.println("High-throughput pairing of T cell receptor alpha and beta sequences.");
|
||||
System.out.println("Sci. Transl. Med. 7, 301ra131 (2015)");
|
||||
System.out.println();
|
||||
System.out.println("BiGpairSEQ_Sim by Eugene Fischer, 2021-2022");
|
||||
}
|
||||
}
|
||||
@@ -1,17 +1,85 @@
|
||||
public class Vertex {
|
||||
private final Integer peptide;
|
||||
private final Integer occupancy;
|
||||
import org.jheaps.AddressableHeap;
|
||||
|
||||
public Vertex(Integer peptide, Integer occupancy) {
|
||||
this.peptide = peptide;
|
||||
this.occupancy = occupancy;
|
||||
import java.io.Serializable;
|
||||
import java.util.Map;
|
||||
|
||||
public class Vertex implements Serializable, Comparable<Vertex> {
|
||||
private SequenceRecord record;
|
||||
private Integer vertexLabel;
|
||||
private Double potential;
|
||||
private AddressableHeap queue;
|
||||
|
||||
public Vertex(SequenceRecord record, Integer vertexLabel) {
|
||||
this.record = record;
|
||||
this.vertexLabel = vertexLabel;
|
||||
}
|
||||
|
||||
public Integer getPeptide() {
|
||||
return peptide;
|
||||
public SequenceRecord getRecord() { return record; }
|
||||
|
||||
public SequenceType getType() { return record.getSequenceType(); }
|
||||
|
||||
public Integer getVertexLabel() {
|
||||
return vertexLabel;
|
||||
}
|
||||
|
||||
public String getSequence() {
|
||||
return record.getSequence();
|
||||
}
|
||||
|
||||
public Integer getOccupancy() {
|
||||
return occupancy;
|
||||
return record.getOccupancy();
|
||||
}
|
||||
|
||||
public Integer getReadCount() { return record.getReadCount(); }
|
||||
|
||||
public Integer getReadCount(Integer well) { return record.getReadCount(well); }
|
||||
|
||||
public Map<Integer, Integer> getWellOccupancies() { return record.getWellOccupancies(); }
|
||||
|
||||
@Override //adapted from JGraphT example code
|
||||
public int hashCode()
|
||||
{
|
||||
return (this.getSequence() == null) ? 0 : this.getSequence().hashCode();
|
||||
}
|
||||
|
||||
@Override //adapted from JGraphT example code
|
||||
public boolean equals(Object obj)
|
||||
{
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
Vertex other = (Vertex) obj;
|
||||
if (this.getSequence() == null) {
|
||||
return other.getSequence() == null;
|
||||
} else {
|
||||
return this.getSequence().equals(other.getSequence());
|
||||
}
|
||||
}
|
||||
|
||||
@Override //adapted from JGraphT example code
|
||||
public String toString()
|
||||
{
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("(").append(vertexLabel)
|
||||
.append(", Type: ").append(this.getType().name())
|
||||
.append(", Sequence: ").append(this.getSequence())
|
||||
.append(", Occupancy: ").append(this.getOccupancy()).append(")");
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(Vertex other) {
|
||||
return this.vertexLabel - other.getVertexLabel();
|
||||
}
|
||||
|
||||
public Double getPotential() {
|
||||
return potential;
|
||||
}
|
||||
|
||||
public void setPotential(Double potential) {
|
||||
this.potential = potential;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user