Skip to content
Snippets Groups Projects
Commit 9dac4f98 authored by Holger Brandl's avatar Holger Brandl
Browse files

integrated cutadapt log parser into workflow

parent f5a99481
No related branches found
No related tags found
No related merge requests found
...@@ -11,7 +11,7 @@ export PATH=$DGE_HOME:$DGE_HOME/../misc/:$PATH ...@@ -11,7 +11,7 @@ export PATH=$DGE_HOME:$DGE_HOME/../misc/:$PATH
######################################################################################################################## ########################################################################################################################
### QC ### QC
dge_fastqc *fastq.gz & dge_fastqc $(ls *fastq.gz) &
######################################################################################################################## ########################################################################################################################
...@@ -21,7 +21,7 @@ mcdir $baseDir/trimmed ...@@ -21,7 +21,7 @@ mcdir $baseDir/trimmed
dge_cutadapt $(ls $baseDir/treps_pooled/*fastq.gz) 2>&1 | tee cutadapt.log dge_cutadapt $(ls $baseDir/treps_pooled/*fastq.gz) 2>&1 | tee cutadapt.log
dge_fastqc $(ls *fastq.gz) dge_fastqc $(ls *fastq.gz) &
######################################################################################################################## ########################################################################################################################
......
...@@ -82,6 +82,9 @@ for fastqFile in $* ; do ...@@ -82,6 +82,9 @@ for fastqFile in $* ; do
done done
wait4jobs .cajobs wait4jobs .cajobs
spin.R $DGE_HOME/cutadapt_summary.R .
ziprm cutadapt_logs ${project}__ca__*.log ziprm cutadapt_logs ${project}__ca__*.log
......
...@@ -49,7 +49,10 @@ readCount <- function(statsFile){ ...@@ -49,7 +49,10 @@ readCount <- function(statsFile){
readCounts <- fastqDataFiles %>% ldply(readCount) %>% print_head() readCounts <- fastqDataFiles %>% ldply(readCount) %>% print_head()
require.auto(scales) require.auto(scales)
ggplot(readCounts, aes(run, num_reads)) + geom_bar(stat="identity") + coord_flip() + scale_y_continuous(labels=comma) gg <- ggplot(readCounts, aes(run, num_reads)) + geom_bar(stat="identity") + coord_flip() + scale_y_continuous(labels=comma) + ggtitle("read counts")
#+ results='asis'
gg %>% ggsave2(width=15, height = round(nrow(readCounts)/4), limitsize=FALSE) %>% paste0("<img src='", ., "'><br>") %>% cat()
### Create a faill/pass matrix ### Create a faill/pass matrix
...@@ -68,10 +71,14 @@ readSummary <- function(statsFile){ ...@@ -68,10 +71,14 @@ readSummary <- function(statsFile){
qcSummary <- list.files(path=baseDir, pattern="^summary.txt", full.names=TRUE, recursive=T) %>% ldply(readSummary) qcSummary <- list.files(path=baseDir, pattern="^summary.txt", full.names=TRUE, recursive=T) %>% ldply(readSummary)
#+ fig.height=20 #+ fig.height=20
qcSummary %>% ggplot(aes(score, run, fill=tolower(flag))) + gg <- qcSummary %>% ggplot(aes(score, run, fill=tolower(flag))) +
geom_tile() + geom_tile() +
rotXlab() + rotXlab() +
scale_fill_manual(values = c(fail="darkred", pass="darkgreen", warn="orange")) scale_fill_manual(values = c(fail="darkred", pass="darkgreen", warn="orange")) +
ggtitle("fastqc passcodes")
#+ results='asis'
gg %>% ggsave2(width=15, height = round(nrow(readCounts)/4), limitsize=FALSE) %>% paste0("<img src='", ., "'><br>") %>% cat()
#' # Base Quality Distribution Summary #' # Base Quality Distribution Summary
...@@ -98,7 +105,6 @@ baseQualities <- fastqDataFiles %>% ldply(readBaseQualDist) ...@@ -98,7 +105,6 @@ baseQualities <- fastqDataFiles %>% ldply(readBaseQualDist)
#with(baseQualities, as.data.frame(table(run))) #with(baseQualities, as.data.frame(table(run)))
#+ fig.height=20
baseQualities %>% ggplot(aes(reorder(Base, base_order), Mean, group=run, color=run)) + geom_line() + scale_y_continuous(limits=c(2, 40)) baseQualities %>% ggplot(aes(reorder(Base, base_order), Mean, group=run, color=run)) + geom_line() + scale_y_continuous(limits=c(2, 40))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment