-
Notifications
You must be signed in to change notification settings - Fork 2
/
combined.sh
executable file
·244 lines (199 loc) · 8.66 KB
/
combined.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
#!/bin/bash
# This is the script to run all Fragpipe tools in a parallelized manner on a HPC.
# By Patrick van Zalm (patrick.vanzalm@childrens.harvard.edu / patrickvanzalm@gmail.com)
# Steen lab, Boston Childrens's Hospital, Boston, Massachussets, United States of America
#### NOTE ####
# A User should ALWAYS first fill in the required settings in the settings.sh file
# before running the script below.
# Script requires five inputs:
# 1. Input Directory
# 2. Output Directory
# 3. FASTA file
# 4. Fragger.params file
# 5. Location of all the scripts
inputdirectory=$1
outputdirectory=$2
fastaFile=$3
fraggerparamsFile=$4
ScriptsLocation=$5
#check if inputdirectory is a directory. If not; kill process
if [[ ! -d "$inputdirectory" ]]
then
echo "ERROR"
echo "Input argument 1(inputdirectory) is not a directory"
echo "Please make sure you refer to an existing directory"
kill $$
fi
#Determine the number of .d bruker directories in the inputdirectory
numberFiles=$(find $inputdirectory -maxdepth 1 -name "*.d" | wc -l)
#If numberFiles is 0, it means there is no bruker data and we kill process
if (($numberFiles == 0))
then
echo "ERROR"
echo "Directory found, but it seems there are no bruker(.d) files in the directory"
echo "Please make sure you refer to a directory with bruker .d directories"
kill $$
fi
#if the outputdirectory does not exist we make it ourselves.
if [[ ! -d "$outputdirectory" ]]
then
echo "output directory not found. will make it"
mkdir -p $outputdirectory
fi
#Check if FASTA file is correct. The file has to end with .fas or .fasta
if [[ ($fastaFile == *.fas) || ($fastaFile == *.fasta) ]]
then
echo "Fasta file correct. Will proceed."
else
echo "Fasta file does not seem to be correct. It did not end with .fas or .fasta"
echo "Please make sure. Will kill script now."
kill $$
fi
# If this is all correct we will design the ouputdirectory
# We make a directory for the results, logs and settings
mkdir -p $outputdirectory/logs
mkdir -p $outputdirectory/settings
mkdir -p $outputdirectory/results
#copy settings.sh, fraggerparams and fasta to settings directory
cp -r $ScriptsLocation/* $outputdirectory/settings/
cp $fraggerparamsFile $outputdirectory/settings/
cp $fastaFile $outputdirectory/settings/
#chmod the scripts
chmod 770 $outputdirectory/settings/*
#all other scripts will take info from the settings.sh script. We append some info there as given by user
echo -e "\n\n# USER INPUT SETTINGS" >> $outputdirectory/settings/settings.sh
echo "ScriptsLocation=$ScriptsLocation" >> $outputdirectory/settings/settings.sh
echo "inputdirectory=$inputdirectory" >> $outputdirectory/settings/settings.sh
echo "outputdirectory=$outputdirectory" >> $outputdirectory/settings/settings.sh
echo "fastaFile=$outputdirectory/settings/$(basename ${fastaFile})" >> $outputdirectory/settings/settings.sh
echo "fraggerParamsNetworkPath=$outputdirectory/settings/$(basename ${fraggerparamsFile})" >> $outputdirectory/settings/settings.sh
#We can now source those settings.
source $outputdirectory/settings/settings.sh
#Alter the FASTA file location in the fragger.params file
databaseTemp="database_name = "
sed -i "1s|.*|$databaseTemp$fastaFile|" $fraggerParamsNetworkPath
#chmod the tools for the networkpath usage
chmod 777 -R $fragpipeDirectory
chmod u+x $philosopherNetworkPath
######################
## Prepare Batching ##
######################
#Make array with the bruker files
arraybruker=($(find $inputdirectory -maxdepth 1 -name "*.d"))
#Check if requested batch size is not bigger than the number of files.
if (($numberFiles < $numberOfFilesPerBatch))
then
echo "ERROR"
echo "Your requested batch size is bigger than the number of samples in the directory"
echo "Please make sure that it is smaller (n-1)."
kill $$
fi
#Calculate the number of batches. This would be the total number of files, divided by the numberOfFilesPerBatch.
#Then we round down, because the last batch will also include the left over ones (if applicable)
numberOfBatches=$(($numberFiles / $numberOfFilesPerBatch))
#Calculate the modulo. We do this so that the last batch will also include the "left over" ones.
moduloOfSamples=$(($numberFiles % $numberOfFilesPerBatch))
#Calculate number of files excluding modulo minus one. This will limit the for loop so it wont run a small batch at the end.
numberFilesNoModulo=$(($numberFiles - $moduloOfSamples - 1))
#For loop that will create ARRAY batches based on the number of samples, split based on NumberOfFilesPerBatch that the user gives.
#The last batch will ALSO include the modulo, so that no small batch (i.e. 1 sample) will be ran.
#Make empty array
declare -a jobArray=()
#loop
number=0
for i in $(seq 0 $numberOfFilesPerBatch $numberFilesNoModulo);
do
number=$(($number+1))
#if i == numberOfBatches it means its the last batch, and we will include the modulo samples
#If not, then we will just run a batch with the given number of samples
if (( $number == $numberOfBatches))
then
numberOfFilesPerBatchAndModulo=$(($numberOfFilesPerBatch + $moduloOfSamples))
files="${arraybruker[@]:$i:$numberOfFilesPerBatchAndModulo}"
jobArray+=("$files")
fi
if (( $number != $numberOfBatches))
then
files="${arraybruker[@]:$i:$numberOfFilesPerBatch}"
jobArray+=("$files")
fi
done
#Write the array to a temp file so we can read it in the Sbatch script
for j in "${jobArray[@]}"
do
echo $j
done >$outputdirectory/settings/write_MZbin.txt
#################
## WRITE mzBIN ##
#################
#Set up array number for sbatch (total number of items in array MINUS 1)
msfraggerArrayNumber=$((${#jobArray[@]} -1))
#determine number of mzBIN
numberFilesmzBIN=$(find $inputdirectory -maxdepth 1 -name "*.mzBIN" | wc -l)
#Determine number of mzBIN files in the directory. If corresponds to number of bruker .d we run msfragger
#If not; we run the write mzbin scripts first to parallelize the writing process
if (( $numberFiles != $numberFilesmzBIN))
then
echo "Uneven number of .d files and mzBIN files observed: will write all mzBIN first"
sbatch --array=0-$msfraggerArrayNumber\
--output=$outputdirectory/logs/write_mzBIN_%A_%a.log\
-W\
"$outputdirectory/settings/MSFragger/Sbatch_write_mzBIN.sh" "$outputdirectory"
echo "Writing of mzBIN finished."
fi
#START COMMENT
# #################
# ## MSFRAGGER ##
# #################
# # mzBIN should be written. We can run MSFragger now.
echo "Run MSFragger"
sbatch --output=$outputdirectory/logs/MSFragger_%A.log\
-W\
"$outputdirectory/settings/MSFragger/Sbatch_MSFragger.sh" "$outputdirectory"
echo "MSFragger finished"
# ####################
# ## peptideProphet ##
# ####################
# #Run peptideprophet in parallel. Use same batching as for writing mzBIN
echo "Run batched peptideProphet"
sbatch --array=0-$msfraggerArrayNumber\
--output=$outputdirectory/logs/peptideProphet_%A_%a.log\
-W\
"$outputdirectory/settings/Philosopher/Sbatch_peptideProphet.sh" "$outputdirectory"
echo "peptideProphet finished"
# ####################
# ## proteinProphet ##
# ####################
# #All are single threaded processes that require little computational power
echo "Run ProteinProphet"
sbatch --output=$outputdirectory/logs/ProteinProphet_et_al_%A.log\
-W\
"$outputdirectory/settings/Philosopher/Sbatch_proteinProphet.sh" "$outputdirectory"
echo "ProteinProphet finished"
# ######################
# ## WRITE quantindex ##
# ######################
#determine number of quantindex
echo "Checking if all quantindex files are written...."
numberFilesquantindex=$(find $inputdirectory -maxdepth 1 -name "*.quantindex" | wc -l)
#Determine number of mzBIN files in the directory. If corresponds to number of bruker .d we run msfragger
#If not; we run the write mzbin scripts first to parallelize the writing process
if (( $numberFiles != $numberFilesquantindex))
then
echo "Uneven number of .d files and quantindex files observed: will write all quantindex first"
sbatch --array=0-$msfraggerArrayNumber\
--output=$outputdirectory/logs/write_quantindex_%A_%a.log\
-W\
"$outputdirectory/settings/IonQuant/Sbatch_write_quantindex.sh" "$outputdirectory"
echo "Writing of quantindex finished."
fi
echo "All quantindex are written and/or found."
# #################
# ## IonQuant ##
# #################
# # quantindex should be written. We can run IonQuant now.
# echo "Run IonQuant"
sbatch --output=$outputdirectory/logs/IonQuant_%A.log\
-W\
"$outputdirectory/settings/IonQuant/Sbatch_IonQuant.sh" "$outputdirectory"
echo "IonQuant finished"