Skip to content

Commit

Permalink
fixed error in scaling for measurements
Browse files Browse the repository at this point in the history
  • Loading branch information
leobrowning92 committed Apr 30, 2018
1 parent a62dde3 commit 7e0502f
Show file tree
Hide file tree
Showing 3 changed files with 22 additions and 11 deletions.
15 changes: 14 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,19 @@ Using a kdtree with a sorted length list and length dependant search radius the

### Running remotely

to get the data from the baptiste's grid run
to get the data from the baptiste's grid run

scp -r leo@10.30.128.49:/home/leo/gitrepos/networksim-cntfet/data /home/leo/Desktop/

### Running on heisenberg
put the srun script into a seperate directory for the data runtime
then edit the number of cores for the measure_perc.py to match the number of tasks in the SBATCH section
then run:

sbatch srun.sh

To check on the job, use:

squeue -o "%.18i %.9P %.8j %.8u %.2t %.8M %.5D %.4C %R"

usage of `squeue` found [here](https://slurm.schedmd.com/squeue.html)
9 changes: 5 additions & 4 deletions measure_perc.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ def checkdir(directoryname):
pass


def measure_fullnet(n,scaling=60, l='exp', save=False, v=True ,remote=False):
def measure_fullnet(n,scaling, l='exp', save=False, v=True ,remote=False):
start = timer()
data=pd.DataFrame(columns = ['sticks', 'size', 'density', 'nclust', 'maxclust', 'ion', 'ioff','ioff_totaltop', 'ioff_partialtop', 'runtime', 'fname'])
try:
Expand Down Expand Up @@ -94,11 +94,11 @@ def measure_number_series_compareL(remote=True):
pool = Pool(os.cpu_count()-1)
pool.map(n_vary_expL_remote, nexp)
pool.map(n_vary_066L_remote, nconst)
def measure_async(cores,start,step,number,save=False):
def measure_async(cores,start,step,number,scaling,save=False):
starttime = timer()
nrange=[start+i*step for i in range(number)]
pool=Pool(cores)
results=[pool.apply_async(measure_fullnet,args=(n,5,'exp',save)) for n in nrange]
results=[pool.apply_async(measure_fullnet,args=(n,scaling,'exp',save)) for n in nrange]
output=[res.get() for res in results]
endtime = timer()
runtime=endtime - starttime
Expand All @@ -114,9 +114,10 @@ def measure_async(cores,start,step,number,save=False):
parser.add_argument("--start",type=int)
parser.add_argument("--step",type=int,default=0)
parser.add_argument("--number",type=int)
parser.add_argument("--scaling",type=int,default=5)
args = parser.parse_args()
checkdir('data')
if args.test:
measure_async(2,500,0,10,save=True)
else:
measure_async(args.cores, args.start, args.step, args.number, args.save)
measure_async(args.cores, args.start, args.step, args.number,args.scaling, args.save)
9 changes: 3 additions & 6 deletions srun.sh
Original file line number Diff line number Diff line change
@@ -1,12 +1,9 @@
#!/bin/bash

#SBATCH -N 1 # nodes requested
#SBATCH -n 2 # tasks requested
#SBATCH -n 32 # tasks requested. default is one core per task
#SBATCH -o outfile # send stdout to outfile
#SBATCH -e errfile # send stderr to errfile
#SBATCH -t 0:01:00 # time requested in hour:minute:second
#SBATCH -t 10:00:00 # time requested in hour:minute:second
#SBATCH --mem-per-cpu=1024
#SBATCH --mail-user=$USER@localhost

python3 ~/gitrepos/networksim-cntfet/measure_perc.py -t

python3 ~/gitrepos/networksim-cntfet/measure_perc.py -s --cores 32 --start 36000 --step 0 --number 32 --scaling 60

0 comments on commit 7e0502f

Please sign in to comment.