Skip to content

Commit

Permalink
updated c code to work with new g++ compiler and updated optic_tract …
Browse files Browse the repository at this point in the history
…seeds for Donald 60 and 150
  • Loading branch information
Tom Close committed Jun 20, 2013
1 parent c931b8e commit 4d7e7ad
Show file tree
Hide file tree
Showing 7 changed files with 19 additions and 15 deletions.
2 changes: 1 addition & 1 deletion bash/subtract_optic
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/usr/bin/env bash
# Subtracts the signal modelled by the corpus callosum from the fornix image

REFERENCE_DIR="$HOME/fouts/params/image/reference"
REFERENCE_DIR="$HOME/fouts/params/image/reference/"

if [ $# -lt 1 ]; then
echo "The directory to sort must be provided as an argument"
Expand Down
4 changes: 2 additions & 2 deletions params/image/reference/donald/optic_tract.150.seed.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
23 19 0
-22 18 0
19 47 0
-14 43.5 2
4 changes: 2 additions & 2 deletions params/image/reference/donald/optic_tract.60.seed.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
21 27.5 -8.5
-26 26.5 -8.5
19 61 1
-24.5 58.5 -4.5
14 changes: 9 additions & 5 deletions python/hpc/invivo_sampling.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

def sampling_cmd(args, work_dir, dataset_path, random_seed, prior_freq,
prior_aux_freq, prior_density_low, prior_density_high, prior_hook, prior_thin,
like_snr, init_name, samples_name, last_name):
like_snr, init_name, samples_name, last_name, num_iterations):
if args.like_noise_map:
noise_option = '--like_noise_map {}'.format(os.path.join(work_dir, 'params', 'image',
'reference', args.like_noise_map))
Expand All @@ -38,7 +38,7 @@ def sampling_cmd(args, work_dir, dataset_path, random_seed, prior_freq,
time metropolis {dataset_path} {work_dir}/output/{init_name}.tct \
{work_dir}/output/{samples_name}.tst {noise_option} \
-exp_interp_extent {args.assumed_interp_extent} \
-walk_step_scale {args.step_scale} -num_iter {args.num_iterations} \
-walk_step_scale {args.step_scale} -num_iter {num_iterations} \
-sample_period {args.sample_period} -seed {random_seed} \
-diff_encodings_location {work_dir}/params/diffusion/encoding_60.b \
-prior_freq {prior_freq} {prior_aux_freq} \
Expand All @@ -59,7 +59,7 @@ def sampling_cmd(args, work_dir, dataset_path, random_seed, prior_freq,
samples_name=samples_name, last_name=last_name, random_seed=random_seed + 1,
prior_freq=prior_freq, prior_aux_freq=prior_aux_freq, prior_density_low=prior_density_low,
prior_density_high=prior_density_high, prior_hook=prior_hook, prior_thin=prior_thin,
noise_option=noise_option, last_sample=(args.num_iterations // args.sample_period) - 1)
noise_option=noise_option, last_sample=(args.num_iterations // args.sample_period) - 1, num_iterations=num_iterations)
return cmd


Expand All @@ -76,6 +76,9 @@ def sampling_cmd(args, work_dir, dataset_path, random_seed, prior_freq,
parser.add_argument('--num_iterations', default=75000, type=int,
help="The number of interations in the metropolis sampling "
"(default: %(default)s)")
parser.add_argument('--num_after_split_iterations', default=50000, type=int,
help="The number of interations in the metropolis sampling after the split"
"(default: %(default)s)")
parser.add_argument('--sample_period', default=250, type=int,
help="The sample period of the metropolis sampling "
"(default: %(default)s)")
Expand Down Expand Up @@ -242,7 +245,8 @@ def sampling_cmd(args, work_dir, dataset_path, random_seed, prior_freq,
prior_density_high=prior_density_high, prior_hook=prior_hook,
prior_thin=prior_thin, like_snr=like_snr, init_name='init',
samples_name='samples_first' if args.split else 'samples',
last_name='end_first' if args.split else 'last')
last_name='end_first' if args.split else 'last',
num_iterations=args.num_iterations)

if args.split:

Expand All @@ -260,7 +264,7 @@ def sampling_cmd(args, work_dir, dataset_path, random_seed, prior_freq,
prior_density_high=prior_density_high, prior_hook=prior_hook,
prior_thin=prior_thin, like_snr=like_snr,
init_name='init_second', samples_name='samples',
last_name='last')
last_name='last', num_iterations=args.num_after_split_iterations)

# Submit job to que
hpc.submit_job(SCRIPT_NAME, cmd_line, args.np, work_dir, output_dir,
Expand Down
6 changes: 3 additions & 3 deletions src/blossom5/MinCost/MinCost.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -222,17 +222,17 @@ void DualMinCost<CostType>::AddUnaryTerm(NodeId i, int objective_coef) {

template<typename CostType>
void DualMinCost<CostType>::SetLowerBound(NodeId i, CostType cmin) {
AddEdge(i, source, FLOW_INFTY, 0, -cmin);
this->AddEdge(i, source, FLOW_INFTY, 0, -cmin);
}

template<typename CostType>
void DualMinCost<CostType>::SetUpperBound(NodeId i, CostType cmax) {
AddEdge(source, i, FLOW_INFTY, 0, cmax);
this->AddEdge(source, i, FLOW_INFTY, 0, cmax);
}

template<typename CostType>
void DualMinCost<CostType>::AddConstraint(NodeId i, NodeId j, CostType cmax) {
AddEdge(i, j, FLOW_INFTY, 0, cmax);
this->AddEdge(i, j, FLOW_INFTY, 0, cmax);
}

template<typename CostType>
Expand Down
2 changes: 1 addition & 1 deletion src/bts/fibre/base/set_reader.cpp.h
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ namespace BTS {
throw Exception(
"No corresponding line found in extended properties file.");

properties = parse_props_line(this->prop_hdr, line);
properties = this->parse_props_line(this->prop_hdr, line);

set.extract_and_set_props(properties);
set.set_extend_props(properties);
Expand Down
2 changes: 1 addition & 1 deletion src/bts/fibre/base/set_writer.cpp.h
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ namespace BTS {

set[elem_i].insert_props(properties_row);

write_properties_row(this->elem_prop_hdr, properties_row, this->ext_elem_out);
this->write_properties_row(this->elem_prop_hdr, properties_row, this->ext_elem_out);

}

Expand Down

0 comments on commit 4d7e7ad

Please sign in to comment.