Skip to content

Commit

Permalink
Revert "going to old covariance method, mainly due to instabilities."
Browse files Browse the repository at this point in the history
This reverts commit 48f4476.
  • Loading branch information
corentinravoux committed Jan 17, 2025
1 parent 48f4476 commit 0f9d41a
Showing 1 changed file with 46 additions and 20 deletions.
66 changes: 46 additions & 20 deletions py/picca/pk1d/postproc_pk1d.py
Original file line number Diff line number Diff line change
Expand Up @@ -565,27 +565,43 @@ def fill_average_pk(
for icol, col in enumerate(p1d_table_cols):
mean_p1d_table["mean" + col][index_mean[0]:index_mean[1]] = mean_array[icol]

variance_col = variance_array[icol]
if len(variance_col) == len(variance_col[np.isnan(variance_col)]):
error_col = np.sqrt(variance_col)
else:
if smooth_error:
# Savgol filter in the log variance.
window_filter = min(
DEFAULT_ERROR_SMOOTHING_WINDOW,
int(3 * len(variance_col) / 4),
if col == "Pk":
variance_col = variance_array[icol]
if len(variance_col) == len(variance_col[np.isnan(variance_col)]):
error_col = np.sqrt(variance_col)
elif len(variance_col) == len(variance_col[variance_col < 0.0]):
error_col = np.sqrt(variance_col)
else:
mask_negative_variance = variance_col < 0.0
variance_indices = np.arange(len(variance_col))
interp_func = interp1d(
variance_indices[~mask_negative_variance],
variance_col[~mask_negative_variance],
kind="linear",
fill_value="extrapolate",
)
error_col = np.sqrt(
np.exp(
savgol_filter(
np.log(variance_col),
window_filter,
DEFAULT_ERROR_SMOOTHING_POLYNOMIAL,
variance_col_filled = interp_func(variance_indices)

if smooth_error:
# Savgol filter in the log variance.
window_filter = min(
DEFAULT_ERROR_SMOOTHING_WINDOW,
int(3 * len(variance_col_filled) / 4),
)
error_col = np.sqrt(
np.exp(
savgol_filter(
np.log(variance_col_filled),
window_filter,
DEFAULT_ERROR_SMOOTHING_POLYNOMIAL,
)
)
)
)
else:
error_col = np.sqrt(variance_col)
else:
error_col = np.sqrt(variance_col_filled)
else:
error_col = np.sqrt(variance_array[icol])


mean_p1d_table["error" + col][index_mean[0]:index_mean[1]] = error_col
mean_p1d_table["min" + col][index_mean[0]:index_mean[1]] = min_array[icol]
Expand Down Expand Up @@ -872,7 +888,17 @@ def compute_average_pk_redshift(
/ np.sum(weights_col) ** 2
)
else:
variance = 1 / np.sum(weights_col)
# Taking JM estimator only for P1D,
# because it gives almost only negative values for other columns.
if col == "Pk":
variance = ((np.sum(weights) ** 2 / np.sum(weights**2)) - 1) ** (
-1
) * (
(np.sum(weights**2 * data_values**2) / np.sum(weights**2))
- (np.sum(weights * data_values) / np.sum(weights)) ** 2
)
else:
variance = 1 / np.sum(weights_col)
if col == "Pk":
standard_dev = np.concatenate(
[
Expand Down Expand Up @@ -1180,7 +1206,7 @@ def compute_groups_for_one_forest(nbins_k, p1d_los):
if number_in_bins != 0:
weight = p1d_los["weight"][mask_ikbin][0]
p1d_weights_id[ikbin] = weight
covariance_weights_id[ikbin] = np.sqrt(weight / number_in_bins)
covariance_weights_id[ikbin] = weight / number_in_bins
p1d_groups_id[ikbin] = np.nansum(
p1d_los["pk"][mask_ikbin] * covariance_weights_id[ikbin]
)
Expand Down

0 comments on commit 0f9d41a

Please sign in to comment.