# Last edited on 2026-02-15 10:55:54 by stolfi # # The probabilities {Pr(H0|LA,LB)} and {Pr(H1|LA,LB)} are computed # with Bayes's formula, using 50% a priori probabilities for each. # Thus a result near 50% means that {LA,LB} are not # sufficient evidence to decide between them. log_P0 = mismatch_sub_sub_match_H0(LA, max_pos) log_P1 = mismatch_sub_sub_match_H1(LA, LB, max_pos, dev_pert, prob_del, prob_add) log_den = log(exp(log_P0) + exp(log_P1)) log_H1 = log_P1 - log_den return log_H1 # ---------------------------------------------------------------------- def mismatch_sub_sub_match_H0(LA, max_pos): # Log of the probability of {LA} being generated given {H0}. return 1.0 # ---------------------------------------------------------------------- def mismatch_sub_sub_match_H1(LA, LB, max_pos, dev_pert, prob_del, prob_add): # Log of probability of {LA} being generated from {LB} by the process described # in {mismatch_sub_sub_match}. return # log_P0 = mismatch_sub_sub_match_H0(LA, max_pos) log_P1 = mismatch_sub_sub_match_H1(LA, LB, max_pos, dev_pert, prob_del, prob_add) log_den = log(exp(log_P0) + exp(log_P1)) log_H1 = log_P1 - log_den return log_H1 # ---------------------------------------------------------------------- def mismatch_sub_sub_match_H0(LA, max_pos): # Log of the probability of {LA} being generated given {H0}. return 1.0 # ---------------------------------------------------------------------- def mismatch_sub_sub_match_H1(LA, LB, max_pos, dev_pert, prob_del, prob_add): # Log of probability of {LA} being generated from {LB} by the process described # in {mismatch_sub_sub_match}. return # ---------------------------------------------------------------------- def log_prob_sub_sub_match_H1_IA_IB(LA, LB, ???IA, ???IB, max_pos, dev_pert): # Computes the probability of {LA} having been derived from {LB} by # deleting the elements listed in {???IA}, applying an arbitrary shift, # and inserting new elements in the positions listed in {???IB}. Assumes # that the probabilities of matched elements to be given by Gaussian # bell functions of the discrepancies, with deviation {dev_pert}. nA = len(LA) nB = len(LB) nP = nB - len(???IA) # Number of elements of {LB} that supposedly were NOT deleted. assert nP == nA - len(???IB) # Build list {IB} of indices of the NON-deleted elems of {LB}: IB = []; iB = 0 for d in ???IA + (nB,): while iB < d: IB.append(iB); iB += 1 iB += 1 assert len(IB) == nP # Build list {IA} of the corresponding indices in {LA}: IA = []; iA = 0 for k in ???IB + (nA,): while iA < k: IA.append(iA); iA += 1 iA += 1 assert len(IA) == nP log_prob = log_prob_sub_sub_match_H1_IA_IB(LA, IA, LB, IB, shift, max_pos, dev_pert) return log_prob # ---------------------------------------------------------------------- def least_squares_system(nb, basis, nd, sample, weight): if M is not None: assert numpy.shape(M) == (ny,nx,), "mask/image shape mismatch" for iy in range(ny): for ix in range(nx): smps = [] # List of pairs {(dx, dy)}. for q in range(4): smps_q = collect_pixels_in_quadrant(M, ix, iy, q, rmax, nmin_q) smpsw_q = add_weights_to_displacements(smps_q, rmax) def add_apodizing_weights_to_displacements(smps, rmax): # Takes a list of index displacement vectors {(dx,dy)}. # Appends to each pair a weight # that is 1 for the nearest pixels and decreases smoothly to zero # at distance {rmax}. smpsw = [] rmin = hypot(smps[0][0], smps[0][1]) rmax = hypot(smps[-1][0], smps[-1][1]) if len(smps > 0): # Get distance of nearest and farthest pixels. # Note: these may be equal. assert rmax > 0 and rmin <= rmax rmid = (rmin + rmax)/2; # Compute edge-smoothing weights for this quadrant: for (dx, dy) in smps: if rmin == rmax: w = 0 else: r = hypot(dx,dy) assert r >= rmin and r <= rmax if r >= rmax: w = 0.0 elif r <= rmid: w = 1.0 else: t = (r - rmid)/(rmax - rmid) w = (1-t) + 2*t*(t-0.5)*(t-1) if w > 0: smpsw.append((dx,dy,w)) return # ---------------------------------------------------------------------- def collect_pixels_in_quadrant(M, ix, iy, q, rmax, nmin_q): # Enumerates the pixels in quadrant {q} around pixel {M[iy,ix]} ny, nx = numpy.shape(M) smps = [] for dv in range(1,rmax+1): for du in range(rmax+1): dx = (+du, +dv, -du, -dv)[q] dy = (+dv, -du, -dv, +du)[q] jx = ix+dx; jy = iy+dy if jx >= 0 and jx < nx and jy >= 0 and jy < ny and M[jy,jx] != 0: smps.append((dx, dy)) smps.sort(key = lambda d : hypot(d[0],d[1])) if len(smps) > nmin_q: smps = smps[:nmin_q] return smps # ---------------------------------------------------------------------- created = "created on " + str(datetime.now(tz = timezone.utc)) + "html_report_funcs.html_subpage_link " imgfile = f"{clip_dir}/annotated.png" # Image file. titfile = f"{clip_dir}/title.txt" # File with title of figure. legfile = f"{clip_dir}/legend.txt" # File with plain text caption. disfile = f"{clip_dir}/discussion.txt" # File with plain text discussion. # Obtain the figure's dimensions: imgsize = h.get_image_size(imgfile) title = h.get_text_from_file(titfile) title = h.protect_html(title) title = h.simple_markup(title) if thumb: thumb_width = 80*st['text_width']/100 else: thumb_width = 0 assert link_text != None, "must specify a link text" link_to_sub = h.make_link(st, subfile, link_text, thumb_width, imgfile) # Create the subsidiary page in memory: st_sub = h.new_doc(title, "#eeffdd", 800) caption = h.get_text_from_file(legfile) caption = h.protect_html(caption) caption = h.simple_markup(caption) caption = h.make_parags(caption, align = "left", width = "80%") imgfile_sub = f"annotated.png" # Name of image file rel to subpage. imgtag_sub = f"" h.figure(st_sub, imgtag_sub, caption, centered = True) discussion = h.get_text_from_file(legfile) if discussion != "": discussion = h.protect_html(discussion) discussion = h.simple_markup(discussion) discussion = h.make_parags(discussion) h.section(st_sub, 2, "Discussion") h.parags(st_sub, discussion) # Write out the subpage: wr_sub = open(subfile, "w") h.output_doc(st_sub, wr_sub, 0, created) wr_sub.close() # Scale each spectral component to standard deviation 1.0, # to give it equal chances of contributing to the result: # sys.stderr.error(f"scaling band images to unit deviation ...\n") # C = normalize_band_intensities_to_unit_std(C) # tfile = f"/tmp/{os.getpid()}-read_mask_image.pnm" # bash(f"convert {fname} -set colorspace LinearGray -colorspace LinearGray {tfile}") # M = read_pnm_image(tfile, 0, 0)