Commit f34427d5 authored by Moreau Nicolas's avatar Moreau Nicolas
Browse files

Revert "Merge branch 'colorbar' into develop"

This reverts commit bb2205d5, reversing
changes made to ad841690.
parent bb2205d5
......@@ -15,6 +15,8 @@ from astropy import constants as const
import numpy as np
import json
import copy
from io import BytesIO
import png
from PIL import Image, PngImagePlugin
import dask
import dask.array as da
......@@ -26,20 +28,17 @@ from datetime import datetime
import os
import re
from functools import reduce
from matplotlib import pyplot as plt
from matplotlib import pyplot as pp
import yt
import cv2
def cmap2palette (palette_name):
x = plt.get_cmap(palette_name)
x = pp.get_cmap(palette_name)
return [tuple(map(lambda x: int(round(x*255)), x(i)[0:3])) for i in range(x.N)]
def getLegendFilePath(pngFilePath):
filename = pngFilePath.split(".png")
return filename[0]+"_legend.png"
from result import Result
from typing import Dict
......@@ -91,8 +90,7 @@ class DataBlock:
__SMOOTHFilePrefix = ""
__IMGFilePrefix = ""
__palette_names = ["afmhot", "gist_gray", "gist_heat", "gist_ncar", "gist_rainbow", "Greys", "hsv",
"inferno", "jet", "magma", "plasma","RdYlBu", "Spectral", "viridis"]
__palette_names = ["Greys", "RdYlBu", "hsv", "gist_ncar", "gist_rainbow", "gist_gray", "Spectral", "jet", "plasma", "inferno", "magma", "afmhot", "gist_heat"]
__palette = {palette_name: cmap2palette(palette_name) for palette_name in __palette_names}
__default_palette_name = "gist_rainbow"
......@@ -104,11 +102,11 @@ class DataBlock:
__renderingCapabilities = {
"itts": __transformation_names,
"default_itt_index": __transformation_names.index(__default_transformation_name),
"default_itt_index": 1,
"luts": __palette_names,
"default_lut_index": __palette_names.index(__default_palette_name),
"default_lut_index": 2,
"vmodes": __video_mode_names,
"default_vmode_index": __video_mode_names.index(__default_video_mode_name)
"default_vmode_index": 0
}
__dateTimeFormat = "%m/%d/%Y - %H:%M:%S"
......@@ -379,7 +377,7 @@ class DataBlock:
self.__data = da.from_array(hdu_list[data_index].data, chunks = (128, 128))
shp = self.__data.shape
if numDims > 2 :
self.__logger.debug("dimensions shp[1]: %s, shp[2]: %s" % (shp[1], shp[2]))
if shp[1] > shp[2]:
......@@ -395,6 +393,8 @@ class DataBlock:
self.__sliceMargin = da.from_array(np.nan * np.ones(shape=(shp[1] - shp[0], shp[1])), chunks=(128, 128))
else:
pass
#
# Header "normalization"
......@@ -509,6 +509,7 @@ class DataBlock:
if numDims == 4 and hdu_list[data_index].data.shape[0] > 1:
result.message=f"This dataset has NAXIS4 == {hdu_list[data_index].data.shape[0]} . yafits does not fully handle 4D data. As a default behaviour yafits will display the cube [0, :, : , :]"
self.__logger.debug(result.toJSON())
except Exception as e:
message = f"Error while opening file {relFITSFilePath}: {e}"
self.__logger.debug(message)
......@@ -618,6 +619,8 @@ class DataBlock:
else:
result = (self.__data[iDEC0:iDEC1+1, iRA0:iRA1+1]*cdelt)
self.__logger.debug("### value of CDELT")
self.__logger.debug(cdelt)
self.__collectStatistics("%d-%d"%(iFREQ0, iFREQ1), result.compute())
self.__logger.debug(f"Returns a {type(result)}")
......@@ -630,83 +633,88 @@ class DataBlock:
def __convertOneSlice2PNG (self, PNGPath, sliceData, transformation_name, palette_name, video_mode_name):
self.__logger.debug("__convertOneSlice2PNG: entering")
self.__logger.debug(f"sliceData is a {type(sliceData)}")
img_f = np.zeros((sliceData.shape[1], sliceData.shape[0]), dtype=np.float32)
img_i = np.zeros((sliceData.shape[1], sliceData.shape[0]), dtype=np.uint8)
data_steps = {}
palette = DataBlock.getPaletteFromName(palette_name)
N = len(palette)
p_low = None
p_high = None
if transformation_name == "percent98":
self.__logger.debug("A '%s' transformation will be applied " % "percent98")
self.__logger.debug("A '%s' transformation will be applied " % transformation_name)
img_f = np.array(sliceData)
p_high = self.__getPercentile(sliceData, 99.9)
p_low = self.__getPercentile(sliceData, 0.1)
img_i = sliceData
data_min, data_max = self.__min_max(sliceData)
shape = sliceData.shape
for irow in range(img_i.shape[1]//2):
img_i[irow,:], img_i[img_i.shape[1] - 1 - irow,:] = img_i[img_i.shape[1] - 1 - irow,:], img_i[irow,:].copy()
n_low_steps = 3
n_high_steps = 3
n_mid_steps = N - n_low_steps - n_high_steps
self.__writePNG(PNGPath, img_i, video_mode_name, palette_name, p_low, p_high)
self.__logger.debug("__convertOneSlice2PNG: exiting")
data_step = (p_low - data_min) / n_low_steps
for i in range (0, n_low_steps):
data_steps["%d_%d_%d"%(palette[i][0], palette[i][1], palette[i][2])] = data_min + i * data_step
def __writePNG(self, PNGPath, img_i, video_mode_name, palette_name, vmin=None, vmax=None):
"""Writes a PNG file and its color bar at given path
data_step = (p_high - p_low) / n_mid_steps
for i in range(0, n_mid_steps):
j = n_low_steps + i
data_steps["%d_%d_%d"%(palette[j][0], palette[j][1], palette[j][2])] = p_low + i * data_step
Parameters
----------
PNGPath : string
path where png file will be written on disk
img_i : array
data array
video_mode_name : string
name of video mode ( direct or inverse)
data_step = (data_max - p_high) / n_high_steps
for i in range(0, n_high_steps):
j = n_low_steps + n_mid_steps + i
data_steps["%d_%d_%d"%(palette[j][0], palette[j][1], palette[j][2])] = p_high + i * data_step
palette_name : string
name of color palette for color map
img_f_flat = img_f.flatten()
img_f_flat_N = np.zeros(img_f_flat.shape)
low_mask = img_f_flat < p_low
img_f_flat_N[low_mask] = n_low_steps * (img_f_flat[low_mask] - data_min) / (p_low - data_min)
vmin : float
minimum value for color bar
mid_mask = np.logical_and((img_f_flat >= p_low), (img_f_flat < p_high))
img_f_flat_N[mid_mask] = n_low_steps + n_mid_steps * (img_f_flat[mid_mask] - p_low) / ( p_high - p_low)
vmax : float
maximum value for colorbar
high_mask = img_f_flat >= p_high
img_f_flat_N[high_mask] = n_low_steps + n_mid_steps + n_high_steps * (img_f_flat[high_mask] - p_high) / (data_max - p_high) - 1
"""
img_f = img_f_flat_N.reshape(shape)
plt.clf()
plt.axis('off')
palette = ""
if video_mode_name == "direct":
palette = palette_name
elif video_mode_name == "inverse":
palette = palette + "_r"
else:
raise Exception("Unknown video mode name {}".format(video_mode_name))
self.__logger.debug("A '%s' transformation will be applied " % "minmax")
data_min, data_max = self.__min_max(sliceData)
if vmin is not None and vmax is not None:
plt.imshow(img_i, cmap=palette, vmin=vmin, vmax=vmax)
else :
plt.imshow(img_i, cmap=palette)
data_step = (data_max - data_min) / (N - 1)
for i in range(0, N):
data_steps["%d_%d_%d"%(palette[i][0], palette[i][1], palette[i][2])] = data_min + i * data_step
if os.path.isfile(PNGPath):
os.remove(PNGPath)
data_range = data_max - data_min
img_f = ((N - 1) * (np.array(sliceData) - data_min) / data_range)
legend_path = getLegendFilePath(PNGPath)
img_i = img_f.astype(np.uint8)
if os.path.isfile(legend_path):
os.remove(legend_path)
self.__logger.debug("video_mode_name = %s" % video_mode_name)
if (video_mode_name == "inverse"):
self.__logger.debug("Display in inverse mode ")
img_i = (N - 1) - img_i
else:
self.__logger.debug("Display in direct mode")
plt.savefig(PNGPath, format="png", bbox_inches="tight", pad_inches=0)
mpb = plt.pcolormesh(img_i,cmap=palette, vmin=vmin, vmax=vmax)
# draw a new figure and replot the colorbar there
fig,ax = plt.subplots()
plt.colorbar(mpb,ax=ax, orientation="horizontal")
ax.remove()
for irow in range(img_i.shape[1]//2):
img_i[irow,:], img_i[img_i.shape[1] - 1 - irow,:] = img_i[img_i.shape[1] - 1 - irow,:], img_i[irow,:].copy()
plt.savefig( legend_path, format="png", bbox_inches="tight", pad_inches=0)
# np.flipud(img_i)
f = open(PNGPath, 'wb')
w = png.Writer(sliceData.shape[1], sliceData.shape[0],palette=palette)
self.__logger.debug (f'type of img = ${type(img_i)}')
w.write(f, img_i.tolist())
f.close()
self.__logger.debug("__convertOneSlice2PNG: exiting")
return data_steps
def __convertSummedSliceRange2PNG(self, iFREQ0, iFREQ1, sliceData, transformation_name, palette_name, video_mode_name):
self.__logger.debug("__convertSummedSliceRange2PNG: entering")
......@@ -714,7 +722,7 @@ class DataBlock:
result = self.__convertOneSlice2PNG(PNGPath, sliceData, transformation_name, palette_name, video_mode_name)
self.__logger.debug("__convertSummedSliceRange2PNG: exiting")
return result
def __min_max(self, dataArray):
# data_max = np.nanmax(dataArray).astype(float).compute()
# data_min = np.nanmin(dataArray).astype(float).compute()
......@@ -786,6 +794,10 @@ class DataBlock:
#"\n CD2_2 = " +str(headerInfos["CDELT2"])+
rotation)
im.save(absPNGFilePath, "PNG", pnginfo=info)
self.__logger.debug("__addWCStoPNG: exiting")
......@@ -1424,9 +1436,13 @@ class DataBlock:
else:
x = self.__getSlice(iFREQ, step)
self.__logger.debug(f"type self.__data = {type(self.__data)}, type x = {type(x)}")
self.__logger.debug("Avant isnan")
self.__logger.debug(x.shape)
x = x.compute()
b = np.isnan(x)
self.__logger.debug(b.shape)
x[b]=None
self.__logger.debug("Apres isnan")
tmp={}
tmp["slice"]=x.tolist()
tmp["result"]["statistics"]=self.__statistics["%d"%iFREQ]
......@@ -1454,7 +1470,7 @@ class DataBlock:
return Result().ok(result)
def getPixelValueAtiRAiDEC(self, iRA: int, iDEC: int) -> Result:
self.__logger.debug("getPixelValueAtiRAiDEC: entering")
self.__logger.debug("getPixelValueAtiRAiDEC: entering");
if len(self.__data.shape) != 2:
self.__logger.debug("getPixelsValueAtiRAiDEC: exiting")
return Result().wrong("Data have an inappropriate shape '{}' for a 2 request ".format(self.__data.shape))
......@@ -1464,7 +1480,7 @@ class DataBlock:
except Exception as e:
result = Result().wrong("{0} - {1}".format(type(e), e.args))
self.__logger.debug("getPixelValueAtiRAiDEC: exiting")
self.__logger.debug("getPixelValueAtiRAiDEC: exiting");
return result
......@@ -1501,6 +1517,7 @@ class DataBlock:
return result
def getAverageSpectrum(self, iDEC0=None, iDEC1=None, iRA0=None, iRA1=None) -> Result:
self.__logger.debug("### getAverageSpectrum: entering")
if iRA0 == None:
iRA0 = 0
if iRA1 == None:
......@@ -1521,9 +1538,16 @@ class DataBlock:
with_dask=True
if with_dask:
if self.__header["BUNIT"] == "Jy/beam":
self.__logger.debug("### Jy/beam")
averageSpectrum = (dask.array.nansum(dask.array.nan_to_num(self.__data[:, iDEC0:iDEC1, iRA0:iRA1]), (1,2)) / self.__convert * self.__cdelt).compute().tolist()
averageSpectrumFits = self.__createFITSSpectrumFromData(iRA0,iRA1,"averageSpectrum",iDEC0,iDEC1,averageSpectrum, "Jy")
elif self.__header["BUNIT"].startswith("K"):
elif self.__header["BUNIT"].startswith("K"):
self.__logger.debug("### K")
self.__logger.debug(self.__data)
self.__logger.debug("self.__convert")
self.__logger.debug(self.__convert)
self.__logger.debug("self.__cdelt")
self.__logger.debug(self.__cdelt)
# print pour verifier que convert et cdelt == 1
# afficher IDEC0, IDEC1, iRA0, iRA1
# spectre scalé à 1.5 au dessus
......@@ -1532,15 +1556,18 @@ class DataBlock:
averageSpectrum = (dask.array.nanmean(dask.array.nan_to_num(self.__data[:, iDEC0:iDEC1, iRA0:iRA1]), (1,2)) / self.__convert * self.__cdelt).compute().tolist()
averageSpectrumFits = self.__createFITSSpectrumFromData(iRA0,iRA1,"averageSpectrum",iDEC0,iDEC1,averageSpectrum, self.__header["BUNIT"])
else:
self.__logger.debug("### else")
averageSpectrum = (dask.array.nanmean(dask.array.nan_to_num(self.__data[:, iDEC0:iDEC1, iRA0:iRA1]), (1,2)) / self.__convert * self.__cdelt).compute().tolist()
#averageSpectrumFits = self.__createFITSSpectrumFromData(iRA0,iRA1,"averageSpectrum",iDEC0,iDEC1,averageSpectrum, self.__header["BUNIT"])
averageSpectrumFits = None
else:
self.__logger.debug("### no dask")
averageSpectrum = np.nansum(np.nan_to_num(self.__data[:, iDEC0:iDEC1, iRA0:iRA1]), (1,2)) / self.__convert * self.__cdelt
averageSpectrumFits = self.__createFITSSpectrumFromData(iRA0,iRA1,"averageSpectrum",iDEC0,iDEC1,averageSpectrum, self.__header["BUNIT"])
#averageSpectrumFits = self.__createFITSSpectrumFromData(iRA0,iRA1,"averageSpectrum",iDEC0,iDEC1,averageSpectrum, "Jy")
result0 = {"averageSpectrum": averageSpectrum, "averageSpectrumFits": averageSpectrumFits}
self.__logger.debug(result0)
result = Result().ok(result0)
self.__logger.debug("getAverageSpectrum: exiting")
return result
......@@ -1616,7 +1643,8 @@ class DataBlock:
try:
# Realize the dask array as a numpy array.
squaredSliceData = self.__squareSliceData(sliceData).compute()
self.__convertOneSlice2PNG(absPNGFilePath, squaredSliceData, ittName, lutName, vmName)
data_steps = self.__convertOneSlice2PNG(
absPNGFilePath, squaredSliceData, ittName, lutName, vmName)
h = self.__header
if "CDELT1" in h and "CDELT2" in h:
......@@ -1644,7 +1672,7 @@ class DataBlock:
return Result().wrong("Problem while creating the PNG file: '%s'" % e)
result = {"path_to_png": relPNGFilePath, "path_to_legend_png": getLegendFilePath(relPNGFilePath),
result = {"data_steps": data_steps, "path_to_png": relPNGFilePath,
"statistics": self.__statistics["%d" % iFREQ]}
self.__logger.debug("getOneSliceAsPNG: exiting.")
......@@ -1681,7 +1709,7 @@ class DataBlock:
squaredData = self.__squareSliceData(summedSliceRangeData).compute()
self.__logger.debug(f"Squared data are now stored in a {type(squaredData)}")
self.__convertSummedSliceRange2PNG(iFREQ0, iFREQ1, squaredData, ittName, lutName, vmName)
data_steps = self.__convertSummedSliceRange2PNG(iFREQ0, iFREQ1, squaredData, ittName, lutName, vmName)
h = self.__header
if "CDELT1" in h and "CDELT2" in h:
x = {"BITPIX": h["BITPIX"],
......@@ -1703,8 +1731,7 @@ class DataBlock:
self.__logger.debug("getSummedSliceRangeAsPNG: exiting.")
return Result().wrong("Problem while creating the PNG file: '%s'" % e)
result = {"path_to_png": relPNGFilePath, "path_to_legend_png": getLegendFilePath(relPNGFilePath),
"statistics": self.__statistics["%d-%d"%(iFREQ0, iFREQ1)]}
result = {"data_steps": data_steps, "path_to_png": relPNGFilePath, "statistics": self.__statistics["%d-%d"%(iFREQ0, iFREQ1)]}
self.__logger.debug(f"result = {result}")
self.__logger.debug("getSummedSliceRangeAsPNG: exiting.")
return Result().ok(result)
......@@ -1968,3 +1995,4 @@ class DataBlock:
#
# End of DataBlock class definition.
#
/**
* This module centralizes somes statics to manipulate elements of the page
* This module centralizes somes functions to manipulate elements of the page
*
*/
class DOMAccessor{
/**
* Returns the velocity input field
* @returns {Element}
*/
static getVelocityField(){
return document.getElementById("lines-velocity");
}
/**
* Returns the redshift input field
* @returns {Element}
*/
static getRedshiftField(){
return document.getElementById("lines-redshift");
}
/**
* Returns the title of the average spectrum
* @returns {Element}
*/
static getAverageSpectrumTitle(){
return document.getElementById("chart-title");
}
/**
* Returns the velocity input field
* @returns {Element}
*/
function getVelocityField(){
return document.getElementById("lines-velocity");
}
/**
* Returns element displaying the position of the mouse on the single slice
* @returns {Element}
*/
static getSingleSliceMousePosition(){
return document.getElementById('external-mouse-position-1');
}
/**
* Returns the redshift input field
* @returns {Element}
*/
function getRedshiftField(){
return document.getElementById("lines-redshift");
}
static getHnotField(){
return document.getElementById("lines-hnot");
}
/**
* Returns the title of the average spectrum
* @returns {Element}
*/
function getAverageSpectrumTitle(){
return document.getElementById("chart-title");
}
static getOmegaMField(){
return document.getElementById("lines-omegam");
}
/**
* Returns element displaying the position of the mouse on the single slice
* @returns {Element}
*/
function getSingleSliceMousePosition(){
return document.getElementById('external-mouse-position-1');
}
static getSearchRadiusField(){
return document.getElementById("ned-search-radius");
}
function getHnotField(){
return document.getElementById("lines-hnot");
}
static getDlField(){
return document.getElementById("lines-dl");
}
function getOmegaMField(){
return document.getElementById("lines-omegam");
}
function getSearchRadiusField(){
return document.getElementById("ned-search-radius");
}
/**
* Returns element displaying the position of the mouse on the summed slice
* @returns {Element}
*/
static getSummedSliceMousePosition(){
return document.getElementById('external-mouse-position-2');
}
function getDlField(){
return document.getElementById("lines-dl");
}
static setSliceChannel(text){
document.getElementById("slice-channel").textContent = text;
}
static setSliceRMS(text){
document.getElementById("slice-rms").textContent = text;
}
/**
* Returns element displaying the position of the mouse on the summed slice
* @returns {Element}
*/
function getSummedSliceMousePosition(){
return document.getElementById('external-mouse-position-2');
}
static setSummedSliceRMS(text){
document.getElementById("summedslice-rms").textContent = text;
}
function setSliceChannel(text){
document.getElementById("slice-channel").textContent = text;
}
static updateSingleSliceColorBar(path){
document.getElementById("slice-colormap").innerHTML=`<img src="${path}" alt="colorbar"></img>`;
}
function setSliceRMS(text){
document.getElementById("slice-rms").textContent = text;
}
static updateSingleSummedColorBar(path){
document.getElementById("summed-slice-colormap").innerHTML=`<img src="${path}" alt="colorbar"></img>`;
}
function setSummedSliceRMS(text){
document.getElementById("summedslice-rms").textContent = text;
}
/**
* Returns the loading image
* @returns
*/
static getLoading(){
return document.getElementById('loading');
}
/**
* Returns the loading image
* @returns
*/
function getLoading(){
return document.getElementById('loading');
}
/**
* Toggles the loading image
* @param {boolean} isVisible
*/
static showLoaderAction(isVisible) {
if (isVisible) {
this.getLoading().style.display = 'block';
} else {
this.getLoading().style.display = 'none';
}
/**
* Toggles the loading image
* @param {boolean} isVisible
*/
function showLoaderAction(isVisible) {
if (isVisible) {
getLoading().style.display = 'block';
} else {
getLoading().style.display = 'none';
}
}
/**
* Returns the field containing the status of the SAMP connection
* The status is written in text and must be parsed before use.
* @returns {Element}
*/
static getSampConnectionStatus(){
return document.getElementById("withSamp");
}
/**
* Returns the field containing the status of the SAMP connection
* The status is written in text and must be parsed before use.
* @returns {Element}
*/
function getSampConnectionStatus(){
return document.getElementById("withSamp");
}
/**
* Returns the field containing the root url of the service
* @returns {Element}
*/
static getUrlRoot(){
return document.getElementById("urlRoot");
}
/**
* Returns the field containing the root url of the service
* @returns {Element}
*/
function getUrlRoot(){
return document.getElementById("urlRoot");
}
/**
* Returns the field containing the path of the currently opened FITS file
* @returns {Element}
*/
static getFitsFilePath(){
return document.getElementById("relFITSFilePath");
}
/**
* Returns the field containing the path of the currently opened FITS file
* @returns {Element}
*/
function getFitsFilePath(){
return document.getElementById("relFITSFilePath");
}
/**