Skip to content

Commit

Permalink
Update quinine and agent
Browse files Browse the repository at this point in the history
  • Loading branch information
XPD Operator committed Mar 12, 2024
1 parent c77b32c commit 5ef9d2e
Show file tree
Hide file tree
Showing 7 changed files with 181 additions and 49 deletions.
4 changes: 3 additions & 1 deletion scripts/_data_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,9 +222,11 @@ def _1peak_fit_good_PL(x0, y0, fit_function, peak=False, maxfev=100000, fit_boun
if dummy_test:
x = x0[w1:w2]
y = y0[w1:w2]
x_low_bnd = x0[w1]
else:
x = x0[w2:w3]
y = y0[w2:w3]
x_low_bnd = x0[w2]

mean = sum(x * y) / sum(y)
sigma = np.sqrt(sum(abs(y) * (x - mean) ** 2) / sum(y))
Expand All @@ -237,7 +239,7 @@ def _1peak_fit_good_PL(x0, y0, fit_function, peak=False, maxfev=100000, fit_boun


try:
bnd = ((0,200,0),(y.max()*1.15,1000, np.inf))
bnd = ((0, x_low_bnd, 0),(y.max()*1.15, 1000, np.inf))
popt, pcov = curve_fit(fit_function, x, y, p0=initial_guess, bounds=bnd, maxfev=maxfev)
except (RuntimeError, ValueError):
bnd = (-np.inf, np.inf)
Expand Down
41 changes: 39 additions & 2 deletions scripts/_data_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,38 @@ def _auto_name_sample2(pump_list, prefix=None):
return sample



def _exprot_rate_agent(metadata_dic, rate_label_dic, agent_data):

is_infusing = []
for pump_status in metadata_dic['pump_status']:
if pump_status == 'Infusing':
is_infusing.append(1)
elif pump_status == 'Idle':
is_infusing.append(0)

for key in rate_label_dic.keys():
is_key_in_precursors = np.asarray([key in precursor for precursor in metadata_dic['precursors']])
num_idx = np.argwhere(is_key_in_precursors==True)

if num_idx.size == 1:
pre_idx = num_idx[0][0]
try:
ruc = rate_unit_converter(r0 = metadata_dic["infuse_rate_unit"][pre_idx], r1 = 'ul/min')
agent_data[rate_label_dic[key]] = metadata_dic["infuse_rate"][pre_idx]*ruc*is_infusing[pre_idx]
except IndexError:
agent_data[rate_label_dic[key]] = 0

elif num_idx.size == 0:
agent_data[rate_label_dic[key]] = 0

elif num_idx.size > 1:
raise ValueError (f'Check key: {key} of rate_label_dic. More than one precursor was found.')

return agent_data



def _readable_time(unix_time):
from datetime import datetime
dt = datetime.fromtimestamp(unix_time)
Expand Down Expand Up @@ -132,6 +164,7 @@ def read_qepro_by_stream(uid, stream_name='primary', data_agent='tiled'):
except NameError:
import databroker
catalog = databroker.catalog['xpd-ldrd20-31']
# catalog = databroker.catalog['xpd']
run = catalog[uid]
meta = run.metadata

Expand All @@ -141,6 +174,8 @@ def read_qepro_by_stream(uid, stream_name='primary', data_agent='tiled'):
except NameError:
from tiled.client import from_profile
tiled_client = from_profile("xpd-ldrd20-31")
# tiled_client = from_profile("xpd")

run = tiled_client[uid]
meta = run.metadata

Expand Down Expand Up @@ -456,7 +491,8 @@ def read_qepro_from_tiled(uid):
from_profile
except NameError:
from tiled.client import from_profile
tiled_client = from_profile("xpd-ldrd20-31")
# tiled_client = from_profile("xpd-ldrd20-31")
tiled_client = from_profile("xpd")

run = tiled_client[uid]
ds = run.primary.read()
Expand Down Expand Up @@ -485,7 +521,8 @@ def read_qepro_from_db(uid):
db
except NameError:
import databroker
db = databroker.Broker.named('xpd-ldrd20-31')
# db = databroker.Broker.named('xpd-ldrd20-31')
db = databroker.Broker.named('xpd')

# full_uid = db[uid].start['uid']
# unix_time = db[uid].start['time']
Expand Down
4 changes: 2 additions & 2 deletions scripts/_plot_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ def plot_average_good(self, x, y, color=None, label=None, clf_limit=10):



def plot_CsPbX3(self, x, y, wavelength, wavelength_range=[420, 660], label=None, clf_limit=10):
def plot_CsPbX3(self, x, y, wavelength, label=None, clf_limit=10):

# import palettable.colorbrewer.diverging as pld
# palette = pld.RdYlGn_4_r
Expand Down Expand Up @@ -273,7 +273,7 @@ def plot_fitting(self, x, y, popt_list, single_f, fill_between=True, num_var=3):



def color_idx_map_halides(peak_wavelength, halide_w_range=[420, 520, 660]):
def color_idx_map_halides(peak_wavelength, halide_w_range=[400, 520, 660]):

from matplotlib.colors import LinearSegmentedColormap
colors = [
Expand Down
29 changes: 9 additions & 20 deletions scripts/kafka_consumer_iterate_qserver.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,11 +116,15 @@

use_good_bad = True
USE_AGENT_iterate = True
peak_target = 580
peak_target = 590

write_agent_data = True
# rate_label = ['infusion_rate_CsPb', 'infusion_rate_Br', 'infusion_rate_Cl', 'infusion_rate_I2']
rate_label = ['infusion_rate_CsPb', 'infusion_rate_Br', 'infusion_rate_I2', 'infusion_rate_Cl']
# rate_label = ['infusion_rate_CsPb', 'infusion_rate_Br', 'infusion_rate_I2', 'infusion_rate_Cl']
rate_label_dic = {'CsPb':'infusion_rate_CsPb',
'Br':'infusion_rate_Br',
'ZnI':'infusion_rate_I2',
'ZnCl':'infusion_rate_Cl'}

if USE_AGENT_iterate:

Expand Down Expand Up @@ -166,7 +170,7 @@ def print_kafka_messages(beamline_acronym, csv_path=csv_path,
pump_list=pump_list, sample=sample, precursor_list=precursor_list,
mixer=mixer, dummy_test=dummy_kafka, plqy=PLQY, prefix=prefix,
agent_data_path=agent_data_path, peak_target=peak_target,
rate_label=rate_label):
rate_label_dic=rate_label_dic):

print(f"Listening for Kafka messages for {beamline_acronym}")
print(f'Defaul parameters:\n'
Expand Down Expand Up @@ -373,30 +377,15 @@ def print_message(consumer, doctype, doc,
plqy_dic = {'PL_integral':PL_integral_s, 'Absorbance_365':absorbance_s, 'plqy': plqy}

optical_property = {'Peak': peak_emission, 'FWHM':fwhm, 'PLQY':plqy}


## Check if pump is Infusing or Idle
is_infusing = []
for pump_status in metadata_dic['pump_status']:
if pump_status == 'Infusing':
is_infusing.append(1)
elif pump_status == 'Idle':
is_infusing.append(0)

if write_agent_data:
agent_data = {}

agent_data.update(optical_property)
agent_data.update({k:v for k, v in metadata_dic.items() if len(np.atleast_1d(v)) == 1})
# agent_data.update({k:v for k, v in metadata_dic.items()})

for i in range(len(rate_label)):
try:
ruc = da.rate_unit_converter(r0 = metadata_dic["infuse_rate_unit"][i], r1 = 'ul/min')
agent_data[rate_label[i]] = metadata_dic["infuse_rate"][i]*ruc*is_infusing[i]
except IndexError:
agent_data[rate_label[i]] = 0.0

agent_data = de._exprot_rate_agent(metadata_dic, rate_label_dic, agent_data)

with open(f"{agent_data_path}/{data_id}.json", "w") as f:
json.dump(agent_data, f)

Expand Down
31 changes: 10 additions & 21 deletions scripts/kafka_consumer_publisher.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from _plot_helper import plot_uvvis
import _data_analysis as da

from bluesky_queueserver.manager.comms import zmq_single_request
# from bluesky_queueserver.manager.comms import zmq_single_request

# db = databroker.Broker.named('xpd-ldrd20-31')
# catalog = databroker.catalog['xpd-ldrd20-31']
Expand All @@ -39,7 +39,8 @@

## Input varaibales: read from inputs_qserver_kafka.xlsx
xlsx = '/home/xf28id2/Documents/ChengHung/inputs_kafka_single.xlsx'
input_dic = de._read_input_xlsx(xlsx)
input_dic = de._read_input_xlsx(xlsx, sheet_name='inputs_quinine')
# input_dic = de._read_input_xlsx(xlsx, sheet_name='inputs')

##################################################################
# Define namespace for tasks in Qserver and Kafa
Expand Down Expand Up @@ -73,13 +74,16 @@

write_agent_data = True
# rate_label = ['infusion_rate_CsPb', 'infusion_rate_Br', 'infusion_rate_Cl', 'infusion_rate_I2']
rate_label = ['infusion_rate_CsPb', 'infusion_rate_Br', 'infusion_rate_I2', 'infusion_rate_Cl']
rate_label_dic = {'CsPb':'infusion_rate_CsPb',
'Br':'infusion_rate_Br',
'ZnI':'infusion_rate_I2',
'ZnCl':'infusion_rate_Cl'}


def print_kafka_messages(beamline_acronym, csv_path=csv_path,
key_height=key_height, height=height, distance=distance,
dummy_test=dummy_kafka, plqy=PLQY,
agent_data_path=agent_data_path, rate_label=rate_label):
agent_data_path=agent_data_path, rate_label_dic=rate_label_dic):

print(f"Listening for Kafka messages for {beamline_acronym}")
print(f'Defaul parameters:\n'
Expand Down Expand Up @@ -286,30 +290,15 @@ def print_message(consumer, doctype, doc,
plqy_dic = {'PL_integral':PL_integral_s, 'Absorbance_365':absorbance_s, 'plqy': plqy}

optical_property = {'Peak': peak_emission, 'FWHM':fwhm, 'PLQY':plqy}


## Check if pump is Infusing or Idle
is_infusing = []
for pump_status in metadata_dic['pump_status']:
if pump_status == 'Infusing':
is_infusing.append(1)
elif pump_status == 'Idle':
is_infusing.append(0)

if write_agent_data:
agent_data = {}

agent_data.update(optical_property)
agent_data.update({k:v for k, v in metadata_dic.items() if len(np.atleast_1d(v)) == 1})
# agent_data.update({k:v for k, v in metadata_dic.items()})

for i in range(len(rate_label)):
try:
ruc = da.rate_unit_converter(r0 = metadata_dic["infuse_rate_unit"][i], r1 = 'ul/min')
agent_data[rate_label[i]] = metadata_dic["infuse_rate"][i]*ruc*is_infusing[i]
except IndexError:
agent_data[rate_label[i]] = 0.0

agent_data = de._exprot_rate_agent(metadata_dic, rate_label_dic, agent_data)

with open(f"{agent_data_path}/{data_id}.json", "w") as f:
json.dump(agent_data, f)

Expand Down
6 changes: 3 additions & 3 deletions scripts/prepare_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@ def build_agen(peak_target=660, peak_tolerance=5):
agent_data_path = '/home/xf28id2/data_halide'


if peak_target > 525:
if peak_target > 515:
I_up_limit = 200
Cl_up_limit = 0

elif peak_target < 515:
elif peak_target < 500:
I_up_limit = 0
Cl_up_limit = 200

Expand Down Expand Up @@ -79,7 +79,7 @@ def build_agen(peak_target=660, peak_tolerance=5):
with open(fp, "r") as f:
data = json.load(f)

r_2_min = 0.85
r_2_min = 0.05
try:
if data['r_2'] < r_2_min:
print(f'Skip because "r_2" of {os.path.basename(fp)} is {data["r_2"]:.2f} < {r_2_min}.')
Expand Down
115 changes: 115 additions & 0 deletions scripts/prepare_agent_select.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@

import os
import json
import glob
import sys
import numpy as np
from tqdm import tqdm
# sys.path.insert(0, "/home/xf28id2/src/blop")

from blop import Agent, DOF, Objective


def build_agen2(peak_target=660, peak_tolerance=5):
# data_path = '/home/xf28id2/data_ZnCl2'
#data_path = '/home/xf28id2/data'
agent_data_path = '/home/xf28id2/data_halide'


if peak_target > 515:
I_up_limit = 200
Cl_up_limit = 0
CsPbX3 = 'CsPbI3'

elif peak_target < 500:
I_up_limit = 0
Cl_up_limit = 200
CsPbX3 = 'CsPbCl3'

else:
I_up_limit = 0
Cl_up_limit = 0
CsPbX3 = 'CsPbBr3'


# dofs = [
# DOF(description="CsPb(oleate)3", name="infusion_rate_CsPb", units="uL/min", search_bounds=(8, 110)),
# DOF(description="TOABr", name="infusion_rate_Br", units="uL/min", search_bounds=(50, 200)),
# DOF(description="ZnCl2", name="infusion_rate_Cl", units="uL/min", search_bounds=(0, Cl_up_limit)),
# DOF(description="ZnI2", name="infusion_rate_I2", units="uL/min", search_bounds=(0, I_up_limit)),
# ]


dofs = [
DOF(description="CsPb(oleate)3", name="infusion_rate_CsPb", units="uL/min", search_bounds=(8, 110)),
DOF(description="TOABr", name="infusion_rate_Br", units="uL/min", search_bounds=(50, 200)),
DOF(description="ZnI2", name="infusion_rate_I2", units="uL/min", search_bounds=(0, I_up_limit)),
DOF(description="ZnCl2", name="infusion_rate_Cl", units="uL/min", search_bounds=(0, Cl_up_limit)),
]


objectives = [
Objective(description="Peak emission", name="Peak", target=(peak_target-peak_tolerance, peak_target+peak_tolerance), weight=1, max_noise=0.25),
Objective(description="Peak width", name="FWHM", target="min", log=True, weight=1., max_noise=0.25),
Objective(description="Quantum yield", name="PLQY", target="max", log=True, weight=1., max_noise=0.25),
]




# objectives = [
# Objective(name="Peak emission", key="peak_emission", target=525, units="nm"),
# Objective(name="Peak width", key="peak_fwhm", minimize=True, units="nm"),
# Objective(name="Quantum yield", key="plqy"),
# ]

USE_AGENT = False

agent = Agent(dofs=dofs, objectives=objectives, db=None, verbose=True)
#agent.load_data("~/blop/data/init.h5")

metadata_keys = ["time", "uid", "r_2"]

init_file = "/home/xf28id2/data_halide/init_240122_01.h5"

# if os.path.exists(init_file):
# agent.load_data(init_file)

# else:
filepaths = glob.glob(f"{agent_data_path}/*.json")
filepaths.sort()
for fp in tqdm(filepaths):
with open(fp, "r") as f:
data = json.load(f)

_filter = { 'CsPbBr3':data['infusion_rate_I2'] == 0 and data['infusion_rate_Cl'] == 0,
'CsPbI3':data['infusion_rate_I2'] != 0 and data['infusion_rate_Cl'] == 0,
'CsPbCl3':data['infusion_rate_I2'] == 0 and data['infusion_rate_Cl'] != 0,}

if _filter[CsPbX3]:

r_2_min = 0.05
try:
if data['r_2'] < r_2_min:
print(f'Skip because "r_2" of {os.path.basename(fp)} is {data["r_2"]:.2f} < {r_2_min}.')
else:
x = {k:[data[k]] for k in agent.dofs.names}
y = {k:[data[k]] for k in agent.objectives.names}
metadata = {k:[data.get(k, None)] for k in metadata_keys}
agent.tell(x=x, y=y, metadata=metadata, train=False, update_models=False)

except (KeyError):
print(f'{os.path.basename(fp)} has no "r_2".')

else:
pass

agent._construct_all_models()
agent._train_all_models()

print(f'The target of the emission peak is {peak_target} nm.')

return agent

# print(agent.ask("qei", n=1))
# print(agent.ask("qr", n=36))

0 comments on commit 5ef9d2e

Please sign in to comment.