Now we will run a die aggregation analysis using the device analyses we triggered in the device analysis notebook. Make sure all the analyses we triggered are finished (i.e. make sure the last cell in that notebook has finished running)!
As before, make sure you have the following environment variables set or added to a .env
file:
GDSFACTORY_HUB_API_URL="https://{org}.gdsfactoryhub.com"
GDSFACTORY_HUB_QUERY_URL="https://query.{org}.gdsfactoryhub.com"
GDSFACTORY_HUB_KEY="<your-gdsfactoryplus-api-key>"
project_id = f"rings-{getpass.getuser()}"
client = gfh.create_client_from_env(project_id=project_id)
api = client.api()
query = client.query()
You can aggregate any metric for a die analysis, for example, we had 3 different ring radius. Each of which will have a different FSR.
aggregate_device_analyses.run(
die_pkey=die_pkey,
device_data_function_id="fsr",
output_key="fsr_mean",
device_attributes={"radius_um": 20},
)
result = api.validate_function(
function_id="aggregate_device_analyses",
target_model="die",
test_target_model_pk=die_pkey,
file=gfh.get_module_path(aggregate_device_analyses),
test_kwargs={
"device_data_function_id": "fsr",
"output_key": "fsr_mean",
"device_attributes": {"radius_um": 20},
},
)
result.summary_plot()
with gfh.suppress_api_error():
result = api.upload_function(
function_id="aggregate_device_analyses",
target_model="die",
file=gfh.get_module_path(aggregate_device_analyses),
)
task_ids = []
for die_pk in (pb := tqdm(die_pkeys)):
pb.set_postfix(die_pk=die_pk)
task_id = api.start_analysis( # start_analysis triggers the analysis task, but does not wait for it to finish.
analysis_id=f"die_fsr_aggregation_r20um_{die_pk}",
function_id="aggregate_device_analyses",
target_model="die",
target_model_pk=die_pk,
kwargs={
"device_data_function_id": "fsr",
"output_key": "fsr_mean",
"device_attributes": {"radius_um": 20},
},
)
task_ids.append(task_id)