"""Example script demonstrating a shell and fill example.
"""
import pandas as pd
import numpy as np
import os
import csv
from numpy.typing import ArrayLike
from pprint import pprint
from scipy.spatial.transform import Rotation as R
from metafold import MetafoldClient
from metafold.func_types import JSONEvaluator, FuncType, Vec3f, Mat4f
from metafold.func import *
project_id = 5765
access_token = " "
client = MetafoldClient(access_token,project_id)
source = GenerateSamplePoints(
{
"offset": [
-100,
-35,
-10
],
"size": [
200,
70,
120
],
"resolution": [
516,
180,
284
]
}
)
# (2) Using the API check if the file you are intrested in uploading is already
# stored in your project. If not upload it.
stl_filename = "rockerArm.stl"
if existing := client.assets.list(q=f"filename:{stl_filename}"):
stl_asset = client.assets.update(existing[0].id, stl_filename)
else:
stl_asset = client.assets.create(stl_filename)
# *.update and *.create return a handle to the asset we just uploaded
# <network_asset> that exist in the cloud. We can now reference the filename of
# this asset using network_asset.filename
# We use the .stl file in the cloud to run a sample_triangle_mesh()
# job.
stl_params = {
"mesh_filename": stl_asset.filename,
"resolution": [256,256,256],
}
stl_job = client.jobs.run(
"sample_triangle_mesh",
stl_params
)
# This job creates new assets. We can search through the assets that were
# created using the file extension .bin to get our .bin file. jjj
for asset in stl_job.assets:
if asset.filename.endswith(".bin"):
bin_asset = asset
break
# Now with our .bin file in the cloud we can use it to run a
# graph evalation like normal.
mesh_volume = {
"offset": [
-97,
-30.431371688842773,
-1.1606040000915527
],
"size": [
194,
60.86274337768555,
101.18431615829468
],
"resolution": [
256,
256,
256
]
}
Part = SampleVolume(
PointSource,
LoadVolume(
volume_data={
"file_type": "Raw",
"path": bin_asset.filename
},
parameters={
"resolution": mesh_volume["resolution"],
},
),
{
"volume_size": mesh_volume["size"],
"volume_offset": mesh_volume["offset"],
},
)
shell_thickness=2
shell = Shell(
Redistance(Part),
{
"thickness": shell_thickness,
"offset": 0.5*shell_thickness,
},
)
section_radius = [0.1, 0.125, 0.15]
scale = [5, 10, 15]
bcc_params= {
"lattice_data" : {
"edges": [
[
0,
7
],
[
4,
7
],
[
1,
7
],
[
2,
7
],
[
5,
7
],
[
6,
7
],
[
3,
7
],
[
7,
8
]
],
"nodes": [
[
0,
0,
0
],
[
0,
1,
0
],
[
1,
0,
0
],
[
1,
1,
0
],
[
0,
0,
1
],
[
0,
1,
1
],
[
1,
0,
1
],
[
0.5,
0.5,
0.5
],
[
1,
1,
1
]
]
},
}
bcc = SampleLattice(source, parameters=bcc_params)
bccz_params= {
"lattice_data": {
"edges": [
[
0,
8
],
[
1,
8
],
[
2,
8
],
[
3,
8
],
[
4,
8
],
[
5,
8
],
[
6,
8
],
[
7,
8
],
[
3,
7
],
[
5,
1
],
[
6,
2
],
[
4,
0
]
],
"nodes": [
[
0,
0,
0
],
[
1,
0,
0
],
[
0,
1,
0
],
[
1,
1,
0
],
[
0,
0,
1
],
[
1,
0,
1
],
[
0,
1,
1
],
[
1,
1,
1
],
[
0.5,
0.5,
0.5
]
]
}
}
bccz = SampleLattice(source, parameters=bccz_params)
k_params= {
"lattice_data": {
"edges": [
[
0,
1
],
[
1,
2
],
[
2,
3
],
[
3,
0
],
[
4,
7
],
[
5,
7
],
[
6,
5
],
[
6,
4
],
[
10,
9
],
[
9,
8
],
[
8,
11
],
[
11,
10
],
[
12,
14
],
[
14,
15
],
[
15,
13
],
[
13,
12
],
[
19,
16
],
[
16,
18
],
[
18,
17
],
[
17,
19
],
[
20,
22
],
[
22,
21
],
[
21,
23
],
[
23,
20
],
[
0,
16
],
[
6,
18
],
[
19,
15
],
[
17,
8
],
[
13,
11
],
[
14,
3
],
[
1,
4
],
[
5,
9
],
[
20,
2
],
[
22,
12
],
[
21,
10
],
[
23,
7
]
],
"nodes": [
[
0,
0.5,
0.25
],
[
0,
0.25,
0.5
],
[
0,
0.5,
0.75
],
[
0,
0.75,
0.5
],
[
0.25,
0,
0.5
],
[
0.75,
0,
0.5
],
[
0.5,
0,
0.25
],
[
0.5,
0,
0.75
],
[
1,
0.5,
0.25
],
[
1,
0.25,
0.5
],
[
1,
0.5,
0.75
],
[
1,
0.75,
0.5
],
[
0.5,
1,
0.75
],
[
0.75,
1,
0.5
],
[
0.25,
1,
0.5
],
[
0.5,
1,
0.25
],
[
0.25,
0.5,
0
],
[
0.75,
0.5,
0
],
[
0.5,
0.25,
0
],
[
0.5,
0.75,
0
],
[
0.25,
0.5,
1
],
[
0.75,
0.5,
1
],
[
0.5,
0.75,
1
],
[
0.5,
0.25,
1
]
]
},
}
k = SampleLattice(source, parameters=k_params)
designs= {
"bcc": bcc,
"bccz": bccz,
"k": k,
}
densities=[]
for design, sample_lattice in designs.items():
for section_radius_value in section_radius:
for scale_value in scale:
bcc_params["section_radius"]= section_radius_value
bcc_params["scale"]= [scale_value, scale_value, scale_value]
bccz_params["section_radius"]= section_radius_value
bccz_params["scale"]= [scale_value, scale_value, scale_value]
k_params["section_radius"]= section_radius_value
k_params["scale"]= [scale_value, scale_value, scale_value]
latticefill= CSG(
Part,
sample_lattice,
parameters={"smoothing": 0.1, "operation": "Intersect"}
)
shape_func= Threshold(
Redistance(
CSG(
shell,
latticefill,
parameters={"smoothing": 0.1, "operation": "Union"}
)
),
{
"width": 0.03,
},
)
evaluator = JSONEvaluator(source)
shape_func(evaluator)
graph_json=evaluator.json()
job= client.jobs.run("evaluate_metrics", {"graph": graph_json, "point_source": 0})
pprint((design, scale_value, section_radius_value, job.meta["interior_volume"]))
densities.append((design, scale_value, section_radius_value, job.meta["interior_volume"]))
export_job = client.jobs.run("export_triangle_mesh", {
"graph": graph_json,
"point_source": 0,
})
export_asset = export_job.assets[0].id
client.assets.download_file(export_asset, f"{design}_{scale_value}_{int(section_radius_value * 1000)}.stl")
# Convert to DataFrame
df = pd.DataFrame(densities, columns=["design", "section_radius_value", "scale_value", "interior_volume"])
print(df)
df.to_csv('shell_fill_sweep.csv', index=False)