"""Example script demonstrating a shell and fill example.
"""
import pandas as pd
import numpy as np
import os
import csv
from numpy.typing import ArrayLike
from pprint import pprint
from scipy.spatial.transform import Rotation as R
from metafold import MetafoldClient
from metafold.func_types import JSONEvaluator, FuncType, Vec3f, Mat4f
from metafold.func import *
project_id = 5765
access_token = " "
client = MetafoldClient(access_token,project_id)
source = GenerateSamplePoints(
{
"offset": [
-100,
-35,
-10
],
"size": [
200,
70,
120
],
"resolution": [
516,
180,
284
]
}
)
# (2) Using the API check if the file you are intrested in uploading is already
# stored in your project. If not upload it.
stl_filename = "rockerArm.stl"
if existing := client.assets.list(q=f"filename:{stl_filename}"):
stl_asset = client.assets.update(existing[0].id, stl_filename)
else:
stl_asset = client.assets.create(stl_filename)
# *.update and *.create return a handle to the asset we just uploaded
# <network_asset> that exist in the cloud. We can now reference the filename of
# this asset using network_asset.filename
# We use the .stl file in the cloud to run a sample_triangle_mesh()
# job.
stl_params = {
"mesh_filename": stl_asset.filename,
"resolution": [256,256,256],
}
stl_job = client.jobs.run(
"sample_triangle_mesh",
stl_params
)
# This job creates new assets. We can search through the assets that were
# created using the file extension .bin to get our .bin file. jjj
for asset in stl_job.assets:
if asset.filename.endswith(".bin"):
bin_asset = asset
break
# Now with our .bin file in the cloud we can use it to run a
# graph evalation like normal.
mesh_volume = {
"offset": [
-97,
-30.431371688842773,
-1.1606040000915527
],
"size": [
194,
60.86274337768555,
101.18431615829468
],
"resolution": [
256,
256,
256
]
}
Part = SampleVolume(
PointSource,
LoadVolume(
volume_data={
"file_type": "Raw",
"path": bin_asset.filename
},
parameters={
"resolution": mesh_volume["resolution"],
},
),
{
"volume_size": mesh_volume["size"],
"volume_offset": mesh_volume["offset"],
},
)
shell_thickness=2
shell = Shell(
Redistance(Part),
{
"thickness": shell_thickness,
"offset": 0.5*shell_thickness,
},
)
bcc_params= {
"lattice_data" : {
"edges": [
[
0,
7
],
[
4,
7
],
[
1,
7
],
[
2,
7
],
[
5,
7
],
[
6,
7
],
[
3,
7
],
[
7,
8
]
],
"nodes": [
[
0,
0,
0
],
[
0,
1,
0
],
[
1,
0,
0
],
[
1,
1,
0
],
[
0,
0,
1
],
[
0,
1,
1
],
[
1,
0,
1
],
[
0.5,
0.5,
0.5
],
[
1,
1,
1
]
]
},
"section_radius" : 0.1,
"scale": [10, 10, 10]
}
bcc = SampleLattice(source, parameters=bcc_params)
latticefill= CSG(
Part,
bcc,
parameters={"smoothing": 0.1, "operation": "Intersect"}
)
shape_func= Threshold(
Redistance(
CSG(
shell,
latticefill,
parameters={"smoothing": 0.1, "operation": "Union"}
)
),
{
"width": 0.03,
},
)
evaluator = JSONEvaluator(source)
shape_func(evaluator)
graph_json=evaluator.json()
job= client.jobs.run("evaluate_metrics", {"graph": graph_json, "point_source": 0})
pprint((job.meta["relative_density"]))
export_job = client.jobs.run("export_triangle_mesh", {
"graph": graph_json,
"point_source": 0,
})
export_asset = export_job.assets[0].id
client.assets.download_file(export_asset, "part_union.stl")