49 lines
736 B (Stored with Git LFS)
Plaintext
49 lines
736 B (Stored with Git LFS)
Plaintext
name: "face_warp"
|
||
backend: "python"
|
||
max_batch_size: 16
|
||
|
||
input [
|
||
{
|
||
name: "input"
|
||
data_type: TYPE_FP32
|
||
dims: [3, 160, 160]
|
||
},
|
||
{
|
||
name: "score"
|
||
data_type: TYPE_FP32
|
||
dims: [1]
|
||
},
|
||
{
|
||
name: "landmarks"
|
||
data_type: TYPE_FP32
|
||
dims: [5, 2]
|
||
}
|
||
|
||
]
|
||
|
||
output [
|
||
{
|
||
name: "output"
|
||
data_type: TYPE_FP32
|
||
dims: [512]
|
||
}
|
||
]
|
||
|
||
# Python backend usually runs on CPU; you *can* pin to GPU but OpenCV warp here is CPU.
|
||
instance_group [
|
||
{
|
||
kind: KIND_CPU
|
||
count: 1
|
||
}
|
||
]
|
||
|
||
# Batch; keep same policy style as your other models.
|
||
dynamic_batching {
|
||
}
|
||
|
||
# Optional: default per‑sample zoom factor when `scale` input not provided.
|
||
parameters: {
|
||
key: "scale_factor"
|
||
value: { string_value: "1.0" }
|
||
}
|