我正在尝试个性化 Apple 在开发者网站上发布的示例:
用视觉检测动物身体姿势
https://developer.apple.com/documentation/vision/detecting-animal-body-poses-with-vision
经过一些测试,我发现只有当我不忽略安全区域时,地标和连接图才有效,如果我不考虑它或在 iPad 上使用该应用程序,则绘图将不再正确应用。
在示例中,Apple 使用几何读取器来检测视图的大小:
...
ZStack {
GeometryReader { geo in
AnimalSkeletonView(animalJoint: animalJoint, size: geo.size)
}
}.frame(maxWidth: .infinity)
...
与:
struct AnimalSkeletonView: View {
// Get the animal joint locations.
@StateObject var animalJoint = AnimalPoseDetector()
var size: CGSize
var body: some View {
DisplayView(animalJoint: animalJoint)
if animalJoint.animalBodyParts.isEmpty == false {
// Draw the skeleton of the animal.
// Iterate over all recognized points and connect the joints.
ZStack {
ZStack {
// left head
if let nose = animalJoint.animalBodyParts[.nose] {
if let leftEye = animalJoint.animalBodyParts[.leftEye] {
Line(points: [nose.location, leftEye.location], size: size)
.stroke(lineWidth: 5.0)
.fill(Color.orange)
}
}
if let leftEye = animalJoint.animalBodyParts[.leftEye] {
if let leftEarBottom = animalJoint.animalBodyParts[.leftEarBottom] {
Line(points: [leftEye.location, leftEarBottom.location], size: size)
.stroke(lineWidth: 5.0)
.fill(Color.orange)
}
}
if let leftEarBottom = animalJoint.animalBodyParts[.leftEarBottom] {
if let leftEarMiddle = animalJoint.animalBodyParts[.leftEarMiddle] {
if let leftEarTop = animalJoint.animalBodyParts[.leftEarTop] {
Line(points: [leftEarBottom.location, leftEarMiddle.location,
leftEarTop.location], size: size)
.stroke(lineWidth: 5.0)
.fill(Color.orange)
}
}
}
// right head
if let nose = animalJoint.animalBodyParts[.nose] {
if let rightEye = animalJoint.animalBodyParts[.rightEye] {
Line(points: [nose.location, rightEye.location], size: size)
.stroke(lineWidth: 5.0)
.fill(Color.orange)
}
}
if let rightEye = animalJoint.animalBodyParts[.rightEye] {
if let rightEarBottom = animalJoint.animalBodyParts[.rightEarBottom] {
Line(points: [rightEye.location, rightEarBottom.location], size: size)
.stroke(lineWidth: 5.0)
.fill(Color.orange)
}
}
if let rightEarBottom = animalJoint.animalBodyParts[.rightEarBottom] {
if let rightEarMiddle = animalJoint.animalBodyParts[.rightEarMiddle] {
if let rightEarTop = animalJoint.animalBodyParts[.rightEarTop] {
Line(points: [rightEarBottom.location, rightEarMiddle.location,
rightEarTop.location], size: size)
.stroke(lineWidth: 5.0)
.fill(Color.orange)
}
}
}
// trunk - Draw a line from the nose to the neck.
if let nose = animalJoint.animalBodyParts[.nose] {
if let neck = animalJoint.animalBodyParts[.neck] {
Line(points: [nose.location, neck.location], size: size)
.stroke(lineWidth: 5.0)
.fill(Color.yellow)
}
}
// tail - Draw a line from the neck to the bottom tail.
if let neck = animalJoint.animalBodyParts[.neck] {
if let tailBottom = animalJoint.animalBodyParts[.tailBottom] {
Line(points: [neck.location,
tailBottom.location], size: size)
.stroke(lineWidth: 5.0)
.fill(Color.green)
}
}
}
ZStack {
// left forelegs
if let neck = animalJoint.animalBodyParts[.neck] {
if let leftFrontElbow = animalJoint.animalBodyParts[.leftFrontElbow] {
Line(points: [neck.location, leftFrontElbow.location], size: size)
.stroke(lineWidth: 5.0)
.fill(Color.purple)
}
}
if let leftFrontElbow = animalJoint.animalBodyParts[.leftFrontElbow] {
if let leftFrontKnee = animalJoint.animalBodyParts[.leftFrontKnee] {
if let leftFrontPaw = animalJoint.animalBodyParts[.leftFrontPaw] {
Line(points: [leftFrontElbow.location, leftFrontKnee.location, leftFrontPaw.location], size: size)
.stroke(lineWidth: 5.0)
.fill(Color.purple)
}
}
}
// right forelegs
if let neck = animalJoint.animalBodyParts[.neck] {
if let rightFrontElbow = animalJoint.animalBodyParts[.rightFrontElbow] {
Line(points: [neck.location, rightFrontElbow.location], size: size)
.stroke(lineWidth: 5.0)
.fill(Color.purple)
}
}
if let rightFrontElbow = animalJoint.animalBodyParts[.rightFrontElbow] {
if let rightFrontKnee = animalJoint.animalBodyParts[.rightFrontKnee] {
if let rightFrontPaw = animalJoint.animalBodyParts[.rightFrontPaw] {
Line(points: [rightFrontElbow.location, rightFrontKnee.location, rightFrontPaw.location], size: size)
.stroke(lineWidth: 5.0)
.fill(Color.purple)
}
}
}
// left hindlegs
if let tailBottom = animalJoint.animalBodyParts[.tailBottom] {
if let leftBackElbow = animalJoint.animalBodyParts[.leftBackElbow] {
Line(points: [tailBottom.location, leftBackElbow.location], size: size)
.stroke(lineWidth: 5.0)
.fill(Color.blue)
}
}
if let leftBackElbow = animalJoint.animalBodyParts[.leftBackElbow] {
if let leftBackKnee = animalJoint.animalBodyParts[.leftBackKnee] {
if let leftBackPaw = animalJoint.animalBodyParts[.leftBackPaw] {
Line(points: [leftBackElbow.location, leftBackKnee.location, leftBackPaw.location], size: size)
.stroke(lineWidth: 5.0)
.fill(Color.blue)
}
}
}
// right hindlegs
if let tailBottom = animalJoint.animalBodyParts[.tailBottom] {
if let rightBackElbow = animalJoint.animalBodyParts[.rightBackElbow] {
Line(points: [tailBottom.location, rightBackElbow.location], size: size)
.stroke(lineWidth: 5.0)
.fill(Color.blue)
}
}
if let rightBackElbow = animalJoint.animalBodyParts[.rightBackElbow] {
if let rightBackKnee = animalJoint.animalBodyParts[.rightBackKnee] {
if let rightBackPaw = animalJoint.animalBodyParts[.rightBackPaw] {
Line(points: [rightBackElbow.location, rightBackKnee.location, rightBackPaw.location], size: size)
.stroke(lineWidth: 5.0)
.fill(Color.blue)
}
}
}
}
ZStack {
// Connect the tail joints.
if let tailBottom = animalJoint.animalBodyParts[.tailBottom] {
if let tailMiddle = animalJoint.animalBodyParts[.tailMiddle] {
if let tailTop = animalJoint.animalBodyParts[.tailTop] {
Line(points: [tailBottom.location, tailMiddle.location, tailTop.location], size: size)
.stroke(lineWidth: 5.0)
.fill(Color.orange)
}
}
}
}
}
}
}
和画线的形状:
// Create a transform that converts the pose's normalized point.
struct Line: Shape {
var points: [CGPoint]
var size: CGSize
func path(in rect: CGRect) -> Path {
let pointTransform: CGAffineTransform =
.identity
.translatedBy(x: 0.0, y: -1.0)
.concatenating(.identity.scaledBy(x: 1.0, y: -1.0))
.concatenating(.identity.scaledBy(x: size.width, y: size.height))
var path = Path()
path.move(to: points[0])
for point in points {
path.addLine(to: point)
}
return path.applying(pointTransform)
}
}
网上查了一下,建议换房产
cameraView.previewLayer.videoGravity
来自
cameraView.previewLayer.videoGravity = .resizeAspectFill
到
cameraView.previewLayer.videoGravity = .resizeAspect
并修改
func updateUIViewController(_ uiViewController: UIViewControllerType, context: Context) {}
在 CameraViewController.swift 但它对我不起作用。
你能帮我理解我错在哪里吗?
谢谢!
编辑: 我更改为人体检测并删除了视图顶部的切换开关(我只需要身体轨迹,而不是表情符号覆盖)
示例图片:
我收到了苹果开发者论坛的答复。
有2种可能的解决方案:
cameraView.previewLayer.videoGravity = .resize
在
CameraViewController
但视野会拉伸
GeometryReader
DisplayView(animalJoint: animalJoint)
.overlay {
GeometryReader { geo in
// draw skeleton
}.aspectRatio(..., contentMode: .fit)
}
}