Try select different color for drawing, not black оn white, and it will work.
This code works:
import SwiftUI
import PencilKit
import Vision
struct HandwritingRecognizerView: View {
@State private var canvasView = PKCanvasView()
@State private var toolPicker = PKToolPicker()
@State private var recognizedText = ""
@State private var isRecognizing = false
var body: some View {
VStack {
HStack {
Button("Recognize") {
recognizeHandwriting()
}
.padding()
.background(Color.blue)
.foregroundColor(.white)
.cornerRadius(8)
Button("Clear") {
canvasView.drawing = PKDrawing()
recognizedText = ""
}
.padding()
.background(Color.red)
.foregroundColor(.white)
.cornerRadius(8)
}
.padding()
Text(recognizedText)
.font(.headline)
.padding()
.frame(maxWidth: .infinity, alignment: .leading)
.background(Color.green.opacity(0.1))
.cornerRadius(8)
.padding(.horizontal)
PencilKitCanvasRepresentable (canvasView: $canvasView, toolPicker: $toolPicker)
.onAppear {
toolPicker.setVisible(true, forFirstResponder: canvasView)
toolPicker.addObserver(canvasView)
canvasView.becomeFirstResponder()
}
.frame(maxWidth: .infinity, maxHeight: .infinity)
}
}
func recognizeHandwriting() {
isRecognizing = true
// Convert PKDrawing to UIImage
let image = canvasView.drawing.image(from: canvasView.drawing.bounds, scale: 1.0)
// Create a request handler
guard let cgImage = image.cgImage else {
print("Could not get CGImage from UIImage")
isRecognizing = false
return
}
// Important: Create the request with the recognition level set to accurate
let request = VNRecognizeTextRequest { (request, error) in
if let error = error {
print("Error: \(error)")
isRecognizing = false
return
}
guard let observations =
request.results as? [VNRecognizedTextObservation] else {
print("No text observations")
isRecognizing = false
return
}
// Process the recognized text
let recognizedStrings = observations.compactMap { observation in
observation.topCandidates(1).first?.string
}
DispatchQueue.main.async {
self.recognizedText = recognizedStrings.joined(separator: " ")
self.isRecognizing = false
}
}
// Configure for handwritten text
request.recognitionLevel = .fast//.accurate
request.recognitionLanguages = ["en-US"]
request.usesLanguageCorrection = true
// THIS IS THE KEY: Set recognition to handwriting mode
request.recognitionLevel = .accurate//.fast.
request.recognitionLanguages = ["en-US"]
request.customWords = ["o3Draw"] // Add custom words that might appear in your app
// Very important - set this to true for handwriting
if #available(iOS 16.0, *) {
request.automaticallyDetectsLanguage = false
request.revision = VNRecognizeTextRequestRevision3
}
DispatchQueue.global(qos: .userInitiated).async {
do {
let requestHandler = VNImageRequestHandler(cgImage: cgImage, options: [:])
try requestHandler.perform([request])
} catch {
print("Failed to perform recognition: \(error.localizedDescription)")
DispatchQueue.main.async {
self.recognizedText = "Recognition failed."
}
}
}
//---
}
}
// PencilKit Canvas SwiftUI wrapper
struct PencilKitCanvasRepresentable: UIViewRepresentable {
@Binding var canvasView: PKCanvasView
@Binding var toolPicker: PKToolPicker
func makeUIView(context: Context) -> PKCanvasView {
canvasView.drawingPolicy = .anyInput
canvasView.alwaysBounceVertical = false
canvasView.backgroundColor = .clear
canvasView.isOpaque = false
return canvasView
}
func updateUIView(_ uiView: PKCanvasView, context: Context) {
// Updates happen through the binding
}
}
// For iOS 16+ specific optimizations for handwriting
extension VNRecognizeTextRequest {
@available(iOS 16.0, *)
var revision3: Int {
return VNRecognizeTextRequestRevision3
}
}
#Preview {
HandwritingRecognizerView()
}