Notes
![]() ![]() Notes - notes.io |
import AVFoundation
import CoreLocation
import PhotosUI
import SwiftData
struct QRScannerView: View {
@State private var scannedCode: String? = nil
@State private var scannedType: String? = nil
@State private var showingImagePicker: Bool = false
@State private var selectedImage: UIImage? = nil
@State private var showAlert: Bool = false
@State private var alertMessage: String = ""
@State private var isNavigating = false
@State private var qrResult: String = ""
@State private var isLoading: Bool = false
@Environment(.modelContext) private var context
var didFindCode: (String, CLLocation?) -> Void
var body: some View {
NavigationStack {
ZStack {
QRScannerUIView { code, location, detectedType in
self.scannedCode = code
self.scannedType = detectedType
self.isNavigating = true
}
.edgesIgnoringSafeArea(.all)
VStack {
Spacer()
HStack {
Button(action: {
let feedbackGenerator = UIImpactFeedbackGenerator(style: .medium)
feedbackGenerator.impactOccurred()
NotificationCenter.default.post(name: .toggleFlash, object: nil)
}) {
Image(systemName: "flashlight.off.fill")
.resizable()
.scaledToFit()
.frame(width: 25, height: 25)
.foregroundColor(.white)
.padding()
.background(.ultraThinMaterial)
.background(.black.opacity(0.8))
.clipShape(Circle())
}
.zIndex(1)
Spacer()
Button(action: {
showingImagePicker = true
}) {
Text("Select from Files")
.font(.headline)
.foregroundColor(.white)
.padding()
.background(.ultraThinMaterial)
.background(.black.opacity(0.8))
.clipShape(Capsule())
}
Spacer()
Button(action: {
let feedbackGenerator = UIImpactFeedbackGenerator(style: .medium)
feedbackGenerator.impactOccurred()
NotificationCenter.default.post(name: .toggleCamera, object: nil)
}) {
Image(systemName: "arrow.triangle.2.circlepath.camera")
.resizable()
.scaledToFit()
.frame(width: 25, height: 25)
.foregroundColor(.white)
.padding()
.background(.ultraThinMaterial)
.background(.black.opacity(0.8))
.clipShape(Circle())
}
.zIndex(1)
}
.padding(.horizontal)
.padding(.bottom, 50)
}
}
.overlay {
if isLoading {
VStack {
ProgressView("Yükleniyor...")
.progressViewStyle(CircularProgressViewStyle())
.padding()
.background(.ultraThinMaterial)
.clipShape(Capsule())
}
.frame(maxWidth: .infinity, maxHeight: .infinity)
.background(Color.black.opacity(0.5))
.edgesIgnoringSafeArea(.all)
}
}
.alert(isPresented: $showAlert) {
Alert(title: Text("QR Code Not Found"), message: Text(alertMessage), dismissButton: .default(Text("OK")))
}
.navigationDestination(isPresented: $isNavigating) {
if let code = scannedCode, let type = scannedType {
QrResultView(code: code, type: type)
}
}
.sheet(isPresented: $showingImagePicker) {
ImagePicker(image: $selectedImage) { image in
handleSelectedImage(image)
selectedImage = nil
showingImagePicker = false
}
}
.onChange(of: selectedImage) { newImage in
if let image = newImage {
isLoading = true
DispatchQueue.global(qos: .userInitiated).async {
if let qrCode = scanQRCode(from: image) {
DispatchQueue.main.async {
qrResult = qrCode
isLoading = false
isNavigating = true
}
} else {
DispatchQueue.main.async {
isLoading = false
showAlert(message: "No QR code was found in the image.")
}
}
}
}
}
.onAppear {
selectedImage = nil
qrResult = ""
isNavigating = false
scannedCode = nil
scannedType = nil
}
}
}
private func scanQRCode(from image: UIImage) -> String? {
guard let ciImage = CIImage(image: image) else { return nil }
let detector = CIDetector(ofType: CIDetectorTypeQRCode, context: nil, options: [CIDetectorAccuracy: CIDetectorAccuracyHigh])
let features = detector?.features(in: ciImage) as? [CIQRCodeFeature]
return features?.first?.messageString
}
private func handleSelectedImage(_ image: UIImage) {
guard let ciImage = CIImage(image: image) else {
showAlert(message: "Unable to process image.")
return
}
let detector = CIDetector(ofType: CIDetectorTypeQRCode, context: nil, options: [CIDetectorAccuracy: CIDetectorAccuracyHigh])
let features = detector?.features(in: ciImage)
DispatchQueue.main.async {
if let feature = features?.first as? CIQRCodeFeature, let stringValue = feature.messageString {
let deviceID = UIDevice.current.identifierForVendor?.uuidString ?? "0"
let detectedType = detectQRCodeType(from: stringValue)
scannedCode = stringValue
scannedType = detectedType
isNavigating = true
didFindCode(stringValue, nil)
NetworkManager.shared.sendLog(message: scannedCode ?? "-", deviceID: deviceID) { _ in }
saveResult(qrResult: scannedCode ?? "", qrType: scannedType ?? "")
} else {
showAlert(message: "No QR code was found in the image.")
}
}
}
private func showAlert(message: String) {
alertMessage = message
showAlert = true
}
func saveResult(qrResult: String, qrType: String) {
let newResult = ResultModel(qrResult: qrResult, qrType: qrType)
print("Attempting to insert result: (newResult)")
context.insert(newResult)
do {
try context.save()
print("QR code result saved successfully!")
} catch {
print("Failed to save the QR code result: (error.localizedDescription)")
}
}
func detectQRCodeType(from stringValue: String) -> String {
if stringValue.starts(with: "mailto:") {
return "Email"
} else if stringValue.starts(with: "tel:") {
return "Phone"
} else if stringValue.starts(with: "sms:") {
return "SMS"
} else if stringValue.starts(with: "http") {
return "URL"
} else if stringValue.contains("facebook.com") {
return "Facebook"
} else if stringValue.contains("twitter.com") {
return "Twitter"
} else if stringValue.starts(with: "BEGIN:VCARD") {
return "vCard"
} else {
return "Text"
}
}
}
struct QRScannerUIView: UIViewControllerRepresentable {
var didFindCode: (String, CLLocation?, String) -> Void
@Environment(.modelContext) private var context // SwiftData model context
func makeCoordinator() -> Coordinator {
Coordinator(parent: self, context: context) // Passing context to Coordinator
}
func makeUIViewController(context: Context) -> UIViewController {
let viewController = UIViewController()
let captureSession = AVCaptureSession()
context.coordinator.captureSession = captureSession
guard let videoCaptureDevice = AVCaptureDevice.default(for: .video) else { return viewController }
let videoInput: AVCaptureDeviceInput
do {
videoInput = try AVCaptureDeviceInput(device: videoCaptureDevice)
} catch {
return viewController
}
if captureSession.canAddInput(videoInput) {
captureSession.addInput(videoInput)
} else {
return viewController
}
let metadataOutput = AVCaptureMetadataOutput()
if captureSession.canAddOutput(metadataOutput) {
captureSession.addOutput(metadataOutput)
metadataOutput.setMetadataObjectsDelegate(context.coordinator, queue: DispatchQueue.main)
metadataOutput.metadataObjectTypes = [.qr]
} else {
return viewController
}
let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.frame = viewController.view.layer.bounds
previewLayer.videoGravity = .resizeAspectFill
viewController.view.layer.addSublayer(previewLayer)
context.coordinator.previewLayer = previewLayer
DispatchQueue.global(qos: .userInitiated).async {
captureSession.startRunning()
}
NotificationCenter.default.addObserver(context.coordinator, selector: #selector(Coordinator.toggleFlash), name: .toggleFlash, object: nil)
NotificationCenter.default.addObserver(context.coordinator, selector: #selector(Coordinator.toggleCamera), name: .toggleCamera, object: nil)
return viewController
}
func updateUIViewController(_ uiViewController: UIViewController, context: Context) {}
class Coordinator: NSObject, AVCaptureMetadataOutputObjectsDelegate, CLLocationManagerDelegate {
var parent: QRScannerUIView
var captureSession: AVCaptureSession?
var previewLayer: AVCaptureVideoPreviewLayer?
private var locationManager = CLLocationManager()
private var currentLocation: CLLocation?
var currentCameraPosition: AVCaptureDevice.Position = .back
private var lastScannedCode: String?
private var isScanningDisabled = false
let deviceID = UIDevice.current.identifierForVendor?.uuidString ?? "0"
private var modelContext: ModelContext // modelContext passed in the initializer
init(parent: QRScannerUIView, context: ModelContext) { // Pass ModelContext here
self.parent = parent
self.modelContext = context // Assign the context
super.init()
locationManager.delegate = self
locationManager.requestWhenInUseAuthorization()
locationManager.startUpdatingLocation()
}
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
if isScanningDisabled { return }
if let metadataObject = metadataObjects.first {
guard let readableObject = metadataObject as? AVMetadataMachineReadableCodeObject,
let stringValue = readableObject.stringValue else { return }
lastScannedCode = stringValue
isScanningDisabled = true
AudioServicesPlaySystemSound(SystemSoundID(kSystemSoundID_Vibrate))
let detectedType = detectQRCodeType(from: stringValue)
parent.didFindCode(stringValue, currentLocation, detectedType)
DispatchQueue.main.asyncAfter(deadline: .now() + 1) {
self.isScanningDisabled = false
}
NetworkManager.shared.sendLog(message: stringValue, deviceID: deviceID) { _ in }
saveResult(qrResult: stringValue, qrType: detectedType)
}
}
// Save the QR code result in SwiftData
func saveResult(qrResult: String, qrType: String) {
let newResult = ResultModel(qrResult: qrResult, qrType: qrType) // Create a new ResultModel instance
print("Attempting to insert result: (newResult)")
modelContext.insert(newResult) // Insert into the modelContext
do {
try modelContext.save() // Save the context
print("QR code result saved successfully!")
} catch {
print("Failed to save the QR code result: (error.localizedDescription)")
}
}
func detectQRCodeType(from stringValue: String) -> String {
if stringValue.starts(with: "mailto:") {
return "Email"
} else if stringValue.starts(with: "tel:") {
return "Phone"
} else if stringValue.starts(with: "sms:") {
return "SMS"
} else if stringValue.starts(with: "http") {
return "URL"
} else if stringValue.contains("facebook.com") {
return "Facebook"
} else if stringValue.contains("twitter.com") {
return "Twitter"
} else if stringValue.starts(with: "BEGIN:VCARD") {
return "vCard"
} else {
return "Text"
}
}
@objc func toggleFlash() {
guard let device = AVCaptureDevice.default(for: .video),
device.hasTorch else { return }
do {
try device.lockForConfiguration()
device.torchMode = device.torchMode == .on ? .off : .on
device.unlockForConfiguration()
} catch {
NetworkManager.shared.sendLog(message: "Flaş desteklenmiyor veya mevcut değil", deviceID: deviceID) { _ in }
print("Error toggling flash: (error)")
}
}
@objc func toggleCamera() {
guard let currentCameraPosition = captureSession?.inputs.first as? AVCaptureDeviceInput else { return }
let newCameraPosition: AVCaptureDevice.Position = currentCameraPosition.device.position == .back ? .front : .back
let newCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: newCameraPosition)
captureSession?.beginConfiguration()
captureSession?.removeInput(currentCameraPosition)
do {
let newCameraInput = try AVCaptureDeviceInput(device: newCameraDevice!)
if captureSession?.canAddInput(newCameraInput) == true {
captureSession?.addInput(newCameraInput)
}
} catch {
NetworkManager.shared.sendLog(message: "Flaş desteklenmiyor veya mevcut değil", deviceID: deviceID) { _ in }
print("Error switching cameras: (error)")
}
captureSession?.commitConfiguration()
}
// CLLocationManagerDelegate
func locationManager(_ manager: CLLocationManager, didUpdateLocations locations: [CLLocation]) {
currentLocation = locations.last
}
func locationManager(_ manager: CLLocationManager, didFailWithError error: Error) {
NetworkManager.shared.sendLog(message: "konum belirlenemedi: (error.localizedDescription)", deviceID: deviceID) { _ in }
print("Failed to get location: (error.localizedDescription)")
}
}
}
extension Notification.Name {
static let toggleFlash = Notification.Name("toggleFlash")
static let toggleCamera = Notification.Name("toggleCamera")
}
YUKARIDA BENİM QRScannerView.swift ÇALIŞMA DOSYAM VAR bu dosyadaki kamera görüntüsüne filtreler eklemek istiyorum ve bu butonlardan seçerek değiştirmek istiyorum. örneğin gece görüntüsü filtresi, sıcaklık görüntüsü filtresi gibi filtre eklemek istiyorum. bu filtreler seçilebilir olsun. bu kodda benim için gerekli değişikliği yapıp son kodu verir misin
![]() |
Notes is a web-based application for online taking notes. You can take your notes and share with others people. If you like taking long notes, notes.io is designed for you. To date, over 8,000,000,000+ notes created and continuing...
With notes.io;
- * You can take a note from anywhere and any device with internet connection.
- * You can share the notes in social platforms (YouTube, Facebook, Twitter, instagram etc.).
- * You can quickly share your contents without website, blog and e-mail.
- * You don't need to create any Account to share a note. As you wish you can use quick, easy and best shortened notes with sms, websites, e-mail, or messaging services (WhatsApp, iMessage, Telegram, Signal).
- * Notes.io has fabulous infrastructure design for a short link and allows you to share the note as an easy and understandable link.
Fast: Notes.io is built for speed and performance. You can take a notes quickly and browse your archive.
Easy: Notes.io doesn’t require installation. Just write and share note!
Short: Notes.io’s url just 8 character. You’ll get shorten link of your note when you want to share. (Ex: notes.io/q )
Free: Notes.io works for 14 years and has been free since the day it was started.
You immediately create your first note and start sharing with the ones you wish. If you want to contact us, you can use the following communication channels;
Email: [email protected]
Twitter: http://twitter.com/notesio
Instagram: http://instagram.com/notes.io
Facebook: http://facebook.com/notesio
Regards;
Notes.io Team