반응형

claude로 어떻게 공부를 할 수 있을까 궁금해서 기존에 필터 앱을 업데이트 하고자 다음과 같이 요청 하였습니다. 

"새로운 질문을 하지 swift 언어로 iOS 15 이상에서 카메라앱을 만들거야 동영상 촬영 앱이고 기능은 다음과 같아 1. 동영상 촬영 버튼을 누르면 촬영이 되고 10초뒤에 촬영이 자동 종료 되 2. 동영상 필터는 2개가 제공될거야 3. 필터는 흑백하고 카툰 필터 4. 녹화된 영상은 앱내에 저장하는데 사진앱으로 내보내기 기능을 제공 5. 녹화사에는 전면 카메라 녹화나 후면 카메라 녹화 둘중 하나만 되게 할거야 6. 고급 카메라는 PIP 기능으로 영상이 녹화되게 할거야 7. 카메라 촬영중에는 화면에서 10초 타이머가 보여지면서 촬영된 영상이 실시간으로 보이게 할거야 여기까지 "

이렇게 요청 하니 CIFilter 기반 이 나왔다.....

CIFilter로는 퍼포먼스가 안나오는걸 알고 있기에... 다시 요청 했다.

"아 미안해... metal framework로 구현해줘 CIFilter는 퍼포먼스 안나와"

다음과 같이 안내해 주었다.

코드를 보니.... 분류가 안되어 있어 재차 요청 했다.

"파일명 별로 구분좀 해줄 수 있을까?"

 

위와 같이 프로젝트 구조도 알려주었다. 

 

그래서 각각의 소스코드는 다음과 같다. 

// MARK: - Models/ShaderTypes.swift
struct Vertex {
    var position: SIMD3<Float>
    var textureCoordinate: SIMD2<Float>
}

enum FilterType {
    case normal
    case grayscale
    case cartoon
}

// MARK: - Shaders/CameraShaders.metal
"""
#include <metal_stdlib>
using namespace metal;

struct VertexIn {
    float3 position [[attribute(0)]];
    float2 textureCoordinate [[attribute(1)]];
};

struct VertexOut {
    float4 position [[position]];
    float2 textureCoordinate;
};

vertex VertexOut vertexShader(uint vertexID [[vertex_id]],
                             constant VertexIn *vertices [[buffer(0)]]) {
    VertexOut out;
    out.position = float4(vertices[vertexID].position, 1.0);
    out.textureCoordinate = vertices[vertexID].textureCoordinate;
    return out;
}

fragment float4 fragmentShader(VertexOut in [[stage_in]],
                             texture2d<float> texture [[texture(0)]],
                             constant int &filterType [[buffer(1)]]) {
    constexpr sampler textureSampler(mag_filter::linear,
                                   min_filter::linear);
    
    float4 color = texture.sample(textureSampler, in.textureCoordinate);
    
    // Normal
    if (filterType == 0) {
        return color;
    }
    // Grayscale
    else if (filterType == 1) {
        float gray = dot(color.rgb, float3(0.299, 0.587, 0.114));
        return float4(gray, gray, gray, color.a);
    }
    // Cartoon
    else if (filterType == 2) {
        float3 original = color.rgb;
        
        float2 texelSize = float2(1.0 / texture.get_width(),
                                1.0 / texture.get_height());
        
        float3 edge = float3(0.0);
        for(int i = -1; i <= 1; i++) {
            for(int j = -1; j <= 1; j++) {
                float2 offset = float2(float(i), float(j)) * texelSize;
                float3 sample = texture.sample(textureSampler,
                                            in.textureCoordinate + offset).rgb;
                edge += abs(sample - original);
            }
        }
        
        float3 quantized = floor(original * 5.0) / 5.0;
        float edgeMask = 1.0 - smoothstep(0.2, 0.3, length(edge));
        return float4(quantized * edgeMask, color.a);
    }
    
    return color;
}
"""

// MARK: - Controllers/MetalCameraController.swift
class MetalCameraController: NSObject {
    private var captureSession: AVCaptureSession?
    private var videoOutput: AVCaptureMovieFileOutput?
    private var textureCache: CVMetalTextureCache?
    
    private let metalDevice: MTLDevice
    private let metalCommandQueue: MTLCommandQueue
    private let pipelineState: MTLRenderPipelineState
    private let vertexBuffer: MTLBuffer
    
    private var currentFilterType: FilterType = .normal
    
    private let vertices: [Vertex] = [
        Vertex(position: SIMD3<Float>(-1, -1, 0), textureCoordinate: SIMD2<Float>(0, 1)),
        Vertex(position: SIMD3<Float>(1, -1, 0), textureCoordinate: SIMD2<Float>(1, 1)),
        Vertex(position: SIMD3<Float>(-1, 1, 0), textureCoordinate: SIMD2<Float>(0, 0)),
        Vertex(position: SIMD3<Float>(1, 1, 0), textureCoordinate: SIMD2<Float>(1, 0))
    ]
    
    override init() {
        // ... (이전 코드와 동일)
    }
    
    private func setupTextureCache() {
        // ... (이전 코드와 동일)
    }
}

// MARK: - Views/Components/CameraView.swift
class CameraView: MTKView {
    var renderDelegate: CameraRenderDelegate?
    
    override init(frame frameRect: CGRect, device: MTLDevice?) {
        super.init(frame: frameRect, device: device)
        setupView()
    }
    
    required init(coder: NSCoder) {
        super.init(coder: coder)
        setupView()
    }
    
    private func setupView() {
        self.device = MTLCreateSystemDefaultDevice()
        self.backgroundColor = .black
        self.framebufferOnly = false
    }
}

// MARK: - Views/Components/CameraControls.swift
class CameraControls: UIView {
    var onRecordTap: (() -> Void)?
    var onFilterTap: (() -> Void)?
    
    private lazy var recordButton: UIButton = {
        let button = UIButton(type: .system)
        button.setTitle("녹화", for: .normal)
        button.backgroundColor = .red
        button.layer.cornerRadius = 25
        button.addTarget(self, action: #selector(recordButtonTapped), for: .touchUpInside)
        return button
    }()
    
    private lazy var filterButton: UIButton = {
        let button = UIButton(type: .system)
        button.setTitle("필터 변경", for: .normal)
        button.addTarget(self, action: #selector(filterButtonTapped), for: .touchUpInside)
        return button
    }()
    
    private lazy var timerLabel: UILabel = {
        let label = UILabel()
        label.textColor = .white
        label.font = .systemFont(ofSize: 24, weight: .bold)
        label.text = "10"
        label.textAlignment = .center
        return label
    }()
    
    @objc private func recordButtonTapped() {
        onRecordTap?()
    }
    
    @objc private func filterButtonTapped() {
        onFilterTap?()
    }
    
    // ... Layout 코드
}

// MARK: - ViewControllers/MetalCameraViewController.swift
class MetalCameraViewController: UIViewController {
    private var cameraView: CameraView!
    private var cameraControls: CameraControls!
    private var cameraController: MetalCameraController!
    
    private var currentFilter: FilterType = .normal
    private var recordingTimer: Timer?
    private var remainingTime: Int = 10
    
    override func viewDidLoad() {
        super.viewDidLoad()
        setupViews()
        setupCameraController()
        setupBindings()
    }
    
    private func setupViews() {
        cameraView = CameraView(frame: view.bounds, device: MTLCreateSystemDefaultDevice())
        cameraControls = CameraControls()
        
        view.addSubview(cameraView)
        view.addSubview(cameraControls)
        
        // Layout constraints
    }
    
    private func setupCameraController() {
        cameraController = MetalCameraController()
    }
    
    private func setupBindings() {
        cameraControls.onRecordTap = { [weak self] in
            self?.handleRecordTap()
        }
        
        cameraControls.onFilterTap = { [weak self] in
            self?.handleFilterTap()
        }
    }
    
    private func handleRecordTap() {
        if recordingTimer == nil {
            startRecording()
        } else {
            stopRecording()
        }
    }
    
    private func handleFilterTap() {
        switch currentFilter {
        case .normal: currentFilter = .grayscale
        case .grayscale: currentFilter = .cartoon
        case .cartoon: currentFilter = .normal
        }
        // Update filter in camera controller
    }
    
    // ... Recording and timer related methods
}

// MARK: - Utils/VideoExporter.swift
class VideoExporter {
    static func exportVideo(at url: URL, completion: @escaping (Result<URL, Error>) -> Void) {
        // Video export implementation
    }
}

// MARK: - Protocols/CameraRenderDelegate.swift
protocol CameraRenderDelegate: AnyObject {
    func renderFrame()
    func updateFilter(_ type: FilterType)
}

여기까지...  

직접 코드를 나눠어 넣었을때.... MetalCameraViewController.swift 파일에 먼가 이상함을 감지했다. 

to be continue...

반응형
Posted by onlyTheOne
,