-
-
Notifications
You must be signed in to change notification settings - Fork 307
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Support adding custom filters to EditingStack.filters #247
Conversation
Hi @shima11 @muukii //
// ChromaticAberrationFilter.swift
//
// Created by BAO HA on 07/06/2024.
//
import BrightroomEngine
import CoreImage
import Foundation
struct ChromaticAberrationFilter: Filtering {
public static let range: ParameterRange<Double, FilterExposure> = .init(min: 0, max: 20)
public var value: Double = 0
let inputFalloff: Double = 0.2
let inputSamples: Double = 10
let kernel = CIKernel(source:
"kernel vec4 motionBlur(sampler image, vec2 size, float sampleCount, float start, float blur) {" +
" int sampleCountInt = int(floor(sampleCount));" +
" vec4 accumulator = vec4(0.0);" +
" vec2 dc = destCoord(); " +
" float normalisedValue = length(((dc / size) - 0.5) * 2.0);" +
" float strength = clamp((normalisedValue - start) * (1.0 / (1.0 - start)), 0.0, 1.0); " +
" vec2 vector = normalize((dc - (size / 2.0)) / size);" +
" vec2 velocity = vector * strength * blur; " +
" vec2 redOffset = -vector * strength * (blur * 1.0); " +
" vec2 greenOffset = -vector * strength * (blur * 1.5); " +
" vec2 blueOffset = -vector * strength * (blur * 2.0); " +
" for (int i=0; i < sampleCountInt; i++) { " +
" accumulator.r += sample(image, samplerTransform (image, dc + redOffset)).r; " +
" redOffset -= velocity / sampleCount; " +
" accumulator.g += sample(image, samplerTransform (image, dc + greenOffset)).g; " +
" greenOffset -= velocity / sampleCount; " +
" accumulator.b += sample(image, samplerTransform (image, dc + blueOffset)).b; " +
" blueOffset -= velocity / sampleCount; " +
" } " +
" return vec4(vec3(accumulator / float(sampleCountInt)), 1.0); " +
"}")
func apply(to image: CIImage, sourceImage: CIImage) -> CIImage {
let args = [image,
CIVector(x: image.extent.width, y: image.extent.height),
inputSamples,
inputFalloff,
value] as [Any]
return kernel!.apply(
extent: image.extent,
roiCallback: {
_, rect in
rect.insetBy(dx: -1, dy: -1)
},
arguments: args)
}
} |
When I use my code above, Preview works fine: Preview Editor: But when rendering the result, the image does not change: let renderOption: BrightRoomImageRenderer.Options = .init()
stack.makeRenderer().render(options: renderOption) Result: |
I see, I will check later 🔍 |
Thank you very much. Savior 🙇♂️!! |
Hi @shima11 My Custom Filter: //
// ChromaticAberrationFilter.swift
// Binsoo
//
// Created by BAO HA on 07/06/2024.
//
import BrightroomEngine
import CoreImage
import Foundation
private let tau = CGFloat(Double.pi * 2)
private let inputAngle: Double = 0
private let redAngle = inputAngle + tau
private let greenAngle = inputAngle + tau * 0.333
private let blueAngle = inputAngle + tau * 0.666
private let rgbChannelCompositing = RGBChannelCompositing()
private let kernel = CIKernel(source:
"""
kernel vec4 motionBlur(sampler image, vec2 size, float sampleCount, float start, float blur) {
int sampleCountInt = int(floor(sampleCount));
vec4 accumulator = vec4(0.0);
vec2 dc = destCoord();
float normalisedValue = length(((dc / size) - 0.5) * 2.0);
float strength = clamp((normalisedValue - start) * (1.0 / (1.0 - start)), 0.0, 1.0);
vec2 vector = normalize((dc - (size / 2.0)) / size);
vec2 velocity = vector * strength * blur;
vec2 redOffset = -vector * strength * (blur * 1.0);
vec2 greenOffset = -vector * strength * (blur * 1.5);
vec2 blueOffset = -vector * strength * (blur * 2.0);
for (int i=0; i < sampleCountInt; i++) {
accumulator.r += sample(image, samplerTransform(image, dc + redOffset)).r;
redOffset -= velocity / sampleCount;
accumulator.g += sample(image, samplerTransform(image, dc + greenOffset)).g;
greenOffset -= velocity / sampleCount;
accumulator.b += sample(image, samplerTransform(image, dc + blueOffset)).b;
blueOffset -= velocity / sampleCount;
}
return vec4(vec3(accumulator / float(sampleCountInt)), 1.0);
}
"""
)!
private class RGBChannelCompositing: CIFilter {
@objc var inputRedImage: CIImage?
@objc var inputGreenImage: CIImage?
@objc var inputBlueImage: CIImage?
let rgbChannelCompositingKernel = CIColorKernel(source:
"kernel vec4 rgbChannelCompositing(__sample red, __sample green, __sample blue)" +
"{" +
" return vec4(red.r, green.g, blue.b, 1.0);" +
"}"
)
override var attributes: [String: Any] {
return [
kCIAttributeFilterDisplayName: "RGB Compositing" as AnyObject,
"inputRedImage": [kCIAttributeIdentity: 0,
kCIAttributeClass: "CIImage",
kCIAttributeDisplayName: "Red Image",
kCIAttributeType: kCIAttributeTypeImage],
"inputGreenImage": [kCIAttributeIdentity: 0,
kCIAttributeClass: "CIImage",
kCIAttributeDisplayName: "Green Image",
kCIAttributeType: kCIAttributeTypeImage],
"inputBlueImage": [kCIAttributeIdentity: 0,
kCIAttributeClass: "CIImage",
kCIAttributeDisplayName: "Blue Image",
kCIAttributeType: kCIAttributeTypeImage]
]
}
override var outputImage: CIImage! {
guard let inputRedImage = inputRedImage,
let inputGreenImage = inputGreenImage,
let inputBlueImage = inputBlueImage,
let rgbChannelCompositingKernel = rgbChannelCompositingKernel
else {
return nil
}
let extent = inputRedImage.extent.union(inputGreenImage.extent.union(inputBlueImage.extent))
let arguments = [inputRedImage, inputGreenImage, inputBlueImage]
return rgbChannelCompositingKernel.apply(extent: extent, arguments: arguments)
}
}
struct ChromaticAberrationFilter: Filtering {
public static let range: ParameterRange<Double, ChromaticAberrationFilter> = .init(min: 0, max: 20)
public var intensity: Double = 0
func apply(to inputImage: CIImage, sourceImage: CIImage) -> CIImage {
let redTransform = CGAffineTransform(translationX: sin(redAngle) * intensity, y: cos(redAngle) * intensity)
let greenTransform = CGAffineTransform(translationX: sin(greenAngle) * intensity, y: cos(greenAngle) * intensity)
let blueTransform = CGAffineTransform(translationX: sin(blueAngle) * intensity, y: cos(blueAngle) * intensity)
let red = inputImage.applyingFilter("CIAffineTransform",
parameters: [kCIInputTransformKey: NSValue(cgAffineTransform: redTransform)])
.cropped(to: inputImage.extent)
let green = inputImage.applyingFilter("CIAffineTransform",
parameters: [kCIInputTransformKey: NSValue(cgAffineTransform: greenTransform)])
.cropped(to: inputImage.extent)
let blue = inputImage.applyingFilter("CIAffineTransform",
parameters: [kCIInputTransformKey: NSValue(cgAffineTransform: blueTransform)])
.cropped(to: inputImage.extent)
rgbChannelCompositing.inputRedImage = red
rgbChannelCompositing.inputGreenImage = green
rgbChannelCompositing.inputBlueImage = blue
return rgbChannelCompositing.outputImage
}
} The result of
|
Hi @shima11 |
@baronha |
It's great to have found the cause. Thank you for your continuous efforts. It's respectful |
Hi @shima11 . I wonder if this error will cause you any difficulties? I just wanted to ask, Thank you for your interest |
Thanks for your help, |
Thank you for using BrightRoom. The app is cool and awesome! |
I count you as one of its founders, Thanks for maintaining this repo @shima11 🙏 |
Hi @shima11 . |
@baronha |
No description provided.