diff --git a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift index 234cdfd7..2631e374 100755 --- a/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift +++ b/examples/Mac/FilterShowcase/FilterShowcase/FilterOperations.swift @@ -497,6 +497,63 @@ let filterOperations: Array = [ }, filterOperationType:.singleInput ), + FilterOperation( + filter:{ParallelCoordinateLineTransform()}, + listName:"Parallel Coordinate Tester", + titleName:"Parallel Coordinate Tester", + sliderConfiguration:.enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), + sliderUpdateCallback: {(filter, sliderValue) in + // filter.threshold = sliderValue + }, + filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + let castFilter = filter as! ParallelCoordinateLineTransform + // TODO: Get this more dynamically sized + let thresholdEdgeDetectionFilter = CannyEdgeDetection() + let parallelCoordsTransformFilter = ParallelCoordinateLineTransform() + let nonMaximumSuppression = TextureSamplingOperation(fragmentShader:ThresholdedNonMaximumSuppressionFragmentShader) + var threshold:Float = 0.2 { didSet { nonMaximumSuppression.uniformSettings["threshold"] = threshold } } + nonMaximumSuppression.uniformSettings["threshold"] = 0.2 +// let directionalNonMaximumSuppression = TextureSamplingOperation(vertexShader:OneInputVertexShader, fragmentShader:DirectionalNonMaximumSuppressionFragmentShader) + +// camera --> thresholdEdgeDetectionFilter --> castFilter --> outputView + camera --> thresholdEdgeDetectionFilter --> castFilter --> outputView + +// camera --> thresholdEdgeDetectionFilter --> castFilter --> nonMaximumSuppression --> outputView + return nil + }) + + ), + FilterOperation( + filter:{HoughTransformLineDetector()}, + listName:"Hough Line detector", + titleName:"Hough Line Detector", + sliderConfiguration:.enabled(minimumValue:0.01, maximumValue:0.70, initialValue:0.20), + sliderUpdateCallback: {(filter, sliderValue) in + filter.lineDetectionThreshold = sliderValue + }, + filterOperationType:.custom(filterSetupFunction:{(camera, filter, outputView) in + let castFilter = filter as! HoughTransformLineDetector + // TODO: Get this more dynamically sized + #if os(iOS) + let lineGenerator = LineGenerator(size:Size(width:480, height:640)) + #else + let lineGenerator = LineGenerator(size:Size(width:1280, height:720)) + #endif + + castFilter.linesDetectedCallback = { lines in + lineGenerator.renderLines(lines) + } + + camera --> castFilter + + let blendFilter = AlphaBlend() + camera --> blendFilter --> outputView + lineGenerator --> blendFilter + + return blendFilter + }) + ), + FilterOperation( filter:{HarrisCornerDetector()}, listName:"Harris corner detector", diff --git a/framework/GPUImage-Mac.xcodeproj/project.pbxproj b/framework/GPUImage-Mac.xcodeproj/project.pbxproj index 9bbc7fb6..4241c0b1 100755 --- a/framework/GPUImage-Mac.xcodeproj/project.pbxproj +++ b/framework/GPUImage-Mac.xcodeproj/project.pbxproj @@ -7,6 +7,10 @@ objects = { /* Begin PBXBuildFile section */ + 503409CF1D52FECA00BC789C /* ParallelCoordinateLineTransform.swift in Sources */ = {isa = PBXBuildFile; fileRef = 503409CE1D52FECA00BC789C /* ParallelCoordinateLineTransform.swift */; }; + 503409D31D56B10100BC789C /* HoughTransformLineDetector.swift in Sources */ = {isa = PBXBuildFile; fileRef = 503409D21D56B10100BC789C /* HoughTransformLineDetector.swift */; }; + 503409D91D58431900BC789C /* ParallelCoordinateLineTransformFBORead_GL.fsh in Resources */ = {isa = PBXBuildFile; fileRef = 503409D81D58431900BC789C /* ParallelCoordinateLineTransformFBORead_GL.fsh */; }; + 503409DB1D58577A00BC789C /* ParallelCoordinateLineTransform.vsh in Resources */ = {isa = PBXBuildFile; fileRef = 503409DA1D58577A00BC789C /* ParallelCoordinateLineTransform.vsh */; }; BC09239E1C92658200A2ADFA /* ShaderProgram_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */; }; BC0923A11C92661D00A2ADFA /* Pipeline_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = BC09239F1C9265A600A2ADFA /* Pipeline_Tests.swift */; }; BC0923A21C92664900A2ADFA /* Framebuffer.swift in Sources */ = {isa = PBXBuildFile; fileRef = BCB279EB1C8D11630013E213 /* Framebuffer.swift */; }; @@ -193,6 +197,11 @@ /* End PBXContainerItemProxy section */ /* Begin PBXFileReference section */ + 503409CE1D52FECA00BC789C /* ParallelCoordinateLineTransform.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ParallelCoordinateLineTransform.swift; path = Source/Operations/ParallelCoordinateLineTransform.swift; sourceTree = ""; }; + 503409D01D52FF1D00BC789C /* ParallelCoordinateLineTransform_GL.fsh */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.glsl; name = ParallelCoordinateLineTransform_GL.fsh; path = Source/Operations/Shaders/ParallelCoordinateLineTransform_GL.fsh; sourceTree = ""; }; + 503409D21D56B10100BC789C /* HoughTransformLineDetector.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = HoughTransformLineDetector.swift; path = Source/Operations/HoughTransformLineDetector.swift; sourceTree = ""; }; + 503409D81D58431900BC789C /* ParallelCoordinateLineTransformFBORead_GL.fsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = ParallelCoordinateLineTransformFBORead_GL.fsh; path = Source/Operations/Shaders/ParallelCoordinateLineTransformFBORead_GL.fsh; sourceTree = ""; }; + 503409DA1D58577A00BC789C /* ParallelCoordinateLineTransform.vsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = ParallelCoordinateLineTransform.vsh; path = Source/Operations/Shaders/ParallelCoordinateLineTransform.vsh; sourceTree = ""; }; BC09239D1C92658200A2ADFA /* ShaderProgram_Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = ShaderProgram_Tests.swift; path = Tests/ShaderProgram_Tests.swift; sourceTree = SOURCE_ROOT; }; BC09239F1C9265A600A2ADFA /* Pipeline_Tests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = Pipeline_Tests.swift; path = Tests/Pipeline_Tests.swift; sourceTree = SOURCE_ROOT; }; BC1E12F41C9F2FD7008F844F /* ThreeInput.vsh */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.glsl; name = ThreeInput.vsh; path = Source/Operations/Shaders/ThreeInput.vsh; sourceTree = ""; }; @@ -720,6 +729,11 @@ BCA4E2481CC3EF26007B51BA /* ColourFASTFeatureDetection.swift */, BCA4E24B1CC3F3C5007B51BA /* ColourFASTDecriptor.vsh */, BCA4E24A1CC3F3C5007B51BA /* ColourFASTDecriptor_GL.fsh */, + 503409CE1D52FECA00BC789C /* ParallelCoordinateLineTransform.swift */, + 503409DA1D58577A00BC789C /* ParallelCoordinateLineTransform.vsh */, + 503409D81D58431900BC789C /* ParallelCoordinateLineTransformFBORead_GL.fsh */, + 503409D01D52FF1D00BC789C /* ParallelCoordinateLineTransform_GL.fsh */, + 503409D21D56B10100BC789C /* HoughTransformLineDetector.swift */, ); name = "Image processing"; sourceTree = ""; @@ -1012,6 +1026,8 @@ isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( + 503409D91D58431900BC789C /* ParallelCoordinateLineTransformFBORead_GL.fsh in Resources */, + 503409DB1D58577A00BC789C /* ParallelCoordinateLineTransform.vsh in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -1100,6 +1116,7 @@ BC7FD11A1CB0793900037949 /* MultiplyBlend.swift in Sources */, BCA4E2591CC3F832007B51BA /* ColorLocalBinaryPattern.swift in Sources */, BCBEC0FA1CCD993900B70ED7 /* MovieInput.swift in Sources */, + 503409D31D56B10100BC789C /* HoughTransformLineDetector.swift in Sources */, BC4EE1561CB3457800AD8A65 /* PrewittEdgeDetection.swift in Sources */, BCB825B61CC9C1F100339790 /* MovieOutput.swift in Sources */, BCFF46D31CB9C24500A0C521 /* HarrisCornerDetector.swift in Sources */, @@ -1142,6 +1159,7 @@ BC7FD1241CB07A0100037949 /* SoftLightBlend.swift in Sources */, BC4EE1521CB3445500AD8A65 /* SobelEdgeDetection.swift in Sources */, BC7FD1221CB079DD00037949 /* ScreenBlend.swift in Sources */, + 503409CF1D52FECA00BC789C /* ParallelCoordinateLineTransform.swift in Sources */, BCB279EC1C8D11630013E213 /* Framebuffer.swift in Sources */, BCA4E2301CC31276007B51BA /* Solarize.swift in Sources */, BC7FD19F1CB20B1300037949 /* LineGenerator.swift in Sources */, diff --git a/framework/Source/Operations/HoughTransformLineDetector.swift b/framework/Source/Operations/HoughTransformLineDetector.swift new file mode 100644 index 00000000..3d7dc650 --- /dev/null +++ b/framework/Source/Operations/HoughTransformLineDetector.swift @@ -0,0 +1,101 @@ +#if os(Linux) +#if GLES + import COpenGLES.gles2 + #else + import COpenGL +#endif +#else +#if GLES + import OpenGLES + #else + import OpenGL.GL3 +#endif +#endif + +// +// HoughTransformLineDetector.swift +// GPUImage-Mac +// +// Created by Max Cantor on 8/6/16. +// Copyright © 2016 Sunset Lake Software LLC. All rights reserved. +// + +import Foundation + +public class HoughTransformLineDetector: OperationGroup { + + let thresholdEdgeDetectionFilter = CannyEdgeDetection() + let nonMaximumSuppression = TextureSamplingOperation(fragmentShader:ThresholdedNonMaximumSuppressionFragmentShader) + + public var linesDetectedCallback:(([Line]) -> ())? + public var edgeThreshold:Float = 0.9 + public var lineDetectionThreshold:Float = 0.2 { didSet { nonMaximumSuppression.uniformSettings["threshold"] = lineDetectionThreshold } } + public var cannyBlurRadiusInPixels:Float = 2.0 { didSet { thresholdEdgeDetectionFilter.blurRadiusInPixels = cannyBlurRadiusInPixels } } + public var cannyUpperThreshold:Float = 0.4 { didSet { thresholdEdgeDetectionFilter.upperThreshold = cannyUpperThreshold } } + public var cannyLowerThreshold:Float = 0.1 { didSet { thresholdEdgeDetectionFilter.lowerThreshold = cannyLowerThreshold } } + + public override init() { + super.init() + let parallelCoordsTransformFilter = ParallelCoordinateLineTransform() + nonMaximumSuppression.uniformSettings["threshold"] = lineDetectionThreshold + + outputImageRelay.newImageCallback = {[weak self] framebuffer in + if let linesDetectedCallback = self?.linesDetectedCallback { + linesDetectedCallback(extractLinesFromImage(framebuffer: framebuffer)) + } + } + + self.configureGroup {input, output in + input --> self.thresholdEdgeDetectionFilter --> parallelCoordsTransformFilter --> self.nonMaximumSuppression --> output + } + } +} + +func extractLinesFromImage(framebuffer: Framebuffer) -> [Line] { + let frameSize = framebuffer.size + let pixCount = UInt32(frameSize.width * frameSize.height) + let chanCount: UInt32 = 4 + let imageByteSize = Int(pixCount * chanCount) // since we're comparing to currentByte, might as well cast here + let rawImagePixels = UnsafeMutablePointer.allocate(capacity: Int(imageByteSize)) + glReadPixels(0, 0, frameSize.width, frameSize.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), rawImagePixels) + // since we only set one position with each iteration of the loop, we'll have ot set positions then combine into lines + // linesArray = calloc(1024 * 2, sizeof(GLfloat)); - lines is 2048 floats - which is 1024 positions or 528 lines + var lines = Array() + + let imageWidthInt = Int(framebuffer.size.width * 4) +// let startTime = CFAbsoluteTimeGetCurrent() + var currentByte:Int = 0 +// var cornerStorageIndex: UInt32 = 0 + var lineStrengthCounter: UInt64 = 0 + while (currentByte < imageByteSize) { + let colorByte = rawImagePixels[currentByte] + if (colorByte > 0) { + let xCoordinate = currentByte % imageWidthInt + let yCoordinate = currentByte / imageWidthInt + lineStrengthCounter += UInt64(colorByte) + let normalizedXCoordinate = -1.0 + 2.0 * (Float)(xCoordinate / 4) / Float(frameSize.width) + let normalizedYCoordinate = -1.0 + 2.0 * (Float)(yCoordinate) / Float(frameSize.height) +// print("(\(xCoordinate), \(yCoordinate)), [\(rawImagePixels[currentByte]), \(rawImagePixels[currentByte+1]), \(rawImagePixels[currentByte+2]), \(rawImagePixels[currentByte+3]) ] ") + let nextLine = + ( normalizedXCoordinate < 0.0 + ? ( normalizedXCoordinate > -0.05 + // T space + // m = -1 - d/u + // b = d * v/u + ? Line.infinite(slope:100000.0, intercept: normalizedYCoordinate) + : Line.infinite(slope: -1.0 - 1.0 / normalizedXCoordinate, intercept: 1.0 * normalizedYCoordinate / normalizedXCoordinate) + ) + : ( normalizedXCoordinate < 0.05 + // S space + // m = 1 - d/u + // b = d * v/u + ? Line.infinite(slope: 100000.0, intercept: normalizedYCoordinate) + : Line.infinite(slope: 1.0 - 1.0 / normalizedXCoordinate,intercept: 1.0 * normalizedYCoordinate / normalizedXCoordinate) + ) + ) + lines.append(nextLine) + } + currentByte += 4 + } + return lines +} diff --git a/framework/Source/Operations/ParallelCoordinateLineTransform.swift b/framework/Source/Operations/ParallelCoordinateLineTransform.swift new file mode 100644 index 00000000..5ed5480d --- /dev/null +++ b/framework/Source/Operations/ParallelCoordinateLineTransform.swift @@ -0,0 +1,163 @@ +#if os(Linux) +#if GLES + import COpenGLES.gles2 + #else + import COpenGL +#endif +#else +#if GLES + import OpenGLES + #else + import OpenGL.GL3 +#endif +#endif + +// +// ParallelCoordinateLineTransform.swift +// GPUImage-Mac +// +// Created by Max Cantor on 8/3/16. +// Copyright © 2016 Sunset Lake Software LLC. All rights reserved. +// + +import Foundation + +public class ParallelCoordinateLineTransform: BasicOperation { + var lineCoordinates:UnsafeMutablePointer? + let MAX_SCALING_FACTOR: UInt32 = 4 + public init() { + let fragShader = + ( sharedImageProcessingContext.deviceSupportsFramebufferReads() + ? ParallelCoordinateLineTransformFBOReadFragmentShader + : ParallelCoordinateLineTransformFragmentShader + ) + super.init(vertexShader: ParallelCoordinateLineTransformVertexShader, fragmentShader: fragShader) + } + override func renderFrame() { + renderToTextureVertices() + } + func renderToTextureVertices() { + guard let framebuffer = inputFramebuffers[0] else {fatalError("Could not get framebuffer orientation for parallel coords")} + let inputSize = sizeOfInitialStageBasedOnFramebuffer(framebuffer) + // Making lots of things Ints instead of UInt32 or Int32 so that we can "Freely" access array indices. + // I dont like it but c'est la vie + let inputByteSize = Int(inputSize.width * inputSize.height * 4) + let imageByteWidth = framebuffer.size.width * 4 + let maxLinePairsToRender = (Int(inputSize.width * inputSize.height) / Int(self.MAX_SCALING_FACTOR)) + let lineCoordinates = self.lineCoordinates ?? + UnsafeMutablePointer.allocate(capacity: Int(maxLinePairsToRender * 8)) + + let rawImagePixels = UnsafeMutablePointer.allocate(capacity: Int(inputByteSize)) + glFinish() + glReadPixels(0, 0, inputSize.width, inputSize.height, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), rawImagePixels) + + +//var lineCoordinates = Array(count: Int(maxLinePairsToRender) * 2, repeatedValue: Line.Segment(p1:Position(0,0),p2:Position(0,0))) + // Copying from Harris Corner Detector +// let imageByteSize = Int(framebuffer.size.width * framebuffer.size.height * 4) +// let inputTextureSize = framebuffer.size + + let startTime = CFAbsoluteTimeGetCurrent() + let xAspectMultiplier:Float = 1.0 + let yAspectMultiplier:Float = 1.0 + + + var linePairsToRender:Int = 0 + var currentByte:Int = 0 + var lineStorageIndex:Int = 0 + + let maxLineStorageIndex = maxLinePairsToRender * 8 - 8 + + var minY:Float = 100 + var maxY:Float = -100 + var minX:Float = 100 + var maxX:Float = -100 + + while (currentByte < inputByteSize) { + let colorByte = rawImagePixels[currentByte] + + if (colorByte > 0) { + let xCoordinate = Int32(currentByte) % imageByteWidth + let yCoordinate = Int32(currentByte) / imageByteWidth + + let normalizedXCoordinate:Float = (-1.0 + 2.0 * (Float)(xCoordinate / 4) / Float(inputSize.width)) * xAspectMultiplier; + let normalizedYCoordinate:Float = (-1.0 + 2.0 * (Float)(yCoordinate) / Float(inputSize.height)) * yAspectMultiplier; + + // this might not be the most performant.. + minY = min(minY, normalizedYCoordinate); + maxY = max(maxY, normalizedYCoordinate); + minX = min(minX, normalizedXCoordinate); + maxX = max(maxX, normalizedXCoordinate); + // NSLog(@"Parallel line coordinates: (%f, %f) - (%f, %f) - (%f, %f)", -1.0, -normalizedYCoordinate, 0.0, normalizedXCoordinate, 1.0, normalizedYCoordinate); + // T space coordinates, (-d, -y) to (0, x) + // Note - I really dont know if its better to just use signed ints. Swift wont allow a UInt as an array index but + // signed ints seem silly. If this is a no-op, then fine. But if casting like this hurts performance can look into + // better solutions. + + // T space coordinates, (-d, -y) to (0, x) + lineCoordinates[lineStorageIndex] = -1.0; lineStorageIndex += 1 + lineCoordinates[lineStorageIndex] = -normalizedYCoordinate; lineStorageIndex += 1 + lineCoordinates[lineStorageIndex] = 0.0; lineStorageIndex += 1 + lineCoordinates[lineStorageIndex] = normalizedXCoordinate; lineStorageIndex += 1 + + // S space coordinates, (0, x) to (d, y) + lineCoordinates[lineStorageIndex] = 0.0; lineStorageIndex += 1 + lineCoordinates[lineStorageIndex] = normalizedXCoordinate; lineStorageIndex += 1 + lineCoordinates[lineStorageIndex] = 1.0; lineStorageIndex += 1 + lineCoordinates[lineStorageIndex] = normalizedYCoordinate; lineStorageIndex += 1 + + linePairsToRender += 1 + + linePairsToRender = min(linePairsToRender, maxLinePairsToRender) + lineStorageIndex = min(lineStorageIndex, maxLineStorageIndex) + } + currentByte += 8 + } + // NSLog(@"Line pairs to render: %d out of max: %d", linePairsToRender, maxLinePairsToRender); + + let currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime); + print("Line generation processing time : \(1000.0 * currentFrameTime) ms for \(linePairsToRender) lines"); + renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:framebuffer.orientation, size:inputSize, stencil:mask != nil) + releaseIncomingFramebuffers() + renderFramebuffer.activateFramebufferForRendering() +// clearFramebufferWithColor(Color.Black) + + // do we need this: +// [self setUniformsForProgramAtIndex:0]; +// renderFramebuffer.lock() // may be unnecessary - what is teh GPUImage2 version of usingNextFrameForImageCapture + shader.use() + + // + // can we get rid of this from clearFrameBufferWithColor + glClearColor(0.0, 0.0, 0.0, 1.0); + glClear(GLenum(GL_COLOR_BUFFER_BIT)); + // + let supportsFrameBufferReads = sharedImageProcessingContext.deviceSupportsFramebufferReads() + if (!supportsFrameBufferReads) { + glBlendEquation(GLenum(GL_FUNC_ADD)) + glBlendFunc(GLenum(GL_ONE), GLenum(GL_ONE)) + glEnable(GLenum(GL_BLEND)) + } + else + { + } + + glLineWidth(1); + guard let filterPositionAttr = shader.attributeIndex("position") else { fatalError("A position attribute was missing from the shader program during rendering.") } + + glVertexAttribPointer(filterPositionAttr, 2, GLenum(GL_FLOAT), 0, 0, lineCoordinates); + glDrawArrays(GLenum(GL_LINES), 0, (Int32(linePairsToRender) * 4)); + + if (!supportsFrameBufferReads) + { + glDisable(GLenum(GL_BLEND)) + } + +// [firstInputFramebuffer unlock]; +// if (usingNextFrameForImageCapture) +// { +// dispatch_semaphore_signal(imageCaptureSemaphore); +// } + + } +} diff --git a/framework/Source/Operations/Shaders/ConvertedShaders_GL.swift b/framework/Source/Operations/Shaders/ConvertedShaders_GL.swift index 7eb18a5e..16940ace 100644 --- a/framework/Source/Operations/Shaders/ConvertedShaders_GL.swift +++ b/framework/Source/Operations/Shaders/ConvertedShaders_GL.swift @@ -101,6 +101,9 @@ public let NormalBlendFragmentShader = "/*\n This equation is a simplification public let OneInputVertexShader = "attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n varying vec2 textureCoordinate;\n \n void main()\n {\n gl_Position = position;\n textureCoordinate = inputTextureCoordinate.xy;\n }\n " public let OpacityFragmentShader = "varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform float opacity;\n \n void main()\n {\n vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n \n gl_FragColor = vec4(textureColor.rgb, textureColor.a * opacity);\n }\n " public let OverlayBlendFragmentShader = "varying vec2 textureCoordinate;\n varying vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n vec4 base = texture2D(inputImageTexture, textureCoordinate);\n vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);\n \n float ra;\n if (2.0 * base.r < base.a) {\n ra = 2.0 * overlay.r * base.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);\n } else {\n ra = overlay.a * base.a - 2.0 * (base.a - base.r) * (overlay.a - overlay.r) + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);\n }\n \n float ga;\n if (2.0 * base.g < base.a) {\n ga = 2.0 * overlay.g * base.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);\n } else {\n ga = overlay.a * base.a - 2.0 * (base.a - base.g) * (overlay.a - overlay.g) + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);\n }\n \n float ba;\n if (2.0 * base.b < base.a) {\n ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);\n } else {\n ba = overlay.a * base.a - 2.0 * (base.a - base.b) * (overlay.a - overlay.b) + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);\n }\n \n gl_FragColor = vec4(ra, ga, ba, 1.0);\n }\n " +public let ParallelCoordinateLineTransformVertexShader = "attribute vec4 position;\n \n void main()\n {\n gl_Position = position;\n }\n " +public let ParallelCoordinateLineTransformFBOReadFragmentShader = "const lowp float scalingFactor = 0.004;\n // const lowp float scalingFactor = 0.1;\n \n void main()\n {\n mediump vec4 fragmentData = gl_LastFragData[0];\n \n fragmentData.r = fragmentData.r + scalingFactor;\n fragmentData.g = scalingFactor * floor(fragmentData.r) + fragmentData.g;\n fragmentData.b = scalingFactor * floor(fragmentData.g) + fragmentData.b;\n fragmentData.a = scalingFactor * floor(fragmentData.b) + fragmentData.a;\n \n fragmentData = fract(fragmentData);\n \n gl_FragColor = vec4(fragmentData.rgb, 1.0);\n }\n " +public let ParallelCoordinateLineTransformFragmentShader = "const float scalingFactor = 1.0 / 256.0;\n \n void main()\n {\n // gl_FragColor = vec4(scalingFactor, scalingFactor, scalingFactor, 1.0);\n gl_FragColor = vec4(0.004, 0.004, 0.004, 1.0);\n // gl_FragColor = vec4(1.0, 1.0, 1.0, scalingFactor);\n }\n //\n //const lowp float scalingFactor = 0.004;\n //// const lowp float scalingFactor = 0.1;\n //\n //void main()\n //{\n // mediump vec4 fragmentData = gl_LastFragData[0];\n //\n // fragmentData.r = fragmentData.r + scalingFactor;\n // fragmentData.g = scalingFactor * floor(fragmentData.r) + fragmentData.g;\n // fragmentData.b = scalingFactor * floor(fragmentData.g) + fragmentData.b;\n // fragmentData.a = scalingFactor * floor(fragmentData.b) + fragmentData.a;\n //\n // fragmentData = fract(fragmentData);\n //\n // gl_FragColor = vec4(fragmentData.rgb, 1.0);\n //}\n " public let PassthroughFragmentShader = "varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n gl_FragColor = texture2D(inputImageTexture, textureCoordinate);\n }\n " public let PinchDistortionFragmentShader = "varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform float aspectRatio;\n uniform vec2 center;\n uniform float radius;\n uniform float scale;\n \n void main()\n {\n vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n float dist = distance(center, textureCoordinateToUse);\n textureCoordinateToUse = textureCoordinate;\n \n if (dist < radius)\n {\n textureCoordinateToUse -= center;\n float percent = 1.0 + ((0.5 - dist) / 0.5) * scale;\n textureCoordinateToUse = textureCoordinateToUse * percent;\n textureCoordinateToUse += center;\n \n gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );\n }\n else\n {\n gl_FragColor = texture2D(inputImageTexture, textureCoordinate );\n }\n }\n " public let PixellateFragmentShader = "varying vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform float fractionalWidthOfPixel;\n uniform float aspectRatio;\n \n void main()\n {\n vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);\n \n vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;\n gl_FragColor = texture2D(inputImageTexture, samplePos );\n }\n " diff --git a/framework/Source/Operations/Shaders/ConvertedShaders_GLES.swift b/framework/Source/Operations/Shaders/ConvertedShaders_GLES.swift index e20b71da..2e7f3500 100644 --- a/framework/Source/Operations/Shaders/ConvertedShaders_GLES.swift +++ b/framework/Source/Operations/Shaders/ConvertedShaders_GLES.swift @@ -101,6 +101,7 @@ public let NormalBlendFragmentShader = "/*\n This equation is a simplification public let OneInputVertexShader = "attribute vec4 position;\n attribute vec4 inputTextureCoordinate;\n \n varying vec2 textureCoordinate;\n \n void main()\n {\n gl_Position = position;\n textureCoordinate = inputTextureCoordinate.xy;\n }\n " public let OpacityFragmentShader = "varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n uniform lowp float opacity;\n \n void main()\n {\n lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);\n \n gl_FragColor = vec4(textureColor.rgb, textureColor.a * opacity);\n }\n " public let OverlayBlendFragmentShader = "varying highp vec2 textureCoordinate;\n varying highp vec2 textureCoordinate2;\n \n uniform sampler2D inputImageTexture;\n uniform sampler2D inputImageTexture2;\n \n void main()\n {\n mediump vec4 base = texture2D(inputImageTexture, textureCoordinate);\n mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);\n \n mediump float ra;\n if (2.0 * base.r < base.a) {\n ra = 2.0 * overlay.r * base.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);\n } else {\n ra = overlay.a * base.a - 2.0 * (base.a - base.r) * (overlay.a - overlay.r) + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);\n }\n \n mediump float ga;\n if (2.0 * base.g < base.a) {\n ga = 2.0 * overlay.g * base.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);\n } else {\n ga = overlay.a * base.a - 2.0 * (base.a - base.g) * (overlay.a - overlay.g) + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);\n }\n \n mediump float ba;\n if (2.0 * base.b < base.a) {\n ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);\n } else {\n ba = overlay.a * base.a - 2.0 * (base.a - base.b) * (overlay.a - overlay.b) + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);\n }\n \n gl_FragColor = vec4(ra, ga, ba, 1.0);\n }\n " +public let ParallelCoordinateLineTransformVertexShader = "attribute vec4 position;\n \n void main()\n {\n gl_Position = position;\n }\n " public let PassthroughFragmentShader = "varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n void main()\n {\n gl_FragColor = texture2D(inputImageTexture, textureCoordinate);\n }\n " public let PinchDistortionFragmentShader = "varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform highp float aspectRatio;\n uniform highp vec2 center;\n uniform highp float radius;\n uniform highp float scale;\n \n void main()\n {\n highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));\n highp float dist = distance(center, textureCoordinateToUse);\n textureCoordinateToUse = textureCoordinate;\n \n if (dist < radius)\n {\n textureCoordinateToUse -= center;\n highp float percent = 1.0 + ((0.5 - dist) / 0.5) * scale;\n textureCoordinateToUse = textureCoordinateToUse * percent;\n textureCoordinateToUse += center;\n \n gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );\n }\n else\n {\n gl_FragColor = texture2D(inputImageTexture, textureCoordinate );\n }\n }\n " public let PixellateFragmentShader = "varying highp vec2 textureCoordinate;\n \n uniform sampler2D inputImageTexture;\n \n uniform highp float fractionalWidthOfPixel;\n uniform highp float aspectRatio;\n \n void main()\n {\n highp vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);\n \n highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;\n gl_FragColor = texture2D(inputImageTexture, samplePos );\n }\n " diff --git a/framework/Source/Operations/Shaders/ParallelCoordinateLineTransform.vsh b/framework/Source/Operations/Shaders/ParallelCoordinateLineTransform.vsh new file mode 100644 index 00000000..dc0afc41 --- /dev/null +++ b/framework/Source/Operations/Shaders/ParallelCoordinateLineTransform.vsh @@ -0,0 +1,6 @@ +attribute vec4 position; + +void main() +{ + gl_Position = position; +} \ No newline at end of file diff --git a/framework/Source/Operations/Shaders/ParallelCoordinateLineTransformFBORead_GL.fsh b/framework/Source/Operations/Shaders/ParallelCoordinateLineTransformFBORead_GL.fsh new file mode 100644 index 00000000..39dbe0a9 --- /dev/null +++ b/framework/Source/Operations/Shaders/ParallelCoordinateLineTransformFBORead_GL.fsh @@ -0,0 +1,16 @@ +const lowp float scalingFactor = 0.004; +// const lowp float scalingFactor = 0.1; + +void main() +{ + mediump vec4 fragmentData = gl_LastFragData[0]; + + fragmentData.r = fragmentData.r + scalingFactor; + fragmentData.g = scalingFactor * floor(fragmentData.r) + fragmentData.g; + fragmentData.b = scalingFactor * floor(fragmentData.g) + fragmentData.b; + fragmentData.a = scalingFactor * floor(fragmentData.b) + fragmentData.a; + + fragmentData = fract(fragmentData); + + gl_FragColor = vec4(fragmentData.rgb, 1.0); +} diff --git a/framework/Source/Operations/Shaders/ParallelCoordinateLineTransform_GL.fsh b/framework/Source/Operations/Shaders/ParallelCoordinateLineTransform_GL.fsh new file mode 100644 index 00000000..f29c9797 --- /dev/null +++ b/framework/Source/Operations/Shaders/ParallelCoordinateLineTransform_GL.fsh @@ -0,0 +1,24 @@ +const float scalingFactor = 1.0 / 256.0; + +void main() +{ + // gl_FragColor = vec4(scalingFactor, scalingFactor, scalingFactor, 1.0); + gl_FragColor = vec4(0.004, 0.004, 0.004, 1.0); +} +// +//const lowp float scalingFactor = 0.004; +//// const lowp float scalingFactor = 0.1; +// +//void main() +//{ +// mediump vec4 fragmentData = gl_LastFragData[0]; +// +// fragmentData.r = fragmentData.r + scalingFactor; +// fragmentData.g = scalingFactor * floor(fragmentData.r) + fragmentData.g; +// fragmentData.b = scalingFactor * floor(fragmentData.g) + fragmentData.b; +// fragmentData.a = scalingFactor * floor(fragmentData.b) + fragmentData.a; +// +// fragmentData = fract(fragmentData); +// +// gl_FragColor = vec4(fragmentData.rgb, 1.0); +//} \ No newline at end of file