diff --git a/APIDiffs/api-diffs-1.8.1.md b/APIDiffs/api-diffs-1.8.1.md
new file mode 100644
index 0000000..b6774d7
--- /dev/null
+++ b/APIDiffs/api-diffs-1.8.1.md
@@ -0,0 +1,9 @@
+# PLCameraStreamingKit 1.8.0 to 1.8.1 API Differences
+
+```
+PLCameraStreamingSession.h
+```
+- *Added* property `@property (nonatomic, assign, getter=isAutoReconnectEnable) BOOL autoReconnectEnable;`
+- *Added* method `+ (NSString *)versionInfo;`
+
+## General Headers
diff --git a/Example/PLCameraStreamingKit/PLCameraStreamingKit-Info.plist b/Example/PLCameraStreamingKit/PLCameraStreamingKit-Info.plist
index 1737676..78d41b7 100644
--- a/Example/PLCameraStreamingKit/PLCameraStreamingKit-Info.plist
+++ b/Example/PLCameraStreamingKit/PLCameraStreamingKit-Info.plist
@@ -17,7 +17,7 @@
CFBundlePackageType
APPL
CFBundleShortVersionString
- 1.7.2
+ 1.8.1
CFBundleSignature
????
CFBundleVersion
diff --git a/Example/PLCameraStreamingKit/PLViewController.m b/Example/PLCameraStreamingKit/PLViewController.m
index ee4af71..80e8533 100644
--- a/Example/PLCameraStreamingKit/PLViewController.m
+++ b/Example/PLCameraStreamingKit/PLViewController.m
@@ -119,6 +119,7 @@ - (void)viewDidLoad {
AVCaptureVideoOrientation orientation = (AVCaptureVideoOrientation)(([[UIDevice currentDevice] orientation] <= UIDeviceOrientationLandscapeRight && [[UIDevice currentDevice] orientation] != UIDeviceOrientationUnknown) ? [[UIDevice currentDevice] orientation]: UIDeviceOrientationPortrait);
// 推流 session
self.session = [[PLCameraStreamingSession alloc] initWithVideoCaptureConfiguration:videoCaptureConfiguration audioCaptureConfiguration:audioCaptureConfiguration videoStreamingConfiguration:videoStreamingConfiguration audioStreamingConfiguration:audioStreamingConfiguration stream:stream videoOrientation:orientation];
+ self.session.captureDevicePosition = AVCaptureDevicePositionBack;
self.session.delegate = self;
self.session.bufferDelegate = self;
UIImage *waterMark = [UIImage imageNamed:@"qiniu.png"];
diff --git a/Example/Podfile.lock b/Example/Podfile.lock
index 743daa2..2443217 100644
--- a/Example/Podfile.lock
+++ b/Example/Podfile.lock
@@ -1,49 +1,48 @@
PODS:
- - GPUImage (0.1.7)
- - HappyDNS (0.3.5)
- - KSCrash (1.6.0):
- - KSCrash/Installations (= 1.6.0)
- - KSCrash/Installations (1.6.0):
+ - HappyDNS (0.3.7)
+ - KSCrash (1.6.4):
+ - KSCrash/Installations (= 1.6.4)
+ - KSCrash/Installations (1.6.4):
- KSCrash/Recording
- KSCrash/Reporting
- - KSCrash/no-arc (1.6.0)
- - KSCrash/Recording (1.6.0):
+ - KSCrash/no-arc (1.6.4)
+ - KSCrash/Recording (1.6.4):
- KSCrash/no-arc
- - KSCrash/Reporting (1.6.0):
+ - KSCrash/Reporting (1.6.4):
- KSCrash/Recording
- - KSCrash/Reporting/Filters (= 1.6.0)
- - KSCrash/Reporting/MessageUI (= 1.6.0)
- - KSCrash/Reporting/Sinks (= 1.6.0)
- - KSCrash/Reporting/Tools (= 1.6.0)
- - KSCrash/Reporting/Filters (1.6.0):
+ - KSCrash/Reporting/Filters (= 1.6.4)
+ - KSCrash/Reporting/MessageUI (= 1.6.4)
+ - KSCrash/Reporting/Sinks (= 1.6.4)
+ - KSCrash/Reporting/Tools (= 1.6.4)
+ - KSCrash/Reporting/Filters (1.6.4):
- KSCrash/Recording
- - KSCrash/Reporting/Filters/Alert (= 1.6.0)
- - KSCrash/Reporting/Filters/AppleFmt (= 1.6.0)
- - KSCrash/Reporting/Filters/Base (= 1.6.0)
- - KSCrash/Reporting/Filters/Basic (= 1.6.0)
- - KSCrash/Reporting/Filters/GZip (= 1.6.0)
- - KSCrash/Reporting/Filters/JSON (= 1.6.0)
- - KSCrash/Reporting/Filters/Sets (= 1.6.0)
- - KSCrash/Reporting/Filters/Stringify (= 1.6.0)
- - KSCrash/Reporting/Filters/Tools (= 1.6.0)
- - KSCrash/Reporting/Filters/Alert (1.6.0):
+ - KSCrash/Reporting/Filters/Alert (= 1.6.4)
+ - KSCrash/Reporting/Filters/AppleFmt (= 1.6.4)
+ - KSCrash/Reporting/Filters/Base (= 1.6.4)
+ - KSCrash/Reporting/Filters/Basic (= 1.6.4)
+ - KSCrash/Reporting/Filters/GZip (= 1.6.4)
+ - KSCrash/Reporting/Filters/JSON (= 1.6.4)
+ - KSCrash/Reporting/Filters/Sets (= 1.6.4)
+ - KSCrash/Reporting/Filters/Stringify (= 1.6.4)
+ - KSCrash/Reporting/Filters/Tools (= 1.6.4)
+ - KSCrash/Reporting/Filters/Alert (1.6.4):
- KSCrash/Recording
- KSCrash/Reporting/Filters/Base
- - KSCrash/Reporting/Filters/AppleFmt (1.6.0):
+ - KSCrash/Reporting/Filters/AppleFmt (1.6.4):
- KSCrash/Recording
- KSCrash/Reporting/Filters/Base
- - KSCrash/Reporting/Filters/Base (1.6.0):
+ - KSCrash/Reporting/Filters/Base (1.6.4):
- KSCrash/Recording
- - KSCrash/Reporting/Filters/Basic (1.6.0):
+ - KSCrash/Reporting/Filters/Basic (1.6.4):
- KSCrash/Recording
- KSCrash/Reporting/Filters/Base
- - KSCrash/Reporting/Filters/GZip (1.6.0):
+ - KSCrash/Reporting/Filters/GZip (1.6.4):
- KSCrash/Recording
- KSCrash/Reporting/Filters/Base
- - KSCrash/Reporting/Filters/JSON (1.6.0):
+ - KSCrash/Reporting/Filters/JSON (1.6.4):
- KSCrash/Recording
- KSCrash/Reporting/Filters/Base
- - KSCrash/Reporting/Filters/Sets (1.6.0):
+ - KSCrash/Reporting/Filters/Sets (1.6.4):
- KSCrash/Recording
- KSCrash/Reporting/Filters/AppleFmt
- KSCrash/Reporting/Filters/Base
@@ -51,34 +50,32 @@ PODS:
- KSCrash/Reporting/Filters/GZip
- KSCrash/Reporting/Filters/JSON
- KSCrash/Reporting/Filters/Stringify
- - KSCrash/Reporting/Filters/Stringify (1.6.0):
+ - KSCrash/Reporting/Filters/Stringify (1.6.4):
- KSCrash/Recording
- KSCrash/Reporting/Filters/Base
- - KSCrash/Reporting/Filters/Tools (1.6.0):
+ - KSCrash/Reporting/Filters/Tools (1.6.4):
- KSCrash/Recording
- - KSCrash/Reporting/MessageUI (1.6.0):
+ - KSCrash/Reporting/MessageUI (1.6.4):
- KSCrash/Recording
- - KSCrash/Reporting/Sinks (1.6.0):
+ - KSCrash/Reporting/Sinks (1.6.4):
- KSCrash/Recording
- KSCrash/Reporting/Filters
- KSCrash/Reporting/Tools
- - KSCrash/Reporting/Tools (1.6.0):
+ - KSCrash/Reporting/Tools (1.6.4):
- KSCrash/Recording
- - pili-librtmp (1.0.3)
- - PLCameraStreamingKit (1.8.0):
- - GPUImage (= 0.1.7)
- - PLCameraStreamingKit/precompiled (= 1.8.0)
- - PLStreamingKit (~> 1.2.0)
- - PLCameraStreamingKit/precompiled (1.8.0):
- - GPUImage (= 0.1.7)
- - PLStreamingKit (~> 1.2.0)
- - PLStreamingKit (1.2.4):
+ - pili-librtmp (1.0.3.1)
+ - PLCameraStreamingKit (1.8.1):
+ - PLCameraStreamingKit/precompiled (= 1.8.1)
+ - PLStreamingKit (= 1.2.5)
+ - PLCameraStreamingKit/precompiled (1.8.1):
+ - PLStreamingKit (= 1.2.5)
+ - PLStreamingKit (1.2.5):
- HappyDNS
- - pili-librtmp
- - PLStreamingKit/precompiled (= 1.2.4)
- - PLStreamingKit/precompiled (1.2.4):
+ - pili-librtmp (= 1.0.3.1)
+ - PLStreamingKit/precompiled (= 1.2.5)
+ - PLStreamingKit/precompiled (1.2.5):
- HappyDNS
- - pili-librtmp
+ - pili-librtmp (= 1.0.3.1)
DEPENDENCIES:
- KSCrash
@@ -89,11 +86,10 @@ EXTERNAL SOURCES:
:path: ../
SPEC CHECKSUMS:
- GPUImage: 733a5f0fab92df9de1c37ba9df520a833ccb406d
- HappyDNS: 6f8cf46fa7c0b98a6591fe8ecb2e08d9907a6e8b
- KSCrash: 73fc2bcec2b6bcd1d948d200e11904c39d532a0e
- pili-librtmp: 50faa17413c635cca5ceccb1a8f9f6acc6587a45
- PLCameraStreamingKit: 03b4bce4218edeb18a0060d8849eb5d7b2af4a5d
- PLStreamingKit: c8574decbacb981e36e5b95314c991a3b5d02125
+ HappyDNS: ec476226e119bf4766b74975ec4873f6fa078cf2
+ KSCrash: d8e5ad6724d26a48e8ca483028c65e33d92e7834
+ pili-librtmp: 0adf2516f4b5958b3cb079a4c3d4f6a4a9f49ce9
+ PLCameraStreamingKit: 12b672cbef9ba15bbb28ec543fb77f7d4cea66dc
+ PLStreamingKit: 4b46739689d020b8e2eb755827ec86447c0291b7
COCOAPODS: 0.39.0
diff --git a/Example/Pods/GPUImage/License.txt b/Example/Pods/GPUImage/License.txt
deleted file mode 100755
index e8062d9..0000000
--- a/Example/Pods/GPUImage/License.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-Copyright (c) 2012, Brad Larson, Ben Cochran, Hugues Lismonde, Keitaroh Kobayashi, Alaric Cole, Matthew Clark, Jacob Gundersen, Chris Williams.
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
-
-Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
-Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
-Neither the name of the GPUImage framework nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/Example/Pods/GPUImage/README.md b/Example/Pods/GPUImage/README.md
deleted file mode 100755
index a487f36..0000000
--- a/Example/Pods/GPUImage/README.md
+++ /dev/null
@@ -1,733 +0,0 @@
-# GPUImage #
-
-
-
-
-
-Brad Larson
-
-http://www.sunsetlakesoftware.com
-
-[@bradlarson](http://twitter.com/bradlarson)
-
-contact@sunsetlakesoftware.com
-
-## Overview ##
-
-The GPUImage framework is a BSD-licensed iOS library that lets you apply GPU-accelerated filters and other effects to images, live camera video, and movies. In comparison to Core Image (part of iOS 5.0), GPUImage allows you to write your own custom filters, supports deployment to iOS 4.0, and has a simpler interface. However, it currently lacks some of the more advanced features of Core Image, such as facial detection.
-
-For massively parallel operations like processing images or live video frames, GPUs have some significant performance advantages over CPUs. On an iPhone 4, a simple image filter can be over 100 times faster to perform on the GPU than an equivalent CPU-based filter.
-
-However, running custom filters on the GPU requires a lot of code to set up and maintain an OpenGL ES 2.0 rendering target for these filters. I created a sample project to do this:
-
-http://www.sunsetlakesoftware.com/2010/10/22/gpu-accelerated-video-processing-mac-and-ios
-
-and found that there was a lot of boilerplate code I had to write in its creation. Therefore, I put together this framework that encapsulates a lot of the common tasks you'll encounter when processing images and video and made it so that you don't need to care about the OpenGL ES 2.0 underpinnings.
-
-This framework compares favorably to Core Image when handling video, taking only 2.5 ms on an iPhone 4 to upload a frame from the camera, apply a gamma filter, and display, versus 106 ms for the same operation using Core Image. CPU-based processing takes 460 ms, making GPUImage 40X faster than Core Image for this operation on this hardware, and 184X faster than CPU-bound processing. On an iPhone 4S, GPUImage is only 4X faster than Core Image for this case, and 102X faster than CPU-bound processing. However, for more complex operations like Gaussian blurs at larger radii, Core Image currently outpaces GPUImage.
-
-## License ##
-
-BSD-style, with the full license available with the framework in License.txt.
-
-## Technical requirements ##
-
-- OpenGL ES 2.0: Applications using this will not run on the original iPhone, iPhone 3G, and 1st and 2nd generation iPod touches
-- iOS 4.1 as a deployment target (4.0 didn't have some extensions needed for movie reading). iOS 4.3 is needed as a deployment target if you wish to show live video previews when taking a still photo.
-- iOS 5.0 SDK to build
-- Devices must have a camera to use camera-related functionality (obviously)
-- The framework uses automatic reference counting (ARC), but should support projects using both ARC and manual reference counting if added as a subproject as explained below. For manual reference counting applications targeting iOS 4.x, you'll need add -fobjc-arc to the Other Linker Flags for your application project.
-
-## General architecture ##
-
-GPUImage uses OpenGL ES 2.0 shaders to perform image and video manipulation much faster than could be done in CPU-bound routines. However, it hides the complexity of interacting with the OpenGL ES API in a simplified Objective-C interface. This interface lets you define input sources for images and video, attach filters in a chain, and send the resulting processed image or video to the screen, to a UIImage, or to a movie on disk.
-
-Images or frames of video are uploaded from source objects, which are subclasses of GPUImageOutput. These include GPUImageVideoCamera (for live video from an iOS camera), GPUImageStillCamera (for taking photos with the camera), GPUImagePicture (for still images), and GPUImageMovie (for movies). Source objects upload still image frames to OpenGL ES as textures, then hand those textures off to the next objects in the processing chain.
-
-Filters and other subsequent elements in the chain conform to the GPUImageInput protocol, which lets them take in the supplied or processed texture from the previous link in the chain and do something with it. Objects one step further down the chain are considered targets, and processing can be branched by adding multiple targets to a single output or filter.
-
-For example, an application that takes in live video from the camera, converts that video to a sepia tone, then displays the video onscreen would set up a chain looking something like the following:
-
- GPUImageVideoCamera -> GPUImageSepiaFilter -> GPUImageView
-
-## Adding the static library to your iOS project ##
-
-Note: if you want to use this in a Swift project, you need to use the steps in the "Adding this as a framework" section instead of the following. Swift needs modules for third-party code.
-
-Once you have the latest source code for the framework, it's fairly straightforward to add it to your application. Start by dragging the GPUImage.xcodeproj file into your application's Xcode project to embed the framework in your project. Next, go to your application's target and add GPUImage as a Target Dependency. Finally, you'll want to drag the libGPUImage.a library from the GPUImage framework's Products folder to the Link Binary With Libraries build phase in your application's target.
-
-GPUImage needs a few other frameworks to be linked into your application, so you'll need to add the following as linked libraries in your application target:
-
-- CoreMedia
-- CoreVideo
-- OpenGLES
-- AVFoundation
-- QuartzCore
-
-You'll also need to find the framework headers, so within your project's build settings set the Header Search Paths to the relative path from your application to the framework/ subdirectory within the GPUImage source directory. Make this header search path recursive.
-
-To use the GPUImage classes within your application, simply include the core framework header using the following:
-
- #import "GPUImage.h"
-
-As a note: if you run into the error "Unknown class GPUImageView in Interface Builder" or the like when trying to build an interface with Interface Builder, you may need to add -ObjC to your Other Linker Flags in your project's build settings.
-
-Also, if you need to deploy this to iOS 4.x, it appears that the current version of Xcode (4.3) requires that you weak-link the Core Video framework in your final application or you see crashes with the message "Symbol not found: _CVOpenGLESTextureCacheCreate" when you create an archive for upload to the App Store or for ad hoc distribution. To do this, go to your project's Build Phases tab, expand the Link Binary With Libraries group, and find CoreVideo.framework in the list. Change the setting for it in the far right of the list from Required to Optional.
-
-Additionally, this is an ARC-enabled framework, so if you want to use this within a manual reference counted application targeting iOS 4.x, you'll need to add -fobjc-arc to your Other Linker Flags as well.
-
-### Building a static library at the command line ###
-
-If you don't want to include the project as a dependency in your application's Xcode project, you can build a universal static library for the iOS Simulator or device. To do this, run `build.sh` at the command line. The resulting library and header files will be located at `build/Release-iphone`. You may also change the version of the iOS SDK by changing the `IOSSDK_VER` variable in `build.sh` (all available versions can be found using `xcodebuild -showsdks`).
-
-## Adding this as a framework (module) to your Mac or iOS project ##
-
-Xcode 6 and iOS 8 support the use of full frameworks, as does the Mac, which simplifies the process of adding this to your application. To add this to your application, I recommend dragging the .xcodeproj project file into your application's project (as you would in the static library target).
-
-For your application, go to its target build settings and choose the Build Phases tab. Under the Target Dependencies grouping, add GPUImageFramework on iOS (not GPUImage, which builds the static library) or GPUImage on the Mac. Under the Link Binary With Libraries section, add GPUImage.framework.
-
-This should cause GPUImage to build as a framework. Under Xcode 6, this will also build as a module, which will allow you to use this in Swift projects. When set up as above, you should just need to use
-
- import GPUImage
-
-to pull it in.
-
-You then need to add a new Copy Files build phase, set the Destination to Frameworks, and add the GPUImage.framework build product to that. This will allow the framework to be bundled with your application (otherwise, you'll see cryptic "dyld: Library not loaded: @rpath/GPUImage.framework/GPUImage" errors on execution).
-
-### Documentation ###
-
-Documentation is generated from header comments using appledoc. To build the documentation, switch to the "Documentation" scheme in Xcode. You should ensure that "APPLEDOC_PATH" (a User-Defined build setting) points to an appledoc binary, available on Github or through Homebrew. It will also build and install a .docset file, which you can view with your favorite documentation tool.
-
-## Performing common tasks ##
-
-### Filtering live video ###
-
-To filter live video from an iOS device's camera, you can use code like the following:
-
- GPUImageVideoCamera *videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack];
- videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
-
- GPUImageFilter *customFilter = [[GPUImageFilter alloc] initWithFragmentShaderFromFile:@"CustomShader"];
- GPUImageView *filteredVideoView = [[GPUImageView alloc] initWithFrame:CGRectMake(0.0, 0.0, viewWidth, viewHeight)];
-
- // Add the view somewhere so it's visible
-
- [videoCamera addTarget:customFilter];
- [customFilter addTarget:filteredVideoView];
-
- [videoCamera startCameraCapture];
-
-This sets up a video source coming from the iOS device's back-facing camera, using a preset that tries to capture at 640x480. This video is captured with the interface being in portrait mode, where the landscape-left-mounted camera needs to have its video frames rotated before display. A custom filter, using code from the file CustomShader.fsh, is then set as the target for the video frames from the camera. These filtered video frames are finally displayed onscreen with the help of a UIView subclass that can present the filtered OpenGL ES texture that results from this pipeline.
-
-The fill mode of the GPUImageView can be altered by setting its fillMode property, so that if the aspect ratio of the source video is different from that of the view, the video will either be stretched, centered with black bars, or zoomed to fill.
-
-For blending filters and others that take in more than one image, you can create multiple outputs and add a single filter as a target for both of these outputs. The order with which the outputs are added as targets will affect the order in which the input images are blended or otherwise processed.
-
-Also, if you wish to enable microphone audio capture for recording to a movie, you'll need to set the audioEncodingTarget of the camera to be your movie writer, like for the following:
-
- videoCamera.audioEncodingTarget = movieWriter;
-
-
-### Capturing and filtering a still photo ###
-
-To capture and filter still photos, you can use a process similar to the one for filtering video. Instead of a GPUImageVideoCamera, you use a GPUImageStillCamera:
-
- stillCamera = [[GPUImageStillCamera alloc] init];
- stillCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
-
- filter = [[GPUImageGammaFilter alloc] init];
- [stillCamera addTarget:filter];
- GPUImageView *filterView = (GPUImageView *)self.view;
- [filter addTarget:filterView];
-
- [stillCamera startCameraCapture];
-
-This will give you a live, filtered feed of the still camera's preview video. Note that this preview video is only provided on iOS 4.3 and higher, so you may need to set that as your deployment target if you wish to have this functionality.
-
-Once you want to capture a photo, you use a callback block like the following:
-
- [stillCamera capturePhotoProcessedUpToFilter:filter withCompletionHandler:^(UIImage *processedImage, NSError *error){
- NSData *dataForJPEGFile = UIImageJPEGRepresentation(processedImage, 0.8);
-
- NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
- NSString *documentsDirectory = [paths objectAtIndex:0];
-
- NSError *error2 = nil;
- if (![dataForJPEGFile writeToFile:[documentsDirectory stringByAppendingPathComponent:@"FilteredPhoto.jpg"] options:NSAtomicWrite error:&error2])
- {
- return;
- }
- }];
-
-The above code captures a full-size photo processed by the same filter chain used in the preview view and saves that photo to disk as a JPEG in the application's documents directory.
-
-Note that the framework currently can't handle images larger than 2048 pixels wide or high on older devices (those before the iPhone 4S, iPad 2, or Retina iPad) due to texture size limitations. This means that the iPhone 4, whose camera outputs still photos larger than this, won't be able to capture photos like this. A tiling mechanism is being implemented to work around this. All other devices should be able to capture and filter photos using this method.
-
-### Processing a still image ###
-
-There are a couple of ways to process a still image and create a result. The first way you can do this is by creating a still image source object and manually creating a filter chain:
-
- UIImage *inputImage = [UIImage imageNamed:@"Lambeau.jpg"];
-
- GPUImagePicture *stillImageSource = [[GPUImagePicture alloc] initWithImage:inputImage];
- GPUImageSepiaFilter *stillImageFilter = [[GPUImageSepiaFilter alloc] init];
-
- [stillImageSource addTarget:stillImageFilter];
- [stillImageFilter useNextFrameForImageCapture];
- [stillImageSource processImage];
-
- UIImage *currentFilteredVideoFrame = [stillImageFilter imageFromCurrentFramebuffer];
-
-Note that for a manual capture of an image from a filter, you need to set -useNextFrameForImageCapture in order to tell the filter that you'll be needing to capture from it later. By default, GPUImage reuses framebuffers within filters to conserve memory, so if you need to hold on to a filter's framebuffer for manual image capture, you need to let it know ahead of time.
-
-For single filters that you wish to apply to an image, you can simply do the following:
-
- GPUImageSepiaFilter *stillImageFilter2 = [[GPUImageSepiaFilter alloc] init];
- UIImage *quickFilteredImage = [stillImageFilter2 imageByFilteringImage:inputImage];
-
-
-### Writing a custom filter ###
-
-One significant advantage of this framework over Core Image on iOS (as of iOS 5.0) is the ability to write your own custom image and video processing filters. These filters are supplied as OpenGL ES 2.0 fragment shaders, written in the C-like OpenGL Shading Language.
-
-A custom filter is initialized with code like
-
- GPUImageFilter *customFilter = [[GPUImageFilter alloc] initWithFragmentShaderFromFile:@"CustomShader"];
-
-where the extension used for the fragment shader is .fsh. Additionally, you can use the -initWithFragmentShaderFromString: initializer to provide the fragment shader as a string, if you would not like to ship your fragment shaders in your application bundle.
-
-Fragment shaders perform their calculations for each pixel to be rendered at that filter stage. They do this using the OpenGL Shading Language (GLSL), a C-like language with additions specific to 2-D and 3-D graphics. An example of a fragment shader is the following sepia-tone filter:
-
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- lowp vec4 outputColor;
- outputColor.r = (textureColor.r * 0.393) + (textureColor.g * 0.769) + (textureColor.b * 0.189);
- outputColor.g = (textureColor.r * 0.349) + (textureColor.g * 0.686) + (textureColor.b * 0.168);
- outputColor.b = (textureColor.r * 0.272) + (textureColor.g * 0.534) + (textureColor.b * 0.131);
- outputColor.a = 1.0;
-
- gl_FragColor = outputColor;
- }
-
-For an image filter to be usable within the GPUImage framework, the first two lines that take in the textureCoordinate varying (for the current coordinate within the texture, normalized to 1.0) and the inputImageTexture uniform (for the actual input image frame texture) are required.
-
-The remainder of the shader grabs the color of the pixel at this location in the passed-in texture, manipulates it in such a way as to produce a sepia tone, and writes that pixel color out to be used in the next stage of the processing pipeline.
-
-One thing to note when adding fragment shaders to your Xcode project is that Xcode thinks they are source code files. To work around this, you'll need to manually move your shader from the Compile Sources build phase to the Copy Bundle Resources one in order to get the shader to be included in your application bundle.
-
-
-### Filtering and re-encoding a movie ###
-
-Movies can be loaded into the framework via the GPUImageMovie class, filtered, and then written out using a GPUImageMovieWriter. GPUImageMovieWriter is also fast enough to record video in realtime from an iPhone 4's camera at 640x480, so a direct filtered video source can be fed into it. Currently, GPUImageMovieWriter is fast enough to record live 720p video at up to 20 FPS on the iPhone 4, and both 720p and 1080p video at 30 FPS on the iPhone 4S (as well as on the new iPad).
-
-The following is an example of how you would load a sample movie, pass it through a pixellation filter, then record the result to disk as a 480 x 640 h.264 movie:
-
- movieFile = [[GPUImageMovie alloc] initWithURL:sampleURL];
- pixellateFilter = [[GPUImagePixellateFilter alloc] init];
-
- [movieFile addTarget:pixellateFilter];
-
- NSString *pathToMovie = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.m4v"];
- unlink([pathToMovie UTF8String]);
- NSURL *movieURL = [NSURL fileURLWithPath:pathToMovie];
-
- movieWriter = [[GPUImageMovieWriter alloc] initWithMovieURL:movieURL size:CGSizeMake(480.0, 640.0)];
- [pixellateFilter addTarget:movieWriter];
-
- movieWriter.shouldPassthroughAudio = YES;
- movieFile.audioEncodingTarget = movieWriter;
- [movieFile enableSynchronizedEncodingUsingMovieWriter:movieWriter];
-
- [movieWriter startRecording];
- [movieFile startProcessing];
-
-Once recording is finished, you need to remove the movie recorder from the filter chain and close off the recording using code like the following:
-
- [pixellateFilter removeTarget:movieWriter];
- [movieWriter finishRecording];
-
-A movie won't be usable until it has been finished off, so if this is interrupted before this point, the recording will be lost.
-
-### Interacting with OpenGL ES ###
-
-GPUImage can both export and import textures from OpenGL ES through the use of its GPUImageTextureOutput and GPUImageTextureInput classes, respectively. This lets you record a movie from an OpenGL ES scene that is rendered to a framebuffer object with a bound texture, or filter video or images and then feed them into OpenGL ES as a texture to be displayed in the scene.
-
-The one caution with this approach is that the textures used in these processes must be shared between GPUImage's OpenGL ES context and any other context via a share group or something similar.
-
-## Built-in filters ##
-
-There are currently 125 built-in filters, divided into the following categories:
-
-### Color adjustments ###
-
-- **GPUImageBrightnessFilter**: Adjusts the brightness of the image
- - *brightness*: The adjusted brightness (-1.0 - 1.0, with 0.0 as the default)
-
-- **GPUImageExposureFilter**: Adjusts the exposure of the image
- - *exposure*: The adjusted exposure (-10.0 - 10.0, with 0.0 as the default)
-
-- **GPUImageContrastFilter**: Adjusts the contrast of the image
- - *contrast*: The adjusted contrast (0.0 - 4.0, with 1.0 as the default)
-
-- **GPUImageSaturationFilter**: Adjusts the saturation of an image
- - *saturation*: The degree of saturation or desaturation to apply to the image (0.0 - 2.0, with 1.0 as the default)
-
-- **GPUImageGammaFilter**: Adjusts the gamma of an image
- - *gamma*: The gamma adjustment to apply (0.0 - 3.0, with 1.0 as the default)
-
-- **GPUImageLevelsFilter**: Photoshop-like levels adjustment. The min, max, minOut and maxOut parameters are floats in the range [0, 1]. If you have parameters from Photoshop in the range [0, 255] you must first convert them to be [0, 1]. The gamma/mid parameter is a float >= 0. This matches the value from Photoshop. If you want to apply levels to RGB as well as individual channels you need to use this filter twice - first for the individual channels and then for all channels.
-
-- **GPUImageColorMatrixFilter**: Transforms the colors of an image by applying a matrix to them
- - *colorMatrix*: A 4x4 matrix used to transform each color in an image
- - *intensity*: The degree to which the new transformed color replaces the original color for each pixel
-
-- **GPUImageRGBFilter**: Adjusts the individual RGB channels of an image
- - *red*: Normalized values by which each color channel is multiplied. The range is from 0.0 up, with 1.0 as the default.
- - *green*:
- - *blue*:
-
-- **GPUImageHueFilter**: Adjusts the hue of an image
- - *hue*: The hue angle, in degrees. 90 degrees by default
-
-- **GPUImageToneCurveFilter**: Adjusts the colors of an image based on spline curves for each color channel.
- - *redControlPoints*:
- - *greenControlPoints*:
- - *blueControlPoints*:
- - *rgbCompositeControlPoints*: The tone curve takes in a series of control points that define the spline curve for each color component, or for all three in the composite. These are stored as NSValue-wrapped CGPoints in an NSArray, with normalized X and Y coordinates from 0 - 1. The defaults are (0,0), (0.5,0.5), (1,1).
-
-- **GPUImageHighlightShadowFilter**: Adjusts the shadows and highlights of an image
- - *shadows*: Increase to lighten shadows, from 0.0 to 1.0, with 0.0 as the default.
- - *highlights*: Decrease to darken highlights, from 0.0 to 1.0, with 1.0 as the default.
-
-- **GPUImageLookupFilter**: Uses an RGB color lookup image to remap the colors in an image. First, use your favourite photo editing application to apply a filter to lookup.png from GPUImage/framework/Resources. For this to work properly each pixel color must not depend on other pixels (e.g. blur will not work). If you need a more complex filter you can create as many lookup tables as required. Once ready, use your new lookup.png file as a second input for GPUImageLookupFilter.
-
-- **GPUImageAmatorkaFilter**: A photo filter based on a Photoshop action by Amatorka: http://amatorka.deviantart.com/art/Amatorka-Action-2-121069631 . If you want to use this effect you have to add lookup_amatorka.png from the GPUImage Resources folder to your application bundle.
-
-- **GPUImageMissEtikateFilter**: A photo filter based on a Photoshop action by Miss Etikate: http://miss-etikate.deviantart.com/art/Photoshop-Action-15-120151961 . If you want to use this effect you have to add lookup_miss_etikate.png from the GPUImage Resources folder to your application bundle.
-
-- **GPUImageSoftEleganceFilter**: Another lookup-based color remapping filter. If you want to use this effect you have to add lookup_soft_elegance_1.png and lookup_soft_elegance_2.png from the GPUImage Resources folder to your application bundle.
-
-- **GPUImageColorInvertFilter**: Inverts the colors of an image
-
-- **GPUImageGrayscaleFilter**: Converts an image to grayscale (a slightly faster implementation of the saturation filter, without the ability to vary the color contribution)
-
-- **GPUImageMonochromeFilter**: Converts the image to a single-color version, based on the luminance of each pixel
- - *intensity*: The degree to which the specific color replaces the normal image color (0.0 - 1.0, with 1.0 as the default)
- - *color*: The color to use as the basis for the effect, with (0.6, 0.45, 0.3, 1.0) as the default.
-
-- **GPUImageFalseColorFilter**: Uses the luminance of the image to mix between two user-specified colors
- - *firstColor*: The first and second colors specify what colors replace the dark and light areas of the image, respectively. The defaults are (0.0, 0.0, 0.5) amd (1.0, 0.0, 0.0).
- - *secondColor*:
-
-- **GPUImageHazeFilter**: Used to add or remove haze (similar to a UV filter)
- - *distance*: Strength of the color applied. Default 0. Values between -.3 and .3 are best.
- - *slope*: Amount of color change. Default 0. Values between -.3 and .3 are best.
-
-- **GPUImageSepiaFilter**: Simple sepia tone filter
- - *intensity*: The degree to which the sepia tone replaces the normal image color (0.0 - 1.0, with 1.0 as the default)
-
-- **GPUImageOpacityFilter**: Adjusts the alpha channel of the incoming image
- - *opacity*: The value to multiply the incoming alpha channel for each pixel by (0.0 - 1.0, with 1.0 as the default)
-
-- **GPUImageSolidColorGenerator**: This outputs a generated image with a solid color. You need to define the image size using -forceProcessingAtSize:
- - *color*: The color, in a four component format, that is used to fill the image.
-
-- **GPUImageLuminanceThresholdFilter**: Pixels with a luminance above the threshold will appear white, and those below will be black
- - *threshold*: The luminance threshold, from 0.0 to 1.0, with a default of 0.5
-
-- **GPUImageAdaptiveThresholdFilter**: Determines the local luminance around a pixel, then turns the pixel black if it is below that local luminance and white if above. This can be useful for picking out text under varying lighting conditions.
- - *blurRadiusInPixels*: A multiplier for the background averaging blur radius in pixels, with a default of 4.
-
-- **GPUImageAverageLuminanceThresholdFilter**: This applies a thresholding operation where the threshold is continually adjusted based on the average luminance of the scene.
- - *thresholdMultiplier*: This is a factor that the average luminance will be multiplied by in order to arrive at the final threshold to use. By default, this is 1.0.
-
-- **GPUImageHistogramFilter**: This analyzes the incoming image and creates an output histogram with the frequency at which each color value occurs. The output of this filter is a 3-pixel-high, 256-pixel-wide image with the center (vertical) pixels containing pixels that correspond to the frequency at which various color values occurred. Each color value occupies one of the 256 width positions, from 0 on the left to 255 on the right. This histogram can be generated for individual color channels (kGPUImageHistogramRed, kGPUImageHistogramGreen, kGPUImageHistogramBlue), the luminance of the image (kGPUImageHistogramLuminance), or for all three color channels at once (kGPUImageHistogramRGB).
- - *downsamplingFactor*: Rather than sampling every pixel, this dictates what fraction of the image is sampled. By default, this is 16 with a minimum of 1. This is needed to keep from saturating the histogram, which can only record 256 pixels for each color value before it becomes overloaded.
-
-- **GPUImageHistogramGenerator**: This is a special filter, in that it's primarily intended to work with the GPUImageHistogramFilter. It generates an output representation of the color histograms generated by GPUImageHistogramFilter, but it could be repurposed to display other kinds of values. It takes in an image and looks at the center (vertical) pixels. It then plots the numerical values of the RGB components in separate colored graphs in an output texture. You may need to force a size for this filter in order to make its output visible.
-
-- **GPUImageAverageColor**: This processes an input image and determines the average color of the scene, by averaging the RGBA components for each pixel in the image. A reduction process is used to progressively downsample the source image on the GPU, followed by a short averaging calculation on the CPU. The output from this filter is meaningless, but you need to set the colorAverageProcessingFinishedBlock property to a block that takes in four color components and a frame time and does something with them.
-
-- **GPUImageLuminosity**: Like the GPUImageAverageColor, this reduces an image to its average luminosity. You need to set the luminosityProcessingFinishedBlock to handle the output of this filter, which just returns a luminosity value and a frame time.
-
-- **GPUImageChromaKeyFilter**: For a given color in the image, sets the alpha channel to 0. This is similar to the GPUImageChromaKeyBlendFilter, only instead of blending in a second image for a matching color this doesn't take in a second image and just turns a given color transparent.
- - *thresholdSensitivity*: How close a color match needs to exist to the target color to be replaced (default of 0.4)
- - *smoothing*: How smoothly to blend for the color match (default of 0.1)
-
-### Image processing ###
-
-- **GPUImageTransformFilter**: This applies an arbitrary 2-D or 3-D transformation to an image
- - *affineTransform*: This takes in a CGAffineTransform to adjust an image in 2-D
- - *transform3D*: This takes in a CATransform3D to manipulate an image in 3-D
- - *ignoreAspectRatio*: By default, the aspect ratio of the transformed image is maintained, but this can be set to YES to make the transformation independent of aspect ratio
-
-- **GPUImageCropFilter**: This crops an image to a specific region, then passes only that region on to the next stage in the filter
- - *cropRegion*: A rectangular area to crop out of the image, normalized to coordinates from 0.0 - 1.0. The (0.0, 0.0) position is in the upper left of the image.
-
-- **GPUImageLanczosResamplingFilter**: This lets you up- or downsample an image using Lanczos resampling, which results in noticeably better quality than the standard linear or trilinear interpolation. Simply use -forceProcessingAtSize: to set the target output resolution for the filter, and the image will be resampled for that new size.
-
-- **GPUImageSharpenFilter**: Sharpens the image
- - *sharpness*: The sharpness adjustment to apply (-4.0 - 4.0, with 0.0 as the default)
-
-- **GPUImageUnsharpMaskFilter**: Applies an unsharp mask
- - *blurRadiusInPixels*: The blur radius of the underlying Gaussian blur. The default is 4.0.
- - *intensity*: The strength of the sharpening, from 0.0 on up, with a default of 1.0
-
-- **GPUImageGaussianBlurFilter**: A hardware-optimized, variable-radius Gaussian blur
- - *texelSpacingMultiplier*: A multiplier for the spacing between texels, ranging from 0.0 on up, with a default of 1.0. Adjusting this may slightly increase the blur strength, but will introduce artifacts in the result. Highly recommend using other parameters first, before touching this one.
- - *blurRadiusInPixels*: A radius in pixels to use for the blur, with a default of 2.0. This adjusts the sigma variable in the Gaussian distribution function.
- - *blurRadiusAsFractionOfImageWidth*:
- - *blurRadiusAsFractionOfImageHeight*: Setting these properties will allow the blur radius to scale with the size of the image
- - *blurPasses*: The number of times to sequentially blur the incoming image. The more passes, the slower the filter.
-
-- **GPUImageBoxBlurFilter**: A hardware-optimized, variable-radius box blur
- - *texelSpacingMultiplier*: A multiplier for the spacing between texels, ranging from 0.0 on up, with a default of 1.0. Adjusting this may slightly increase the blur strength, but will introduce artifacts in the result. Highly recommend using other parameters first, before touching this one.
- - *blurRadiusInPixels*: A radius in pixels to use for the blur, with a default of 2.0. This adjusts the sigma variable in the Gaussian distribution function.
- - *blurRadiusAsFractionOfImageWidth*:
- - *blurRadiusAsFractionOfImageHeight*: Setting these properties will allow the blur radius to scale with the size of the image
- - *blurPasses*: The number of times to sequentially blur the incoming image. The more passes, the slower the filter.
-
-- **GPUImageSingleComponentGaussianBlurFilter**: A modification of the GPUImageGaussianBlurFilter that operates only on the red component
- - *texelSpacingMultiplier*: A multiplier for the spacing between texels, ranging from 0.0 on up, with a default of 1.0. Adjusting this may slightly increase the blur strength, but will introduce artifacts in the result. Highly recommend using other parameters first, before touching this one.
- - *blurRadiusInPixels*: A radius in pixels to use for the blur, with a default of 2.0. This adjusts the sigma variable in the Gaussian distribution function.
- - *blurRadiusAsFractionOfImageWidth*:
- - *blurRadiusAsFractionOfImageHeight*: Setting these properties will allow the blur radius to scale with the size of the image
- - *blurPasses*: The number of times to sequentially blur the incoming image. The more passes, the slower the filter.
-
-- **GPUImageGaussianSelectiveBlurFilter**: A Gaussian blur that preserves focus within a circular region
- - *blurRadiusInPixels*: A radius in pixels to use for the blur, with a default of 5.0. This adjusts the sigma variable in the Gaussian distribution function.
- - *excludeCircleRadius*: The radius of the circular area being excluded from the blur
- - *excludeCirclePoint*: The center of the circular area being excluded from the blur
- - *excludeBlurSize*: The size of the area between the blurred portion and the clear circle
- - *aspectRatio*: The aspect ratio of the image, used to adjust the circularity of the in-focus region. By default, this matches the image aspect ratio, but you can override this value.
-
-- **GPUImageGaussianBlurPositionFilter**: The inverse of the GPUImageGaussianSelectiveBlurFilter, applying the blur only within a certain circle
- - *blurSize*: A multiplier for the size of the blur, ranging from 0.0 on up, with a default of 1.0
- - *blurCenter*: Center for the blur, defaults to 0.5, 0.5
- - *blurRadius*: Radius for the blur, defaults to 1.0
-
-- **GPUImageiOSBlurFilter**: An attempt to replicate the background blur used on iOS 7 in places like the control center.
- - *blurRadiusInPixels*: A radius in pixels to use for the blur, with a default of 12.0. This adjusts the sigma variable in the Gaussian distribution function.
- - *saturation*: Saturation ranges from 0.0 (fully desaturated) to 2.0 (max saturation), with 0.8 as the normal level
- - *downsampling*: The degree to which to downsample, then upsample the incoming image to minimize computations within the Gaussian blur, with a default of 4.0.
-
-- **GPUImageMedianFilter**: Takes the median value of the three color components, over a 3x3 area
-
-- **GPUImageBilateralFilter**: A bilateral blur, which tries to blur similar color values while preserving sharp edges
- - *texelSpacingMultiplier*: A multiplier for the spacing between texel reads, ranging from 0.0 on up, with a default of 4.0
- - *distanceNormalizationFactor*: A normalization factor for the distance between central color and sample color, with a default of 8.0.
-
-- **GPUImageTiltShiftFilter**: A simulated tilt shift lens effect
- - *blurRadiusInPixels*: The radius of the underlying blur, in pixels. This is 7.0 by default.
- - *topFocusLevel*: The normalized location of the top of the in-focus area in the image, this value should be lower than bottomFocusLevel, default 0.4
- - *bottomFocusLevel*: The normalized location of the bottom of the in-focus area in the image, this value should be higher than topFocusLevel, default 0.6
- - *focusFallOffRate*: The rate at which the image gets blurry away from the in-focus region, default 0.2
-
-- **GPUImage3x3ConvolutionFilter**: Runs a 3x3 convolution kernel against the image
- - *convolutionKernel*: The convolution kernel is a 3x3 matrix of values to apply to the pixel and its 8 surrounding pixels. The matrix is specified in row-major order, with the top left pixel being one.one and the bottom right three.three. If the values in the matrix don't add up to 1.0, the image could be brightened or darkened.
-
-- **GPUImageSobelEdgeDetectionFilter**: Sobel edge detection, with edges highlighted in white
- - *texelWidth*:
- - *texelHeight*: These parameters affect the visibility of the detected edges
- - *edgeStrength*: Adjusts the dynamic range of the filter. Higher values lead to stronger edges, but can saturate the intensity colorspace. Default is 1.0.
-
-- **GPUImagePrewittEdgeDetectionFilter**: Prewitt edge detection, with edges highlighted in white
- - *texelWidth*:
- - *texelHeight*: These parameters affect the visibility of the detected edges
- - *edgeStrength*: Adjusts the dynamic range of the filter. Higher values lead to stronger edges, but can saturate the intensity colorspace. Default is 1.0.
-
-- **GPUImageThresholdEdgeDetectionFilter**: Performs Sobel edge detection, but applies a threshold instead of giving gradual strength values
- - *texelWidth*:
- - *texelHeight*: These parameters affect the visibility of the detected edges
- - *edgeStrength*: Adjusts the dynamic range of the filter. Higher values lead to stronger edges, but can saturate the intensity colorspace. Default is 1.0.
- - *threshold*: Any edge above this threshold will be black, and anything below white. Ranges from 0.0 to 1.0, with 0.8 as the default
-
-- **GPUImageCannyEdgeDetectionFilter**: This uses the full Canny process to highlight one-pixel-wide edges
- - *texelWidth*:
- - *texelHeight*: These parameters affect the visibility of the detected edges
- - *blurRadiusInPixels*: The underlying blur radius for the Gaussian blur. Default is 2.0.
- - *blurTexelSpacingMultiplier*: The underlying blur texel spacing multiplier. Default is 1.0.
- - *upperThreshold*: Any edge with a gradient magnitude above this threshold will pass and show up in the final result. Default is 0.4.
- - *lowerThreshold*: Any edge with a gradient magnitude below this threshold will fail and be removed from the final result. Default is 0.1.
-
-- **GPUImageHarrisCornerDetectionFilter**: Runs the Harris corner detection algorithm on an input image, and produces an image with those corner points as white pixels and everything else black. The cornersDetectedBlock can be set, and you will be provided with a list of corners (in normalized 0..1 X, Y coordinates) within that callback for whatever additional operations you want to perform.
- - *blurRadiusInPixels*: The radius of the underlying Gaussian blur. The default is 2.0.
- - *sensitivity*: An internal scaling factor applied to adjust the dynamic range of the cornerness maps generated in the filter. The default is 5.0.
- - *threshold*: The threshold at which a point is detected as a corner. This can vary significantly based on the size, lighting conditions, and iOS device camera type, so it might take a little experimentation to get right for your cases. Default is 0.20.
-
-- **GPUImageNobleCornerDetectionFilter**: Runs the Noble variant on the Harris corner detector. It behaves as described above for the Harris detector.
- - *blurRadiusInPixels*: The radius of the underlying Gaussian blur. The default is 2.0.
- - *sensitivity*: An internal scaling factor applied to adjust the dynamic range of the cornerness maps generated in the filter. The default is 5.0.
- - *threshold*: The threshold at which a point is detected as a corner. This can vary significantly based on the size, lighting conditions, and iOS device camera type, so it might take a little experimentation to get right for your cases. Default is 0.2.
-
-- **GPUImageShiTomasiCornerDetectionFilter**: Runs the Shi-Tomasi feature detector. It behaves as described above for the Harris detector.
- - *blurRadiusInPixels*: The radius of the underlying Gaussian blur. The default is 2.0.
- - *sensitivity*: An internal scaling factor applied to adjust the dynamic range of the cornerness maps generated in the filter. The default is 1.5.
- - *threshold*: The threshold at which a point is detected as a corner. This can vary significantly based on the size, lighting conditions, and iOS device camera type, so it might take a little experimentation to get right for your cases. Default is 0.2.
-
-- **GPUImageNonMaximumSuppressionFilter**: Currently used only as part of the Harris corner detection filter, this will sample a 1-pixel box around each pixel and determine if the center pixel's red channel is the maximum in that area. If it is, it stays. If not, it is set to 0 for all color components.
-
-- **GPUImageXYDerivativeFilter**: An internal component within the Harris corner detection filter, this calculates the squared difference between the pixels to the left and right of this one, the squared difference of the pixels above and below this one, and the product of those two differences.
-
-- **GPUImageCrosshairGenerator**: This draws a series of crosshairs on an image, most often used for identifying machine vision features. It does not take in a standard image like other filters, but a series of points in its -renderCrosshairsFromArray:count: method, which does the actual drawing. You will need to force this filter to render at the particular output size you need.
- - *crosshairWidth*: The width, in pixels, of the crosshairs to be drawn onscreen.
-
-- **GPUImageDilationFilter**: This performs an image dilation operation, where the maximum intensity of the red channel in a rectangular neighborhood is used for the intensity of this pixel. The radius of the rectangular area to sample over is specified on initialization, with a range of 1-4 pixels. This is intended for use with grayscale images, and it expands bright regions.
-
-- **GPUImageRGBDilationFilter**: This is the same as the GPUImageDilationFilter, except that this acts on all color channels, not just the red channel.
-
-- **GPUImageErosionFilter**: This performs an image erosion operation, where the minimum intensity of the red channel in a rectangular neighborhood is used for the intensity of this pixel. The radius of the rectangular area to sample over is specified on initialization, with a range of 1-4 pixels. This is intended for use with grayscale images, and it expands dark regions.
-
-- **GPUImageRGBErosionFilter**: This is the same as the GPUImageErosionFilter, except that this acts on all color channels, not just the red channel.
-
-- **GPUImageOpeningFilter**: This performs an erosion on the red channel of an image, followed by a dilation of the same radius. The radius is set on initialization, with a range of 1-4 pixels. This filters out smaller bright regions.
-
-- **GPUImageRGBOpeningFilter**: This is the same as the GPUImageOpeningFilter, except that this acts on all color channels, not just the red channel.
-
-- **GPUImageClosingFilter**: This performs a dilation on the red channel of an image, followed by an erosion of the same radius. The radius is set on initialization, with a range of 1-4 pixels. This filters out smaller dark regions.
-
-- **GPUImageRGBClosingFilter**: This is the same as the GPUImageClosingFilter, except that this acts on all color channels, not just the red channel.
-
-- **GPUImageLocalBinaryPatternFilter**: This performs a comparison of intensity of the red channel of the 8 surrounding pixels and that of the central one, encoding the comparison results in a bit string that becomes this pixel intensity. The least-significant bit is the top-right comparison, going counterclockwise to end at the right comparison as the most significant bit.
-
-- **GPUImageLowPassFilter**: This applies a low pass filter to incoming video frames. This basically accumulates a weighted rolling average of previous frames with the current ones as they come in. This can be used to denoise video, add motion blur, or be used to create a high pass filter.
- - *filterStrength*: This controls the degree by which the previous accumulated frames are blended with the current one. This ranges from 0.0 to 1.0, with a default of 0.5.
-
-- **GPUImageHighPassFilter**: This applies a high pass filter to incoming video frames. This is the inverse of the low pass filter, showing the difference between the current frame and the weighted rolling average of previous ones. This is most useful for motion detection.
- - *filterStrength*: This controls the degree by which the previous accumulated frames are blended and then subtracted from the current one. This ranges from 0.0 to 1.0, with a default of 0.5.
-
-- **GPUImageMotionDetector**: This is a motion detector based on a high-pass filter. You set the motionDetectionBlock and on every incoming frame it will give you the centroid of any detected movement in the scene (in normalized X,Y coordinates) as well as an intensity of motion for the scene.
- - *lowPassFilterStrength*: This controls the strength of the low pass filter used behind the scenes to establish the baseline that incoming frames are compared with. This ranges from 0.0 to 1.0, with a default of 0.5.
-
-- **GPUImageHoughTransformLineDetector**: Detects lines in the image using a Hough transform into parallel coordinate space. This approach is based entirely on the PC lines process developed by the Graph@FIT research group at the Brno University of Technology and described in their publications: M. Dubská, J. Havel, and A. Herout. Real-Time Detection of Lines using Parallel Coordinates and OpenGL. Proceedings of SCCG 2011, Bratislava, SK, p. 7 (http://medusa.fit.vutbr.cz/public/data/papers/2011-SCCG-Dubska-Real-Time-Line-Detection-Using-PC-and-OpenGL.pdf) and M. Dubská, J. Havel, and A. Herout. PClines — Line detection using parallel coordinates. 2011 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), p. 1489- 1494 (http://medusa.fit.vutbr.cz/public/data/papers/2011-CVPR-Dubska-PClines.pdf).
- - *edgeThreshold*: A threshold value for which a point is detected as belonging to an edge for determining lines. Default is 0.9.
- - *lineDetectionThreshold*: A threshold value for which a local maximum is detected as belonging to a line in parallel coordinate space. Default is 0.20.
- - *linesDetectedBlock*: This block is called on the detection of lines, usually on every processed frame. A C array containing normalized slopes and intercepts in m, b pairs (y=mx+b) is passed in, along with a count of the number of lines detected and the current timestamp of the video frame.
-
-- **GPUImageLineGenerator**: A helper class that generates lines which can overlay the scene. The color of these lines can be adjusted using -setLineColorRed:green:blue:
- - *lineWidth*: The width of the lines, in pixels, with a default of 1.0.
-
-- **GPUImageMotionBlurFilter**: Applies a directional motion blur to an image
- - *blurSize*: A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
- - *blurAngle*: The angular direction of the blur, in degrees. 0 degrees by default.
-
-- **GPUImageZoomBlurFilter**: Applies a directional motion blur to an image
- - *blurSize*: A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
- - *blurCenter*: The normalized center of the blur. (0.5, 0.5) by default
-
-### Blending modes ###
-
-- **GPUImageChromaKeyBlendFilter**: Selectively replaces a color in the first image with the second image
- - *thresholdSensitivity*: How close a color match needs to exist to the target color to be replaced (default of 0.4)
- - *smoothing*: How smoothly to blend for the color match (default of 0.1)
-
-- **GPUImageDissolveBlendFilter**: Applies a dissolve blend of two images
- - *mix*: The degree with which the second image overrides the first (0.0 - 1.0, with 0.5 as the default)
-
-- **GPUImageMultiplyBlendFilter**: Applies a multiply blend of two images
-
-- **GPUImageAddBlendFilter**: Applies an additive blend of two images
-
-- **GPUImageSubtractBlendFilter**: Applies a subtractive blend of two images
-
-- **GPUImageDivideBlendFilter**: Applies a division blend of two images
-
-- **GPUImageOverlayBlendFilter**: Applies an overlay blend of two images
-
-- **GPUImageDarkenBlendFilter**: Blends two images by taking the minimum value of each color component between the images
-
-- **GPUImageLightenBlendFilter**: Blends two images by taking the maximum value of each color component between the images
-
-- **GPUImageColorBurnBlendFilter**: Applies a color burn blend of two images
-
-- **GPUImageColorDodgeBlendFilter**: Applies a color dodge blend of two images
-
-- **GPUImageScreenBlendFilter**: Applies a screen blend of two images
-
-- **GPUImageExclusionBlendFilter**: Applies an exclusion blend of two images
-
-- **GPUImageDifferenceBlendFilter**: Applies a difference blend of two images
-
-- **GPUImageHardLightBlendFilter**: Applies a hard light blend of two images
-
-- **GPUImageSoftLightBlendFilter**: Applies a soft light blend of two images
-
-- **GPUImageAlphaBlendFilter**: Blends the second image over the first, based on the second's alpha channel
- - *mix*: The degree with which the second image overrides the first (0.0 - 1.0, with 1.0 as the default)
-
-- **GPUImageSourceOverBlendFilter**: Applies a source over blend of two images
-
-- **GPUImageColorBurnBlendFilter**: Applies a color burn blend of two images
-
-- **GPUImageColorDodgeBlendFilter**: Applies a color dodge blend of two images
-
-- **GPUImageNormalBlendFilter**: Applies a normal blend of two images
-
-- **GPUImageColorBlendFilter**: Applies a color blend of two images
-
-- **GPUImageHueBlendFilter**: Applies a hue blend of two images
-
-- **GPUImageSaturationBlendFilter**: Applies a saturation blend of two images
-
-- **GPUImageLuminosityBlendFilter**: Applies a luminosity blend of two images
-
-- **GPUImageLinearBurnBlendFilter**: Applies a linear burn blend of two images
-
-- **GPUImagePoissonBlendFilter**: Applies a Poisson blend of two images
- - *mix*: Mix ranges from 0.0 (only image 1) to 1.0 (only image 2 gradients), with 1.0 as the normal level
- - *numIterations*: The number of times to propagate the gradients. Crank this up to 100 or even 1000 if you want to get anywhere near convergence. Yes, this will be slow.
-
-- **GPUImageMaskFilter**: Masks one image using another
-
-### Visual effects ###
-
-- **GPUImagePixellateFilter**: Applies a pixellation effect on an image or video
- - *fractionalWidthOfAPixel*: How large the pixels are, as a fraction of the width and height of the image (0.0 - 1.0, default 0.05)
-
-- **GPUImagePolarPixellateFilter**: Applies a pixellation effect on an image or video, based on polar coordinates instead of Cartesian ones
- - *center*: The center about which to apply the pixellation, defaulting to (0.5, 0.5)
- - *pixelSize*: The fractional pixel size, split into width and height components. The default is (0.05, 0.05)
-
-- **GPUImagePolkaDotFilter**: Breaks an image up into colored dots within a regular grid
- - *fractionalWidthOfAPixel*: How large the dots are, as a fraction of the width and height of the image (0.0 - 1.0, default 0.05)
- - *dotScaling*: What fraction of each grid space is taken up by a dot, from 0.0 to 1.0 with a default of 0.9.
-
-- **GPUImageHalftoneFilter**: Applies a halftone effect to an image, like news print
- - *fractionalWidthOfAPixel*: How large the halftone dots are, as a fraction of the width and height of the image (0.0 - 1.0, default 0.05)
-
-- **GPUImageCrosshatchFilter**: This converts an image into a black-and-white crosshatch pattern
- - *crossHatchSpacing*: The fractional width of the image to use as the spacing for the crosshatch. The default is 0.03.
- - *lineWidth*: A relative width for the crosshatch lines. The default is 0.003.
-
-- **GPUImageSketchFilter**: Converts video to look like a sketch. This is just the Sobel edge detection filter with the colors inverted
- - *texelWidth*:
- - *texelHeight*: These parameters affect the visibility of the detected edges
- - *edgeStrength*: Adjusts the dynamic range of the filter. Higher values lead to stronger edges, but can saturate the intensity colorspace. Default is 1.0.
-
-- **GPUImageThresholdSketchFilter**: Same as the sketch filter, only the edges are thresholded instead of being grayscale
- - *texelWidth*:
- - *texelHeight*: These parameters affect the visibility of the detected edges
- - *edgeStrength*: Adjusts the dynamic range of the filter. Higher values lead to stronger edges, but can saturate the intensity colorspace. Default is 1.0.
- - *threshold*: Any edge above this threshold will be black, and anything below white. Ranges from 0.0 to 1.0, with 0.8 as the default
-
-- **GPUImageToonFilter**: This uses Sobel edge detection to place a black border around objects, and then it quantizes the colors present in the image to give a cartoon-like quality to the image.
- - *texelWidth*:
- - *texelHeight*: These parameters affect the visibility of the detected edges
- - *threshold*: The sensitivity of the edge detection, with lower values being more sensitive. Ranges from 0.0 to 1.0, with 0.2 as the default
- - *quantizationLevels*: The number of color levels to represent in the final image. Default is 10.0
-
-- **GPUImageSmoothToonFilter**: This uses a similar process as the GPUImageToonFilter, only it precedes the toon effect with a Gaussian blur to smooth out noise.
- - *texelWidth*:
- - *texelHeight*: These parameters affect the visibility of the detected edges
- - *blurRadiusInPixels*: The radius of the underlying Gaussian blur. The default is 2.0.
- - *threshold*: The sensitivity of the edge detection, with lower values being more sensitive. Ranges from 0.0 to 1.0, with 0.2 as the default
- - *quantizationLevels*: The number of color levels to represent in the final image. Default is 10.0
-
-- **GPUImageEmbossFilter**: Applies an embossing effect on the image
- - *intensity*: The strength of the embossing, from 0.0 to 4.0, with 1.0 as the normal level
-
-- **GPUImagePosterizeFilter**: This reduces the color dynamic range into the number of steps specified, leading to a cartoon-like simple shading of the image.
- - *colorLevels*: The number of color levels to reduce the image space to. This ranges from 1 to 256, with a default of 10.
-
-- **GPUImageSwirlFilter**: Creates a swirl distortion on the image
- - *radius*: The radius from the center to apply the distortion, with a default of 0.5
- - *center*: The center of the image (in normalized coordinates from 0 - 1.0) about which to twist, with a default of (0.5, 0.5)
- - *angle*: The amount of twist to apply to the image, with a default of 1.0
-
-- **GPUImageBulgeDistortionFilter**: Creates a bulge distortion on the image
- - *radius*: The radius from the center to apply the distortion, with a default of 0.25
- - *center*: The center of the image (in normalized coordinates from 0 - 1.0) about which to distort, with a default of (0.5, 0.5)
- - *scale*: The amount of distortion to apply, from -1.0 to 1.0, with a default of 0.5
-
-- **GPUImagePinchDistortionFilter**: Creates a pinch distortion of the image
- - *radius*: The radius from the center to apply the distortion, with a default of 1.0
- - *center*: The center of the image (in normalized coordinates from 0 - 1.0) about which to distort, with a default of (0.5, 0.5)
- - *scale*: The amount of distortion to apply, from -2.0 to 2.0, with a default of 1.0
-
-- **GPUImageStretchDistortionFilter**: Creates a stretch distortion of the image
- - *center*: The center of the image (in normalized coordinates from 0 - 1.0) about which to distort, with a default of (0.5, 0.5)
-
-- **GPUImageSphereRefractionFilter**: Simulates the refraction through a glass sphere
- - *center*: The center about which to apply the distortion, with a default of (0.5, 0.5)
- - *radius*: The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25
- - *refractiveIndex*: The index of refraction for the sphere, with a default of 0.71
-
-- **GPUImageGlassSphereFilter**: Same as the GPUImageSphereRefractionFilter, only the image is not inverted and there's a little bit of frosting at the edges of the glass
- - *center*: The center about which to apply the distortion, with a default of (0.5, 0.5)
- - *radius*: The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25
- - *refractiveIndex*: The index of refraction for the sphere, with a default of 0.71
-
-- **GPUImageVignetteFilter**: Performs a vignetting effect, fading out the image at the edges
- - *x*:
- - *y*: The directional intensity of the vignetting, with a default of x = 0.75, y = 0.5
-
-- **GPUImageKuwaharaFilter**: Kuwahara image abstraction, drawn from the work of Kyprianidis, et. al. in their publication "Anisotropic Kuwahara Filtering on the GPU" within the GPU Pro collection. This produces an oil-painting-like image, but it is extremely computationally expensive, so it can take seconds to render a frame on an iPad 2. This might be best used for still images.
- - *radius*: In integer specifying the number of pixels out from the center pixel to test when applying the filter, with a default of 4. A higher value creates a more abstracted image, but at the cost of much greater processing time.
-
-- **GPUImageKuwaharaRadius3Filter**: A modified version of the Kuwahara filter, optimized to work over just a radius of three pixels
-
-- **GPUImagePerlinNoiseFilter**: Generates an image full of Perlin noise
- - *colorStart*:
- - *colorFinish*: The color range for the noise being generated
- - *scale*: The scaling of the noise being generated
-
-- **GPUImageCGAColorspaceFilter**: Simulates the colorspace of a CGA monitor
-
-- **GPUImageMosaicFilter**: This filter takes an input tileset, the tiles must ascend in luminance. It looks at the input image and replaces each display tile with an input tile according to the luminance of that tile. The idea was to replicate the ASCII video filters seen in other apps, but the tileset can be anything.
- - *inputTileSize*:
- - *numTiles*:
- - *displayTileSize*:
- - *colorOn*:
-
-- **GPUImageJFAVoronoiFilter**: Generates a Voronoi map, for use in a later stage.
- - *sizeInPixels*: Size of the individual elements
-
-- **GPUImageVoronoiConsumerFilter**: Takes in the Voronoi map, and uses that to filter an incoming image.
- - *sizeInPixels*: Size of the individual elements
-
-You can also easily write your own custom filters using the C-like OpenGL Shading Language, as described above.
-
-## Sample applications ##
-
-Several sample applications are bundled with the framework source. Most are compatible with both iPhone and iPad-class devices. They attempt to show off various aspects of the framework and should be used as the best examples of the API while the framework is under development. These include:
-
-### SimpleImageFilter ###
-
-A bundled JPEG image is loaded into the application at launch, a filter is applied to it, and the result rendered to the screen. Additionally, this sample shows two ways of taking in an image, filtering it, and saving it to disk.
-
-### SimpleVideoFilter ###
-
-A pixellate filter is applied to a live video stream, with a UISlider control that lets you adjust the pixel size on the live video.
-
-### SimpleVideoFileFilter ###
-
-A movie file is loaded from disk, an unsharp mask filter is applied to it, and the filtered result is re-encoded as another movie.
-
-### MultiViewFilterExample ###
-
-From a single camera feed, four views are populated with realtime filters applied to camera. One is just the straight camera video, one is a preprogrammed sepia tone, and two are custom filters based on shader programs.
-
-### FilterShowcase ###
-
-This demonstrates every filter supplied with GPUImage.
-
-### BenchmarkSuite ###
-
-This is used to test the performance of the overall framework by testing it against CPU-bound routines and Core Image. Benchmarks involving still images and video are run against all three, with results displayed in-application.
-
-### CubeExample ###
-
-This demonstrates the ability of GPUImage to interact with OpenGL ES rendering. Frames are captured from the camera, a sepia filter applied to them, and then they are fed into a texture to be applied to the face of a cube you can rotate with your finger. This cube in turn is rendered to a texture-backed framebuffer object, and that texture is fed back into GPUImage to have a pixellation filter applied to it before rendering to screen.
-
-In other words, the path of this application is camera -> sepia tone filter -> cube -> pixellation filter -> display.
-
-### ColorObjectTracking ###
-
-A version of my ColorTracking example from http://www.sunsetlakesoftware.com/2010/10/22/gpu-accelerated-video-processing-mac-and-ios ported across to use GPUImage, this application uses color in a scene to track objects from a live camera feed. The four views you can switch between include the raw camera feed, the camera feed with pixels matching the color threshold in white, the processed video where positions are encoded as colors within the pixels passing the threshold test, and finally the live video feed with a dot that tracks the selected color. Tapping the screen changes the color to track to match the color of the pixels under your finger. Tapping and dragging on the screen makes the color threshold more or less forgiving. This is most obvious on the second, color thresholding view.
-
-Currently, all processing for the color averaging in the last step is done on the CPU, so this is part is extremely slow.
diff --git a/Example/Pods/GPUImage/framework/Resources/lookup.png b/Example/Pods/GPUImage/framework/Resources/lookup.png
deleted file mode 100644
index ed814df..0000000
Binary files a/Example/Pods/GPUImage/framework/Resources/lookup.png and /dev/null differ
diff --git a/Example/Pods/GPUImage/framework/Resources/lookup_amatorka.png b/Example/Pods/GPUImage/framework/Resources/lookup_amatorka.png
deleted file mode 100644
index 4a2cc8a..0000000
Binary files a/Example/Pods/GPUImage/framework/Resources/lookup_amatorka.png and /dev/null differ
diff --git a/Example/Pods/GPUImage/framework/Resources/lookup_miss_etikate.png b/Example/Pods/GPUImage/framework/Resources/lookup_miss_etikate.png
deleted file mode 100644
index e1317d7..0000000
Binary files a/Example/Pods/GPUImage/framework/Resources/lookup_miss_etikate.png and /dev/null differ
diff --git a/Example/Pods/GPUImage/framework/Resources/lookup_soft_elegance_1.png b/Example/Pods/GPUImage/framework/Resources/lookup_soft_elegance_1.png
deleted file mode 100644
index 525437a..0000000
Binary files a/Example/Pods/GPUImage/framework/Resources/lookup_soft_elegance_1.png and /dev/null differ
diff --git a/Example/Pods/GPUImage/framework/Resources/lookup_soft_elegance_2.png b/Example/Pods/GPUImage/framework/Resources/lookup_soft_elegance_2.png
deleted file mode 100644
index ccc6d4e..0000000
Binary files a/Example/Pods/GPUImage/framework/Resources/lookup_soft_elegance_2.png and /dev/null differ
diff --git a/Example/Pods/GPUImage/framework/Source/GLProgram.h b/Example/Pods/GPUImage/framework/Source/GLProgram.h
deleted file mode 100755
index cd455e7..0000000
--- a/Example/Pods/GPUImage/framework/Source/GLProgram.h
+++ /dev/null
@@ -1,42 +0,0 @@
-// This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book.
-// A description of this can be found at his page on the topic:
-// http://iphonedevelopment.blogspot.com/2010/11/opengl-es-20-for-ios-chapter-4.html
-// I've extended this to be able to take programs as NSStrings in addition to files, for baked-in shaders
-
-#import
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-#import
-#import
-#else
-#import
-#import
-#endif
-
-@interface GLProgram : NSObject
-{
- NSMutableArray *attributes;
- NSMutableArray *uniforms;
- GLuint program,
- vertShader,
- fragShader;
-}
-
-@property(readwrite, nonatomic) BOOL initialized;
-@property(readwrite, copy, nonatomic) NSString *vertexShaderLog;
-@property(readwrite, copy, nonatomic) NSString *fragmentShaderLog;
-@property(readwrite, copy, nonatomic) NSString *programLog;
-
-- (id)initWithVertexShaderString:(NSString *)vShaderString
- fragmentShaderString:(NSString *)fShaderString;
-- (id)initWithVertexShaderString:(NSString *)vShaderString
- fragmentShaderFilename:(NSString *)fShaderFilename;
-- (id)initWithVertexShaderFilename:(NSString *)vShaderFilename
- fragmentShaderFilename:(NSString *)fShaderFilename;
-- (void)addAttribute:(NSString *)attributeName;
-- (GLuint)attributeIndex:(NSString *)attributeName;
-- (GLuint)uniformIndex:(NSString *)uniformName;
-- (BOOL)link;
-- (void)use;
-- (void)validate;
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GLProgram.m b/Example/Pods/GPUImage/framework/Source/GLProgram.m
deleted file mode 100755
index 105d75f..0000000
--- a/Example/Pods/GPUImage/framework/Source/GLProgram.m
+++ /dev/null
@@ -1,236 +0,0 @@
-// This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book.
-// A description of this can be found at his page on the topic:
-// http://iphonedevelopment.blogspot.com/2010/11/opengl-es-20-for-ios-chapter-4.html
-
-
-#import "GLProgram.h"
-// START:typedefs
-#pragma mark Function Pointer Definitions
-typedef void (*GLInfoFunction)(GLuint program, GLenum pname, GLint* params);
-typedef void (*GLLogFunction) (GLuint program, GLsizei bufsize, GLsizei* length, GLchar* infolog);
-// END:typedefs
-#pragma mark -
-#pragma mark Private Extension Method Declaration
-// START:extension
-@interface GLProgram()
-
-- (BOOL)compileShader:(GLuint *)shader
- type:(GLenum)type
- string:(NSString *)shaderString;
-@end
-// END:extension
-#pragma mark -
-
-@implementation GLProgram
-// START:init
-
-@synthesize initialized = _initialized;
-
-- (id)initWithVertexShaderString:(NSString *)vShaderString
- fragmentShaderString:(NSString *)fShaderString;
-{
- if ((self = [super init]))
- {
- _initialized = NO;
-
- attributes = [[NSMutableArray alloc] init];
- uniforms = [[NSMutableArray alloc] init];
- program = glCreateProgram();
-
- if (![self compileShader:&vertShader
- type:GL_VERTEX_SHADER
- string:vShaderString])
- {
- NSLog(@"Failed to compile vertex shader");
- }
-
- // Create and compile fragment shader
- if (![self compileShader:&fragShader
- type:GL_FRAGMENT_SHADER
- string:fShaderString])
- {
- NSLog(@"Failed to compile fragment shader");
- }
-
- glAttachShader(program, vertShader);
- glAttachShader(program, fragShader);
- }
-
- return self;
-}
-
-- (id)initWithVertexShaderString:(NSString *)vShaderString
- fragmentShaderFilename:(NSString *)fShaderFilename;
-{
- NSString *fragShaderPathname = [[NSBundle mainBundle] pathForResource:fShaderFilename ofType:@"fsh"];
- NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragShaderPathname encoding:NSUTF8StringEncoding error:nil];
-
- if ((self = [self initWithVertexShaderString:vShaderString fragmentShaderString:fragmentShaderString]))
- {
- }
-
- return self;
-}
-
-- (id)initWithVertexShaderFilename:(NSString *)vShaderFilename
- fragmentShaderFilename:(NSString *)fShaderFilename;
-{
- NSString *vertShaderPathname = [[NSBundle mainBundle] pathForResource:vShaderFilename ofType:@"vsh"];
- NSString *vertexShaderString = [NSString stringWithContentsOfFile:vertShaderPathname encoding:NSUTF8StringEncoding error:nil];
-
- NSString *fragShaderPathname = [[NSBundle mainBundle] pathForResource:fShaderFilename ofType:@"fsh"];
- NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragShaderPathname encoding:NSUTF8StringEncoding error:nil];
-
- if ((self = [self initWithVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString]))
- {
- }
-
- return self;
-}
-// END:init
-// START:compile
-- (BOOL)compileShader:(GLuint *)shader
- type:(GLenum)type
- string:(NSString *)shaderString
-{
-// CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
-
- GLint status;
- const GLchar *source;
-
- source =
- (GLchar *)[shaderString UTF8String];
- if (!source)
- {
- NSLog(@"Failed to load vertex shader");
- return NO;
- }
-
- *shader = glCreateShader(type);
- glShaderSource(*shader, 1, &source, NULL);
- glCompileShader(*shader);
-
- glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
-
- if (status != GL_TRUE)
- {
- GLint logLength;
- glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
- if (logLength > 0)
- {
- GLchar *log = (GLchar *)malloc(logLength);
- glGetShaderInfoLog(*shader, logLength, &logLength, log);
- if (shader == &vertShader)
- {
- self.vertexShaderLog = [NSString stringWithFormat:@"%s", log];
- }
- else
- {
- self.fragmentShaderLog = [NSString stringWithFormat:@"%s", log];
- }
-
- free(log);
- }
- }
-
-// CFAbsoluteTime linkTime = (CFAbsoluteTimeGetCurrent() - startTime);
-// NSLog(@"Compiled in %f ms", linkTime * 1000.0);
-
- return status == GL_TRUE;
-}
-// END:compile
-#pragma mark -
-// START:addattribute
-- (void)addAttribute:(NSString *)attributeName
-{
- if (![attributes containsObject:attributeName])
- {
- [attributes addObject:attributeName];
- glBindAttribLocation(program,
- (GLuint)[attributes indexOfObject:attributeName],
- [attributeName UTF8String]);
- }
-}
-// END:addattribute
-// START:indexmethods
-- (GLuint)attributeIndex:(NSString *)attributeName
-{
- return (GLuint)[attributes indexOfObject:attributeName];
-}
-- (GLuint)uniformIndex:(NSString *)uniformName
-{
- return glGetUniformLocation(program, [uniformName UTF8String]);
-}
-// END:indexmethods
-#pragma mark -
-// START:link
-- (BOOL)link
-{
-// CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
-
- GLint status;
-
- glLinkProgram(program);
-
- glGetProgramiv(program, GL_LINK_STATUS, &status);
- if (status == GL_FALSE)
- return NO;
-
- if (vertShader)
- {
- glDeleteShader(vertShader);
- vertShader = 0;
- }
- if (fragShader)
- {
- glDeleteShader(fragShader);
- fragShader = 0;
- }
-
- self.initialized = YES;
-
-// CFAbsoluteTime linkTime = (CFAbsoluteTimeGetCurrent() - startTime);
-// NSLog(@"Linked in %f ms", linkTime * 1000.0);
-
- return YES;
-}
-// END:link
-// START:use
-- (void)use
-{
- glUseProgram(program);
-}
-// END:use
-#pragma mark -
-
-- (void)validate;
-{
- GLint logLength;
-
- glValidateProgram(program);
- glGetProgramiv(program, GL_INFO_LOG_LENGTH, &logLength);
- if (logLength > 0)
- {
- GLchar *log = (GLchar *)malloc(logLength);
- glGetProgramInfoLog(program, logLength, &logLength, log);
- self.programLog = [NSString stringWithFormat:@"%s", log];
- free(log);
- }
-}
-
-#pragma mark -
-// START:dealloc
-- (void)dealloc
-{
- if (vertShader)
- glDeleteShader(vertShader);
-
- if (fragShader)
- glDeleteShader(fragShader);
-
- if (program)
- glDeleteProgram(program);
-
-}
-// END:dealloc
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImage.h b/Example/Pods/GPUImage/framework/Source/GPUImage.h
deleted file mode 100755
index f7e96b3..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImage.h
+++ /dev/null
@@ -1,164 +0,0 @@
-#import "GLProgram.h"
-
-// Base classes
-#import "GPUImageContext.h"
-#import "GPUImageOutput.h"
-#import "GPUImageView.h"
-#import "GPUImageVideoCamera.h"
-#import "GPUImageStillCamera.h"
-#import "GPUImageMovie.h"
-#import "GPUImagePicture.h"
-#import "GPUImageRawDataInput.h"
-#import "GPUImageRawDataOutput.h"
-#import "GPUImageMovieWriter.h"
-#import "GPUImageFilterPipeline.h"
-#import "GPUImageTextureOutput.h"
-#import "GPUImageFilterGroup.h"
-#import "GPUImageTextureInput.h"
-#import "GPUImageUIElement.h"
-#import "GPUImageBuffer.h"
-#import "GPUImageFramebuffer.h"
-#import "GPUImageFramebufferCache.h"
-
-// Filters
-#import "GPUImageFilter.h"
-#import "GPUImageTwoInputFilter.h"
-#import "GPUImagePixellateFilter.h"
-#import "GPUImagePixellatePositionFilter.h"
-#import "GPUImageSepiaFilter.h"
-#import "GPUImageColorInvertFilter.h"
-#import "GPUImageSaturationFilter.h"
-#import "GPUImageContrastFilter.h"
-#import "GPUImageExposureFilter.h"
-#import "GPUImageBrightnessFilter.h"
-#import "GPUImageLevelsFilter.h"
-#import "GPUImageSharpenFilter.h"
-#import "GPUImageGammaFilter.h"
-#import "GPUImageSobelEdgeDetectionFilter.h"
-#import "GPUImageSketchFilter.h"
-#import "GPUImageToonFilter.h"
-#import "GPUImageSmoothToonFilter.h"
-#import "GPUImageMultiplyBlendFilter.h"
-#import "GPUImageDissolveBlendFilter.h"
-#import "GPUImageKuwaharaFilter.h"
-#import "GPUImageKuwaharaRadius3Filter.h"
-#import "GPUImageVignetteFilter.h"
-#import "GPUImageGaussianBlurFilter.h"
-#import "GPUImageGaussianBlurPositionFilter.h"
-#import "GPUImageGaussianSelectiveBlurFilter.h"
-#import "GPUImageOverlayBlendFilter.h"
-#import "GPUImageDarkenBlendFilter.h"
-#import "GPUImageLightenBlendFilter.h"
-#import "GPUImageSwirlFilter.h"
-#import "GPUImageSourceOverBlendFilter.h"
-#import "GPUImageColorBurnBlendFilter.h"
-#import "GPUImageColorDodgeBlendFilter.h"
-#import "GPUImageScreenBlendFilter.h"
-#import "GPUImageExclusionBlendFilter.h"
-#import "GPUImageDifferenceBlendFilter.h"
-#import "GPUImageSubtractBlendFilter.h"
-#import "GPUImageHardLightBlendFilter.h"
-#import "GPUImageSoftLightBlendFilter.h"
-#import "GPUImageColorBlendFilter.h"
-#import "GPUImageHueBlendFilter.h"
-#import "GPUImageSaturationBlendFilter.h"
-#import "GPUImageLuminosityBlendFilter.h"
-#import "GPUImageCropFilter.h"
-#import "GPUImageGrayscaleFilter.h"
-#import "GPUImageTransformFilter.h"
-#import "GPUImageChromaKeyBlendFilter.h"
-#import "GPUImageHazeFilter.h"
-#import "GPUImageLuminanceThresholdFilter.h"
-#import "GPUImagePosterizeFilter.h"
-#import "GPUImageBoxBlurFilter.h"
-#import "GPUImageAdaptiveThresholdFilter.h"
-#import "GPUImageUnsharpMaskFilter.h"
-#import "GPUImageBulgeDistortionFilter.h"
-#import "GPUImagePinchDistortionFilter.h"
-#import "GPUImageCrosshatchFilter.h"
-#import "GPUImageCGAColorspaceFilter.h"
-#import "GPUImagePolarPixellateFilter.h"
-#import "GPUImageStretchDistortionFilter.h"
-#import "GPUImagePerlinNoiseFilter.h"
-#import "GPUImageJFAVoronoiFilter.h"
-#import "GPUImageVoronoiConsumerFilter.h"
-#import "GPUImageMosaicFilter.h"
-#import "GPUImageTiltShiftFilter.h"
-#import "GPUImage3x3ConvolutionFilter.h"
-#import "GPUImageEmbossFilter.h"
-#import "GPUImageCannyEdgeDetectionFilter.h"
-#import "GPUImageThresholdEdgeDetectionFilter.h"
-#import "GPUImageMaskFilter.h"
-#import "GPUImageHistogramFilter.h"
-#import "GPUImageHistogramGenerator.h"
-#import "GPUImageHistogramEqualizationFilter.h"
-#import "GPUImagePrewittEdgeDetectionFilter.h"
-#import "GPUImageXYDerivativeFilter.h"
-#import "GPUImageHarrisCornerDetectionFilter.h"
-#import "GPUImageAlphaBlendFilter.h"
-#import "GPUImageNormalBlendFilter.h"
-#import "GPUImageNonMaximumSuppressionFilter.h"
-#import "GPUImageRGBFilter.h"
-#import "GPUImageMedianFilter.h"
-#import "GPUImageBilateralFilter.h"
-#import "GPUImageCrosshairGenerator.h"
-#import "GPUImageToneCurveFilter.h"
-#import "GPUImageNobleCornerDetectionFilter.h"
-#import "GPUImageShiTomasiFeatureDetectionFilter.h"
-#import "GPUImageErosionFilter.h"
-#import "GPUImageRGBErosionFilter.h"
-#import "GPUImageDilationFilter.h"
-#import "GPUImageRGBDilationFilter.h"
-#import "GPUImageOpeningFilter.h"
-#import "GPUImageRGBOpeningFilter.h"
-#import "GPUImageClosingFilter.h"
-#import "GPUImageRGBClosingFilter.h"
-#import "GPUImageColorPackingFilter.h"
-#import "GPUImageSphereRefractionFilter.h"
-#import "GPUImageMonochromeFilter.h"
-#import "GPUImageOpacityFilter.h"
-#import "GPUImageHighlightShadowFilter.h"
-#import "GPUImageFalseColorFilter.h"
-#import "GPUImageHSBFilter.h"
-#import "GPUImageHueFilter.h"
-#import "GPUImageGlassSphereFilter.h"
-#import "GPUImageLookupFilter.h"
-#import "GPUImageAmatorkaFilter.h"
-#import "GPUImageMissEtikateFilter.h"
-#import "GPUImageSoftEleganceFilter.h"
-#import "GPUImageAddBlendFilter.h"
-#import "GPUImageDivideBlendFilter.h"
-#import "GPUImagePolkaDotFilter.h"
-#import "GPUImageLocalBinaryPatternFilter.h"
-#import "GPUImageLanczosResamplingFilter.h"
-#import "GPUImageAverageColor.h"
-#import "GPUImageSolidColorGenerator.h"
-#import "GPUImageLuminosity.h"
-#import "GPUImageAverageLuminanceThresholdFilter.h"
-#import "GPUImageWhiteBalanceFilter.h"
-#import "GPUImageChromaKeyFilter.h"
-#import "GPUImageLowPassFilter.h"
-#import "GPUImageHighPassFilter.h"
-#import "GPUImageMotionDetector.h"
-#import "GPUImageHalftoneFilter.h"
-#import "GPUImageThresholdedNonMaximumSuppressionFilter.h"
-#import "GPUImageHoughTransformLineDetector.h"
-#import "GPUImageParallelCoordinateLineTransformFilter.h"
-#import "GPUImageThresholdSketchFilter.h"
-#import "GPUImageLineGenerator.h"
-#import "GPUImageLinearBurnBlendFilter.h"
-#import "GPUImageGaussianBlurPositionFilter.h"
-#import "GPUImagePixellatePositionFilter.h"
-#import "GPUImageTwoInputCrossTextureSamplingFilter.h"
-#import "GPUImagePoissonBlendFilter.h"
-#import "GPUImageMotionBlurFilter.h"
-#import "GPUImageZoomBlurFilter.h"
-#import "GPUImageLaplacianFilter.h"
-#import "GPUImageiOSBlurFilter.h"
-#import "GPUImageLuminanceRangeFilter.h"
-#import "GPUImageDirectionalNonMaximumSuppressionFilter.h"
-#import "GPUImageDirectionalSobelEdgeDetectionFilter.h"
-#import "GPUImageSingleComponentGaussianBlurFilter.h"
-#import "GPUImageThreeInputFilter.h"
-#import "GPUImageWeakPixelInclusionFilter.h"
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImage3x3ConvolutionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImage3x3ConvolutionFilter.h
deleted file mode 100755
index 67e68de..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImage3x3ConvolutionFilter.h
+++ /dev/null
@@ -1,18 +0,0 @@
-#import "GPUImage3x3TextureSamplingFilter.h"
-
-/** Runs a 3x3 convolution kernel against the image
- */
-@interface GPUImage3x3ConvolutionFilter : GPUImage3x3TextureSamplingFilter
-{
- GLint convolutionMatrixUniform;
-}
-
-/** Convolution kernel to run against the image
-
- The convolution kernel is a 3x3 matrix of values to apply to the pixel and its 8 surrounding pixels.
- The matrix is specified in row-major order, with the top left pixel being one.one and the bottom right three.three
- If the values in the matrix don't add up to 1.0, the image could be brightened or darkened.
- */
-@property(readwrite, nonatomic) GPUMatrix3x3 convolutionKernel;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImage3x3ConvolutionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImage3x3ConvolutionFilter.m
deleted file mode 100755
index c623ac6..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImage3x3ConvolutionFilter.m
+++ /dev/null
@@ -1,128 +0,0 @@
-#import "GPUImage3x3ConvolutionFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImage3x3ConvolutionFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- uniform sampler2D inputImageTexture;
-
- uniform mediump mat3 convolutionMatrix;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- void main()
- {
- mediump vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
- mediump vec3 bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;
- mediump vec3 bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;
- mediump vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
- mediump vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
- mediump vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
- mediump vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;
- mediump vec3 topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;
- mediump vec3 topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;
-
- mediump vec3 resultColor = topLeftColor * convolutionMatrix[0][0] + topColor * convolutionMatrix[0][1] + topRightColor * convolutionMatrix[0][2];
- resultColor += leftColor * convolutionMatrix[1][0] + centerColor.rgb * convolutionMatrix[1][1] + rightColor * convolutionMatrix[1][2];
- resultColor += bottomLeftColor * convolutionMatrix[2][0] + bottomColor * convolutionMatrix[2][1] + bottomRightColor * convolutionMatrix[2][2];
-
- gl_FragColor = vec4(resultColor, centerColor.a);
- }
-);
-#else
-NSString *const kGPUImage3x3ConvolutionFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
-
- uniform mat3 convolutionMatrix;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- void main()
- {
- vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
- vec3 bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;
- vec3 bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;
- vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
- vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
- vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
- vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;
- vec3 topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;
- vec3 topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;
-
- vec3 resultColor = topLeftColor * convolutionMatrix[0][0] + topColor * convolutionMatrix[0][1] + topRightColor * convolutionMatrix[0][2];
- resultColor += leftColor * convolutionMatrix[1][0] + centerColor.rgb * convolutionMatrix[1][1] + rightColor * convolutionMatrix[1][2];
- resultColor += bottomLeftColor * convolutionMatrix[2][0] + bottomColor * convolutionMatrix[2][1] + bottomRightColor * convolutionMatrix[2][2];
-
- gl_FragColor = vec4(resultColor, centerColor.a);
- }
-);
-#endif
-
-@implementation GPUImage3x3ConvolutionFilter
-
-@synthesize convolutionKernel = _convolutionKernel;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [self initWithFragmentShaderFromString:kGPUImage3x3ConvolutionFragmentShaderString]))
- {
- return nil;
- }
-
- self.convolutionKernel = (GPUMatrix3x3){
- {0.f, 0.f, 0.f},
- {0.f, 1.f, 0.f},
- {0.f, 0.f, 0.f}
- };
-
- return self;
-}
-
-- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
-{
- if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString]))
- {
- return nil;
- }
-
- convolutionMatrixUniform = [filterProgram uniformIndex:@"convolutionMatrix"];
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setConvolutionKernel:(GPUMatrix3x3)newValue;
-{
- _convolutionKernel = newValue;
-
- [self setMatrix3f:_convolutionKernel forUniform:convolutionMatrixUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImage3x3TextureSamplingFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImage3x3TextureSamplingFilter.h
deleted file mode 100644
index 5599e15..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImage3x3TextureSamplingFilter.h
+++ /dev/null
@@ -1,18 +0,0 @@
-#import "GPUImageFilter.h"
-
-extern NSString *const kGPUImageNearbyTexelSamplingVertexShaderString;
-
-@interface GPUImage3x3TextureSamplingFilter : GPUImageFilter
-{
- GLint texelWidthUniform, texelHeightUniform;
-
- CGFloat texelWidth, texelHeight;
- BOOL hasOverriddenImageSizeFactor;
-}
-
-// The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
-@property(readwrite, nonatomic) CGFloat texelWidth;
-@property(readwrite, nonatomic) CGFloat texelHeight;
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImage3x3TextureSamplingFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImage3x3TextureSamplingFilter.m
deleted file mode 100644
index 05c4d50..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImage3x3TextureSamplingFilter.m
+++ /dev/null
@@ -1,121 +0,0 @@
-#import "GPUImage3x3TextureSamplingFilter.h"
-
-// Override vertex shader to remove dependent texture reads
-NSString *const kGPUImageNearbyTexelSamplingVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec4 inputTextureCoordinate;
-
- uniform float texelWidth;
- uniform float texelHeight;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- void main()
- {
- gl_Position = position;
-
- vec2 widthStep = vec2(texelWidth, 0.0);
- vec2 heightStep = vec2(0.0, texelHeight);
- vec2 widthHeightStep = vec2(texelWidth, texelHeight);
- vec2 widthNegativeHeightStep = vec2(texelWidth, -texelHeight);
-
- textureCoordinate = inputTextureCoordinate.xy;
- leftTextureCoordinate = inputTextureCoordinate.xy - widthStep;
- rightTextureCoordinate = inputTextureCoordinate.xy + widthStep;
-
- topTextureCoordinate = inputTextureCoordinate.xy - heightStep;
- topLeftTextureCoordinate = inputTextureCoordinate.xy - widthHeightStep;
- topRightTextureCoordinate = inputTextureCoordinate.xy + widthNegativeHeightStep;
-
- bottomTextureCoordinate = inputTextureCoordinate.xy + heightStep;
- bottomLeftTextureCoordinate = inputTextureCoordinate.xy - widthNegativeHeightStep;
- bottomRightTextureCoordinate = inputTextureCoordinate.xy + widthHeightStep;
- }
-);
-
-
-@implementation GPUImage3x3TextureSamplingFilter
-
-@synthesize texelWidth = _texelWidth;
-@synthesize texelHeight = _texelHeight;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
-{
- if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString]))
- {
- return nil;
- }
-
- texelWidthUniform = [filterProgram uniformIndex:@"texelWidth"];
- texelHeightUniform = [filterProgram uniformIndex:@"texelHeight"];
-
- return self;
-}
-
-- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
-{
- if (!(self = [self initWithVertexShaderFromString:kGPUImageNearbyTexelSamplingVertexShaderString fragmentShaderFromString:fragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (void)setupFilterForSize:(CGSize)filterFrameSize;
-{
- if (!hasOverriddenImageSizeFactor)
- {
- _texelWidth = 1.0 / filterFrameSize.width;
- _texelHeight = 1.0 / filterFrameSize.height;
-
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext setActiveShaderProgram:filterProgram];
- if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
- {
- glUniform1f(texelWidthUniform, _texelHeight);
- glUniform1f(texelHeightUniform, _texelWidth);
- }
- else
- {
- glUniform1f(texelWidthUniform, _texelWidth);
- glUniform1f(texelHeightUniform, _texelHeight);
- }
- });
- }
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setTexelWidth:(CGFloat)newValue;
-{
- hasOverriddenImageSizeFactor = YES;
- _texelWidth = newValue;
-
- [self setFloat:_texelWidth forUniform:texelWidthUniform program:filterProgram];
-}
-
-- (void)setTexelHeight:(CGFloat)newValue;
-{
- hasOverriddenImageSizeFactor = YES;
- _texelHeight = newValue;
-
- [self setFloat:_texelHeight forUniform:texelHeightUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageAdaptiveThresholdFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageAdaptiveThresholdFilter.h
deleted file mode 100755
index 3278556..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageAdaptiveThresholdFilter.h
+++ /dev/null
@@ -1,9 +0,0 @@
-#import "GPUImageFilterGroup.h"
-
-@interface GPUImageAdaptiveThresholdFilter : GPUImageFilterGroup
-
-/** A multiplier for the background averaging blur radius in pixels, with a default of 4
- */
-@property(readwrite, nonatomic) CGFloat blurRadiusInPixels;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageAdaptiveThresholdFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageAdaptiveThresholdFilter.m
deleted file mode 100755
index 71fa6ab..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageAdaptiveThresholdFilter.m
+++ /dev/null
@@ -1,100 +0,0 @@
-#import "GPUImageAdaptiveThresholdFilter.h"
-#import "GPUImageFilter.h"
-#import "GPUImageTwoInputFilter.h"
-#import "GPUImageGrayscaleFilter.h"
-#import "GPUImageBoxBlurFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageAdaptiveThresholdFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- highp float blurredInput = texture2D(inputImageTexture, textureCoordinate).r;
- highp float localLuminance = texture2D(inputImageTexture2, textureCoordinate2).r;
- highp float thresholdResult = step(blurredInput - 0.05, localLuminance);
-
- gl_FragColor = vec4(vec3(thresholdResult), 1.0);
- }
-);
-#else
-NSString *const kGPUImageAdaptiveThresholdFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- float blurredInput = texture2D(inputImageTexture, textureCoordinate).r;
- float localLuminance = texture2D(inputImageTexture2, textureCoordinate2).r;
- float thresholdResult = step(blurredInput - 0.05, localLuminance);
-
- gl_FragColor = vec4(vec3(thresholdResult), 1.0);
- }
-);
-#endif
-
-@interface GPUImageAdaptiveThresholdFilter()
-{
- GPUImageBoxBlurFilter *boxBlurFilter;
-}
-@end
-
-@implementation GPUImageAdaptiveThresholdFilter
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- // First pass: reduce to luminance
- GPUImageGrayscaleFilter *luminanceFilter = [[GPUImageGrayscaleFilter alloc] init];
- [self addFilter:luminanceFilter];
-
- // Second pass: perform a box blur
- boxBlurFilter = [[GPUImageBoxBlurFilter alloc] init];
- [self addFilter:boxBlurFilter];
-
- // Third pass: compare the blurred background luminance to the local value
- GPUImageFilter *adaptiveThresholdFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageAdaptiveThresholdFragmentShaderString];
- [self addFilter:adaptiveThresholdFilter];
-
- [luminanceFilter addTarget:boxBlurFilter];
-
- [boxBlurFilter addTarget:adaptiveThresholdFilter];
- // To prevent double updating of this filter, disable updates from the sharp luminance image side
- [luminanceFilter addTarget:adaptiveThresholdFilter];
-
- self.initialFilters = [NSArray arrayWithObject:luminanceFilter];
- self.terminalFilter = adaptiveThresholdFilter;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setBlurRadiusInPixels:(CGFloat)newValue;
-{
- boxBlurFilter.blurRadiusInPixels = newValue;
-}
-
-- (CGFloat)blurRadiusInPixels;
-{
- return boxBlurFilter.blurRadiusInPixels;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageAddBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageAddBlendFilter.h
deleted file mode 100644
index b14c60c..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageAddBlendFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageAddBlendFilter : GPUImageTwoInputFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageAddBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageAddBlendFilter.m
deleted file mode 100644
index c89054a..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageAddBlendFilter.m
+++ /dev/null
@@ -1,100 +0,0 @@
-#import "GPUImageAddBlendFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageAddBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- lowp vec4 base = texture2D(inputImageTexture, textureCoordinate);
- lowp vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
-
- mediump float r;
- if (overlay.r * base.a + base.r * overlay.a >= overlay.a * base.a) {
- r = overlay.a * base.a + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
- } else {
- r = overlay.r + base.r;
- }
-
- mediump float g;
- if (overlay.g * base.a + base.g * overlay.a >= overlay.a * base.a) {
- g = overlay.a * base.a + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
- } else {
- g = overlay.g + base.g;
- }
-
- mediump float b;
- if (overlay.b * base.a + base.b * overlay.a >= overlay.a * base.a) {
- b = overlay.a * base.a + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
- } else {
- b = overlay.b + base.b;
- }
-
- mediump float a = overlay.a + base.a - overlay.a * base.a;
-
- gl_FragColor = vec4(r, g, b, a);
- }
-);
-#else
-NSString *const kGPUImageAddBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 base = texture2D(inputImageTexture, textureCoordinate);
- vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
-
- float r;
- if (overlay.r * base.a + base.r * overlay.a >= overlay.a * base.a) {
- r = overlay.a * base.a + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
- } else {
- r = overlay.r + base.r;
- }
-
- float g;
- if (overlay.g * base.a + base.g * overlay.a >= overlay.a * base.a) {
- g = overlay.a * base.a + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
- } else {
- g = overlay.g + base.g;
- }
-
- float b;
- if (overlay.b * base.a + base.b * overlay.a >= overlay.a * base.a) {
- b = overlay.a * base.a + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
- } else {
- b = overlay.b + base.b;
- }
-
- float a = overlay.a + base.a - overlay.a * base.a;
-
- gl_FragColor = vec4(r, g, b, a);
- }
-);
-#endif
-
-
-
-@implementation GPUImageAddBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageAddBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageAlphaBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageAlphaBlendFilter.h
deleted file mode 100755
index c4d7575..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageAlphaBlendFilter.h
+++ /dev/null
@@ -1,11 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageAlphaBlendFilter : GPUImageTwoInputFilter
-{
- GLint mixUniform;
-}
-
-// Mix ranges from 0.0 (only image 1) to 1.0 (only image 2), with 1.0 as the normal level
-@property(readwrite, nonatomic) CGFloat mix;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageAlphaBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageAlphaBlendFilter.m
deleted file mode 100755
index 077df79..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageAlphaBlendFilter.m
+++ /dev/null
@@ -1,72 +0,0 @@
-#import "GPUImageAlphaBlendFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageAlphaBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- uniform lowp float mixturePercent;
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = vec4(mix(textureColor.rgb, textureColor2.rgb, textureColor2.a * mixturePercent), textureColor.a);
- }
-);
-#else
-NSString *const kGPUImageAlphaBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- uniform float mixturePercent;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = vec4(mix(textureColor.rgb, textureColor2.rgb, textureColor2.a * mixturePercent), textureColor.a);
- }
-);
-#endif
-
-@implementation GPUImageAlphaBlendFilter
-
-@synthesize mix = _mix;
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageAlphaBlendFragmentShaderString]))
- {
- return nil;
- }
-
- mixUniform = [filterProgram uniformIndex:@"mixturePercent"];
- self.mix = 0.5;
-
- return self;
-}
-
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setMix:(CGFloat)newValue;
-{
- _mix = newValue;
-
- [self setFloat:_mix forUniform:mixUniform program:filterProgram];
-}
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageAmatorkaFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageAmatorkaFilter.h
deleted file mode 100755
index 1dbe096..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageAmatorkaFilter.h
+++ /dev/null
@@ -1,17 +0,0 @@
-#import "GPUImageFilterGroup.h"
-
-@class GPUImagePicture;
-
-/** A photo filter based on Photoshop action by Amatorka
- http://amatorka.deviantart.com/art/Amatorka-Action-2-121069631
- */
-
-// Note: If you want to use this effect you have to add lookup_amatorka.png
-// from Resources folder to your application bundle.
-
-@interface GPUImageAmatorkaFilter : GPUImageFilterGroup
-{
- GPUImagePicture *lookupImageSource;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageAmatorkaFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageAmatorkaFilter.m
deleted file mode 100755
index 1ab3ec4..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageAmatorkaFilter.m
+++ /dev/null
@@ -1,38 +0,0 @@
-#import "GPUImageAmatorkaFilter.h"
-#import "GPUImagePicture.h"
-#import "GPUImageLookupFilter.h"
-
-@implementation GPUImageAmatorkaFilter
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- UIImage *image = [UIImage imageNamed:@"lookup_amatorka.png"];
-#else
- NSImage *image = [NSImage imageNamed:@"lookup_amatorka.png"];
-#endif
-
- NSAssert(image, @"To use GPUImageAmatorkaFilter you need to add lookup_amatorka.png from GPUImage/framework/Resources to your application bundle.");
-
- lookupImageSource = [[GPUImagePicture alloc] initWithImage:image];
- GPUImageLookupFilter *lookupFilter = [[GPUImageLookupFilter alloc] init];
- [self addFilter:lookupFilter];
-
- [lookupImageSource addTarget:lookupFilter atTextureLocation:1];
- [lookupImageSource processImage];
-
- self.initialFilters = [NSArray arrayWithObjects:lookupFilter, nil];
- self.terminalFilter = lookupFilter;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageAverageColor.h b/Example/Pods/GPUImage/framework/Source/GPUImageAverageColor.h
deleted file mode 100644
index e3d957d..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageAverageColor.h
+++ /dev/null
@@ -1,20 +0,0 @@
-#import "GPUImageFilter.h"
-
-extern NSString *const kGPUImageColorAveragingVertexShaderString;
-
-@interface GPUImageAverageColor : GPUImageFilter
-{
- GLint texelWidthUniform, texelHeightUniform;
-
- NSUInteger numberOfStages;
-
- GLubyte *rawImagePixels;
- CGSize finalStageSize;
-}
-
-// This block is called on the completion of color averaging for a frame
-@property(nonatomic, copy) void(^colorAverageProcessingFinishedBlock)(CGFloat redComponent, CGFloat greenComponent, CGFloat blueComponent, CGFloat alphaComponent, CMTime frameTime);
-
-- (void)extractAverageColorAtFrameTime:(CMTime)frameTime;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageAverageColor.m b/Example/Pods/GPUImage/framework/Source/GPUImageAverageColor.m
deleted file mode 100644
index a768ecb..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageAverageColor.m
+++ /dev/null
@@ -1,210 +0,0 @@
-#import "GPUImageAverageColor.h"
-
-NSString *const kGPUImageColorAveragingVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec4 inputTextureCoordinate;
-
- uniform float texelWidth;
- uniform float texelHeight;
-
- varying vec2 upperLeftInputTextureCoordinate;
- varying vec2 upperRightInputTextureCoordinate;
- varying vec2 lowerLeftInputTextureCoordinate;
- varying vec2 lowerRightInputTextureCoordinate;
-
- void main()
- {
- gl_Position = position;
-
- upperLeftInputTextureCoordinate = inputTextureCoordinate.xy + vec2(-texelWidth, -texelHeight);
- upperRightInputTextureCoordinate = inputTextureCoordinate.xy + vec2(texelWidth, -texelHeight);
- lowerLeftInputTextureCoordinate = inputTextureCoordinate.xy + vec2(-texelWidth, texelHeight);
- lowerRightInputTextureCoordinate = inputTextureCoordinate.xy + vec2(texelWidth, texelHeight);
- }
- );
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageColorAveragingFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- uniform sampler2D inputImageTexture;
-
- varying highp vec2 outputTextureCoordinate;
-
- varying highp vec2 upperLeftInputTextureCoordinate;
- varying highp vec2 upperRightInputTextureCoordinate;
- varying highp vec2 lowerLeftInputTextureCoordinate;
- varying highp vec2 lowerRightInputTextureCoordinate;
-
- void main()
- {
- highp vec4 upperLeftColor = texture2D(inputImageTexture, upperLeftInputTextureCoordinate);
- highp vec4 upperRightColor = texture2D(inputImageTexture, upperRightInputTextureCoordinate);
- highp vec4 lowerLeftColor = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate);
- highp vec4 lowerRightColor = texture2D(inputImageTexture, lowerRightInputTextureCoordinate);
-
- gl_FragColor = 0.25 * (upperLeftColor + upperRightColor + lowerLeftColor + lowerRightColor);
- }
-);
-#else
-NSString *const kGPUImageColorAveragingFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
-
- varying vec2 outputTextureCoordinate;
-
- varying vec2 upperLeftInputTextureCoordinate;
- varying vec2 upperRightInputTextureCoordinate;
- varying vec2 lowerLeftInputTextureCoordinate;
- varying vec2 lowerRightInputTextureCoordinate;
-
- void main()
- {
- vec4 upperLeftColor = texture2D(inputImageTexture, upperLeftInputTextureCoordinate);
- vec4 upperRightColor = texture2D(inputImageTexture, upperRightInputTextureCoordinate);
- vec4 lowerLeftColor = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate);
- vec4 lowerRightColor = texture2D(inputImageTexture, lowerRightInputTextureCoordinate);
-
- gl_FragColor = 0.25 * (upperLeftColor + upperRightColor + lowerLeftColor + lowerRightColor);
- }
-);
-#endif
-
-@implementation GPUImageAverageColor
-
-@synthesize colorAverageProcessingFinishedBlock = _colorAverageProcessingFinishedBlock;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithVertexShaderFromString:kGPUImageColorAveragingVertexShaderString fragmentShaderFromString:kGPUImageColorAveragingFragmentShaderString]))
- {
- return nil;
- }
-
- texelWidthUniform = [filterProgram uniformIndex:@"texelWidth"];
- texelHeightUniform = [filterProgram uniformIndex:@"texelHeight"];
- finalStageSize = CGSizeMake(1.0, 1.0);
-
- __unsafe_unretained GPUImageAverageColor *weakSelf = self;
- [self setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) {
- [weakSelf extractAverageColorAtFrameTime:frameTime];
- }];
-
- return self;
-}
-
-- (void)dealloc;
-{
- if (rawImagePixels != NULL)
- {
- free(rawImagePixels);
- }
-}
-
-#pragma mark -
-#pragma mark Managing the display FBOs
-
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-{
- if (self.preventRendering)
- {
- [firstInputFramebuffer unlock];
- return;
- }
-
- outputFramebuffer = nil;
- [GPUImageContext setActiveShaderProgram:filterProgram];
-
- glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
- glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
-
- GLuint currentTexture = [firstInputFramebuffer texture];
-
- NSUInteger numberOfReductionsInX = floor(log(inputTextureSize.width) / log(4.0));
- NSUInteger numberOfReductionsInY = floor(log(inputTextureSize.height) / log(4.0));
- NSUInteger reductionsToHitSideLimit = MIN(numberOfReductionsInX, numberOfReductionsInY);
- for (NSUInteger currentReduction = 0; currentReduction < reductionsToHitSideLimit; currentReduction++)
- {
- CGSize currentStageSize = CGSizeMake(floor(inputTextureSize.width / pow(4.0, currentReduction + 1.0)), floor(inputTextureSize.height / pow(4.0, currentReduction + 1.0)));
- if ( (currentStageSize.height < 2.0) || (currentStageSize.width < 2.0) )
- {
- // A really small last stage seems to cause significant errors in the average, so I abort and leave the rest to the CPU at this point
- break;
- // currentStageSize.height = 2.0; // TODO: Rotate the image to account for this case, which causes FBO construction to fail
- }
-
- [outputFramebuffer unlock];
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:currentStageSize textureOptions:self.outputTextureOptions onlyTexture:NO];
- [outputFramebuffer activateFramebuffer];
-
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT);
-
- glActiveTexture(GL_TEXTURE2);
- glBindTexture(GL_TEXTURE_2D, currentTexture);
-
- glUniform1i(filterInputTextureUniform, 2);
-
- glUniform1f(texelWidthUniform, 0.5 / currentStageSize.width);
- glUniform1f(texelHeightUniform, 0.5 / currentStageSize.height);
-
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
-
- currentTexture = [outputFramebuffer texture];
- finalStageSize = currentStageSize;
- }
-
- [firstInputFramebuffer unlock];
-}
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
- inputRotation = kGPUImageNoRotation;
-}
-
-- (void)extractAverageColorAtFrameTime:(CMTime)frameTime;
-{
- runSynchronouslyOnVideoProcessingQueue(^{
- // we need a normal color texture for averaging the color values
- NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture internal format for this filter must be GL_RGBA.");
- NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.");
-
- NSUInteger totalNumberOfPixels = round(finalStageSize.width * finalStageSize.height);
-
- if (rawImagePixels == NULL)
- {
- rawImagePixels = (GLubyte *)malloc(totalNumberOfPixels * 4);
- }
-
- [GPUImageContext useImageProcessingContext];
- [outputFramebuffer activateFramebuffer];
- glReadPixels(0, 0, (int)finalStageSize.width, (int)finalStageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);
-
- NSUInteger redTotal = 0, greenTotal = 0, blueTotal = 0, alphaTotal = 0;
- NSUInteger byteIndex = 0;
- for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++)
- {
- redTotal += rawImagePixels[byteIndex++];
- greenTotal += rawImagePixels[byteIndex++];
- blueTotal += rawImagePixels[byteIndex++];
- alphaTotal += rawImagePixels[byteIndex++];
- }
-
- CGFloat normalizedRedTotal = (CGFloat)redTotal / (CGFloat)totalNumberOfPixels / 255.0;
- CGFloat normalizedGreenTotal = (CGFloat)greenTotal / (CGFloat)totalNumberOfPixels / 255.0;
- CGFloat normalizedBlueTotal = (CGFloat)blueTotal / (CGFloat)totalNumberOfPixels / 255.0;
- CGFloat normalizedAlphaTotal = (CGFloat)alphaTotal / (CGFloat)totalNumberOfPixels / 255.0;
-
- if (_colorAverageProcessingFinishedBlock != NULL)
- {
- _colorAverageProcessingFinishedBlock(normalizedRedTotal, normalizedGreenTotal, normalizedBlueTotal, normalizedAlphaTotal, frameTime);
- }
- });
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageAverageLuminanceThresholdFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageAverageLuminanceThresholdFilter.h
deleted file mode 100644
index 7f1ae46..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageAverageLuminanceThresholdFilter.h
+++ /dev/null
@@ -1,8 +0,0 @@
-#import "GPUImageFilterGroup.h"
-
-@interface GPUImageAverageLuminanceThresholdFilter : GPUImageFilterGroup
-
-// This is multiplied by the continually calculated average image luminosity to arrive at the final threshold. Default is 1.0.
-@property(readwrite, nonatomic) CGFloat thresholdMultiplier;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageAverageLuminanceThresholdFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageAverageLuminanceThresholdFilter.m
deleted file mode 100644
index eb2796f..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageAverageLuminanceThresholdFilter.m
+++ /dev/null
@@ -1,47 +0,0 @@
-#import "GPUImageAverageLuminanceThresholdFilter.h"
-#import "GPUImageLuminosity.h"
-#import "GPUImageLuminanceThresholdFilter.h"
-
-@interface GPUImageAverageLuminanceThresholdFilter()
-{
- GPUImageLuminosity *luminosityFilter;
- GPUImageLuminanceThresholdFilter *luminanceThresholdFilter;
-}
-@end
-
-@implementation GPUImageAverageLuminanceThresholdFilter
-
-@synthesize thresholdMultiplier = _thresholdMultiplier;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- self.thresholdMultiplier = 1.0;
-
- luminosityFilter = [[GPUImageLuminosity alloc] init];
- [self addFilter:luminosityFilter];
-
- luminanceThresholdFilter = [[GPUImageLuminanceThresholdFilter alloc] init];
- [self addFilter:luminanceThresholdFilter];
-
- __unsafe_unretained GPUImageAverageLuminanceThresholdFilter *weakSelf = self;
- __unsafe_unretained GPUImageLuminanceThresholdFilter *weakThreshold = luminanceThresholdFilter;
-
- [luminosityFilter setLuminosityProcessingFinishedBlock:^(CGFloat luminosity, CMTime frameTime) {
- weakThreshold.threshold = luminosity * weakSelf.thresholdMultiplier;
- }];
-
- self.initialFilters = [NSArray arrayWithObjects:luminosityFilter, luminanceThresholdFilter, nil];
- self.terminalFilter = luminanceThresholdFilter;
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageBilateralFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageBilateralFilter.h
deleted file mode 100644
index 6b736cc..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageBilateralFilter.h
+++ /dev/null
@@ -1,10 +0,0 @@
-#import "GPUImageGaussianBlurFilter.h"
-
-@interface GPUImageBilateralFilter : GPUImageGaussianBlurFilter
-{
- CGFloat firstDistanceNormalizationFactorUniform;
- CGFloat secondDistanceNormalizationFactorUniform;
-}
-// A normalization factor for the distance between central color and sample color.
-@property(nonatomic, readwrite) CGFloat distanceNormalizationFactor;
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageBilateralFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageBilateralFilter.m
deleted file mode 100644
index c2a8c86..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageBilateralFilter.m
+++ /dev/null
@@ -1,231 +0,0 @@
-#import "GPUImageBilateralFilter.h"
-
-NSString *const kGPUImageBilateralBlurVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec4 inputTextureCoordinate;
-
- const int GAUSSIAN_SAMPLES = 9;
-
- uniform float texelWidthOffset;
- uniform float texelHeightOffset;
-
- varying vec2 textureCoordinate;
- varying vec2 blurCoordinates[GAUSSIAN_SAMPLES];
-
- void main()
- {
- gl_Position = position;
- textureCoordinate = inputTextureCoordinate.xy;
-
- // Calculate the positions for the blur
- int multiplier = 0;
- vec2 blurStep;
- vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);
-
- for (int i = 0; i < GAUSSIAN_SAMPLES; i++)
- {
- multiplier = (i - ((GAUSSIAN_SAMPLES - 1) / 2));
- // Blur in x (horizontal)
- blurStep = float(multiplier) * singleStepOffset;
- blurCoordinates[i] = inputTextureCoordinate.xy + blurStep;
- }
- }
-);
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageBilateralFilterFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
-
- const lowp int GAUSSIAN_SAMPLES = 9;
-
- varying highp vec2 textureCoordinate;
- varying highp vec2 blurCoordinates[GAUSSIAN_SAMPLES];
-
- uniform mediump float distanceNormalizationFactor;
-
- void main()
- {
- lowp vec4 centralColor;
- lowp float gaussianWeightTotal;
- lowp vec4 sum;
- lowp vec4 sampleColor;
- lowp float distanceFromCentralColor;
- lowp float gaussianWeight;
-
- centralColor = texture2D(inputImageTexture, blurCoordinates[4]);
- gaussianWeightTotal = 0.18;
- sum = centralColor * 0.18;
-
- sampleColor = texture2D(inputImageTexture, blurCoordinates[0]);
- distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
- gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);
- gaussianWeightTotal += gaussianWeight;
- sum += sampleColor * gaussianWeight;
-
- sampleColor = texture2D(inputImageTexture, blurCoordinates[1]);
- distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
- gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor);
- gaussianWeightTotal += gaussianWeight;
- sum += sampleColor * gaussianWeight;
-
- sampleColor = texture2D(inputImageTexture, blurCoordinates[2]);
- distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
- gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor);
- gaussianWeightTotal += gaussianWeight;
- sum += sampleColor * gaussianWeight;
-
- sampleColor = texture2D(inputImageTexture, blurCoordinates[3]);
- distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
- gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor);
- gaussianWeightTotal += gaussianWeight;
- sum += sampleColor * gaussianWeight;
-
- sampleColor = texture2D(inputImageTexture, blurCoordinates[5]);
- distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
- gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor);
- gaussianWeightTotal += gaussianWeight;
- sum += sampleColor * gaussianWeight;
-
- sampleColor = texture2D(inputImageTexture, blurCoordinates[6]);
- distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
- gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor);
- gaussianWeightTotal += gaussianWeight;
- sum += sampleColor * gaussianWeight;
-
- sampleColor = texture2D(inputImageTexture, blurCoordinates[7]);
- distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
- gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor);
- gaussianWeightTotal += gaussianWeight;
- sum += sampleColor * gaussianWeight;
-
- sampleColor = texture2D(inputImageTexture, blurCoordinates[8]);
- distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
- gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);
- gaussianWeightTotal += gaussianWeight;
- sum += sampleColor * gaussianWeight;
-
- gl_FragColor = sum / gaussianWeightTotal;
- }
-);
-#else
-NSString *const kGPUImageBilateralFilterFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
-
- const int GAUSSIAN_SAMPLES = 9;
-
- varying vec2 textureCoordinate;
- varying vec2 blurCoordinates[GAUSSIAN_SAMPLES];
-
- uniform float distanceNormalizationFactor;
-
- void main()
- {
- vec4 centralColor;
- float gaussianWeightTotal;
- vec4 sum;
- vec4 sampleColor;
- float distanceFromCentralColor;
- float gaussianWeight;
-
- centralColor = texture2D(inputImageTexture, blurCoordinates[4]);
- gaussianWeightTotal = 0.18;
- sum = centralColor * 0.18;
-
- sampleColor = texture2D(inputImageTexture, blurCoordinates[0]);
- distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
- gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);
- gaussianWeightTotal += gaussianWeight;
- sum += sampleColor * gaussianWeight;
-
- sampleColor = texture2D(inputImageTexture, blurCoordinates[1]);
- distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
- gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor);
- gaussianWeightTotal += gaussianWeight;
- sum += sampleColor * gaussianWeight;
-
- sampleColor = texture2D(inputImageTexture, blurCoordinates[2]);
- distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
- gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor);
- gaussianWeightTotal += gaussianWeight;
- sum += sampleColor * gaussianWeight;
-
- sampleColor = texture2D(inputImageTexture, blurCoordinates[3]);
- distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
- gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor);
- gaussianWeightTotal += gaussianWeight;
- sum += sampleColor * gaussianWeight;
-
- sampleColor = texture2D(inputImageTexture, blurCoordinates[5]);
- distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
- gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor);
- gaussianWeightTotal += gaussianWeight;
- sum += sampleColor * gaussianWeight;
-
- sampleColor = texture2D(inputImageTexture, blurCoordinates[6]);
- distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
- gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor);
- gaussianWeightTotal += gaussianWeight;
- sum += sampleColor * gaussianWeight;
-
- sampleColor = texture2D(inputImageTexture, blurCoordinates[7]);
- distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
- gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor);
- gaussianWeightTotal += gaussianWeight;
- sum += sampleColor * gaussianWeight;
-
- sampleColor = texture2D(inputImageTexture, blurCoordinates[8]);
- distanceFromCentralColor = min(distance(centralColor, sampleColor) * distanceNormalizationFactor, 1.0);
- gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);
- gaussianWeightTotal += gaussianWeight;
- sum += sampleColor * gaussianWeight;
-
- gl_FragColor = sum / gaussianWeightTotal;
- }
-);
-#endif
-
-@implementation GPUImageBilateralFilter
-
-@synthesize distanceNormalizationFactor = _distanceNormalizationFactor;
-
-- (id)init;
-{
-
- if (!(self = [super initWithFirstStageVertexShaderFromString:kGPUImageBilateralBlurVertexShaderString
- firstStageFragmentShaderFromString:kGPUImageBilateralFilterFragmentShaderString
- secondStageVertexShaderFromString:kGPUImageBilateralBlurVertexShaderString
- secondStageFragmentShaderFromString:kGPUImageBilateralFilterFragmentShaderString])) {
- return nil;
- }
-
- firstDistanceNormalizationFactorUniform = [filterProgram uniformIndex:@"distanceNormalizationFactor"];
- secondDistanceNormalizationFactorUniform = [filterProgram uniformIndex:@"distanceNormalizationFactor"];
-
- self.texelSpacingMultiplier = 4.0;
- self.distanceNormalizationFactor = 8.0;
-
-
- return self;
-}
-
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setDistanceNormalizationFactor:(CGFloat)newValue
-{
- _distanceNormalizationFactor = newValue;
-
- [self setFloat:newValue
- forUniform:firstDistanceNormalizationFactorUniform
- program:filterProgram];
-
- [self setFloat:newValue
- forUniform:secondDistanceNormalizationFactorUniform
- program:secondFilterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageBoxBlurFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageBoxBlurFilter.h
deleted file mode 100755
index 3fd880b..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageBoxBlurFilter.h
+++ /dev/null
@@ -1,7 +0,0 @@
-#import "GPUImageGaussianBlurFilter.h"
-
-/** A hardware-accelerated box blur of an image
- */
-@interface GPUImageBoxBlurFilter : GPUImageGaussianBlurFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageBoxBlurFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageBoxBlurFilter.m
deleted file mode 100755
index 5a49385..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageBoxBlurFilter.m
+++ /dev/null
@@ -1,178 +0,0 @@
-#import "GPUImageBoxBlurFilter.h"
-
-
-@implementation GPUImageBoxBlurFilter
-
-+ (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
-{
- if (blurRadius < 1)
- {
- return kGPUImageVertexShaderString;
- }
-
- // From these weights we calculate the offsets to read interpolated values from
- NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);
-
- NSMutableString *shaderString = [[NSMutableString alloc] init];
- // Header
- [shaderString appendFormat:@"\
- attribute vec4 position;\n\
- attribute vec4 inputTextureCoordinate;\n\
- \n\
- uniform float texelWidthOffset;\n\
- uniform float texelHeightOffset;\n\
- \n\
- varying vec2 blurCoordinates[%lu];\n\
- \n\
- void main()\n\
- {\n\
- gl_Position = position;\n\
- \n\
- vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2))];
-
- // Inner offset loop
- [shaderString appendString:@"blurCoordinates[0] = inputTextureCoordinate.xy;\n"];
- for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++)
- {
- GLfloat optimizedOffset = (GLfloat)(currentOptimizedOffset * 2) + 1.5;
-
- [shaderString appendFormat:@"\
- blurCoordinates[%lu] = inputTextureCoordinate.xy + singleStepOffset * %f;\n\
- blurCoordinates[%lu] = inputTextureCoordinate.xy - singleStepOffset * %f;\n", (unsigned long)((currentOptimizedOffset * 2) + 1), optimizedOffset, (unsigned long)((currentOptimizedOffset * 2) + 2), optimizedOffset];
- }
-
- // Footer
- [shaderString appendString:@"}\n"];
-
- return shaderString;
-}
-
-+ (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
-{
- if (blurRadius < 1)
- {
- return kGPUImagePassthroughFragmentShaderString;
- }
-
- NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);
- NSUInteger trueNumberOfOptimizedOffsets = blurRadius / 2 + (blurRadius % 2);
-
- NSMutableString *shaderString = [[NSMutableString alloc] init];
-
- // Header
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- [shaderString appendFormat:@"\
- uniform sampler2D inputImageTexture;\n\
- uniform highp float texelWidthOffset;\n\
- uniform highp float texelHeightOffset;\n\
- \n\
- varying highp vec2 blurCoordinates[%lu];\n\
- \n\
- void main()\n\
- {\n\
- lowp vec4 sum = vec4(0.0);\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2)) ];
-#else
- [shaderString appendFormat:@"\
- uniform sampler2D inputImageTexture;\n\
- uniform float texelWidthOffset;\n\
- uniform float texelHeightOffset;\n\
- \n\
- varying vec2 blurCoordinates[%lu];\n\
- \n\
- void main()\n\
- {\n\
- vec4 sum = vec4(0.0);\n", 1 + (numberOfOptimizedOffsets * 2) ];
-#endif
-
- GLfloat boxWeight = 1.0 / (GLfloat)((blurRadius * 2) + 1);
-
- // Inner texture loop
- [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0]) * %f;\n", boxWeight];
-
- for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < numberOfOptimizedOffsets; currentBlurCoordinateIndex++)
- {
- [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 1), boxWeight * 2.0];
- [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 2), boxWeight * 2.0];
- }
-
- // If the number of required samples exceeds the amount we can pass in via varyings, we have to do dependent texture reads in the fragment shader
- if (trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets)
- {
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- [shaderString appendString:@"highp vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n"];
-#else
- [shaderString appendString:@"vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n"];
-#endif
-
- for (NSUInteger currentOverlowTextureRead = numberOfOptimizedOffsets; currentOverlowTextureRead < trueNumberOfOptimizedOffsets; currentOverlowTextureRead++)
- {
- GLfloat optimizedOffset = (GLfloat)(currentOverlowTextureRead * 2) + 1.5;
-
- [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0] + singleStepOffset * %f) * %f;\n", optimizedOffset, boxWeight * 2.0];
- [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0] - singleStepOffset * %f) * %f;\n", optimizedOffset, boxWeight * 2.0];
- }
- }
-
- // Footer
- [shaderString appendString:@"\
- gl_FragColor = sum;\n\
- }\n"];
-
- return shaderString;
-}
-
-- (void)setupFilterForSize:(CGSize)filterFrameSize;
-{
- [super setupFilterForSize:filterFrameSize];
-
- if (shouldResizeBlurRadiusWithImageSize == YES)
- {
-
- }
-}
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- // NSString *currentGaussianBlurVertexShader = [GPUImageGaussianBlurFilter vertexShaderForStandardGaussianOfRadius:4 sigma:2.0];
- // NSString *currentGaussianBlurFragmentShader = [GPUImageGaussianBlurFilter fragmentShaderForStandardGaussianOfRadius:4 sigma:2.0];
-
- NSString *currentBoxBlurVertexShader = [[self class] vertexShaderForOptimizedBlurOfRadius:4 sigma:0.0];
- NSString *currentBoxBlurFragmentShader = [[self class] fragmentShaderForOptimizedBlurOfRadius:4 sigma:0.0];
-
- if (!(self = [super initWithFirstStageVertexShaderFromString:currentBoxBlurVertexShader firstStageFragmentShaderFromString:currentBoxBlurFragmentShader secondStageVertexShaderFromString:currentBoxBlurVertexShader secondStageFragmentShaderFromString:currentBoxBlurFragmentShader]))
- {
- return nil;
- }
-
- _blurRadiusInPixels = 4.0;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setBlurRadiusInPixels:(CGFloat)newValue;
-{
- CGFloat newBlurRadius = round(round(newValue / 2.0) * 2.0); // For now, only do even radii
-
- if (newBlurRadius != _blurRadiusInPixels)
- {
- _blurRadiusInPixels = newBlurRadius;
-
- NSString *newGaussianBlurVertexShader = [[self class] vertexShaderForOptimizedBlurOfRadius:_blurRadiusInPixels sigma:0.0];
- NSString *newGaussianBlurFragmentShader = [[self class] fragmentShaderForOptimizedBlurOfRadius:_blurRadiusInPixels sigma:0.0];
-
- // NSLog(@"Optimized vertex shader: \n%@", newGaussianBlurVertexShader);
- // NSLog(@"Optimized fragment shader: \n%@", newGaussianBlurFragmentShader);
- //
- [self switchToVertexShader:newGaussianBlurVertexShader fragmentShader:newGaussianBlurFragmentShader];
- }
- shouldResizeBlurRadiusWithImageSize = NO;
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageBrightnessFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageBrightnessFilter.h
deleted file mode 100755
index 046473b..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageBrightnessFilter.h
+++ /dev/null
@@ -1,11 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageBrightnessFilter : GPUImageFilter
-{
- GLint brightnessUniform;
-}
-
-// Brightness ranges from -1.0 to 1.0, with 0.0 as the normal level
-@property(readwrite, nonatomic) CGFloat brightness;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageBrightnessFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageBrightnessFilter.m
deleted file mode 100755
index 7e526d8..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageBrightnessFilter.m
+++ /dev/null
@@ -1,66 +0,0 @@
-#import "GPUImageBrightnessFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageBrightnessFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform lowp float brightness;
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- gl_FragColor = vec4((textureColor.rgb + vec3(brightness)), textureColor.w);
- }
-);
-#else
-NSString *const kGPUImageBrightnessFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float brightness;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- gl_FragColor = vec4((textureColor.rgb + vec3(brightness)), textureColor.w);
- }
- );
-#endif
-
-@implementation GPUImageBrightnessFilter
-
-@synthesize brightness = _brightness;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageBrightnessFragmentShaderString]))
- {
- return nil;
- }
-
- brightnessUniform = [filterProgram uniformIndex:@"brightness"];
- self.brightness = 0.0;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setBrightness:(CGFloat)newValue;
-{
- _brightness = newValue;
-
- [self setFloat:_brightness forUniform:brightnessUniform program:filterProgram];
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageBuffer.h b/Example/Pods/GPUImage/framework/Source/GPUImageBuffer.h
deleted file mode 100644
index caf09c8..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageBuffer.h
+++ /dev/null
@@ -1,10 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageBuffer : GPUImageFilter
-{
- NSMutableArray *bufferedFramebuffers;
-}
-
-@property(readwrite, nonatomic) NSUInteger bufferSize;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageBuffer.m b/Example/Pods/GPUImage/framework/Source/GPUImageBuffer.m
deleted file mode 100644
index c90d020..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageBuffer.m
+++ /dev/null
@@ -1,112 +0,0 @@
-#import "GPUImageBuffer.h"
-
-@interface GPUImageBuffer()
-
-@end
-
-@implementation GPUImageBuffer
-
-@synthesize bufferSize = _bufferSize;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [self initWithFragmentShaderFromString:kGPUImagePassthroughFragmentShaderString]))
- {
- return nil;
- }
-
- bufferedFramebuffers = [[NSMutableArray alloc] init];
-// [bufferedTextures addObject:[NSNumber numberWithInt:outputTexture]];
- _bufferSize = 1;
-
- return self;
-}
-
-- (void)dealloc
-{
- for (GPUImageFramebuffer *currentFramebuffer in bufferedFramebuffers)
- {
- [currentFramebuffer unlock];
- }
-}
-
-#pragma mark -
-#pragma mark GPUImageInput
-
-- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
-{
- if ([bufferedFramebuffers count] >= _bufferSize)
- {
- outputFramebuffer = [bufferedFramebuffers objectAtIndex:0];
- [bufferedFramebuffers removeObjectAtIndex:0];
- }
- else
- {
- // Nothing yet in the buffer, so don't process further until the buffer is full
- outputFramebuffer = firstInputFramebuffer;
- [firstInputFramebuffer lock];
- }
-
- [bufferedFramebuffers addObject:firstInputFramebuffer];
-
- // Need to pass along rotation information, as we're just holding on to buffered framebuffers and not rotating them ourselves
- for (id currentTarget in targets)
- {
- if (currentTarget != self.targetToIgnoreForUpdates)
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
-
- [currentTarget setInputRotation:inputRotation atIndex:textureIndex];
- }
- }
-
- // Let the downstream video elements see the previous frame from the buffer before rendering a new one into place
- [self informTargetsAboutNewFrameAtTime:frameTime];
-
-// [self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
-}
-
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-{
- // No need to render to another texture anymore, since we'll be hanging on to the textures in our buffer
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setBufferSize:(NSUInteger)newValue;
-{
- if ( (newValue == _bufferSize) || (newValue < 1) )
- {
- return;
- }
-
- if (newValue > _bufferSize)
- {
- NSUInteger texturesToAdd = newValue - _bufferSize;
- for (NSUInteger currentTextureIndex = 0; currentTextureIndex < texturesToAdd; currentTextureIndex++)
- {
- // TODO: Deal with the growth of the size of the buffer by rotating framebuffers, no textures
- }
- }
- else
- {
- NSUInteger texturesToRemove = _bufferSize - newValue;
- for (NSUInteger currentTextureIndex = 0; currentTextureIndex < texturesToRemove; currentTextureIndex++)
- {
- GPUImageFramebuffer *lastFramebuffer = [bufferedFramebuffers lastObject];
- [bufferedFramebuffers removeObjectAtIndex:([bufferedFramebuffers count] - 1)];
-
- [lastFramebuffer unlock];
- lastFramebuffer = nil;
- }
- }
-
- _bufferSize = newValue;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageBulgeDistortionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageBulgeDistortionFilter.h
deleted file mode 100755
index d416e53..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageBulgeDistortionFilter.h
+++ /dev/null
@@ -1,16 +0,0 @@
-#import "GPUImageFilter.h"
-
-/// Creates a bulge distortion on the image
-@interface GPUImageBulgeDistortionFilter : GPUImageFilter
-{
- GLint aspectRatioUniform, radiusUniform, centerUniform, scaleUniform;
-}
-
-/// The center about which to apply the distortion, with a default of (0.5, 0.5)
-@property(readwrite, nonatomic) CGPoint center;
-/// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25
-@property(readwrite, nonatomic) CGFloat radius;
-/// The amount of distortion to apply, from -1.0 to 1.0, with a default of 0.5
-@property(readwrite, nonatomic) CGFloat scale;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageBulgeDistortionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageBulgeDistortionFilter.m
deleted file mode 100755
index 1f778ad..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageBulgeDistortionFilter.m
+++ /dev/null
@@ -1,174 +0,0 @@
-#import "GPUImageBulgeDistortionFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageBulgeDistortionFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform highp float aspectRatio;
- uniform highp vec2 center;
- uniform highp float radius;
- uniform highp float scale;
-
- void main()
- {
- highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- highp float dist = distance(center, textureCoordinateToUse);
- textureCoordinateToUse = textureCoordinate;
-
- if (dist < radius)
- {
- textureCoordinateToUse -= center;
- highp float percent = 1.0 - ((radius - dist) / radius) * scale;
- percent = percent * percent;
-
- textureCoordinateToUse = textureCoordinateToUse * percent;
- textureCoordinateToUse += center;
- }
-
- gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );
- }
-);
-#else
-NSString *const kGPUImageBulgeDistortionFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform float aspectRatio;
- uniform vec2 center;
- uniform float radius;
- uniform float scale;
-
- void main()
- {
- vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- float dist = distance(center, textureCoordinateToUse);
- textureCoordinateToUse = textureCoordinate;
-
- if (dist < radius)
- {
- textureCoordinateToUse -= center;
- float percent = 1.0 - ((radius - dist) / radius) * scale;
- percent = percent * percent;
-
- textureCoordinateToUse = textureCoordinateToUse * percent;
- textureCoordinateToUse += center;
- }
-
- gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );
- }
-);
-#endif
-
-
-@interface GPUImageBulgeDistortionFilter ()
-
-- (void)adjustAspectRatio;
-
-@property (readwrite, nonatomic) CGFloat aspectRatio;
-
-@end
-
-@implementation GPUImageBulgeDistortionFilter
-
-@synthesize aspectRatio = _aspectRatio;
-@synthesize center = _center;
-@synthesize radius = _radius;
-@synthesize scale = _scale;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageBulgeDistortionFragmentShaderString]))
- {
- return nil;
- }
-
- aspectRatioUniform = [filterProgram uniformIndex:@"aspectRatio"];
- radiusUniform = [filterProgram uniformIndex:@"radius"];
- scaleUniform = [filterProgram uniformIndex:@"scale"];
- centerUniform = [filterProgram uniformIndex:@"center"];
-
- self.radius = 0.25;
- self.scale = 0.5;
- self.center = CGPointMake(0.5, 0.5);
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)adjustAspectRatio;
-{
- if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
- {
- [self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)];
- }
- else
- {
- [self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)];
- }
-}
-
-- (void)forceProcessingAtSize:(CGSize)frameSize;
-{
- [super forceProcessingAtSize:frameSize];
- [self adjustAspectRatio];
-}
-
-- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
-{
- CGSize oldInputSize = inputTextureSize;
- [super setInputSize:newSize atIndex:textureIndex];
-
- if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )
- {
- [self adjustAspectRatio];
- }
-}
-
-- (void)setAspectRatio:(CGFloat)newValue;
-{
- _aspectRatio = newValue;
-
- [self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram];
-}
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
- [super setInputRotation:newInputRotation atIndex:textureIndex];
- [self setCenter:self.center];
- [self adjustAspectRatio];
-}
-
-- (void)setRadius:(CGFloat)newValue;
-{
- _radius = newValue;
-
- [self setFloat:_radius forUniform:radiusUniform program:filterProgram];
-}
-
-- (void)setScale:(CGFloat)newValue;
-{
- _scale = newValue;
-
- [self setFloat:_scale forUniform:scaleUniform program:filterProgram];
-}
-
-- (void)setCenter:(CGPoint)newValue;
-{
- _center = newValue;
-
- CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation];
-
- [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageCGAColorspaceFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageCGAColorspaceFilter.h
deleted file mode 100755
index 4f97804..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageCGAColorspaceFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageCGAColorspaceFilter : GPUImageFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageCGAColorspaceFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageCGAColorspaceFilter.m
deleted file mode 100755
index eee939a..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageCGAColorspaceFilter.m
+++ /dev/null
@@ -1,113 +0,0 @@
-//
-// GPUImageCGAColorspaceFilter.m
-//
-
-#import "GPUImageCGAColorspaceFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageCGAColorspaceFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- highp vec2 sampleDivisor = vec2(1.0 / 200.0, 1.0 / 320.0);
- //highp vec4 colorDivisor = vec4(colorDepth);
-
- highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor);
- highp vec4 color = texture2D(inputImageTexture, samplePos );
-
- //gl_FragColor = texture2D(inputImageTexture, samplePos );
- mediump vec4 colorCyan = vec4(85.0 / 255.0, 1.0, 1.0, 1.0);
- mediump vec4 colorMagenta = vec4(1.0, 85.0 / 255.0, 1.0, 1.0);
- mediump vec4 colorWhite = vec4(1.0, 1.0, 1.0, 1.0);
- mediump vec4 colorBlack = vec4(0.0, 0.0, 0.0, 1.0);
-
- mediump vec4 endColor;
- highp float blackDistance = distance(color, colorBlack);
- highp float whiteDistance = distance(color, colorWhite);
- highp float magentaDistance = distance(color, colorMagenta);
- highp float cyanDistance = distance(color, colorCyan);
-
- mediump vec4 finalColor;
-
- highp float colorDistance = min(magentaDistance, cyanDistance);
- colorDistance = min(colorDistance, whiteDistance);
- colorDistance = min(colorDistance, blackDistance);
-
- if (colorDistance == blackDistance) {
- finalColor = colorBlack;
- } else if (colorDistance == whiteDistance) {
- finalColor = colorWhite;
- } else if (colorDistance == cyanDistance) {
- finalColor = colorCyan;
- } else {
- finalColor = colorMagenta;
- }
-
- gl_FragColor = finalColor;
- }
-);
-#else
-NSString *const kGPUImageCGAColorspaceFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- vec2 sampleDivisor = vec2(1.0 / 200.0, 1.0 / 320.0);
- //highp vec4 colorDivisor = vec4(colorDepth);
-
- vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor);
- vec4 color = texture2D(inputImageTexture, samplePos );
-
- //gl_FragColor = texture2D(inputImageTexture, samplePos );
- vec4 colorCyan = vec4(85.0 / 255.0, 1.0, 1.0, 1.0);
- vec4 colorMagenta = vec4(1.0, 85.0 / 255.0, 1.0, 1.0);
- vec4 colorWhite = vec4(1.0, 1.0, 1.0, 1.0);
- vec4 colorBlack = vec4(0.0, 0.0, 0.0, 1.0);
-
- vec4 endColor;
- float blackDistance = distance(color, colorBlack);
- float whiteDistance = distance(color, colorWhite);
- float magentaDistance = distance(color, colorMagenta);
- float cyanDistance = distance(color, colorCyan);
-
- vec4 finalColor;
-
- float colorDistance = min(magentaDistance, cyanDistance);
- colorDistance = min(colorDistance, whiteDistance);
- colorDistance = min(colorDistance, blackDistance);
-
- if (colorDistance == blackDistance) {
- finalColor = colorBlack;
- } else if (colorDistance == whiteDistance) {
- finalColor = colorWhite;
- } else if (colorDistance == cyanDistance) {
- finalColor = colorCyan;
- } else {
- finalColor = colorMagenta;
- }
-
- gl_FragColor = finalColor;
- }
-);
-#endif
-
-@implementation GPUImageCGAColorspaceFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageCGAColorspaceFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageCannyEdgeDetectionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageCannyEdgeDetectionFilter.h
deleted file mode 100755
index 53c5850..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageCannyEdgeDetectionFilter.h
+++ /dev/null
@@ -1,62 +0,0 @@
-#import "GPUImageFilterGroup.h"
-
-@class GPUImageGrayscaleFilter;
-@class GPUImageSingleComponentGaussianBlurFilter;
-@class GPUImageDirectionalSobelEdgeDetectionFilter;
-@class GPUImageDirectionalNonMaximumSuppressionFilter;
-@class GPUImageWeakPixelInclusionFilter;
-
-/** This applies the edge detection process described by John Canny in
-
- Canny, J., A Computational Approach To Edge Detection, IEEE Trans. Pattern Analysis and Machine Intelligence, 8(6):679–698, 1986.
-
- and implemented in OpenGL ES by
-
- A. Ensor, S. Hall. GPU-based Image Analysis on Mobile Devices. Proceedings of Image and Vision Computing New Zealand 2011.
-
- It starts with a conversion to luminance, followed by an accelerated 9-hit Gaussian blur. A Sobel operator is applied to obtain the overall
- gradient strength in the blurred image, as well as the direction (in texture sampling steps) of the gradient. A non-maximum suppression filter
- acts along the direction of the gradient, highlighting strong edges that pass the threshold and completely removing those that fail the lower
- threshold. Finally, pixels from in-between these thresholds are either included in edges or rejected based on neighboring pixels.
- */
-@interface GPUImageCannyEdgeDetectionFilter : GPUImageFilterGroup
-{
- GPUImageGrayscaleFilter *luminanceFilter;
- GPUImageSingleComponentGaussianBlurFilter *blurFilter;
- GPUImageDirectionalSobelEdgeDetectionFilter *edgeDetectionFilter;
- GPUImageDirectionalNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;
- GPUImageWeakPixelInclusionFilter *weakPixelInclusionFilter;
-}
-
-/** The image width and height factors tweak the appearance of the edges.
-
- These parameters affect the visibility of the detected edges
-
- By default, they match the inverse of the filter size in pixels
- */
-@property(readwrite, nonatomic) CGFloat texelWidth;
-/** The image width and height factors tweak the appearance of the edges.
-
- These parameters affect the visibility of the detected edges
-
- By default, they match the inverse of the filter size in pixels
- */
-@property(readwrite, nonatomic) CGFloat texelHeight;
-
-/** The underlying blur radius for the Gaussian blur. Default is 2.0.
- */
-@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
-
-/** The underlying blur texel spacing multiplier. Default is 1.0.
- */
-@property (readwrite, nonatomic) CGFloat blurTexelSpacingMultiplier;
-
-/** Any edge with a gradient magnitude above this threshold will pass and show up in the final result.
- */
-@property(readwrite, nonatomic) CGFloat upperThreshold;
-
-/** Any edge with a gradient magnitude below this threshold will fail and be removed from the final result.
- */
-@property(readwrite, nonatomic) CGFloat lowerThreshold;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageCannyEdgeDetectionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageCannyEdgeDetectionFilter.m
deleted file mode 100755
index 7327d53..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageCannyEdgeDetectionFilter.m
+++ /dev/null
@@ -1,125 +0,0 @@
-#import "GPUImageCannyEdgeDetectionFilter.h"
-
-#import "GPUImageGrayscaleFilter.h"
-#import "GPUImageDirectionalSobelEdgeDetectionFilter.h"
-#import "GPUImageDirectionalNonMaximumSuppressionFilter.h"
-#import "GPUImageWeakPixelInclusionFilter.h"
-#import "GPUImageSingleComponentGaussianBlurFilter.h"
-
-@implementation GPUImageCannyEdgeDetectionFilter
-
-@synthesize upperThreshold;
-@synthesize lowerThreshold;
-@synthesize blurRadiusInPixels;
-@synthesize blurTexelSpacingMultiplier;
-@synthesize texelWidth;
-@synthesize texelHeight;
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- // First pass: convert image to luminance
- luminanceFilter = [[GPUImageGrayscaleFilter alloc] init];
- [self addFilter:luminanceFilter];
-
- // Second pass: apply a variable Gaussian blur
- blurFilter = [[GPUImageSingleComponentGaussianBlurFilter alloc] init];
- [self addFilter:blurFilter];
-
- // Third pass: run the Sobel edge detection, with calculated gradient directions, on this blurred image
- edgeDetectionFilter = [[GPUImageDirectionalSobelEdgeDetectionFilter alloc] init];
- [self addFilter:edgeDetectionFilter];
-
- // Fourth pass: apply non-maximum suppression
- nonMaximumSuppressionFilter = [[GPUImageDirectionalNonMaximumSuppressionFilter alloc] init];
- [self addFilter:nonMaximumSuppressionFilter];
-
- // Fifth pass: include weak pixels to complete edges
- weakPixelInclusionFilter = [[GPUImageWeakPixelInclusionFilter alloc] init];
- [self addFilter:weakPixelInclusionFilter];
-
- [luminanceFilter addTarget:blurFilter];
- [blurFilter addTarget:edgeDetectionFilter];
- [edgeDetectionFilter addTarget:nonMaximumSuppressionFilter];
- [nonMaximumSuppressionFilter addTarget:weakPixelInclusionFilter];
-
- self.initialFilters = [NSArray arrayWithObject:luminanceFilter];
-// self.terminalFilter = nonMaximumSuppressionFilter;
- self.terminalFilter = weakPixelInclusionFilter;
-
- self.blurRadiusInPixels = 2.0;
- self.blurTexelSpacingMultiplier = 1.0;
- self.upperThreshold = 0.4;
- self.lowerThreshold = 0.1;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setBlurRadiusInPixels:(CGFloat)newValue;
-{
- blurFilter.blurRadiusInPixels = newValue;
-}
-
-- (CGFloat)blurRadiusInPixels;
-{
- return blurFilter.blurRadiusInPixels;
-}
-
-- (void)setBlurTexelSpacingMultiplier:(CGFloat)newValue;
-{
- blurFilter.texelSpacingMultiplier = newValue;
-}
-
-- (CGFloat)blurTexelSpacingMultiplier;
-{
- return blurFilter.texelSpacingMultiplier;
-}
-
-- (void)setTexelWidth:(CGFloat)newValue;
-{
- edgeDetectionFilter.texelWidth = newValue;
-}
-
-- (CGFloat)texelWidth;
-{
- return edgeDetectionFilter.texelWidth;
-}
-
-- (void)setTexelHeight:(CGFloat)newValue;
-{
- edgeDetectionFilter.texelHeight = newValue;
-}
-
-- (CGFloat)texelHeight;
-{
- return edgeDetectionFilter.texelHeight;
-}
-
-- (void)setUpperThreshold:(CGFloat)newValue;
-{
- nonMaximumSuppressionFilter.upperThreshold = newValue;
-}
-
-- (CGFloat)upperThreshold;
-{
- return nonMaximumSuppressionFilter.upperThreshold;
-}
-
-- (void)setLowerThreshold:(CGFloat)newValue;
-{
- nonMaximumSuppressionFilter.lowerThreshold = newValue;
-}
-
-- (CGFloat)lowerThreshold;
-{
- return nonMaximumSuppressionFilter.lowerThreshold;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageChromaKeyBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageChromaKeyBlendFilter.h
deleted file mode 100755
index 00a5aed..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageChromaKeyBlendFilter.h
+++ /dev/null
@@ -1,32 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-/** Selectively replaces a color in the first image with the second image
- */
-@interface GPUImageChromaKeyBlendFilter : GPUImageTwoInputFilter
-{
- GLint colorToReplaceUniform, thresholdSensitivityUniform, smoothingUniform;
-}
-
-/** The threshold sensitivity controls how similar pixels need to be colored to be replaced
-
- The default value is 0.3
- */
-@property(readwrite, nonatomic) CGFloat thresholdSensitivity;
-
-/** The degree of smoothing controls how gradually similar colors are replaced in the image
-
- The default value is 0.1
- */
-@property(readwrite, nonatomic) CGFloat smoothing;
-
-/** The color to be replaced is specified using individual red, green, and blue components (normalized to 1.0).
-
- The default is green: (0.0, 1.0, 0.0).
-
- @param redComponent Red component of color to be replaced
- @param greenComponent Green component of color to be replaced
- @param blueComponent Blue component of color to be replaced
- */
-- (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageChromaKeyBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageChromaKeyBlendFilter.m
deleted file mode 100755
index 4e6b466..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageChromaKeyBlendFilter.m
+++ /dev/null
@@ -1,117 +0,0 @@
-#import "GPUImageChromaKeyBlendFilter.h"
-
-// Shader code based on Apple's CIChromaKeyFilter example: https://developer.apple.com/library/mac/#samplecode/CIChromaKeyFilter/Introduction/Intro.html
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageChromaKeyBlendFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform float thresholdSensitivity;
- uniform float smoothing;
- uniform vec3 colorToReplace;
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
-
- float maskY = 0.2989 * colorToReplace.r + 0.5866 * colorToReplace.g + 0.1145 * colorToReplace.b;
- float maskCr = 0.7132 * (colorToReplace.r - maskY);
- float maskCb = 0.5647 * (colorToReplace.b - maskY);
-
- float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b;
- float Cr = 0.7132 * (textureColor.r - Y);
- float Cb = 0.5647 * (textureColor.b - Y);
-
-// float blendValue = 1.0 - smoothstep(thresholdSensitivity - smoothing, thresholdSensitivity , abs(Cr - maskCr) + abs(Cb - maskCb));
- float blendValue = 1.0 - smoothstep(thresholdSensitivity, thresholdSensitivity + smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb)));
- gl_FragColor = mix(textureColor, textureColor2, blendValue);
- }
-);
-#else
-NSString *const kGPUImageChromaKeyBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform float thresholdSensitivity;
- uniform float smoothing;
- uniform vec3 colorToReplace;
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
-
- float maskY = 0.2989 * colorToReplace.r + 0.5866 * colorToReplace.g + 0.1145 * colorToReplace.b;
- float maskCr = 0.7132 * (colorToReplace.r - maskY);
- float maskCb = 0.5647 * (colorToReplace.b - maskY);
-
- float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b;
- float Cr = 0.7132 * (textureColor.r - Y);
- float Cb = 0.5647 * (textureColor.b - Y);
-
- // float blendValue = 1.0 - smoothstep(thresholdSensitivity - smoothing, thresholdSensitivity , abs(Cr - maskCr) + abs(Cb - maskCb));
- float blendValue = 1.0 - smoothstep(thresholdSensitivity, thresholdSensitivity + smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb)));
- gl_FragColor = mix(textureColor, textureColor2, blendValue);
- }
-);
-#endif
-
-@implementation GPUImageChromaKeyBlendFilter
-
-@synthesize thresholdSensitivity = _thresholdSensitivity;
-@synthesize smoothing = _smoothing;
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageChromaKeyBlendFragmentShaderString]))
- {
- return nil;
- }
-
- thresholdSensitivityUniform = [filterProgram uniformIndex:@"thresholdSensitivity"];
- smoothingUniform = [filterProgram uniformIndex:@"smoothing"];
- colorToReplaceUniform = [filterProgram uniformIndex:@"colorToReplace"];
-
- self.thresholdSensitivity = 0.4;
- self.smoothing = 0.1;
- [self setColorToReplaceRed:0.0 green:1.0 blue:0.0];
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
-{
- GPUVector3 colorToReplace = {redComponent, greenComponent, blueComponent};
-
- [self setVec3:colorToReplace forUniform:colorToReplaceUniform program:filterProgram];
-}
-
-- (void)setThresholdSensitivity:(CGFloat)newValue;
-{
- _thresholdSensitivity = newValue;
-
- [self setFloat:(GLfloat)_thresholdSensitivity forUniform:thresholdSensitivityUniform program:filterProgram];
-}
-
-- (void)setSmoothing:(CGFloat)newValue;
-{
- _smoothing = newValue;
-
- [self setFloat:(GLfloat)_smoothing forUniform:smoothingUniform program:filterProgram];
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageChromaKeyFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageChromaKeyFilter.h
deleted file mode 100644
index 5ee7e49..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageChromaKeyFilter.h
+++ /dev/null
@@ -1,30 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageChromaKeyFilter : GPUImageFilter
-{
- GLint colorToReplaceUniform, thresholdSensitivityUniform, smoothingUniform;
-}
-
-/** The threshold sensitivity controls how similar pixels need to be colored to be replaced
-
- The default value is 0.3
- */
-@property(readwrite, nonatomic) CGFloat thresholdSensitivity;
-
-/** The degree of smoothing controls how gradually similar colors are replaced in the image
-
- The default value is 0.1
- */
-@property(readwrite, nonatomic) CGFloat smoothing;
-
-/** The color to be replaced is specified using individual red, green, and blue components (normalized to 1.0).
-
- The default is green: (0.0, 1.0, 0.0).
-
- @param redComponent Red component of color to be replaced
- @param greenComponent Green component of color to be replaced
- @param blueComponent Blue component of color to be replaced
- */
-- (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageChromaKeyFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageChromaKeyFilter.m
deleted file mode 100644
index c70ef61..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageChromaKeyFilter.m
+++ /dev/null
@@ -1,113 +0,0 @@
-#import "GPUImageChromaKeyFilter.h"
-
-// Shader code based on Apple's CIChromaKeyFilter example: https://developer.apple.com/library/mac/#samplecode/CIChromaKeyFilter/Introduction/Intro.html
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageChromaKeyFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying highp vec2 textureCoordinate;
-
- uniform float thresholdSensitivity;
- uniform float smoothing;
- uniform vec3 colorToReplace;
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- float maskY = 0.2989 * colorToReplace.r + 0.5866 * colorToReplace.g + 0.1145 * colorToReplace.b;
- float maskCr = 0.7132 * (colorToReplace.r - maskY);
- float maskCb = 0.5647 * (colorToReplace.b - maskY);
-
- float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b;
- float Cr = 0.7132 * (textureColor.r - Y);
- float Cb = 0.5647 * (textureColor.b - Y);
-
- // float blendValue = 1.0 - smoothstep(thresholdSensitivity - smoothing, thresholdSensitivity , abs(Cr - maskCr) + abs(Cb - maskCb));
- float blendValue = smoothstep(thresholdSensitivity, thresholdSensitivity + smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb)));
- gl_FragColor = vec4(textureColor.rgb, textureColor.a * blendValue);
- }
-);
-#else
-NSString *const kGPUImageChromaKeyFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform float thresholdSensitivity;
- uniform float smoothing;
- uniform vec3 colorToReplace;
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- float maskY = 0.2989 * colorToReplace.r + 0.5866 * colorToReplace.g + 0.1145 * colorToReplace.b;
- float maskCr = 0.7132 * (colorToReplace.r - maskY);
- float maskCb = 0.5647 * (colorToReplace.b - maskY);
-
- float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b;
- float Cr = 0.7132 * (textureColor.r - Y);
- float Cb = 0.5647 * (textureColor.b - Y);
-
- // float blendValue = 1.0 - smoothstep(thresholdSensitivity - smoothing, thresholdSensitivity , abs(Cr - maskCr) + abs(Cb - maskCb));
- float blendValue = smoothstep(thresholdSensitivity, thresholdSensitivity + smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb)));
- gl_FragColor = vec4(textureColor.rgb, textureColor.a * blendValue);
- }
- );
-#endif
-
-@implementation GPUImageChromaKeyFilter
-
-@synthesize thresholdSensitivity = _thresholdSensitivity;
-@synthesize smoothing = _smoothing;
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageChromaKeyFragmentShaderString]))
- {
- return nil;
- }
-
- thresholdSensitivityUniform = [filterProgram uniformIndex:@"thresholdSensitivity"];
- smoothingUniform = [filterProgram uniformIndex:@"smoothing"];
- colorToReplaceUniform = [filterProgram uniformIndex:@"colorToReplace"];
-
- self.thresholdSensitivity = 0.4;
- self.smoothing = 0.1;
- [self setColorToReplaceRed:0.0 green:1.0 blue:0.0];
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
-{
- GPUVector3 colorToReplace = {redComponent, greenComponent, blueComponent};
-
- [self setVec3:colorToReplace forUniform:colorToReplaceUniform program:filterProgram];
-}
-
-- (void)setThresholdSensitivity:(CGFloat)newValue;
-{
- _thresholdSensitivity = newValue;
-
- [self setFloat:(GLfloat)_thresholdSensitivity forUniform:thresholdSensitivityUniform program:filterProgram];
-}
-
-- (void)setSmoothing:(CGFloat)newValue;
-{
- _smoothing = newValue;
-
- [self setFloat:(GLfloat)_smoothing forUniform:smoothingUniform program:filterProgram];
-}
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageClosingFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageClosingFilter.h
deleted file mode 100644
index 61e34c4..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageClosingFilter.h
+++ /dev/null
@@ -1,19 +0,0 @@
-#import "GPUImageFilterGroup.h"
-
-@class GPUImageErosionFilter;
-@class GPUImageDilationFilter;
-
-// A filter that first performs a dilation on the red channel of an image, followed by an erosion of the same radius.
-// This helps to filter out smaller dark elements.
-
-@interface GPUImageClosingFilter : GPUImageFilterGroup
-{
- GPUImageErosionFilter *erosionFilter;
- GPUImageDilationFilter *dilationFilter;
-}
-
-@property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
-
-- (id)initWithRadius:(NSUInteger)radius;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageClosingFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageClosingFilter.m
deleted file mode 100644
index 01e9b29..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageClosingFilter.m
+++ /dev/null
@@ -1,57 +0,0 @@
-#import "GPUImageClosingFilter.h"
-#import "GPUImageErosionFilter.h"
-#import "GPUImageDilationFilter.h"
-
-@implementation GPUImageClosingFilter
-
-@synthesize verticalTexelSpacing = _verticalTexelSpacing;
-@synthesize horizontalTexelSpacing = _horizontalTexelSpacing;
-
-- (id)init;
-{
- if (!(self = [self initWithRadius:1]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)initWithRadius:(NSUInteger)radius;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- // First pass: dilation
- dilationFilter = [[GPUImageDilationFilter alloc] initWithRadius:radius];
- [self addFilter:dilationFilter];
-
- // Second pass: erosion
- erosionFilter = [[GPUImageErosionFilter alloc] initWithRadius:radius];
- [self addFilter:erosionFilter];
-
- [dilationFilter addTarget:erosionFilter];
-
- self.initialFilters = [NSArray arrayWithObjects:dilationFilter, nil];
- self.terminalFilter = erosionFilter;
-
- return self;
-}
-
-- (void)setVerticalTexelSpacing:(CGFloat)newValue;
-{
- _verticalTexelSpacing = newValue;
- erosionFilter.verticalTexelSpacing = newValue;
- dilationFilter.verticalTexelSpacing = newValue;
-}
-
-- (void)setHorizontalTexelSpacing:(CGFloat)newValue;
-{
- _horizontalTexelSpacing = newValue;
- erosionFilter.horizontalTexelSpacing = newValue;
- dilationFilter.horizontalTexelSpacing = newValue;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageColorBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageColorBlendFilter.h
deleted file mode 100644
index 302a16c..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageColorBlendFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageColorBlendFilter : GPUImageTwoInputFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageColorBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageColorBlendFilter.m
deleted file mode 100644
index ced1cb8..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageColorBlendFilter.m
+++ /dev/null
@@ -1,113 +0,0 @@
-#import "GPUImageColorBlendFilter.h"
-
-/**
- * Color blend mode based upon pseudo code from the PDF specification.
- */
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageColorBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- highp float lum(lowp vec3 c) {
- return dot(c, vec3(0.3, 0.59, 0.11));
- }
-
- lowp vec3 clipcolor(lowp vec3 c) {
- highp float l = lum(c);
- lowp float n = min(min(c.r, c.g), c.b);
- lowp float x = max(max(c.r, c.g), c.b);
-
- if (n < 0.0) {
- c.r = l + ((c.r - l) * l) / (l - n);
- c.g = l + ((c.g - l) * l) / (l - n);
- c.b = l + ((c.b - l) * l) / (l - n);
- }
- if (x > 1.0) {
- c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);
- c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);
- c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);
- }
-
- return c;
- }
-
- lowp vec3 setlum(lowp vec3 c, highp float l) {
- highp float d = l - lum(c);
- c = c + vec3(d);
- return clipcolor(c);
- }
-
- void main()
- {
- highp vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);
- highp vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(overlayColor.rgb, lum(baseColor.rgb)) * overlayColor.a, baseColor.a);
- }
-);
-#else
-NSString *const kGPUImageColorBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- float lum(vec3 c) {
- return dot(c, vec3(0.3, 0.59, 0.11));
- }
-
- vec3 clipcolor(vec3 c) {
- float l = lum(c);
- float n = min(min(c.r, c.g), c.b);
- float x = max(max(c.r, c.g), c.b);
-
- if (n < 0.0) {
- c.r = l + ((c.r - l) * l) / (l - n);
- c.g = l + ((c.g - l) * l) / (l - n);
- c.b = l + ((c.b - l) * l) / (l - n);
- }
- if (x > 1.0) {
- c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);
- c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);
- c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);
- }
-
- return c;
- }
-
- vec3 setlum(vec3 c, float l) {
- float d = l - lum(c);
- c = c + vec3(d);
- return clipcolor(c);
- }
-
- void main()
- {
- vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);
- vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(overlayColor.rgb, lum(baseColor.rgb)) * overlayColor.a, baseColor.a);
- }
-);
-#endif
-
-
-@implementation GPUImageColorBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
\ No newline at end of file
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageColorBurnBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageColorBurnBlendFilter.h
deleted file mode 100755
index 50ebb3f..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageColorBurnBlendFilter.h
+++ /dev/null
@@ -1,9 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-/** Applies a color burn blend of two images
- */
-@interface GPUImageColorBurnBlendFilter : GPUImageTwoInputFilter
-{
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageColorBurnBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageColorBurnBlendFilter.m
deleted file mode 100755
index 5d6ff60..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageColorBurnBlendFilter.m
+++ /dev/null
@@ -1,52 +0,0 @@
-#import "GPUImageColorBurnBlendFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageColorBurnBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
- mediump vec4 whiteColor = vec4(1.0);
- gl_FragColor = whiteColor - (whiteColor - textureColor) / textureColor2;
- }
-);
-#else
-NSString *const kGPUImageColorBurnBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
- vec4 whiteColor = vec4(1.0);
- gl_FragColor = whiteColor - (whiteColor - textureColor) / textureColor2;
- }
-);
-#endif
-
-@implementation GPUImageColorBurnBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorBurnBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageColorDodgeBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageColorDodgeBlendFilter.h
deleted file mode 100755
index 0f541c4..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageColorDodgeBlendFilter.h
+++ /dev/null
@@ -1,9 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-/** Applies a color dodge blend of two images
- */
-@interface GPUImageColorDodgeBlendFilter : GPUImageTwoInputFilter
-{
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageColorDodgeBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageColorDodgeBlendFilter.m
deleted file mode 100755
index 6a38827..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageColorDodgeBlendFilter.m
+++ /dev/null
@@ -1,75 +0,0 @@
-#import "GPUImageColorDodgeBlendFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageColorDodgeBlendFragmentShaderString = SHADER_STRING
-(
-
- precision mediump float;
-
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 base = texture2D(inputImageTexture, textureCoordinate);
- vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
-
- vec3 baseOverlayAlphaProduct = vec3(overlay.a * base.a);
- vec3 rightHandProduct = overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a);
-
- vec3 firstBlendColor = baseOverlayAlphaProduct + rightHandProduct;
- vec3 overlayRGB = clamp((overlay.rgb / clamp(overlay.a, 0.01, 1.0)) * step(0.0, overlay.a), 0.0, 0.99);
-
- vec3 secondBlendColor = (base.rgb * overlay.a) / (1.0 - overlayRGB) + rightHandProduct;
-
- vec3 colorChoice = step((overlay.rgb * base.a + base.rgb * overlay.a), baseOverlayAlphaProduct);
-
- gl_FragColor = vec4(mix(firstBlendColor, secondBlendColor, colorChoice), 1.0);
- }
-);
-#else
-NSString *const kGPUImageColorDodgeBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 base = texture2D(inputImageTexture, textureCoordinate);
- vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
-
- vec3 baseOverlayAlphaProduct = vec3(overlay.a * base.a);
- vec3 rightHandProduct = overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a);
-
- vec3 firstBlendColor = baseOverlayAlphaProduct + rightHandProduct;
- vec3 overlayRGB = clamp((overlay.rgb / clamp(overlay.a, 0.01, 1.0)) * step(0.0, overlay.a), 0.0, 0.99);
-
- vec3 secondBlendColor = (base.rgb * overlay.a) / (1.0 - overlayRGB) + rightHandProduct;
-
- vec3 colorChoice = step((overlay.rgb * base.a + base.rgb * overlay.a), baseOverlayAlphaProduct);
-
- gl_FragColor = vec4(mix(firstBlendColor, secondBlendColor, colorChoice), 1.0);
- }
-);
-#endif
-
-@implementation GPUImageColorDodgeBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorDodgeBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageColorInvertFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageColorInvertFilter.h
deleted file mode 100755
index aaeec43..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageColorInvertFilter.h
+++ /dev/null
@@ -1,7 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageColorInvertFilter : GPUImageFilter
-{
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageColorInvertFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageColorInvertFilter.m
deleted file mode 100755
index 0a8798b..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageColorInvertFilter.m
+++ /dev/null
@@ -1,46 +0,0 @@
-#import "GPUImageColorInvertFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageInvertFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w);
- }
-);
-#else
-NSString *const kGPUImageInvertFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- gl_FragColor = vec4((1.0 - textureColor.rgb), textureColor.w);
- }
- );
-#endif
-
-@implementation GPUImageColorInvertFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageInvertFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageColorMatrixFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageColorMatrixFilter.h
deleted file mode 100755
index 7588727..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageColorMatrixFilter.h
+++ /dev/null
@@ -1,19 +0,0 @@
-#import "GPUImageFilter.h"
-
-/** Transforms the colors of an image by applying a matrix to them
- */
-@interface GPUImageColorMatrixFilter : GPUImageFilter
-{
- GLint colorMatrixUniform;
- GLint intensityUniform;
-}
-
-/** A 4x4 matrix used to transform each color in an image
- */
-@property(readwrite, nonatomic) GPUMatrix4x4 colorMatrix;
-
-/** The degree to which the new transformed color replaces the original color for each pixel
- */
-@property(readwrite, nonatomic) CGFloat intensity;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageColorMatrixFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageColorMatrixFilter.m
deleted file mode 100755
index 0e21c7e..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageColorMatrixFilter.m
+++ /dev/null
@@ -1,87 +0,0 @@
-#import "GPUImageColorMatrixFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageColorMatrixFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform lowp mat4 colorMatrix;
- uniform lowp float intensity;
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- lowp vec4 outputColor = textureColor * colorMatrix;
-
- gl_FragColor = (intensity * outputColor) + ((1.0 - intensity) * textureColor);
- }
-);
-#else
-NSString *const kGPUImageColorMatrixFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform mat4 colorMatrix;
- uniform float intensity;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- vec4 outputColor = textureColor * colorMatrix;
-
- gl_FragColor = (intensity * outputColor) + ((1.0 - intensity) * textureColor);
- }
-);
-#endif
-
-@implementation GPUImageColorMatrixFilter
-
-@synthesize intensity = _intensity;
-@synthesize colorMatrix = _colorMatrix;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageColorMatrixFragmentShaderString]))
- {
- return nil;
- }
-
- colorMatrixUniform = [filterProgram uniformIndex:@"colorMatrix"];
- intensityUniform = [filterProgram uniformIndex:@"intensity"];
-
- self.intensity = 1.f;
- self.colorMatrix = (GPUMatrix4x4){
- {1.f, 0.f, 0.f, 0.f},
- {0.f, 1.f, 0.f, 0.f},
- {0.f, 0.f, 1.f, 0.f},
- {0.f, 0.f, 0.f, 1.f}
- };
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setIntensity:(CGFloat)newIntensity;
-{
- _intensity = newIntensity;
-
- [self setFloat:_intensity forUniform:intensityUniform program:filterProgram];
-}
-
-- (void)setColorMatrix:(GPUMatrix4x4)newColorMatrix;
-{
- _colorMatrix = newColorMatrix;
-
- [self setMatrix4f:_colorMatrix forUniform:colorMatrixUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageColorPackingFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageColorPackingFilter.h
deleted file mode 100644
index c2edca5..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageColorPackingFilter.h
+++ /dev/null
@@ -1,10 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageColorPackingFilter : GPUImageFilter
-{
- GLint texelWidthUniform, texelHeightUniform;
-
- CGFloat texelWidth, texelHeight;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageColorPackingFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageColorPackingFilter.m
deleted file mode 100644
index 1a087ca..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageColorPackingFilter.m
+++ /dev/null
@@ -1,139 +0,0 @@
-#import "GPUImageColorPackingFilter.h"
-
-NSString *const kGPUImageColorPackingVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec4 inputTextureCoordinate;
-
- uniform float texelWidth;
- uniform float texelHeight;
-
- varying vec2 upperLeftInputTextureCoordinate;
- varying vec2 upperRightInputTextureCoordinate;
- varying vec2 lowerLeftInputTextureCoordinate;
- varying vec2 lowerRightInputTextureCoordinate;
-
- void main()
- {
- gl_Position = position;
-
- upperLeftInputTextureCoordinate = inputTextureCoordinate.xy + vec2(-texelWidth, -texelHeight);
- upperRightInputTextureCoordinate = inputTextureCoordinate.xy + vec2(texelWidth, -texelHeight);
- lowerLeftInputTextureCoordinate = inputTextureCoordinate.xy + vec2(-texelWidth, texelHeight);
- lowerRightInputTextureCoordinate = inputTextureCoordinate.xy + vec2(texelWidth, texelHeight);
- }
-);
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageColorPackingFragmentShaderString = SHADER_STRING
-(
- precision lowp float;
-
- uniform sampler2D inputImageTexture;
-
- uniform mediump mat3 convolutionMatrix;
-
- varying highp vec2 outputTextureCoordinate;
-
- varying highp vec2 upperLeftInputTextureCoordinate;
- varying highp vec2 upperRightInputTextureCoordinate;
- varying highp vec2 lowerLeftInputTextureCoordinate;
- varying highp vec2 lowerRightInputTextureCoordinate;
-
- void main()
- {
- float upperLeftIntensity = texture2D(inputImageTexture, upperLeftInputTextureCoordinate).r;
- float upperRightIntensity = texture2D(inputImageTexture, upperRightInputTextureCoordinate).r;
- float lowerLeftIntensity = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).r;
- float lowerRightIntensity = texture2D(inputImageTexture, lowerRightInputTextureCoordinate).r;
-
- gl_FragColor = vec4(upperLeftIntensity, upperRightIntensity, lowerLeftIntensity, lowerRightIntensity);
- }
-);
-#else
-NSString *const kGPUImageColorPackingFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
-
- uniform mat3 convolutionMatrix;
-
- varying vec2 outputTextureCoordinate;
-
- varying vec2 upperLeftInputTextureCoordinate;
- varying vec2 upperRightInputTextureCoordinate;
- varying vec2 lowerLeftInputTextureCoordinate;
- varying vec2 lowerRightInputTextureCoordinate;
-
- void main()
- {
- float upperLeftIntensity = texture2D(inputImageTexture, upperLeftInputTextureCoordinate).r;
- float upperRightIntensity = texture2D(inputImageTexture, upperRightInputTextureCoordinate).r;
- float lowerLeftIntensity = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).r;
- float lowerRightIntensity = texture2D(inputImageTexture, lowerRightInputTextureCoordinate).r;
-
- gl_FragColor = vec4(upperLeftIntensity, upperRightIntensity, lowerLeftIntensity, lowerRightIntensity);
- }
-);
-#endif
-
-@implementation GPUImageColorPackingFilter
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithVertexShaderFromString:kGPUImageColorPackingVertexShaderString fragmentShaderFromString:kGPUImageColorPackingFragmentShaderString]))
- {
- return nil;
- }
-
- texelWidthUniform = [filterProgram uniformIndex:@"texelWidth"];
- texelHeightUniform = [filterProgram uniformIndex:@"texelHeight"];
-
- return self;
-}
-
-- (void)setupFilterForSize:(CGSize)filterFrameSize;
-{
- texelWidth = 0.5 / inputTextureSize.width;
- texelHeight = 0.5 / inputTextureSize.height;
-
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext setActiveShaderProgram:filterProgram];
- glUniform1f(texelWidthUniform, texelWidth);
- glUniform1f(texelHeightUniform, texelHeight);
- });
-}
-
-#pragma mark -
-#pragma mark Managing the display FBOs
-
-- (CGSize)sizeOfFBO;
-{
- CGSize outputSize = [self maximumOutputSize];
- if ( (CGSizeEqualToSize(outputSize, CGSizeZero)) || (inputTextureSize.width < outputSize.width) )
- {
- CGSize quarterSize;
- quarterSize.width = inputTextureSize.width / 2.0;
- quarterSize.height = inputTextureSize.height / 2.0;
- return quarterSize;
- }
- else
- {
- return outputSize;
- }
-}
-
-#pragma mark -
-#pragma mark Rendering
-
-- (CGSize)outputFrameSize;
-{
- CGSize quarterSize;
- quarterSize.width = inputTextureSize.width / 2.0;
- quarterSize.height = inputTextureSize.height / 2.0;
- return quarterSize;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageContrastFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageContrastFilter.h
deleted file mode 100755
index e09e6dc..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageContrastFilter.h
+++ /dev/null
@@ -1,14 +0,0 @@
-#import "GPUImageFilter.h"
-
-/** Adjusts the contrast of the image
- */
-@interface GPUImageContrastFilter : GPUImageFilter
-{
- GLint contrastUniform;
-}
-
-/** Contrast ranges from 0.0 to 4.0 (max contrast), with 1.0 as the normal level
- */
-@property(readwrite, nonatomic) CGFloat contrast;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageContrastFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageContrastFilter.m
deleted file mode 100755
index 5ed1e22..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageContrastFilter.m
+++ /dev/null
@@ -1,66 +0,0 @@
-#import "GPUImageContrastFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageContrastFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform lowp float contrast;
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- gl_FragColor = vec4(((textureColor.rgb - vec3(0.5)) * contrast + vec3(0.5)), textureColor.w);
- }
-);
-#else
-NSString *const kGPUImageContrastFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float contrast;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- gl_FragColor = vec4(((textureColor.rgb - vec3(0.5)) * contrast + vec3(0.5)), textureColor.w);
- }
- );
-#endif
-
-@implementation GPUImageContrastFilter
-
-@synthesize contrast = _contrast;
-
-#pragma mark -
-#pragma mark Initialization
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageContrastFragmentShaderString]))
- {
- return nil;
- }
-
- contrastUniform = [filterProgram uniformIndex:@"contrast"];
- self.contrast = 1.0;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setContrast:(CGFloat)newValue;
-{
- _contrast = newValue;
-
- [self setFloat:_contrast forUniform:contrastUniform program:filterProgram];
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageCropFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageCropFilter.h
deleted file mode 100755
index 641fb7b..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageCropFilter.h
+++ /dev/null
@@ -1,14 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageCropFilter : GPUImageFilter
-{
- GLfloat cropTextureCoordinates[8];
-}
-
-// The crop region is the rectangle within the image to crop. It is normalized to a coordinate space from 0.0 to 1.0, with 0.0, 0.0 being the upper left corner of the image
-@property(readwrite, nonatomic) CGRect cropRegion;
-
-// Initialization and teardown
-- (id)initWithCropRegion:(CGRect)newCropRegion;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageCropFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageCropFilter.m
deleted file mode 100755
index 22e33c1..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageCropFilter.m
+++ /dev/null
@@ -1,274 +0,0 @@
-#import "GPUImageCropFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageCropFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
- }
-);
-#else
-NSString *const kGPUImageCropFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
- }
-);
-#endif
-
-@interface GPUImageCropFilter ()
-
-- (void)calculateCropTextureCoordinates;
-
-@end
-
-@interface GPUImageCropFilter()
-{
- CGSize originallySuppliedInputSize;
-}
-
-@end
-
-@implementation GPUImageCropFilter
-
-@synthesize cropRegion = _cropRegion;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithCropRegion:(CGRect)newCropRegion;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageCropFragmentShaderString]))
- {
- return nil;
- }
-
- self.cropRegion = newCropRegion;
-
- return self;
-}
-
-- (id)init;
-{
- if (!(self = [self initWithCropRegion:CGRectMake(0.0, 0.0, 1.0, 1.0)]))
- {
- return nil;
- }
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Rendering
-
-- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
-{
- if (self.preventRendering)
- {
- return;
- }
-
-// if (overrideInputSize)
-// {
-// if (CGSizeEqualToSize(forcedMaximumSize, CGSizeZero))
-// {
-// return;
-// }
-// else
-// {
-// CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(newSize, CGRectMake(0.0, 0.0, forcedMaximumSize.width, forcedMaximumSize.height));
-// inputTextureSize = insetRect.size;
-// return;
-// }
-// }
-
- CGSize rotatedSize = [self rotatedSize:newSize forIndex:textureIndex];
- originallySuppliedInputSize = rotatedSize;
-
- CGSize scaledSize;
- scaledSize.width = rotatedSize.width * _cropRegion.size.width;
- scaledSize.height = rotatedSize.height * _cropRegion.size.height;
-
-
- if (CGSizeEqualToSize(scaledSize, CGSizeZero))
- {
- inputTextureSize = scaledSize;
- }
- else if (!CGSizeEqualToSize(inputTextureSize, scaledSize))
- {
- inputTextureSize = scaledSize;
- }
-}
-
-#pragma mark -
-#pragma mark GPUImageInput
-
-- (void)calculateCropTextureCoordinates;
-{
- CGFloat minX = _cropRegion.origin.x;
- CGFloat minY = _cropRegion.origin.y;
- CGFloat maxX = CGRectGetMaxX(_cropRegion);
- CGFloat maxY = CGRectGetMaxY(_cropRegion);
-
- switch(inputRotation)
- {
- case kGPUImageNoRotation: // Works
- {
- cropTextureCoordinates[0] = minX; // 0,0
- cropTextureCoordinates[1] = minY;
-
- cropTextureCoordinates[2] = maxX; // 1,0
- cropTextureCoordinates[3] = minY;
-
- cropTextureCoordinates[4] = minX; // 0,1
- cropTextureCoordinates[5] = maxY;
-
- cropTextureCoordinates[6] = maxX; // 1,1
- cropTextureCoordinates[7] = maxY;
- }; break;
- case kGPUImageRotateLeft: // Fixed
- {
- cropTextureCoordinates[0] = maxY; // 1,0
- cropTextureCoordinates[1] = 1.0 - maxX;
-
- cropTextureCoordinates[2] = maxY; // 1,1
- cropTextureCoordinates[3] = 1.0 - minX;
-
- cropTextureCoordinates[4] = minY; // 0,0
- cropTextureCoordinates[5] = 1.0 - maxX;
-
- cropTextureCoordinates[6] = minY; // 0,1
- cropTextureCoordinates[7] = 1.0 - minX;
- }; break;
- case kGPUImageRotateRight: // Fixed
- {
- cropTextureCoordinates[0] = minY; // 0,1
- cropTextureCoordinates[1] = 1.0 - minX;
-
- cropTextureCoordinates[2] = minY; // 0,0
- cropTextureCoordinates[3] = 1.0 - maxX;
-
- cropTextureCoordinates[4] = maxY; // 1,1
- cropTextureCoordinates[5] = 1.0 - minX;
-
- cropTextureCoordinates[6] = maxY; // 1,0
- cropTextureCoordinates[7] = 1.0 - maxX;
- }; break;
- case kGPUImageFlipVertical: // Works for me
- {
- cropTextureCoordinates[0] = minX; // 0,1
- cropTextureCoordinates[1] = maxY;
-
- cropTextureCoordinates[2] = maxX; // 1,1
- cropTextureCoordinates[3] = maxY;
-
- cropTextureCoordinates[4] = minX; // 0,0
- cropTextureCoordinates[5] = minY;
-
- cropTextureCoordinates[6] = maxX; // 1,0
- cropTextureCoordinates[7] = minY;
- }; break;
- case kGPUImageFlipHorizonal: // Works for me
- {
- cropTextureCoordinates[0] = maxX; // 1,0
- cropTextureCoordinates[1] = minY;
-
- cropTextureCoordinates[2] = minX; // 0,0
- cropTextureCoordinates[3] = minY;
-
- cropTextureCoordinates[4] = maxX; // 1,1
- cropTextureCoordinates[5] = maxY;
-
- cropTextureCoordinates[6] = minX; // 0,1
- cropTextureCoordinates[7] = maxY;
- }; break;
- case kGPUImageRotate180: // Fixed
- {
- cropTextureCoordinates[0] = maxX; // 1,1
- cropTextureCoordinates[1] = maxY;
-
- cropTextureCoordinates[2] = minX; // 0,1
- cropTextureCoordinates[3] = maxY;
-
- cropTextureCoordinates[4] = maxX; // 1,0
- cropTextureCoordinates[5] = minY;
-
- cropTextureCoordinates[6] = minX; // 0,0
- cropTextureCoordinates[7] = minY;
- }; break;
- case kGPUImageRotateRightFlipVertical: // Fixed
- {
- cropTextureCoordinates[0] = minY; // 0,0
- cropTextureCoordinates[1] = 1.0 - maxX;
-
- cropTextureCoordinates[2] = minY; // 0,1
- cropTextureCoordinates[3] = 1.0 - minX;
-
- cropTextureCoordinates[4] = maxY; // 1,0
- cropTextureCoordinates[5] = 1.0 - maxX;
-
- cropTextureCoordinates[6] = maxY; // 1,1
- cropTextureCoordinates[7] = 1.0 - minX;
- }; break;
- case kGPUImageRotateRightFlipHorizontal: // Fixed
- {
- cropTextureCoordinates[0] = maxY; // 1,1
- cropTextureCoordinates[1] = 1.0 - minX;
-
- cropTextureCoordinates[2] = maxY; // 1,0
- cropTextureCoordinates[3] = 1.0 - maxX;
-
- cropTextureCoordinates[4] = minY; // 0,1
- cropTextureCoordinates[5] = 1.0 - minX;
-
- cropTextureCoordinates[6] = minY; // 0,0
- cropTextureCoordinates[7] = 1.0 - maxX;
- }; break;
- }
-}
-
-- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
-{
- static const GLfloat cropSquareVertices[] = {
- -1.0f, -1.0f,
- 1.0f, -1.0f,
- -1.0f, 1.0f,
- 1.0f, 1.0f,
- };
-
- [self renderToTextureWithVertices:cropSquareVertices textureCoordinates:cropTextureCoordinates];
-
- [self informTargetsAboutNewFrameAtTime:frameTime];
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setCropRegion:(CGRect)newValue;
-{
- NSParameterAssert(newValue.origin.x >= 0 && newValue.origin.x <= 1 &&
- newValue.origin.y >= 0 && newValue.origin.y <= 1 &&
- newValue.size.width >= 0 && newValue.size.width <= 1 &&
- newValue.size.height >= 0 && newValue.size.height <= 1);
-
- _cropRegion = newValue;
- [self calculateCropTextureCoordinates];
-}
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
- [super setInputRotation:newInputRotation atIndex:textureIndex];
- [self calculateCropTextureCoordinates];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageCrosshairGenerator.h b/Example/Pods/GPUImage/framework/Source/GPUImageCrosshairGenerator.h
deleted file mode 100644
index 569774f..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageCrosshairGenerator.h
+++ /dev/null
@@ -1,17 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageCrosshairGenerator : GPUImageFilter
-{
- GLint crosshairWidthUniform, crosshairColorUniform;
-}
-
-// The width of the displayed crosshairs, in pixels. Currently this only works well for odd widths. The default is 5.
-@property(readwrite, nonatomic) CGFloat crosshairWidth;
-
-// The color of the crosshairs is specified using individual red, green, and blue components (normalized to 1.0). The default is green: (0.0, 1.0, 0.0).
-- (void)setCrosshairColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
-
-// Rendering
-- (void)renderCrosshairsFromArray:(GLfloat *)crosshairCoordinates count:(NSUInteger)numberOfCrosshairs frameTime:(CMTime)frameTime;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageCrosshairGenerator.m b/Example/Pods/GPUImage/framework/Source/GPUImageCrosshairGenerator.m
deleted file mode 100644
index 9e2a29a..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageCrosshairGenerator.m
+++ /dev/null
@@ -1,139 +0,0 @@
-#import "GPUImageCrosshairGenerator.h"
-
-NSString *const kGPUImageCrosshairVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
-
- uniform float crosshairWidth;
-
- varying vec2 centerLocation;
- varying float pointSpacing;
-
- void main()
- {
- gl_Position = vec4(((position.xy * 2.0) - 1.0), 0.0, 1.0);
- gl_PointSize = crosshairWidth + 1.0;
- pointSpacing = 1.0 / crosshairWidth;
- centerLocation = vec2(pointSpacing * ceil(crosshairWidth / 2.0), pointSpacing * ceil(crosshairWidth / 2.0));
- }
-);
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageCrosshairFragmentShaderString = SHADER_STRING
-(
- uniform lowp vec3 crosshairColor;
-
- varying highp vec2 centerLocation;
- varying highp float pointSpacing;
-
- void main()
- {
- lowp vec2 distanceFromCenter = abs(centerLocation - gl_PointCoord.xy);
- lowp float axisTest = step(pointSpacing, gl_PointCoord.y) * step(distanceFromCenter.x, 0.09) + step(pointSpacing, gl_PointCoord.x) * step(distanceFromCenter.y, 0.09);
-
- gl_FragColor = vec4(crosshairColor * axisTest, axisTest);
-// gl_FragColor = vec4(distanceFromCenterInX, distanceFromCenterInY, 0.0, 1.0);
- }
-);
-#else
-NSString *const kGPUImageCrosshairFragmentShaderString = SHADER_STRING
-(
- GPUImageEscapedHashIdentifier(version 120)\n
-
- uniform vec3 crosshairColor;
-
- varying vec2 centerLocation;
- varying float pointSpacing;
-
- void main()
- {
- vec2 distanceFromCenter = abs(centerLocation - gl_PointCoord.xy);
- float axisTest = step(pointSpacing, gl_PointCoord.y) * step(distanceFromCenter.x, 0.09) + step(pointSpacing, gl_PointCoord.x) * step(distanceFromCenter.y, 0.09);
-
- gl_FragColor = vec4(crosshairColor * axisTest, axisTest);
- // gl_FragColor = vec4(distanceFromCenterInX, distanceFromCenterInY, 0.0, 1.0);
- }
-);
-#endif
-
-@implementation GPUImageCrosshairGenerator
-
-@synthesize crosshairWidth = _crosshairWidth;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithVertexShaderFromString:kGPUImageCrosshairVertexShaderString fragmentShaderFromString:kGPUImageCrosshairFragmentShaderString]))
- {
- return nil;
- }
-
- runSynchronouslyOnVideoProcessingQueue(^{
- crosshairWidthUniform = [filterProgram uniformIndex:@"crosshairWidth"];
- crosshairColorUniform = [filterProgram uniformIndex:@"crosshairColor"];
-
- self.crosshairWidth = 5.0;
- [self setCrosshairColorRed:0.0 green:1.0 blue:0.0];
- });
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Rendering
-
-- (void)renderCrosshairsFromArray:(GLfloat *)crosshairCoordinates count:(NSUInteger)numberOfCrosshairs frameTime:(CMTime)frameTime;
-{
- if (self.preventRendering)
- {
- return;
- }
-
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext setActiveShaderProgram:filterProgram];
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-#else
- glEnable(GL_POINT_SPRITE);
- glEnable(GL_VERTEX_PROGRAM_POINT_SIZE);
-#endif
-
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
- [outputFramebuffer activateFramebuffer];
-
- glClearColor(0.0, 0.0, 0.0, 0.0);
- glClear(GL_COLOR_BUFFER_BIT);
-
- glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, crosshairCoordinates);
-
- glDrawArrays(GL_POINTS, 0, (GLsizei)numberOfCrosshairs);
-
- [self informTargetsAboutNewFrameAtTime:frameTime];
- });
-}
-
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-{
- // Prevent rendering of the frame by normal means
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setCrosshairWidth:(CGFloat)newValue;
-{
- _crosshairWidth = newValue;
-
- [self setFloat:_crosshairWidth forUniform:crosshairWidthUniform program:filterProgram];
-}
-
-- (void)setCrosshairColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
-{
- GPUVector3 crosshairColor = {redComponent, greenComponent, blueComponent};
-
- [self setVec3:crosshairColor forUniform:crosshairColorUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageCrosshatchFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageCrosshatchFilter.h
deleted file mode 100755
index dab1896..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageCrosshatchFilter.h
+++ /dev/null
@@ -1,13 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageCrosshatchFilter : GPUImageFilter
-{
- GLint crossHatchSpacingUniform, lineWidthUniform;
-}
-// The fractional width of the image to use as the spacing for the crosshatch. The default is 0.03.
-@property(readwrite, nonatomic) CGFloat crossHatchSpacing;
-
-// A relative width for the crosshatch lines. The default is 0.003.
-@property(readwrite, nonatomic) CGFloat lineWidth;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageCrosshatchFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageCrosshatchFilter.m
deleted file mode 100755
index 51dbd59..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageCrosshatchFilter.m
+++ /dev/null
@@ -1,163 +0,0 @@
-#import "GPUImageCrosshatchFilter.h"
-
-// Shader code based on http://machinesdontcare.wordpress.com/
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageCrosshatchFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform highp float crossHatchSpacing;
- uniform highp float lineWidth;
-
- const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- highp float luminance = dot(texture2D(inputImageTexture, textureCoordinate).rgb, W);
-
- lowp vec4 colorToDisplay = vec4(1.0, 1.0, 1.0, 1.0);
- if (luminance < 1.00)
- {
- if (mod(textureCoordinate.x + textureCoordinate.y, crossHatchSpacing) <= lineWidth)
- {
- colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);
- }
- }
- if (luminance < 0.75)
- {
- if (mod(textureCoordinate.x - textureCoordinate.y, crossHatchSpacing) <= lineWidth)
- {
- colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);
- }
- }
- if (luminance < 0.50)
- {
- if (mod(textureCoordinate.x + textureCoordinate.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth)
- {
- colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);
- }
- }
- if (luminance < 0.3)
- {
- if (mod(textureCoordinate.x - textureCoordinate.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth)
- {
- colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);
- }
- }
-
- gl_FragColor = colorToDisplay;
- }
-);
-#else
-NSString *const kGPUImageCrosshatchFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform float crossHatchSpacing;
- uniform float lineWidth;
-
- const vec3 W = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- float luminance = dot(texture2D(inputImageTexture, textureCoordinate).rgb, W);
-
- vec4 colorToDisplay = vec4(1.0, 1.0, 1.0, 1.0);
- if (luminance < 1.00)
- {
- if (mod(textureCoordinate.x + textureCoordinate.y, crossHatchSpacing) <= lineWidth)
- {
- colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);
- }
- }
- if (luminance < 0.75)
- {
- if (mod(textureCoordinate.x - textureCoordinate.y, crossHatchSpacing) <= lineWidth)
- {
- colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);
- }
- }
- if (luminance < 0.50)
- {
- if (mod(textureCoordinate.x + textureCoordinate.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth)
- {
- colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);
- }
- }
- if (luminance < 0.3)
- {
- if (mod(textureCoordinate.x - textureCoordinate.y - (crossHatchSpacing / 2.0), crossHatchSpacing) <= lineWidth)
- {
- colorToDisplay = vec4(0.0, 0.0, 0.0, 1.0);
- }
- }
-
- gl_FragColor = colorToDisplay;
- }
-);
-#endif
-
-@implementation GPUImageCrosshatchFilter
-
-@synthesize crossHatchSpacing = _crossHatchSpacing;
-@synthesize lineWidth = _lineWidth;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageCrosshatchFragmentShaderString]))
- {
- return nil;
- }
-
- crossHatchSpacingUniform = [filterProgram uniformIndex:@"crossHatchSpacing"];
- lineWidthUniform = [filterProgram uniformIndex:@"lineWidth"];
-
- self.crossHatchSpacing = 0.03;
- self.lineWidth = 0.003;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setCrossHatchSpacing:(CGFloat)newValue;
-{
- CGFloat singlePixelSpacing;
- if (inputTextureSize.width != 0.0)
- {
- singlePixelSpacing = 1.0 / inputTextureSize.width;
- }
- else
- {
- singlePixelSpacing = 1.0 / 2048.0;
- }
-
- if (newValue < singlePixelSpacing)
- {
- _crossHatchSpacing = singlePixelSpacing;
- }
- else
- {
- _crossHatchSpacing = newValue;
- }
-
- [self setFloat:_crossHatchSpacing forUniform:crossHatchSpacingUniform program:filterProgram];
-}
-
-- (void)setLineWidth:(CGFloat)newValue;
-{
- _lineWidth = newValue;
-
- [self setFloat:_lineWidth forUniform:lineWidthUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageDarkenBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageDarkenBlendFilter.h
deleted file mode 100755
index 5dfe340..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageDarkenBlendFilter.h
+++ /dev/null
@@ -1,7 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageDarkenBlendFilter : GPUImageTwoInputFilter
-{
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageDarkenBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageDarkenBlendFilter.m
deleted file mode 100644
index 85ec9e8..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageDarkenBlendFilter.m
+++ /dev/null
@@ -1,52 +0,0 @@
-#import "GPUImageDarkenBlendFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageDarkenBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- lowp vec4 base = texture2D(inputImageTexture, textureCoordinate);
- lowp vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = vec4(min(overlayer.rgb * base.a, base.rgb * overlayer.a) + overlayer.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlayer.a), 1.0);
- }
-);
-#else
-NSString *const kGPUImageDarkenBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 base = texture2D(inputImageTexture, textureCoordinate);
- vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = vec4(min(overlayer.rgb * base.a, base.rgb * overlayer.a) + overlayer.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlayer.a), 1.0);
- }
- );
-#endif
-
-@implementation GPUImageDarkenBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageDarkenBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageDifferenceBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageDifferenceBlendFilter.h
deleted file mode 100755
index 7c7dfc2..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageDifferenceBlendFilter.h
+++ /dev/null
@@ -1,7 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageDifferenceBlendFilter : GPUImageTwoInputFilter
-{
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageDifferenceBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageDifferenceBlendFilter.m
deleted file mode 100755
index 01bf09b..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageDifferenceBlendFilter.m
+++ /dev/null
@@ -1,50 +0,0 @@
-#import "GPUImageDifferenceBlendFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageDifferenceBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
- gl_FragColor = vec4(abs(textureColor2.rgb - textureColor.rgb), textureColor.a);
- }
-);
-#else
-NSString *const kGPUImageDifferenceBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
- gl_FragColor = vec4(abs(textureColor2.rgb - textureColor.rgb), textureColor.a);
- }
-);
-#endif
-
-@implementation GPUImageDifferenceBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageDifferenceBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageDilationFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageDilationFilter.h
deleted file mode 100644
index 59423a3..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageDilationFilter.h
+++ /dev/null
@@ -1,16 +0,0 @@
-#import "GPUImageTwoPassTextureSamplingFilter.h"
-
-// For each pixel, this sets it to the maximum value of the red channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
-// This extends out bright features, and is most commonly used with black-and-white thresholded images.
-
-extern NSString *const kGPUImageDilationRadiusOneVertexShaderString;
-extern NSString *const kGPUImageDilationRadiusTwoVertexShaderString;
-extern NSString *const kGPUImageDilationRadiusThreeVertexShaderString;
-extern NSString *const kGPUImageDilationRadiusFourVertexShaderString;
-
-@interface GPUImageDilationFilter : GPUImageTwoPassTextureSamplingFilter
-
-// Acceptable values for dilationRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
-- (id)initWithRadius:(NSUInteger)dilationRadius;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageDilationFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageDilationFilter.m
deleted file mode 100644
index df06518..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageDilationFilter.m
+++ /dev/null
@@ -1,431 +0,0 @@
-#import "GPUImageDilationFilter.h"
-
-@implementation GPUImageDilationFilter
-
-NSString *const kGPUImageDilationRadiusOneVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec2 inputTextureCoordinate;
-
- uniform float texelWidthOffset;
- uniform float texelHeightOffset;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
-
- void main()
- {
- gl_Position = position;
-
- vec2 offset = vec2(texelWidthOffset, texelHeightOffset);
-
- centerTextureCoordinate = inputTextureCoordinate;
- oneStepNegativeTextureCoordinate = inputTextureCoordinate - offset;
- oneStepPositiveTextureCoordinate = inputTextureCoordinate + offset;
- }
-);
-
-NSString *const kGPUImageDilationRadiusTwoVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec2 inputTextureCoordinate;
-
- uniform float texelWidthOffset;
- uniform float texelHeightOffset;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
-
- void main()
- {
- gl_Position = position;
-
- vec2 offset = vec2(texelWidthOffset, texelHeightOffset);
-
- centerTextureCoordinate = inputTextureCoordinate;
- oneStepNegativeTextureCoordinate = inputTextureCoordinate - offset;
- oneStepPositiveTextureCoordinate = inputTextureCoordinate + offset;
- twoStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 2.0);
- twoStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 2.0);
- }
-);
-
-NSString *const kGPUImageDilationRadiusThreeVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec2 inputTextureCoordinate;
-
- uniform float texelWidthOffset;
- uniform float texelHeightOffset;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
- varying vec2 threeStepsPositiveTextureCoordinate;
- varying vec2 threeStepsNegativeTextureCoordinate;
-
- void main()
- {
- gl_Position = position;
-
- vec2 offset = vec2(texelWidthOffset, texelHeightOffset);
-
- centerTextureCoordinate = inputTextureCoordinate;
- oneStepNegativeTextureCoordinate = inputTextureCoordinate - offset;
- oneStepPositiveTextureCoordinate = inputTextureCoordinate + offset;
- twoStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 2.0);
- twoStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 2.0);
- threeStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 3.0);
- threeStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 3.0);
- }
-);
-
-NSString *const kGPUImageDilationRadiusFourVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec2 inputTextureCoordinate;
-
- uniform float texelWidthOffset;
- uniform float texelHeightOffset;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
- varying vec2 threeStepsPositiveTextureCoordinate;
- varying vec2 threeStepsNegativeTextureCoordinate;
- varying vec2 fourStepsPositiveTextureCoordinate;
- varying vec2 fourStepsNegativeTextureCoordinate;
-
- void main()
- {
- gl_Position = position;
-
- vec2 offset = vec2(texelWidthOffset, texelHeightOffset);
-
- centerTextureCoordinate = inputTextureCoordinate;
- oneStepNegativeTextureCoordinate = inputTextureCoordinate - offset;
- oneStepPositiveTextureCoordinate = inputTextureCoordinate + offset;
- twoStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 2.0);
- twoStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 2.0);
- threeStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 3.0);
- threeStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 3.0);
- fourStepsNegativeTextureCoordinate = inputTextureCoordinate - (offset * 4.0);
- fourStepsPositiveTextureCoordinate = inputTextureCoordinate + (offset * 4.0);
- }
-);
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageDilationRadiusOneFragmentShaderString = SHADER_STRING
-(
- precision lowp float;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
- float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
- float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
-
- lowp float maxValue = max(centerIntensity, oneStepPositiveIntensity);
- maxValue = max(maxValue, oneStepNegativeIntensity);
-
- gl_FragColor = vec4(vec3(maxValue), 1.0);
- }
-);
-
-NSString *const kGPUImageDilationRadiusTwoFragmentShaderString = SHADER_STRING
-(
- precision lowp float;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
- float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
- float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
- float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
- float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
-
- lowp float maxValue = max(centerIntensity, oneStepPositiveIntensity);
- maxValue = max(maxValue, oneStepNegativeIntensity);
- maxValue = max(maxValue, twoStepsPositiveIntensity);
- maxValue = max(maxValue, twoStepsNegativeIntensity);
-
- gl_FragColor = vec4(vec3(maxValue), 1.0);
- }
-);
-
-NSString *const kGPUImageDilationRadiusThreeFragmentShaderString = SHADER_STRING
-(
- precision lowp float;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
- varying vec2 threeStepsPositiveTextureCoordinate;
- varying vec2 threeStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
- float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
- float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
- float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
- float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
- float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;
- float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;
-
- lowp float maxValue = max(centerIntensity, oneStepPositiveIntensity);
- maxValue = max(maxValue, oneStepNegativeIntensity);
- maxValue = max(maxValue, twoStepsPositiveIntensity);
- maxValue = max(maxValue, twoStepsNegativeIntensity);
- maxValue = max(maxValue, threeStepsPositiveIntensity);
- maxValue = max(maxValue, threeStepsNegativeIntensity);
-
- gl_FragColor = vec4(vec3(maxValue), 1.0);
- }
-);
-
-NSString *const kGPUImageDilationRadiusFourFragmentShaderString = SHADER_STRING
-(
- precision lowp float;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
- varying vec2 threeStepsPositiveTextureCoordinate;
- varying vec2 threeStepsNegativeTextureCoordinate;
- varying vec2 fourStepsPositiveTextureCoordinate;
- varying vec2 fourStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
- float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
- float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
- float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
- float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
- float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;
- float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;
- float fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate).r;
- float fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate).r;
-
- lowp float maxValue = max(centerIntensity, oneStepPositiveIntensity);
- maxValue = max(maxValue, oneStepNegativeIntensity);
- maxValue = max(maxValue, twoStepsPositiveIntensity);
- maxValue = max(maxValue, twoStepsNegativeIntensity);
- maxValue = max(maxValue, threeStepsPositiveIntensity);
- maxValue = max(maxValue, threeStepsNegativeIntensity);
- maxValue = max(maxValue, fourStepsPositiveIntensity);
- maxValue = max(maxValue, fourStepsNegativeIntensity);
-
- gl_FragColor = vec4(vec3(maxValue), 1.0);
- }
-);
-#else
-NSString *const kGPUImageDilationRadiusOneFragmentShaderString = SHADER_STRING
-(
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
- float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
- float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
-
- float maxValue = max(centerIntensity, oneStepPositiveIntensity);
- maxValue = max(maxValue, oneStepNegativeIntensity);
-
- gl_FragColor = vec4(vec3(maxValue), 1.0);
- }
-);
-
-NSString *const kGPUImageDilationRadiusTwoFragmentShaderString = SHADER_STRING
-(
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
- float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
- float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
- float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
- float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
-
- float maxValue = max(centerIntensity, oneStepPositiveIntensity);
- maxValue = max(maxValue, oneStepNegativeIntensity);
- maxValue = max(maxValue, twoStepsPositiveIntensity);
- maxValue = max(maxValue, twoStepsNegativeIntensity);
-
- gl_FragColor = vec4(vec3(maxValue), 1.0);
- }
-);
-
-NSString *const kGPUImageDilationRadiusThreeFragmentShaderString = SHADER_STRING
-(
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
- varying vec2 threeStepsPositiveTextureCoordinate;
- varying vec2 threeStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
- float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
- float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
- float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
- float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
- float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;
- float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;
-
- float maxValue = max(centerIntensity, oneStepPositiveIntensity);
- maxValue = max(maxValue, oneStepNegativeIntensity);
- maxValue = max(maxValue, twoStepsPositiveIntensity);
- maxValue = max(maxValue, twoStepsNegativeIntensity);
- maxValue = max(maxValue, threeStepsPositiveIntensity);
- maxValue = max(maxValue, threeStepsNegativeIntensity);
-
- gl_FragColor = vec4(vec3(maxValue), 1.0);
- }
-);
-
-NSString *const kGPUImageDilationRadiusFourFragmentShaderString = SHADER_STRING
-(
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
- varying vec2 threeStepsPositiveTextureCoordinate;
- varying vec2 threeStepsNegativeTextureCoordinate;
- varying vec2 fourStepsPositiveTextureCoordinate;
- varying vec2 fourStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
- float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
- float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
- float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
- float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
- float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;
- float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;
- float fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate).r;
- float fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate).r;
-
- float maxValue = max(centerIntensity, oneStepPositiveIntensity);
- maxValue = max(maxValue, oneStepNegativeIntensity);
- maxValue = max(maxValue, twoStepsPositiveIntensity);
- maxValue = max(maxValue, twoStepsNegativeIntensity);
- maxValue = max(maxValue, threeStepsPositiveIntensity);
- maxValue = max(maxValue, threeStepsNegativeIntensity);
- maxValue = max(maxValue, fourStepsPositiveIntensity);
- maxValue = max(maxValue, fourStepsNegativeIntensity);
-
- gl_FragColor = vec4(vec3(maxValue), 1.0);
- }
-);
-#endif
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithRadius:(NSUInteger)dilationRadius;
-{
- NSString *fragmentShaderForThisRadius = nil;
- NSString *vertexShaderForThisRadius = nil;
-
- switch (dilationRadius)
- {
- case 0:
- case 1:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusOneVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageDilationRadiusOneFragmentShaderString;
- }; break;
- case 2:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusTwoVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageDilationRadiusTwoFragmentShaderString;
- }; break;
- case 3:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusThreeVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageDilationRadiusThreeFragmentShaderString;
- }; break;
- case 4:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageDilationRadiusFourFragmentShaderString;
- }; break;
- default:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageDilationRadiusFourFragmentShaderString;
- }; break;
- }
-
- if (!(self = [super initWithFirstStageVertexShaderFromString:vertexShaderForThisRadius firstStageFragmentShaderFromString:fragmentShaderForThisRadius secondStageVertexShaderFromString:vertexShaderForThisRadius secondStageFragmentShaderFromString:fragmentShaderForThisRadius]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)init;
-{
- if (!(self = [self initWithRadius:1]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageDirectionalNonMaximumSuppressionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageDirectionalNonMaximumSuppressionFilter.h
deleted file mode 100644
index fdffb9f..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageDirectionalNonMaximumSuppressionFilter.h
+++ /dev/null
@@ -1,19 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageDirectionalNonMaximumSuppressionFilter : GPUImageFilter
-{
- GLint texelWidthUniform, texelHeightUniform;
- GLint upperThresholdUniform, lowerThresholdUniform;
-
- BOOL hasOverriddenImageSizeFactor;
-}
-
-// The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
-@property(readwrite, nonatomic) CGFloat texelWidth;
-@property(readwrite, nonatomic) CGFloat texelHeight;
-
-// These thresholds set cutoffs for the intensities that definitely get registered (upper threshold) and those that definitely don't (lower threshold)
-@property(readwrite, nonatomic) CGFloat upperThreshold;
-@property(readwrite, nonatomic) CGFloat lowerThreshold;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageDirectionalNonMaximumSuppressionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageDirectionalNonMaximumSuppressionFilter.m
deleted file mode 100644
index b442f3a..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageDirectionalNonMaximumSuppressionFilter.m
+++ /dev/null
@@ -1,141 +0,0 @@
-#import "GPUImageDirectionalNonMaximumSuppressionFilter.h"
-
-@implementation GPUImageDirectionalNonMaximumSuppressionFilter
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageDirectionalNonmaximumSuppressionFragmentShaderString = SHADER_STRING
-(
- precision mediump float;
-
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform highp float texelWidth;
- uniform highp float texelHeight;
- uniform mediump float upperThreshold;
- uniform mediump float lowerThreshold;
-
- void main()
- {
- vec3 currentGradientAndDirection = texture2D(inputImageTexture, textureCoordinate).rgb;
- vec2 gradientDirection = ((currentGradientAndDirection.gb * 2.0) - 1.0) * vec2(texelWidth, texelHeight);
-
- float firstSampledGradientMagnitude = texture2D(inputImageTexture, textureCoordinate + gradientDirection).r;
- float secondSampledGradientMagnitude = texture2D(inputImageTexture, textureCoordinate - gradientDirection).r;
-
- float multiplier = step(firstSampledGradientMagnitude, currentGradientAndDirection.r);
- multiplier = multiplier * step(secondSampledGradientMagnitude, currentGradientAndDirection.r);
-
- float thresholdCompliance = smoothstep(lowerThreshold, upperThreshold, currentGradientAndDirection.r);
- multiplier = multiplier * thresholdCompliance;
-
- gl_FragColor = vec4(multiplier, multiplier, multiplier, 1.0);
- }
-);
-#else
-NSString *const kGPUImageDirectionalNonmaximumSuppressionFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float texelWidth;
- uniform float texelHeight;
- uniform float upperThreshold;
- uniform float lowerThreshold;
-
- void main()
- {
- vec3 currentGradientAndDirection = texture2D(inputImageTexture, textureCoordinate).rgb;
- vec2 gradientDirection = ((currentGradientAndDirection.gb * 2.0) - 1.0) * vec2(texelWidth, texelHeight);
-
- float firstSampledGradientMagnitude = texture2D(inputImageTexture, textureCoordinate + gradientDirection).r;
- float secondSampledGradientMagnitude = texture2D(inputImageTexture, textureCoordinate - gradientDirection).r;
-
- float multiplier = step(firstSampledGradientMagnitude, currentGradientAndDirection.r);
- multiplier = multiplier * step(secondSampledGradientMagnitude, currentGradientAndDirection.r);
-
- float thresholdCompliance = smoothstep(lowerThreshold, upperThreshold, currentGradientAndDirection.r);
- multiplier = multiplier * thresholdCompliance;
-
- gl_FragColor = vec4(multiplier, multiplier, multiplier, 1.0);
- }
-);
-#endif
-
-@synthesize texelWidth = _texelWidth;
-@synthesize texelHeight = _texelHeight;
-@synthesize upperThreshold = _upperThreshold;
-@synthesize lowerThreshold = _lowerThreshold;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageDirectionalNonmaximumSuppressionFragmentShaderString]))
- {
- return nil;
- }
-
- texelWidthUniform = [filterProgram uniformIndex:@"texelWidth"];
- texelHeightUniform = [filterProgram uniformIndex:@"texelHeight"];
- upperThresholdUniform = [filterProgram uniformIndex:@"upperThreshold"];
- lowerThresholdUniform = [filterProgram uniformIndex:@"lowerThreshold"];
-
- self.upperThreshold = 0.5;
- self.lowerThreshold = 0.1;
-
- return self;
-}
-
-- (void)setupFilterForSize:(CGSize)filterFrameSize;
-{
- if (!hasOverriddenImageSizeFactor)
- {
- _texelWidth = 1.0 / filterFrameSize.width;
- _texelHeight = 1.0 / filterFrameSize.height;
-
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext setActiveShaderProgram:filterProgram];
- glUniform1f(texelWidthUniform, _texelWidth);
- glUniform1f(texelHeightUniform, _texelHeight);
- });
- }
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setTexelWidth:(CGFloat)newValue;
-{
- hasOverriddenImageSizeFactor = YES;
- _texelWidth = newValue;
-
- [self setFloat:_texelWidth forUniform:texelWidthUniform program:filterProgram];
-}
-
-- (void)setTexelHeight:(CGFloat)newValue;
-{
- hasOverriddenImageSizeFactor = YES;
- _texelHeight = newValue;
-
- [self setFloat:_texelHeight forUniform:texelHeightUniform program:filterProgram];
-}
-
-- (void)setLowerThreshold:(CGFloat)newValue;
-{
- _lowerThreshold = newValue;
-
- [self setFloat:_lowerThreshold forUniform:lowerThresholdUniform program:filterProgram];
-}
-
-- (void)setUpperThreshold:(CGFloat)newValue;
-{
- _upperThreshold = newValue;
-
- [self setFloat:_upperThreshold forUniform:upperThresholdUniform program:filterProgram];
-}
-
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageDirectionalSobelEdgeDetectionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageDirectionalSobelEdgeDetectionFilter.h
deleted file mode 100644
index cfccc89..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageDirectionalSobelEdgeDetectionFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImage3x3TextureSamplingFilter.h"
-
-@interface GPUImageDirectionalSobelEdgeDetectionFilter : GPUImage3x3TextureSamplingFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageDirectionalSobelEdgeDetectionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageDirectionalSobelEdgeDetectionFilter.m
deleted file mode 100644
index a3575e3..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageDirectionalSobelEdgeDetectionFilter.m
+++ /dev/null
@@ -1,103 +0,0 @@
-#import "GPUImageDirectionalSobelEdgeDetectionFilter.h"
-
-@implementation GPUImageDirectionalSobelEdgeDetectionFilter
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageDirectionalSobelEdgeDetectionFragmentShaderString = SHADER_STRING
-(
- precision mediump float;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
-
- vec2 gradientDirection;
- gradientDirection.x = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
- gradientDirection.y = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
-
- float gradientMagnitude = length(gradientDirection);
- vec2 normalizedDirection = normalize(gradientDirection);
- normalizedDirection = sign(normalizedDirection) * floor(abs(normalizedDirection) + 0.617316); // Offset by 1-sin(pi/8) to set to 0 if near axis, 1 if away
- normalizedDirection = (normalizedDirection + 1.0) * 0.5; // Place -1.0 - 1.0 within 0 - 1.0
-
- gl_FragColor = vec4(gradientMagnitude, normalizedDirection.x, normalizedDirection.y, 1.0);
- }
-);
-#else
-NSString *const kGPUImageDirectionalSobelEdgeDetectionFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
-
- vec2 gradientDirection;
- gradientDirection.x = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
- gradientDirection.y = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
-
- float gradientMagnitude = length(gradientDirection);
- vec2 normalizedDirection = normalize(gradientDirection);
- normalizedDirection = sign(normalizedDirection) * floor(abs(normalizedDirection) + 0.617316); // Offset by 1-sin(pi/8) to set to 0 if near axis, 1 if away
- normalizedDirection = (normalizedDirection + 1.0) * 0.5; // Place -1.0 - 1.0 within 0 - 1.0
-
- gl_FragColor = vec4(gradientMagnitude, normalizedDirection.x, normalizedDirection.y, 1.0);
- }
-);
-#endif
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageDirectionalSobelEdgeDetectionFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageDissolveBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageDissolveBlendFilter.h
deleted file mode 100755
index b4e5720..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageDissolveBlendFilter.h
+++ /dev/null
@@ -1,11 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageDissolveBlendFilter : GPUImageTwoInputFilter
-{
- GLint mixUniform;
-}
-
-// Mix ranges from 0.0 (only image 1) to 1.0 (only image 2), with 0.5 (half of either) as the normal level
-@property(readwrite, nonatomic) CGFloat mix;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageDissolveBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageDissolveBlendFilter.m
deleted file mode 100755
index b4a5609..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageDissolveBlendFilter.m
+++ /dev/null
@@ -1,72 +0,0 @@
-#import "GPUImageDissolveBlendFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageDissolveBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
- uniform lowp float mixturePercent;
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = mix(textureColor, textureColor2, mixturePercent);
- }
-);
-#else
-NSString *const kGPUImageDissolveBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
- uniform float mixturePercent;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = mix(textureColor, textureColor2, mixturePercent);
- }
-);
-#endif
-
-@implementation GPUImageDissolveBlendFilter
-
-@synthesize mix = _mix;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageDissolveBlendFragmentShaderString]))
- {
- return nil;
- }
-
- mixUniform = [filterProgram uniformIndex:@"mixturePercent"];
- self.mix = 0.5;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setMix:(CGFloat)newValue;
-{
- _mix = newValue;
-
- [self setFloat:_mix forUniform:mixUniform program:filterProgram];
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageDivideBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageDivideBlendFilter.h
deleted file mode 100644
index ad798e2..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageDivideBlendFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageDivideBlendFilter : GPUImageTwoInputFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageDivideBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageDivideBlendFilter.m
deleted file mode 100644
index 63ee071..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageDivideBlendFilter.m
+++ /dev/null
@@ -1,96 +0,0 @@
-#import "GPUImageDivideBlendFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageDivideBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- mediump vec4 base = texture2D(inputImageTexture, textureCoordinate);
- mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
-
- mediump float ra;
- if (overlay.a == 0.0 || ((base.r / overlay.r) > (base.a / overlay.a)))
- ra = overlay.a * base.a + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
- else
- ra = (base.r * overlay.a * overlay.a) / overlay.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
-
-
- mediump float ga;
- if (overlay.a == 0.0 || ((base.g / overlay.g) > (base.a / overlay.a)))
- ga = overlay.a * base.a + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
- else
- ga = (base.g * overlay.a * overlay.a) / overlay.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
-
-
- mediump float ba;
- if (overlay.a == 0.0 || ((base.b / overlay.b) > (base.a / overlay.a)))
- ba = overlay.a * base.a + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
- else
- ba = (base.b * overlay.a * overlay.a) / overlay.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
-
- mediump float a = overlay.a + base.a - overlay.a * base.a;
-
- gl_FragColor = vec4(ra, ga, ba, a);
- }
-);
-#else
-NSString *const kGPUImageDivideBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 base = texture2D(inputImageTexture, textureCoordinate);
- vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
-
- float ra;
- if (overlay.a == 0.0 || ((base.r / overlay.r) > (base.a / overlay.a)))
- ra = overlay.a * base.a + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
- else
- ra = (base.r * overlay.a * overlay.a) / overlay.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
-
-
- float ga;
- if (overlay.a == 0.0 || ((base.g / overlay.g) > (base.a / overlay.a)))
- ga = overlay.a * base.a + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
- else
- ga = (base.g * overlay.a * overlay.a) / overlay.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
-
-
- float ba;
- if (overlay.a == 0.0 || ((base.b / overlay.b) > (base.a / overlay.a)))
- ba = overlay.a * base.a + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
- else
- ba = (base.b * overlay.a * overlay.a) / overlay.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
-
- float a = overlay.a + base.a - overlay.a * base.a;
-
- gl_FragColor = vec4(ra, ga, ba, a);
- }
- );
-#endif
-
-@implementation GPUImageDivideBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageDivideBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageEmbossFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageEmbossFilter.h
deleted file mode 100755
index dbd21e8..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageEmbossFilter.h
+++ /dev/null
@@ -1,8 +0,0 @@
-#import "GPUImage3x3ConvolutionFilter.h"
-
-@interface GPUImageEmbossFilter : GPUImage3x3ConvolutionFilter
-
-// The strength of the embossing, from 0.0 to 4.0, with 1.0 as the normal level
-@property(readwrite, nonatomic) CGFloat intensity;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageEmbossFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageEmbossFilter.m
deleted file mode 100755
index 6ba48cd..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageEmbossFilter.m
+++ /dev/null
@@ -1,49 +0,0 @@
-#import "GPUImageEmbossFilter.h"
-
-@implementation GPUImageEmbossFilter
-
-@synthesize intensity = _intensity;
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- self.intensity = 1.0;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setIntensity:(CGFloat)newValue;
-{
-// [(GPUImage3x3ConvolutionFilter *)filter setConvolutionMatrix:(GPUMatrix3x3){
-// {-2.0f, -1.0f, 0.0f},
-// {-1.0f, 1.0f, 1.0f},
-// { 0.0f, 1.0f, 2.0f}
-// }];
-
- _intensity = newValue;
-
- GPUMatrix3x3 newConvolutionMatrix;
- newConvolutionMatrix.one.one = _intensity * (-2.0);
- newConvolutionMatrix.one.two = -_intensity;
- newConvolutionMatrix.one.three = 0.0f;
-
- newConvolutionMatrix.two.one = -_intensity;
- newConvolutionMatrix.two.two = 1.0;
- newConvolutionMatrix.two.three = _intensity;
-
- newConvolutionMatrix.three.one = 0.0f;
- newConvolutionMatrix.three.two = _intensity;
- newConvolutionMatrix.three.three = _intensity * 2.0;
-
- self.convolutionKernel = newConvolutionMatrix;
-}
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageErosionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageErosionFilter.h
deleted file mode 100644
index b311a26..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageErosionFilter.h
+++ /dev/null
@@ -1,11 +0,0 @@
-#import "GPUImageTwoPassTextureSamplingFilter.h"
-
-// For each pixel, this sets it to the minimum value of the red channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
-// This extends out dark features, and is most commonly used with black-and-white thresholded images.
-
-@interface GPUImageErosionFilter : GPUImageTwoPassTextureSamplingFilter
-
-// Acceptable values for erosionRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
-- (id)initWithRadius:(NSUInteger)erosionRadius;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageErosionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageErosionFilter.m
deleted file mode 100644
index 05f4f28..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageErosionFilter.m
+++ /dev/null
@@ -1,312 +0,0 @@
-#import "GPUImageErosionFilter.h"
-#import "GPUImageDilationFilter.h"
-
-@implementation GPUImageErosionFilter
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageErosionRadiusOneFragmentShaderString = SHADER_STRING
-(
- precision lowp float;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
- float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
- float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
-
- lowp float minValue = min(centerIntensity, oneStepPositiveIntensity);
- minValue = min(minValue, oneStepNegativeIntensity);
-
- gl_FragColor = vec4(vec3(minValue), 1.0);
- }
-);
-
-NSString *const kGPUImageErosionRadiusTwoFragmentShaderString = SHADER_STRING
-(
- precision lowp float;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
- float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
- float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
- float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
- float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
-
- lowp float minValue = min(centerIntensity, oneStepPositiveIntensity);
- minValue = min(minValue, oneStepNegativeIntensity);
- minValue = min(minValue, twoStepsPositiveIntensity);
- minValue = min(minValue, twoStepsNegativeIntensity);
-
- gl_FragColor = vec4(vec3(minValue), 1.0);
- }
-);
-
-NSString *const kGPUImageErosionRadiusThreeFragmentShaderString = SHADER_STRING
-(
- precision lowp float;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
- varying vec2 threeStepsPositiveTextureCoordinate;
- varying vec2 threeStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
- float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
- float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
- float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
- float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
- float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;
- float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;
-
- lowp float minValue = min(centerIntensity, oneStepPositiveIntensity);
- minValue = min(minValue, oneStepNegativeIntensity);
- minValue = min(minValue, twoStepsPositiveIntensity);
- minValue = min(minValue, twoStepsNegativeIntensity);
- minValue = min(minValue, threeStepsPositiveIntensity);
- minValue = min(minValue, threeStepsNegativeIntensity);
-
- gl_FragColor = vec4(vec3(minValue), 1.0);
- }
-);
-
-NSString *const kGPUImageErosionRadiusFourFragmentShaderString = SHADER_STRING
-(
- precision lowp float;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
- varying vec2 threeStepsPositiveTextureCoordinate;
- varying vec2 threeStepsNegativeTextureCoordinate;
- varying vec2 fourStepsPositiveTextureCoordinate;
- varying vec2 fourStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
- float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
- float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
- float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
- float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
- float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;
- float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;
- float fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate).r;
- float fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate).r;
-
- lowp float minValue = min(centerIntensity, oneStepPositiveIntensity);
- minValue = min(minValue, oneStepNegativeIntensity);
- minValue = min(minValue, twoStepsPositiveIntensity);
- minValue = min(minValue, twoStepsNegativeIntensity);
- minValue = min(minValue, threeStepsPositiveIntensity);
- minValue = min(minValue, threeStepsNegativeIntensity);
- minValue = min(minValue, fourStepsPositiveIntensity);
- minValue = min(minValue, fourStepsNegativeIntensity);
-
- gl_FragColor = vec4(vec3(minValue), 1.0);
- }
-);
-#else
-NSString *const kGPUImageErosionRadiusOneFragmentShaderString = SHADER_STRING
-(
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
- float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
- float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
-
- float minValue = min(centerIntensity, oneStepPositiveIntensity);
- minValue = min(minValue, oneStepNegativeIntensity);
-
- gl_FragColor = vec4(vec3(minValue), 1.0);
- }
-);
-
-NSString *const kGPUImageErosionRadiusTwoFragmentShaderString = SHADER_STRING
-(
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
- float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
- float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
- float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
- float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
-
- float minValue = min(centerIntensity, oneStepPositiveIntensity);
- minValue = min(minValue, oneStepNegativeIntensity);
- minValue = min(minValue, twoStepsPositiveIntensity);
- minValue = min(minValue, twoStepsNegativeIntensity);
-
- gl_FragColor = vec4(vec3(minValue), 1.0);
- }
-);
-
-NSString *const kGPUImageErosionRadiusThreeFragmentShaderString = SHADER_STRING
-(
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
- varying vec2 threeStepsPositiveTextureCoordinate;
- varying vec2 threeStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
- float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
- float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
- float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
- float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
- float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;
- float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;
-
- float minValue = min(centerIntensity, oneStepPositiveIntensity);
- minValue = min(minValue, oneStepNegativeIntensity);
- minValue = min(minValue, twoStepsPositiveIntensity);
- minValue = min(minValue, twoStepsNegativeIntensity);
- minValue = min(minValue, threeStepsPositiveIntensity);
- minValue = min(minValue, threeStepsNegativeIntensity);
-
- gl_FragColor = vec4(vec3(minValue), 1.0);
- }
-);
-
-NSString *const kGPUImageErosionRadiusFourFragmentShaderString = SHADER_STRING
-(
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
- varying vec2 threeStepsPositiveTextureCoordinate;
- varying vec2 threeStepsNegativeTextureCoordinate;
- varying vec2 fourStepsPositiveTextureCoordinate;
- varying vec2 fourStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate).r;
- float oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate).r;
- float oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate).r;
- float twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate).r;
- float twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate).r;
- float threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate).r;
- float threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate).r;
- float fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate).r;
- float fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate).r;
-
- float minValue = min(centerIntensity, oneStepPositiveIntensity);
- minValue = min(minValue, oneStepNegativeIntensity);
- minValue = min(minValue, twoStepsPositiveIntensity);
- minValue = min(minValue, twoStepsNegativeIntensity);
- minValue = min(minValue, threeStepsPositiveIntensity);
- minValue = min(minValue, threeStepsNegativeIntensity);
- minValue = min(minValue, fourStepsPositiveIntensity);
- minValue = min(minValue, fourStepsNegativeIntensity);
-
- gl_FragColor = vec4(vec3(minValue), 1.0);
- }
-);
-#endif
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithRadius:(NSUInteger)dilationRadius;
-{
- NSString *fragmentShaderForThisRadius = nil;
- NSString *vertexShaderForThisRadius = nil;
-
- switch (dilationRadius)
- {
- case 0:
- case 1:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusOneVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageErosionRadiusOneFragmentShaderString;
- }; break;
- case 2:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusTwoVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageErosionRadiusTwoFragmentShaderString;
- }; break;
- case 3:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusThreeVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageErosionRadiusThreeFragmentShaderString;
- }; break;
- case 4:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageErosionRadiusFourFragmentShaderString;
- }; break;
- default:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageErosionRadiusFourFragmentShaderString;
- }; break;
- }
-
- if (!(self = [super initWithFirstStageVertexShaderFromString:vertexShaderForThisRadius firstStageFragmentShaderFromString:fragmentShaderForThisRadius secondStageVertexShaderFromString:vertexShaderForThisRadius secondStageFragmentShaderFromString:fragmentShaderForThisRadius]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)init;
-{
- if (!(self = [self initWithRadius:1]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageExclusionBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageExclusionBlendFilter.h
deleted file mode 100755
index f7c83f5..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageExclusionBlendFilter.h
+++ /dev/null
@@ -1,7 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageExclusionBlendFilter : GPUImageTwoInputFilter
-{
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageExclusionBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageExclusionBlendFilter.m
deleted file mode 100755
index c364159..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageExclusionBlendFilter.m
+++ /dev/null
@@ -1,56 +0,0 @@
-#import "GPUImageExclusionBlendFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageExclusionBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- mediump vec4 base = texture2D(inputImageTexture, textureCoordinate);
- mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
-
- // Dca = (Sca.Da + Dca.Sa - 2.Sca.Dca) + Sca.(1 - Da) + Dca.(1 - Sa)
-
- gl_FragColor = vec4((overlay.rgb * base.a + base.rgb * overlay.a - 2.0 * overlay.rgb * base.rgb) + overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a), base.a);
- }
-);
-#else
-NSString *const kGPUImageExclusionBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 base = texture2D(inputImageTexture, textureCoordinate);
- vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
-
- // Dca = (Sca.Da + Dca.Sa - 2.Sca.Dca) + Sca.(1 - Da) + Dca.(1 - Sa)
-
- gl_FragColor = vec4((overlay.rgb * base.a + base.rgb * overlay.a - 2.0 * overlay.rgb * base.rgb) + overlay.rgb * (1.0 - base.a) + base.rgb * (1.0 - overlay.a), base.a);
- }
- );
-#endif
-
-@implementation GPUImageExclusionBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageExclusionBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageExposureFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageExposureFilter.h
deleted file mode 100755
index 886a052..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageExposureFilter.h
+++ /dev/null
@@ -1,11 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageExposureFilter : GPUImageFilter
-{
- GLint exposureUniform;
-}
-
-// Exposure ranges from -10.0 to 10.0, with 0.0 as the normal level
-@property(readwrite, nonatomic) CGFloat exposure;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageExposureFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageExposureFilter.m
deleted file mode 100755
index d5ee2c9..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageExposureFilter.m
+++ /dev/null
@@ -1,66 +0,0 @@
-#import "GPUImageExposureFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageExposureFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform highp float exposure;
-
- void main()
- {
- highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- gl_FragColor = vec4(textureColor.rgb * pow(2.0, exposure), textureColor.w);
- }
-);
-#else
-NSString *const kGPUImageExposureFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float exposure;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- gl_FragColor = vec4(textureColor.rgb * pow(2.0, exposure), textureColor.w);
- }
-);
-#endif
-
-@implementation GPUImageExposureFilter
-
-@synthesize exposure = _exposure;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageExposureFragmentShaderString]))
- {
- return nil;
- }
-
- exposureUniform = [filterProgram uniformIndex:@"exposure"];
- self.exposure = 0.0;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setExposure:(CGFloat)newValue;
-{
- _exposure = newValue;
-
- [self setFloat:_exposure forUniform:exposureUniform program:filterProgram];
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageFASTCornerDetectionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageFASTCornerDetectionFilter.h
deleted file mode 100644
index 86e7cf4..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageFASTCornerDetectionFilter.h
+++ /dev/null
@@ -1,33 +0,0 @@
-#import "GPUImageFilterGroup.h"
-
-@class GPUImageGrayscaleFilter;
-@class GPUImage3x3TextureSamplingFilter;
-@class GPUImageNonMaximumSuppressionFilter;
-
-/*
- An implementation of the Features from Accelerated Segment Test (FAST) feature detector as described in the following publications:
-
- E. Rosten and T. Drummond. Fusing points and lines for high performance tracking. IEEE International Conference on Computer Vision, 2005.
- E. Rosten and T. Drummond. Machine learning for high-speed corner detection. European Conference on Computer Vision, 2006.
-
- For more about the FAST feature detector, see the resources here:
- http://www.edwardrosten.com/work/fast.html
- */
-
-typedef enum { kGPUImageFAST12Contiguous, kGPUImageFAST12ContiguousNonMaximumSuppressed} GPUImageFASTDetectorType;
-
-@interface GPUImageFASTCornerDetectionFilter : GPUImageFilterGroup
-{
- GPUImageGrayscaleFilter *luminanceReductionFilter;
- GPUImage3x3TextureSamplingFilter *featureDetectionFilter;
- GPUImageNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;
-// Generate a lookup texture based on the bit patterns
-
-// Step 1: convert to monochrome if necessary
-// Step 2: do a lookup at each pixel based on the Bresenham circle, encode comparison in two color components
-// Step 3: do non-maximum suppression of close corner points
-}
-
-- (id)initWithFASTDetectorVariant:(GPUImageFASTDetectorType)detectorType;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageFASTCornerDetectionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageFASTCornerDetectionFilter.m
deleted file mode 100644
index b04a24a..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageFASTCornerDetectionFilter.m
+++ /dev/null
@@ -1,89 +0,0 @@
-#import "GPUImageFASTCornerDetectionFilter.h"
-
-#import "GPUImageGrayscaleFilter.h"
-#import "GPUImage3x3TextureSamplingFilter.h"
-#import "GPUImageNonMaximumSuppressionFilter.h"
-
-// 14 total texture coordinates from vertex shader for non-dependent reads
-// 3 texture coordinates for dependent reads, then
-
-NSString *const kGPUImageFASTDetectorFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D lookupTable;
-
- void main()
- {
- lowp float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r;
- lowp float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- lowp float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- lowp float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- lowp float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- lowp float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- lowp float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
- lowp float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- lowp float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
-
- lowp float byteTally = 1.0 / 255.0 * step(centerIntensity, topRightIntensity);
- byteTally += 2.0 / 255.0 * step(centerIntensity, topIntensity);
- byteTally += 4.0 / 255.0 * step(centerIntensity, topLeftIntensity);
- byteTally += 8.0 / 255.0 * step(centerIntensity, leftIntensity);
- byteTally += 16.0 / 255.0 * step(centerIntensity, bottomLeftIntensity);
- byteTally += 32.0 / 255.0 * step(centerIntensity, bottomIntensity);
- byteTally += 64.0 / 255.0 * step(centerIntensity, bottomRightIntensity);
- byteTally += 128.0 / 255.0 * step(centerIntensity, rightIntensity);
-
- // TODO: Replace the above with a dot product and two vec4s
- // TODO: Apply step to a matrix, rather than individually
-
- gl_FragColor = vec4(byteTally, byteTally, byteTally, 1.0);
- }
- );
-
-
-@implementation GPUImageFASTCornerDetectionFilter
-
-- (id)init;
-{
- if (!(self = [self initWithFASTDetectorVariant:kGPUImageFAST12ContiguousNonMaximumSuppressed]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)initWithFASTDetectorVariant:(GPUImageFASTDetectorType)detectorType;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
-// [derivativeFilter addTarget:blurFilter];
-// [blurFilter addTarget:harrisCornerDetectionFilter];
-// [harrisCornerDetectionFilter addTarget:nonMaximumSuppressionFilter];
- // [simpleThresholdFilter addTarget:colorPackingFilter];
-
-// self.initialFilters = [NSArray arrayWithObjects:derivativeFilter, nil];
- // self.terminalFilter = colorPackingFilter;
-// self.terminalFilter = nonMaximumSuppressionFilter;
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageFalseColorFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageFalseColorFilter.h
deleted file mode 100644
index cb0b82f..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageFalseColorFilter.h
+++ /dev/null
@@ -1,15 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageFalseColorFilter : GPUImageFilter
-{
- GLint firstColorUniform, secondColorUniform;
-}
-
-// The first and second colors specify what colors replace the dark and light areas of the image, respectively. The defaults are (0.0, 0.0, 0.5) amd (1.0, 0.0, 0.0).
-@property(readwrite, nonatomic) GPUVector4 firstColor;
-@property(readwrite, nonatomic) GPUVector4 secondColor;
-
-- (void)setFirstColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
-- (void)setSecondColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageFalseColorFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageFalseColorFilter.m
deleted file mode 100644
index f514dba..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageFalseColorFilter.m
+++ /dev/null
@@ -1,101 +0,0 @@
-#import "GPUImageFalseColorFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUFalseColorFragmentShaderString = SHADER_STRING
-(
- precision lowp float;
-
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float intensity;
- uniform vec3 firstColor;
- uniform vec3 secondColor;
-
- const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- float luminance = dot(textureColor.rgb, luminanceWeighting);
-
- gl_FragColor = vec4( mix(firstColor.rgb, secondColor.rgb, luminance), textureColor.a);
- }
-);
-#else
-NSString *const kGPUFalseColorFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float intensity;
- uniform vec3 firstColor;
- uniform vec3 secondColor;
-
- const vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- float luminance = dot(textureColor.rgb, luminanceWeighting);
-
- gl_FragColor = vec4( mix(firstColor.rgb, secondColor.rgb, luminance), textureColor.a);
- }
-);
-#endif
-
-
-@implementation GPUImageFalseColorFilter
-
-@synthesize secondColor = _secondColor;
-@synthesize firstColor = _firstColor;
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUFalseColorFragmentShaderString]))
- {
- return nil;
- }
-
- firstColorUniform = [filterProgram uniformIndex:@"firstColor"];
- secondColorUniform = [filterProgram uniformIndex:@"secondColor"];
-
- self.firstColor = (GPUVector4){0.0f, 0.0f, 0.5f, 1.0f};
- self.secondColor = (GPUVector4){1.0f, 0.0f, 0.0f, 1.0f};
-
- return self;
-}
-
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setFirstColor:(GPUVector4)newValue;
-{
- _firstColor = newValue;
-
- [self setFirstColorRed:_firstColor.one green:_firstColor.two blue:_firstColor.three];
-}
-
-- (void)setSecondColor:(GPUVector4)newValue;
-{
- _secondColor = newValue;
-
- [self setSecondColorRed:_secondColor.one green:_secondColor.two blue:_secondColor.three];
-}
-
-- (void)setFirstColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
-{
- GPUVector3 firstColor = {redComponent, greenComponent, blueComponent};
-
- [self setVec3:firstColor forUniform:firstColorUniform program:filterProgram];
-}
-
-- (void)setSecondColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
-{
- GPUVector3 secondColor = {redComponent, greenComponent, blueComponent};
-
- [self setVec3:secondColor forUniform:secondColorUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageFilter.h
deleted file mode 100755
index 0171aa8..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageFilter.h
+++ /dev/null
@@ -1,134 +0,0 @@
-#import "GPUImageOutput.h"
-
-#define STRINGIZE(x) #x
-#define STRINGIZE2(x) STRINGIZE(x)
-#define SHADER_STRING(text) @ STRINGIZE2(text)
-
-#define GPUImageHashIdentifier #
-#define GPUImageWrappedLabel(x) x
-#define GPUImageEscapedHashIdentifier(a) GPUImageWrappedLabel(GPUImageHashIdentifier)a
-
-extern NSString *const kGPUImageVertexShaderString;
-extern NSString *const kGPUImagePassthroughFragmentShaderString;
-
-struct GPUVector4 {
- GLfloat one;
- GLfloat two;
- GLfloat three;
- GLfloat four;
-};
-typedef struct GPUVector4 GPUVector4;
-
-struct GPUVector3 {
- GLfloat one;
- GLfloat two;
- GLfloat three;
-};
-typedef struct GPUVector3 GPUVector3;
-
-struct GPUMatrix4x4 {
- GPUVector4 one;
- GPUVector4 two;
- GPUVector4 three;
- GPUVector4 four;
-};
-typedef struct GPUMatrix4x4 GPUMatrix4x4;
-
-struct GPUMatrix3x3 {
- GPUVector3 one;
- GPUVector3 two;
- GPUVector3 three;
-};
-typedef struct GPUMatrix3x3 GPUMatrix3x3;
-
-/** GPUImage's base filter class
-
- Filters and other subsequent elements in the chain conform to the GPUImageInput protocol, which lets them take in the supplied or processed texture from the previous link in the chain and do something with it. Objects one step further down the chain are considered targets, and processing can be branched by adding multiple targets to a single output or filter.
- */
-@interface GPUImageFilter : GPUImageOutput
-{
- GPUImageFramebuffer *firstInputFramebuffer;
-
- GLProgram *filterProgram;
- GLint filterPositionAttribute, filterTextureCoordinateAttribute;
- GLint filterInputTextureUniform;
- GLfloat backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha;
-
- BOOL isEndProcessing;
-
- CGSize currentFilterSize;
- GPUImageRotationMode inputRotation;
-
- BOOL currentlyReceivingMonochromeInput;
-
- NSMutableDictionary *uniformStateRestorationBlocks;
- dispatch_semaphore_t imageCaptureSemaphore;
-}
-
-@property(readonly) CVPixelBufferRef renderTarget;
-@property(readwrite, nonatomic) BOOL preventRendering;
-@property(readwrite, nonatomic) BOOL currentlyReceivingMonochromeInput;
-
-/// @name Initialization and teardown
-
-/**
- Initialize with vertex and fragment shaders
-
- You make take advantage of the SHADER_STRING macro to write your shaders in-line.
- @param vertexShaderString Source code of the vertex shader to use
- @param fragmentShaderString Source code of the fragment shader to use
- */
-- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
-
-/**
- Initialize with a fragment shader
-
- You may take advantage of the SHADER_STRING macro to write your shader in-line.
- @param fragmentShaderString Source code of fragment shader to use
- */
-- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
-/**
- Initialize with a fragment shader
- @param fragmentShaderFilename Filename of fragment shader to load
- */
-- (id)initWithFragmentShaderFromFile:(NSString *)fragmentShaderFilename;
-- (void)initializeAttributes;
-- (void)setupFilterForSize:(CGSize)filterFrameSize;
-- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;
-- (CGPoint)rotatedPoint:(CGPoint)pointToRotate forRotation:(GPUImageRotationMode)rotation;
-
-/// @name Managing the display FBOs
-/** Size of the frame buffer object
- */
-- (CGSize)sizeOfFBO;
-
-/// @name Rendering
-+ (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode;
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;
-- (CGSize)outputFrameSize;
-
-/// @name Input parameters
-- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
-- (void)setInteger:(GLint)newInteger forUniformName:(NSString *)uniformName;
-- (void)setFloat:(GLfloat)newFloat forUniformName:(NSString *)uniformName;
-- (void)setSize:(CGSize)newSize forUniformName:(NSString *)uniformName;
-- (void)setPoint:(CGPoint)newPoint forUniformName:(NSString *)uniformName;
-- (void)setFloatVec3:(GPUVector3)newVec3 forUniformName:(NSString *)uniformName;
-- (void)setFloatVec4:(GPUVector4)newVec4 forUniform:(NSString *)uniformName;
-- (void)setFloatArray:(GLfloat *)array length:(GLsizei)count forUniform:(NSString*)uniformName;
-
-- (void)setMatrix3f:(GPUMatrix3x3)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
-- (void)setMatrix4f:(GPUMatrix4x4)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
-- (void)setFloat:(GLfloat)floatValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
-- (void)setPoint:(CGPoint)pointValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
-- (void)setSize:(CGSize)sizeValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
-- (void)setVec3:(GPUVector3)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
-- (void)setVec4:(GPUVector4)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
-- (void)setFloatArray:(GLfloat *)arrayValue length:(GLsizei)arrayLength forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
-- (void)setInteger:(GLint)intValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
-
-- (void)setAndExecuteUniformStateCallbackAtIndex:(GLint)uniform forProgram:(GLProgram *)shaderProgram toBlock:(dispatch_block_t)uniformStateBlock;
-- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageFilter.m
deleted file mode 100755
index 406d707..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageFilter.m
+++ /dev/null
@@ -1,753 +0,0 @@
-#import "GPUImageFilter.h"
-#import "GPUImagePicture.h"
-#import
-
-// Hardcode the vertex shader for standard filters, but this can be overridden
-NSString *const kGPUImageVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec4 inputTextureCoordinate;
-
- varying vec2 textureCoordinate;
-
- void main()
- {
- gl_Position = position;
- textureCoordinate = inputTextureCoordinate.xy;
- }
- );
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-
-NSString *const kGPUImagePassthroughFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
- }
-);
-
-#else
-
-NSString *const kGPUImagePassthroughFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
- }
-);
-#endif
-
-
-@implementation GPUImageFilter
-
-@synthesize preventRendering = _preventRendering;
-@synthesize currentlyReceivingMonochromeInput;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- uniformStateRestorationBlocks = [NSMutableDictionary dictionaryWithCapacity:10];
- _preventRendering = NO;
- currentlyReceivingMonochromeInput = NO;
- inputRotation = kGPUImageNoRotation;
- backgroundColorRed = 0.0;
- backgroundColorGreen = 0.0;
- backgroundColorBlue = 0.0;
- backgroundColorAlpha = 0.0;
- imageCaptureSemaphore = dispatch_semaphore_create(0);
- dispatch_semaphore_signal(imageCaptureSemaphore);
-
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext useImageProcessingContext];
-
- filterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString];
-
- if (!filterProgram.initialized)
- {
- [self initializeAttributes];
-
- if (![filterProgram link])
- {
- NSString *progLog = [filterProgram programLog];
- NSLog(@"Program link log: %@", progLog);
- NSString *fragLog = [filterProgram fragmentShaderLog];
- NSLog(@"Fragment shader compile log: %@", fragLog);
- NSString *vertLog = [filterProgram vertexShaderLog];
- NSLog(@"Vertex shader compile log: %@", vertLog);
- filterProgram = nil;
- NSAssert(NO, @"Filter shader link failed");
- }
- }
-
- filterPositionAttribute = [filterProgram attributeIndex:@"position"];
- filterTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate"];
- filterInputTextureUniform = [filterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader
-
- [GPUImageContext setActiveShaderProgram:filterProgram];
-
- glEnableVertexAttribArray(filterPositionAttribute);
- glEnableVertexAttribArray(filterTextureCoordinateAttribute);
- });
-
- return self;
-}
-
-- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
-{
- if (!(self = [self initWithVertexShaderFromString:kGPUImageVertexShaderString fragmentShaderFromString:fragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)initWithFragmentShaderFromFile:(NSString *)fragmentShaderFilename;
-{
- NSString *fragmentShaderPathname = [[NSBundle mainBundle] pathForResource:fragmentShaderFilename ofType:@"fsh"];
- NSString *fragmentShaderString = [NSString stringWithContentsOfFile:fragmentShaderPathname encoding:NSUTF8StringEncoding error:nil];
-
- if (!(self = [self initWithFragmentShaderFromString:fragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)init;
-{
- if (!(self = [self initWithFragmentShaderFromString:kGPUImagePassthroughFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (void)initializeAttributes;
-{
- [filterProgram addAttribute:@"position"];
- [filterProgram addAttribute:@"inputTextureCoordinate"];
-
- // Override this, calling back to this super method, in order to add new attributes to your vertex shader
-}
-
-- (void)setupFilterForSize:(CGSize)filterFrameSize;
-{
- // This is where you can override to provide some custom setup, if your filter has a size-dependent element
-}
-
-- (void)dealloc
-{
-#if !OS_OBJECT_USE_OBJC
- if (imageCaptureSemaphore != NULL)
- {
- dispatch_release(imageCaptureSemaphore);
- }
-#endif
-
-}
-
-#pragma mark -
-#pragma mark Still image processing
-
-- (void)useNextFrameForImageCapture;
-{
- usingNextFrameForImageCapture = YES;
-
- // Set the semaphore high, if it isn't already
- if (dispatch_semaphore_wait(imageCaptureSemaphore, DISPATCH_TIME_NOW) != 0)
- {
- return;
- }
-}
-
-- (CGImageRef)newCGImageFromCurrentlyProcessedOutput
-{
- // Give it three seconds to process, then abort if they forgot to set up the image capture properly
- double timeoutForImageCapture = 3.0;
- dispatch_time_t convertedTimeout = dispatch_time(DISPATCH_TIME_NOW, timeoutForImageCapture * NSEC_PER_SEC);
-
- if (dispatch_semaphore_wait(imageCaptureSemaphore, convertedTimeout) != 0)
- {
- return NULL;
- }
-
- GPUImageFramebuffer* framebuffer = [self framebufferForOutput];
-
- usingNextFrameForImageCapture = NO;
- dispatch_semaphore_signal(imageCaptureSemaphore);
-
- CGImageRef image = [framebuffer newCGImageFromFramebufferContents];
- return image;
-}
-
-#pragma mark -
-#pragma mark Managing the display FBOs
-
-- (CGSize)sizeOfFBO;
-{
- CGSize outputSize = [self maximumOutputSize];
- if ( (CGSizeEqualToSize(outputSize, CGSizeZero)) || (inputTextureSize.width < outputSize.width) )
- {
- return inputTextureSize;
- }
- else
- {
- return outputSize;
- }
-}
-
-#pragma mark -
-#pragma mark Rendering
-
-+ (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode;
-{
- static const GLfloat noRotationTextureCoordinates[] = {
- 0.0f, 0.0f,
- 1.0f, 0.0f,
- 0.0f, 1.0f,
- 1.0f, 1.0f,
- };
-
- static const GLfloat rotateLeftTextureCoordinates[] = {
- 1.0f, 0.0f,
- 1.0f, 1.0f,
- 0.0f, 0.0f,
- 0.0f, 1.0f,
- };
-
- static const GLfloat rotateRightTextureCoordinates[] = {
- 0.0f, 1.0f,
- 0.0f, 0.0f,
- 1.0f, 1.0f,
- 1.0f, 0.0f,
- };
-
- static const GLfloat verticalFlipTextureCoordinates[] = {
- 0.0f, 1.0f,
- 1.0f, 1.0f,
- 0.0f, 0.0f,
- 1.0f, 0.0f,
- };
-
- static const GLfloat horizontalFlipTextureCoordinates[] = {
- 1.0f, 0.0f,
- 0.0f, 0.0f,
- 1.0f, 1.0f,
- 0.0f, 1.0f,
- };
-
- static const GLfloat rotateRightVerticalFlipTextureCoordinates[] = {
- 0.0f, 0.0f,
- 0.0f, 1.0f,
- 1.0f, 0.0f,
- 1.0f, 1.0f,
- };
-
- static const GLfloat rotateRightHorizontalFlipTextureCoordinates[] = {
- 1.0f, 1.0f,
- 1.0f, 0.0f,
- 0.0f, 1.0f,
- 0.0f, 0.0f,
- };
-
- static const GLfloat rotate180TextureCoordinates[] = {
- 1.0f, 1.0f,
- 0.0f, 1.0f,
- 1.0f, 0.0f,
- 0.0f, 0.0f,
- };
-
- switch(rotationMode)
- {
- case kGPUImageNoRotation: return noRotationTextureCoordinates;
- case kGPUImageRotateLeft: return rotateLeftTextureCoordinates;
- case kGPUImageRotateRight: return rotateRightTextureCoordinates;
- case kGPUImageFlipVertical: return verticalFlipTextureCoordinates;
- case kGPUImageFlipHorizonal: return horizontalFlipTextureCoordinates;
- case kGPUImageRotateRightFlipVertical: return rotateRightVerticalFlipTextureCoordinates;
- case kGPUImageRotateRightFlipHorizontal: return rotateRightHorizontalFlipTextureCoordinates;
- case kGPUImageRotate180: return rotate180TextureCoordinates;
- }
-}
-
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-{
- if (self.preventRendering)
- {
- [firstInputFramebuffer unlock];
- return;
- }
-
- [GPUImageContext setActiveShaderProgram:filterProgram];
-
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
- [outputFramebuffer activateFramebuffer];
- if (usingNextFrameForImageCapture)
- {
- [outputFramebuffer lock];
- }
-
- [self setUniformsForProgramAtIndex:0];
-
- glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
- glClear(GL_COLOR_BUFFER_BIT);
-
- glActiveTexture(GL_TEXTURE2);
- glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
-
- glUniform1i(filterInputTextureUniform, 2);
-
- glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
- glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
-
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
-
- [firstInputFramebuffer unlock];
-
- if (usingNextFrameForImageCapture)
- {
- dispatch_semaphore_signal(imageCaptureSemaphore);
- }
-}
-
-- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;
-{
- if (self.frameProcessingCompletionBlock != NULL)
- {
- self.frameProcessingCompletionBlock(self, frameTime);
- }
-
- // Get all targets the framebuffer so they can grab a lock on it
- for (id currentTarget in targets)
- {
- if (currentTarget != self.targetToIgnoreForUpdates)
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
-
- [self setInputFramebufferForTarget:currentTarget atIndex:textureIndex];
- [currentTarget setInputSize:[self outputFrameSize] atIndex:textureIndex];
- }
- }
-
- // Release our hold so it can return to the cache immediately upon processing
- [[self framebufferForOutput] unlock];
-
- if (usingNextFrameForImageCapture)
- {
-// usingNextFrameForImageCapture = NO;
- }
- else
- {
- [self removeOutputFramebuffer];
- }
-
- // Trigger processing last, so that our unlock comes first in serial execution, avoiding the need for a callback
- for (id currentTarget in targets)
- {
- if (currentTarget != self.targetToIgnoreForUpdates)
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
- [currentTarget newFrameReadyAtTime:frameTime atIndex:textureIndex];
- }
- }
-}
-
-- (CGSize)outputFrameSize;
-{
- return inputTextureSize;
-}
-
-#pragma mark -
-#pragma mark Input parameters
-
-- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
-{
- backgroundColorRed = redComponent;
- backgroundColorGreen = greenComponent;
- backgroundColorBlue = blueComponent;
- backgroundColorAlpha = alphaComponent;
-}
-
-- (void)setInteger:(GLint)newInteger forUniformName:(NSString *)uniformName;
-{
- GLint uniformIndex = [filterProgram uniformIndex:uniformName];
- [self setInteger:newInteger forUniform:uniformIndex program:filterProgram];
-}
-
-- (void)setFloat:(GLfloat)newFloat forUniformName:(NSString *)uniformName;
-{
- GLint uniformIndex = [filterProgram uniformIndex:uniformName];
- [self setFloat:newFloat forUniform:uniformIndex program:filterProgram];
-}
-
-- (void)setSize:(CGSize)newSize forUniformName:(NSString *)uniformName;
-{
- GLint uniformIndex = [filterProgram uniformIndex:uniformName];
- [self setSize:newSize forUniform:uniformIndex program:filterProgram];
-}
-
-- (void)setPoint:(CGPoint)newPoint forUniformName:(NSString *)uniformName;
-{
- GLint uniformIndex = [filterProgram uniformIndex:uniformName];
- [self setPoint:newPoint forUniform:uniformIndex program:filterProgram];
-}
-
-- (void)setFloatVec3:(GPUVector3)newVec3 forUniformName:(NSString *)uniformName;
-{
- GLint uniformIndex = [filterProgram uniformIndex:uniformName];
- [self setVec3:newVec3 forUniform:uniformIndex program:filterProgram];
-}
-
-- (void)setFloatVec4:(GPUVector4)newVec4 forUniform:(NSString *)uniformName;
-{
- GLint uniformIndex = [filterProgram uniformIndex:uniformName];
- [self setVec4:newVec4 forUniform:uniformIndex program:filterProgram];
-}
-
-- (void)setFloatArray:(GLfloat *)array length:(GLsizei)count forUniform:(NSString*)uniformName
-{
- GLint uniformIndex = [filterProgram uniformIndex:uniformName];
-
- [self setFloatArray:array length:count forUniform:uniformIndex program:filterProgram];
-}
-
-- (void)setMatrix3f:(GPUMatrix3x3)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
-{
- runAsynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext setActiveShaderProgram:shaderProgram];
- [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
- glUniformMatrix3fv(uniform, 1, GL_FALSE, (GLfloat *)&matrix);
- }];
- });
-}
-
-- (void)setMatrix4f:(GPUMatrix4x4)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
-{
- runAsynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext setActiveShaderProgram:shaderProgram];
- [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
- glUniformMatrix4fv(uniform, 1, GL_FALSE, (GLfloat *)&matrix);
- }];
- });
-}
-
-- (void)setFloat:(GLfloat)floatValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
-{
- runAsynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext setActiveShaderProgram:shaderProgram];
- [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
- glUniform1f(uniform, floatValue);
- }];
- });
-}
-
-- (void)setPoint:(CGPoint)pointValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
-{
- runAsynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext setActiveShaderProgram:shaderProgram];
- [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
- GLfloat positionArray[2];
- positionArray[0] = pointValue.x;
- positionArray[1] = pointValue.y;
-
- glUniform2fv(uniform, 1, positionArray);
- }];
- });
-}
-
-- (void)setSize:(CGSize)sizeValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
-{
- runAsynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext setActiveShaderProgram:shaderProgram];
-
- [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
- GLfloat sizeArray[2];
- sizeArray[0] = sizeValue.width;
- sizeArray[1] = sizeValue.height;
-
- glUniform2fv(uniform, 1, sizeArray);
- }];
- });
-}
-
-- (void)setVec3:(GPUVector3)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
-{
- runAsynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext setActiveShaderProgram:shaderProgram];
-
- [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
- glUniform3fv(uniform, 1, (GLfloat *)&vectorValue);
- }];
- });
-}
-
-- (void)setVec4:(GPUVector4)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
-{
- runAsynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext setActiveShaderProgram:shaderProgram];
-
- [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
- glUniform4fv(uniform, 1, (GLfloat *)&vectorValue);
- }];
- });
-}
-
-- (void)setFloatArray:(GLfloat *)arrayValue length:(GLsizei)arrayLength forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
-{
- // Make a copy of the data, so it doesn't get overwritten before async call executes
- NSData* arrayData = [NSData dataWithBytes:arrayValue length:arrayLength * sizeof(arrayValue[0])];
-
- runAsynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext setActiveShaderProgram:shaderProgram];
-
- [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
- glUniform1fv(uniform, arrayLength, [arrayData bytes]);
- }];
- });
-}
-
-- (void)setInteger:(GLint)intValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
-{
- runAsynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext setActiveShaderProgram:shaderProgram];
-
- [self setAndExecuteUniformStateCallbackAtIndex:uniform forProgram:shaderProgram toBlock:^{
- glUniform1i(uniform, intValue);
- }];
- });
-}
-
-- (void)setAndExecuteUniformStateCallbackAtIndex:(GLint)uniform forProgram:(GLProgram *)shaderProgram toBlock:(dispatch_block_t)uniformStateBlock;
-{
- [uniformStateRestorationBlocks setObject:[uniformStateBlock copy] forKey:[NSNumber numberWithInt:uniform]];
- uniformStateBlock();
-}
-
-- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;
-{
- [uniformStateRestorationBlocks enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop){
- dispatch_block_t currentBlock = obj;
- currentBlock();
- }];
-}
-
-#pragma mark -
-#pragma mark GPUImageInput
-
-- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
-{
- static const GLfloat imageVertices[] = {
- -1.0f, -1.0f,
- 1.0f, -1.0f,
- -1.0f, 1.0f,
- 1.0f, 1.0f,
- };
-
- [self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
-
- [self informTargetsAboutNewFrameAtTime:frameTime];
-}
-
-- (NSInteger)nextAvailableTextureIndex;
-{
- return 0;
-}
-
-- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
-{
- firstInputFramebuffer = newInputFramebuffer;
- [firstInputFramebuffer lock];
-}
-
-- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;
-{
- CGSize rotatedSize = sizeToRotate;
-
- if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
- {
- rotatedSize.width = sizeToRotate.height;
- rotatedSize.height = sizeToRotate.width;
- }
-
- return rotatedSize;
-}
-
-- (CGPoint)rotatedPoint:(CGPoint)pointToRotate forRotation:(GPUImageRotationMode)rotation;
-{
- CGPoint rotatedPoint;
- switch(rotation)
- {
- case kGPUImageNoRotation: return pointToRotate; break;
- case kGPUImageFlipHorizonal:
- {
- rotatedPoint.x = 1.0 - pointToRotate.x;
- rotatedPoint.y = pointToRotate.y;
- }; break;
- case kGPUImageFlipVertical:
- {
- rotatedPoint.x = pointToRotate.x;
- rotatedPoint.y = 1.0 - pointToRotate.y;
- }; break;
- case kGPUImageRotateLeft:
- {
- rotatedPoint.x = 1.0 - pointToRotate.y;
- rotatedPoint.y = pointToRotate.x;
- }; break;
- case kGPUImageRotateRight:
- {
- rotatedPoint.x = pointToRotate.y;
- rotatedPoint.y = 1.0 - pointToRotate.x;
- }; break;
- case kGPUImageRotateRightFlipVertical:
- {
- rotatedPoint.x = pointToRotate.y;
- rotatedPoint.y = pointToRotate.x;
- }; break;
- case kGPUImageRotateRightFlipHorizontal:
- {
- rotatedPoint.x = 1.0 - pointToRotate.y;
- rotatedPoint.y = 1.0 - pointToRotate.x;
- }; break;
- case kGPUImageRotate180:
- {
- rotatedPoint.x = 1.0 - pointToRotate.x;
- rotatedPoint.y = 1.0 - pointToRotate.y;
- }; break;
- }
-
- return rotatedPoint;
-}
-
-- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
-{
- if (self.preventRendering)
- {
- return;
- }
-
- if (overrideInputSize)
- {
- if (CGSizeEqualToSize(forcedMaximumSize, CGSizeZero))
- {
- }
- else
- {
- CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(newSize, CGRectMake(0.0, 0.0, forcedMaximumSize.width, forcedMaximumSize.height));
- inputTextureSize = insetRect.size;
- }
- }
- else
- {
- CGSize rotatedSize = [self rotatedSize:newSize forIndex:textureIndex];
-
- if (CGSizeEqualToSize(rotatedSize, CGSizeZero))
- {
- inputTextureSize = rotatedSize;
- }
- else if (!CGSizeEqualToSize(inputTextureSize, rotatedSize))
- {
- inputTextureSize = rotatedSize;
- }
- }
-
- [self setupFilterForSize:[self sizeOfFBO]];
-}
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
- inputRotation = newInputRotation;
-}
-
-- (void)forceProcessingAtSize:(CGSize)frameSize;
-{
- if (CGSizeEqualToSize(frameSize, CGSizeZero))
- {
- overrideInputSize = NO;
- }
- else
- {
- overrideInputSize = YES;
- inputTextureSize = frameSize;
- forcedMaximumSize = CGSizeZero;
- }
-}
-
-- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize;
-{
- if (CGSizeEqualToSize(frameSize, CGSizeZero))
- {
- overrideInputSize = NO;
- inputTextureSize = CGSizeZero;
- forcedMaximumSize = CGSizeZero;
- }
- else
- {
- overrideInputSize = YES;
- forcedMaximumSize = frameSize;
- }
-}
-
-- (CGSize)maximumOutputSize;
-{
- // I'm temporarily disabling adjustments for smaller output sizes until I figure out how to make this work better
- return CGSizeZero;
-
- /*
- if (CGSizeEqualToSize(cachedMaximumOutputSize, CGSizeZero))
- {
- for (id currentTarget in targets)
- {
- if ([currentTarget maximumOutputSize].width > cachedMaximumOutputSize.width)
- {
- cachedMaximumOutputSize = [currentTarget maximumOutputSize];
- }
- }
- }
-
- return cachedMaximumOutputSize;
- */
-}
-
-- (void)endProcessing
-{
- if (!isEndProcessing)
- {
- isEndProcessing = YES;
-
- for (id currentTarget in targets)
- {
- [currentTarget endProcessing];
- }
- }
-}
-
-- (BOOL)wantsMonochromeInput;
-{
- return NO;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageFilterGroup.h b/Example/Pods/GPUImage/framework/Source/GPUImageFilterGroup.h
deleted file mode 100755
index 6817cdf..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageFilterGroup.h
+++ /dev/null
@@ -1,19 +0,0 @@
-#import "GPUImageOutput.h"
-#import "GPUImageFilter.h"
-
-@interface GPUImageFilterGroup : GPUImageOutput
-{
- NSMutableArray *filters;
- BOOL isEndProcessing;
-}
-
-@property(readwrite, nonatomic, strong) GPUImageOutput *terminalFilter;
-@property(readwrite, nonatomic, strong) NSArray *initialFilters;
-@property(readwrite, nonatomic, strong) GPUImageOutput *inputFilterToIgnoreForUpdates;
-
-// Filter management
-- (void)addFilter:(GPUImageOutput *)newFilter;
-- (GPUImageOutput *)filterAtIndex:(NSUInteger)filterIndex;
-- (NSUInteger)filterCount;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageFilterGroup.m b/Example/Pods/GPUImage/framework/Source/GPUImageFilterGroup.m
deleted file mode 100755
index 72cfe5e..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageFilterGroup.m
+++ /dev/null
@@ -1,208 +0,0 @@
-#import "GPUImageFilterGroup.h"
-#import "GPUImagePicture.h"
-
-@implementation GPUImageFilterGroup
-
-@synthesize terminalFilter = _terminalFilter;
-@synthesize initialFilters = _initialFilters;
-@synthesize inputFilterToIgnoreForUpdates = _inputFilterToIgnoreForUpdates;
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- filters = [[NSMutableArray alloc] init];
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Filter management
-
-- (void)addFilter:(GPUImageOutput *)newFilter;
-{
- [filters addObject:newFilter];
-}
-
-- (GPUImageOutput *)filterAtIndex:(NSUInteger)filterIndex;
-{
- return [filters objectAtIndex:filterIndex];
-}
-
-- (NSUInteger)filterCount;
-{
- return [filters count];
-}
-
-#pragma mark -
-#pragma mark Still image processing
-
-- (void)useNextFrameForImageCapture;
-{
- [self.terminalFilter useNextFrameForImageCapture];
-}
-
-- (CGImageRef)newCGImageFromCurrentlyProcessedOutput;
-{
- return [self.terminalFilter newCGImageFromCurrentlyProcessedOutput];
-}
-
-#pragma mark -
-#pragma mark GPUImageOutput overrides
-
-- (void)setTargetToIgnoreForUpdates:(id)targetToIgnoreForUpdates;
-{
- [_terminalFilter setTargetToIgnoreForUpdates:targetToIgnoreForUpdates];
-}
-
-- (void)addTarget:(id)newTarget atTextureLocation:(NSInteger)textureLocation;
-{
- [_terminalFilter addTarget:newTarget atTextureLocation:textureLocation];
-}
-
-- (void)removeTarget:(id)targetToRemove;
-{
- [_terminalFilter removeTarget:targetToRemove];
-}
-
-- (void)removeAllTargets;
-{
- [_terminalFilter removeAllTargets];
-}
-
-- (NSArray *)targets;
-{
- return [_terminalFilter targets];
-}
-
-- (void)setFrameProcessingCompletionBlock:(void (^)(GPUImageOutput *, CMTime))frameProcessingCompletionBlock;
-{
- [_terminalFilter setFrameProcessingCompletionBlock:frameProcessingCompletionBlock];
-}
-
-- (void (^)(GPUImageOutput *, CMTime))frameProcessingCompletionBlock;
-{
- return [_terminalFilter frameProcessingCompletionBlock];
-}
-
-#pragma mark -
-#pragma mark GPUImageInput protocol
-
-- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
-{
- for (GPUImageOutput *currentFilter in _initialFilters)
- {
- if (currentFilter != self.inputFilterToIgnoreForUpdates)
- {
- [currentFilter newFrameReadyAtTime:frameTime atIndex:textureIndex];
- }
- }
-}
-
-- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
-{
- for (GPUImageOutput *currentFilter in _initialFilters)
- {
- [currentFilter setInputFramebuffer:newInputFramebuffer atIndex:textureIndex];
- }
-}
-
-- (NSInteger)nextAvailableTextureIndex;
-{
-// if ([_initialFilters count] > 0)
-// {
-// return [[_initialFilters objectAtIndex:0] nextAvailableTextureIndex];
-// }
-
- return 0;
-}
-
-- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
-{
- for (GPUImageOutput *currentFilter in _initialFilters)
- {
- [currentFilter setInputSize:newSize atIndex:textureIndex];
- }
-}
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
- for (GPUImageOutput *currentFilter in _initialFilters)
- {
- [currentFilter setInputRotation:newInputRotation atIndex:(NSInteger)textureIndex];
- }
-}
-
-- (void)forceProcessingAtSize:(CGSize)frameSize;
-{
- for (GPUImageOutput *currentFilter in filters)
- {
- [currentFilter forceProcessingAtSize:frameSize];
- }
-}
-
-- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize;
-{
- for (GPUImageOutput *currentFilter in filters)
- {
- [currentFilter forceProcessingAtSizeRespectingAspectRatio:frameSize];
- }
-}
-
-- (CGSize)maximumOutputSize;
-{
- // I'm temporarily disabling adjustments for smaller output sizes until I figure out how to make this work better
- return CGSizeZero;
-
- /*
- if (CGSizeEqualToSize(cachedMaximumOutputSize, CGSizeZero))
- {
- for (id currentTarget in _initialFilters)
- {
- if ([currentTarget maximumOutputSize].width > cachedMaximumOutputSize.width)
- {
- cachedMaximumOutputSize = [currentTarget maximumOutputSize];
- }
- }
- }
-
- return cachedMaximumOutputSize;
- */
-}
-
-- (void)endProcessing;
-{
- if (!isEndProcessing)
- {
- isEndProcessing = YES;
-
- for (id currentTarget in _initialFilters)
- {
- [currentTarget endProcessing];
- }
- }
-}
-
-- (BOOL)wantsMonochromeInput;
-{
- BOOL allInputsWantMonochromeInput = YES;
- for (GPUImageOutput *currentFilter in _initialFilters)
- {
- allInputsWantMonochromeInput = allInputsWantMonochromeInput && [currentFilter wantsMonochromeInput];
- }
-
- return allInputsWantMonochromeInput;
-}
-
-- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
-{
- for (GPUImageOutput *currentFilter in _initialFilters)
- {
- [currentFilter setCurrentlyReceivingMonochromeInput:newValue];
- }
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageFilterPipeline.h b/Example/Pods/GPUImage/framework/Source/GPUImageFilterPipeline.h
deleted file mode 100755
index dc2baea..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageFilterPipeline.h
+++ /dev/null
@@ -1,30 +0,0 @@
-#import
-#import "GPUImageOutput.h"
-
-@interface GPUImageFilterPipeline : NSObject
-{
- NSString *stringValue;
-}
-
-@property (strong) NSMutableArray *filters;
-
-@property (strong) GPUImageOutput *input;
-@property (strong) id output;
-
-- (id) initWithOrderedFilters:(NSArray*) filters input:(GPUImageOutput*)input output:(id )output;
-- (id) initWithConfiguration:(NSDictionary*) configuration input:(GPUImageOutput*)input output:(id )output;
-- (id) initWithConfigurationFile:(NSURL*) configuration input:(GPUImageOutput*)input output:(id )output;
-
-- (void) addFilter:(GPUImageOutput *)filter;
-- (void) addFilter:(GPUImageOutput *)filter atIndex:(NSUInteger)insertIndex;
-- (void) replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageOutput *)filter;
-- (void) replaceAllFilters:(NSArray *) newFilters;
-- (void) removeFilter:(GPUImageOutput *)filter;
-- (void) removeFilterAtIndex:(NSUInteger)index;
-- (void) removeAllFilters;
-
-- (UIImage *) currentFilteredFrame;
-- (UIImage *) currentFilteredFrameWithOrientation:(UIImageOrientation)imageOrientation;
-- (CGImageRef) newCGImageFromCurrentFilteredFrame;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageFilterPipeline.m b/Example/Pods/GPUImage/framework/Source/GPUImageFilterPipeline.m
deleted file mode 100755
index 4fbe5eb..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageFilterPipeline.m
+++ /dev/null
@@ -1,218 +0,0 @@
-#import "GPUImageFilterPipeline.h"
-
-@interface GPUImageFilterPipeline ()
-
-- (BOOL)_parseConfiguration:(NSDictionary *)configuration;
-
-- (void)_refreshFilters;
-
-@end
-
-@implementation GPUImageFilterPipeline
-
-@synthesize filters = _filters, input = _input, output = _output;
-
-#pragma mark Config file init
-
-- (id)initWithConfiguration:(NSDictionary *)configuration input:(GPUImageOutput *)input output:(id )output {
- self = [super init];
- if (self) {
- self.input = input;
- self.output = output;
- if (![self _parseConfiguration:configuration]) {
- NSLog(@"Sorry, a parsing error occurred.");
- abort();
- }
- [self _refreshFilters];
- }
- return self;
-}
-
-- (id)initWithConfigurationFile:(NSURL *)configuration input:(GPUImageOutput *)input output:(id )output {
- return [self initWithConfiguration:[NSDictionary dictionaryWithContentsOfURL:configuration] input:input output:output];
-}
-
-- (BOOL)_parseConfiguration:(NSDictionary *)configuration {
- NSArray *filters = [configuration objectForKey:@"Filters"];
- if (!filters) {
- return NO;
- }
-
- NSError *regexError = nil;
- NSRegularExpression *parsingRegex = [NSRegularExpression regularExpressionWithPattern:@"(float|CGPoint|NSString)\\((.*?)(?:,\\s*(.*?))*\\)"
- options:0
- error:®exError];
-
- // It's faster to put them into an array and then pass it to the filters property than it is to call [self addFilter:] every time
- NSMutableArray *orderedFilters = [NSMutableArray arrayWithCapacity:[filters count]];
- for (NSDictionary *filter in filters) {
- NSString *filterName = [filter objectForKey:@"FilterName"];
- Class theClass = NSClassFromString(filterName);
- GPUImageOutput *genericFilter = [[theClass alloc] init];
- // Set up the properties
- NSDictionary *filterAttributes;
- if ((filterAttributes = [filter objectForKey:@"Attributes"])) {
- for (NSString *propertyKey in filterAttributes) {
- // Set up the selector
- SEL theSelector = NSSelectorFromString(propertyKey);
- NSInvocation *inv = [NSInvocation invocationWithMethodSignature:[theClass instanceMethodSignatureForSelector:theSelector]];
- [inv setSelector:theSelector];
- [inv setTarget:genericFilter];
-
- // check selector given with parameter
- if ([propertyKey hasSuffix:@":"]) {
-
- stringValue = nil;
-
- // Then parse the arguments
- NSMutableArray *parsedArray;
- if ([[filterAttributes objectForKey:propertyKey] isKindOfClass:[NSArray class]]) {
- NSArray *array = [filterAttributes objectForKey:propertyKey];
- parsedArray = [NSMutableArray arrayWithCapacity:[array count]];
- for (NSString *string in array) {
- NSTextCheckingResult *parse = [parsingRegex firstMatchInString:string
- options:0
- range:NSMakeRange(0, [string length])];
-
- NSString *modifier = [string substringWithRange:[parse rangeAtIndex:1]];
- if ([modifier isEqualToString:@"float"]) {
- // Float modifier, one argument
- CGFloat value = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue];
- [parsedArray addObject:[NSNumber numberWithFloat:value]];
- [inv setArgument:&value atIndex:2];
- } else if ([modifier isEqualToString:@"CGPoint"]) {
- // CGPoint modifier, two float arguments
- CGFloat x = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue];
- CGFloat y = [[string substringWithRange:[parse rangeAtIndex:3]] floatValue];
- CGPoint value = CGPointMake(x, y);
- [parsedArray addObject:[NSValue valueWithCGPoint:value]];
- } else if ([modifier isEqualToString:@"NSString"]) {
- // NSString modifier, one string argument
- stringValue = [[string substringWithRange:[parse rangeAtIndex:2]] copy];
- [inv setArgument:&stringValue atIndex:2];
-
- } else {
- return NO;
- }
- }
- [inv setArgument:&parsedArray atIndex:2];
- } else {
- NSString *string = [filterAttributes objectForKey:propertyKey];
- NSTextCheckingResult *parse = [parsingRegex firstMatchInString:string
- options:0
- range:NSMakeRange(0, [string length])];
-
- NSString *modifier = [string substringWithRange:[parse rangeAtIndex:1]];
- if ([modifier isEqualToString:@"float"]) {
- // Float modifier, one argument
- CGFloat value = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue];
- [inv setArgument:&value atIndex:2];
- } else if ([modifier isEqualToString:@"CGPoint"]) {
- // CGPoint modifier, two float arguments
- CGFloat x = [[string substringWithRange:[parse rangeAtIndex:2]] floatValue];
- CGFloat y = [[string substringWithRange:[parse rangeAtIndex:3]] floatValue];
- CGPoint value = CGPointMake(x, y);
- [inv setArgument:&value atIndex:2];
- } else if ([modifier isEqualToString:@"NSString"]) {
- // NSString modifier, one string argument
- stringValue = [[string substringWithRange:[parse rangeAtIndex:2]] copy];
- [inv setArgument:&stringValue atIndex:2];
-
- } else {
- return NO;
- }
- }
- }
-
-
- [inv invoke];
- }
- }
- [orderedFilters addObject:genericFilter];
- }
- self.filters = orderedFilters;
-
- return YES;
-}
-
-#pragma mark Regular init
-
-- (id)initWithOrderedFilters:(NSArray *)filters input:(GPUImageOutput *)input output:(id )output {
- self = [super init];
- if (self) {
- self.input = input;
- self.output = output;
- self.filters = [NSMutableArray arrayWithArray:filters];
- [self _refreshFilters];
- }
- return self;
-}
-
-- (void)addFilter:(GPUImageOutput *)filter atIndex:(NSUInteger)insertIndex {
- [self.filters insertObject:filter atIndex:insertIndex];
- [self _refreshFilters];
-}
-
-- (void)addFilter:(GPUImageOutput *)filter {
- [self.filters addObject:filter];
- [self _refreshFilters];
-}
-
-- (void)replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageOutput *)filter {
- [self.filters replaceObjectAtIndex:index withObject:filter];
- [self _refreshFilters];
-}
-
-- (void) removeFilter:(GPUImageOutput *)filter;
-{
- [self.filters removeObject:filter];
- [self _refreshFilters];
-}
-
-- (void)removeFilterAtIndex:(NSUInteger)index {
- [self.filters removeObjectAtIndex:index];
- [self _refreshFilters];
-}
-
-- (void)removeAllFilters {
- [self.filters removeAllObjects];
- [self _refreshFilters];
-}
-
-- (void)replaceAllFilters:(NSArray *)newFilters {
- self.filters = [NSMutableArray arrayWithArray:newFilters];
- [self _refreshFilters];
-}
-
-- (void)_refreshFilters {
-
- id prevFilter = self.input;
- GPUImageOutput *theFilter = nil;
-
- for (int i = 0; i < [self.filters count]; i++) {
- theFilter = [self.filters objectAtIndex:i];
- [prevFilter removeAllTargets];
- [prevFilter addTarget:theFilter];
- prevFilter = theFilter;
- }
-
- [prevFilter removeAllTargets];
-
- if (self.output != nil) {
- [prevFilter addTarget:self.output];
- }
-}
-
-- (UIImage *)currentFilteredFrame {
- return [(GPUImageOutput *)[_filters lastObject] imageFromCurrentFramebuffer];
-}
-
-- (UIImage *)currentFilteredFrameWithOrientation:(UIImageOrientation)imageOrientation {
- return [(GPUImageOutput *)[_filters lastObject] imageFromCurrentFramebufferWithOrientation:imageOrientation];
-}
-
-- (CGImageRef)newCGImageFromCurrentFilteredFrame {
- return [(GPUImageOutput *)[_filters lastObject] newCGImageFromCurrentlyProcessedOutput];
-}
-
-@end
\ No newline at end of file
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageFramebuffer.h b/Example/Pods/GPUImage/framework/Source/GPUImageFramebuffer.h
deleted file mode 100644
index 5cf20dd..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageFramebuffer.h
+++ /dev/null
@@ -1,58 +0,0 @@
-#import
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-#import
-#import
-#import
-#else
-#import
-#import
-#endif
-
-#import
-#import
-
-
-typedef struct GPUTextureOptions {
- GLenum minFilter;
- GLenum magFilter;
- GLenum wrapS;
- GLenum wrapT;
- GLenum internalFormat;
- GLenum format;
- GLenum type;
-} GPUTextureOptions;
-
-@interface GPUImageFramebuffer : NSObject
-
-@property(readonly) CGSize size;
-@property(readonly) GPUTextureOptions textureOptions;
-@property(readonly) GLuint texture;
-@property(readonly) BOOL missingFramebuffer;
-
-// Initialization and teardown
-- (id)initWithSize:(CGSize)framebufferSize;
-- (id)initWithSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)fboTextureOptions onlyTexture:(BOOL)onlyGenerateTexture;
-- (id)initWithSize:(CGSize)framebufferSize overriddenTexture:(GLuint)inputTexture;
-
-// Usage
-- (void)activateFramebuffer;
-
-// Reference counting
-- (void)lock;
-- (void)unlock;
-- (void)clearAllLocks;
-- (void)disableReferenceCounting;
-- (void)enableReferenceCounting;
-
-// Image capture
-- (CGImageRef)newCGImageFromFramebufferContents;
-- (void)restoreRenderTarget;
-
-// Raw data bytes
-- (void)lockForReading;
-- (void)unlockAfterReading;
-- (NSUInteger)bytesPerRow;
-- (GLubyte *)byteBuffer;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageFramebuffer.m b/Example/Pods/GPUImage/framework/Source/GPUImageFramebuffer.m
deleted file mode 100644
index 1d3d48f..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageFramebuffer.m
+++ /dev/null
@@ -1,448 +0,0 @@
-#import "GPUImageFramebuffer.h"
-#import "GPUImageOutput.h"
-
-@interface GPUImageFramebuffer()
-{
- GLuint framebuffer;
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- CVPixelBufferRef renderTarget;
- CVOpenGLESTextureRef renderTexture;
- NSUInteger readLockCount;
-#else
-#endif
- NSUInteger framebufferReferenceCount;
- BOOL referenceCountingDisabled;
-}
-
-- (void)generateFramebuffer;
-- (void)generateTexture;
-- (void)destroyFramebuffer;
-
-@end
-
-void dataProviderReleaseCallback (void *info, const void *data, size_t size);
-void dataProviderUnlockCallback (void *info, const void *data, size_t size);
-
-@implementation GPUImageFramebuffer
-
-@synthesize size = _size;
-@synthesize textureOptions = _textureOptions;
-@synthesize texture = _texture;
-@synthesize missingFramebuffer = _missingFramebuffer;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)fboTextureOptions onlyTexture:(BOOL)onlyGenerateTexture;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- _textureOptions = fboTextureOptions;
- _size = framebufferSize;
- framebufferReferenceCount = 0;
- referenceCountingDisabled = NO;
- _missingFramebuffer = onlyGenerateTexture;
-
- if (_missingFramebuffer)
- {
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext useImageProcessingContext];
- [self generateTexture];
- framebuffer = 0;
- });
- }
- else
- {
- [self generateFramebuffer];
- }
- return self;
-}
-
-- (id)initWithSize:(CGSize)framebufferSize overriddenTexture:(GLuint)inputTexture;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- GPUTextureOptions defaultTextureOptions;
- defaultTextureOptions.minFilter = GL_LINEAR;
- defaultTextureOptions.magFilter = GL_LINEAR;
- defaultTextureOptions.wrapS = GL_CLAMP_TO_EDGE;
- defaultTextureOptions.wrapT = GL_CLAMP_TO_EDGE;
- defaultTextureOptions.internalFormat = GL_RGBA;
- defaultTextureOptions.format = GL_BGRA;
- defaultTextureOptions.type = GL_UNSIGNED_BYTE;
-
- _textureOptions = defaultTextureOptions;
- _size = framebufferSize;
- framebufferReferenceCount = 0;
- referenceCountingDisabled = YES;
-
- _texture = inputTexture;
-
- return self;
-}
-
-- (id)initWithSize:(CGSize)framebufferSize;
-{
- GPUTextureOptions defaultTextureOptions;
- defaultTextureOptions.minFilter = GL_LINEAR;
- defaultTextureOptions.magFilter = GL_LINEAR;
- defaultTextureOptions.wrapS = GL_CLAMP_TO_EDGE;
- defaultTextureOptions.wrapT = GL_CLAMP_TO_EDGE;
- defaultTextureOptions.internalFormat = GL_RGBA;
- defaultTextureOptions.format = GL_BGRA;
- defaultTextureOptions.type = GL_UNSIGNED_BYTE;
-
- if (!(self = [self initWithSize:framebufferSize textureOptions:defaultTextureOptions onlyTexture:NO]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (void)dealloc
-{
- [self destroyFramebuffer];
-}
-
-#pragma mark -
-#pragma mark Internal
-
-- (void)generateTexture;
-{
- glActiveTexture(GL_TEXTURE1);
- glGenTextures(1, &_texture);
- glBindTexture(GL_TEXTURE_2D, _texture);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, _textureOptions.minFilter);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, _textureOptions.magFilter);
- // This is necessary for non-power-of-two textures
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, _textureOptions.wrapS);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, _textureOptions.wrapT);
-
- // TODO: Handle mipmaps
-}
-
-- (void)generateFramebuffer;
-{
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext useImageProcessingContext];
-
- glGenFramebuffers(1, &framebuffer);
- glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
-
- // By default, all framebuffers on iOS 5.0+ devices are backed by texture caches, using one shared cache
- if ([GPUImageContext supportsFastTextureUpload])
- {
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- CVOpenGLESTextureCacheRef coreVideoTextureCache = [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache];
- // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/
-
- CFDictionaryRef empty; // empty value for attr value.
- CFMutableDictionaryRef attrs;
- empty = CFDictionaryCreate(kCFAllocatorDefault, NULL, NULL, 0, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); // our empty IOSurface properties dictionary
- attrs = CFDictionaryCreateMutable(kCFAllocatorDefault, 1, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks);
- CFDictionarySetValue(attrs, kCVPixelBufferIOSurfacePropertiesKey, empty);
-
- CVReturn err = CVPixelBufferCreate(kCFAllocatorDefault, (int)_size.width, (int)_size.height, kCVPixelFormatType_32BGRA, attrs, &renderTarget);
- if (err)
- {
- NSLog(@"FBO size: %f, %f", _size.width, _size.height);
- NSAssert(NO, @"Error at CVPixelBufferCreate %d", err);
- }
-
- err = CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, coreVideoTextureCache, renderTarget,
- NULL, // texture attributes
- GL_TEXTURE_2D,
- _textureOptions.internalFormat, // opengl format
- (int)_size.width,
- (int)_size.height,
- _textureOptions.format, // native iOS format
- _textureOptions.type,
- 0,
- &renderTexture);
- if (err)
- {
- NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
- }
-
- CFRelease(attrs);
- CFRelease(empty);
-
- glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture));
- _texture = CVOpenGLESTextureGetName(renderTexture);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, _textureOptions.wrapS);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, _textureOptions.wrapT);
-
- glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0);
-#endif
- }
- else
- {
- [self generateTexture];
-
- glBindTexture(GL_TEXTURE_2D, _texture);
-
- glTexImage2D(GL_TEXTURE_2D, 0, _textureOptions.internalFormat, (int)_size.width, (int)_size.height, 0, _textureOptions.format, _textureOptions.type, 0);
- glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, _texture, 0);
- }
-
- #ifndef NS_BLOCK_ASSERTIONS
- GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
- NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status);
- #endif
-
- glBindTexture(GL_TEXTURE_2D, 0);
- });
-}
-
-- (void)destroyFramebuffer;
-{
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext useImageProcessingContext];
-
- if (framebuffer)
- {
- glDeleteFramebuffers(1, &framebuffer);
- framebuffer = 0;
- }
-
-
- if ([GPUImageContext supportsFastTextureUpload] && (!_missingFramebuffer))
- {
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- if (renderTarget)
- {
- CFRelease(renderTarget);
- renderTarget = NULL;
- }
-
- if (renderTexture)
- {
- CFRelease(renderTexture);
- renderTexture = NULL;
- }
-#endif
- }
- else
- {
- glDeleteTextures(1, &_texture);
- }
-
- });
-}
-
-#pragma mark -
-#pragma mark Usage
-
-- (void)activateFramebuffer;
-{
- glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
- glViewport(0, 0, (int)_size.width, (int)_size.height);
-}
-
-#pragma mark -
-#pragma mark Reference counting
-
-- (void)lock;
-{
- if (referenceCountingDisabled)
- {
- return;
- }
-
- framebufferReferenceCount++;
-}
-
-- (void)unlock;
-{
- if (referenceCountingDisabled)
- {
- return;
- }
-
- NSAssert(framebufferReferenceCount > 0, @"Tried to overrelease a framebuffer, did you forget to call -useNextFrameForImageCapture before using -imageFromCurrentFramebuffer?");
- framebufferReferenceCount--;
- if (framebufferReferenceCount < 1)
- {
- [[GPUImageContext sharedFramebufferCache] returnFramebufferToCache:self];
- }
-}
-
-- (void)clearAllLocks;
-{
- framebufferReferenceCount = 0;
-}
-
-- (void)disableReferenceCounting;
-{
- referenceCountingDisabled = YES;
-}
-
-- (void)enableReferenceCounting;
-{
- referenceCountingDisabled = NO;
-}
-
-#pragma mark -
-#pragma mark Image capture
-
-void dataProviderReleaseCallback (void *info, const void *data, size_t size)
-{
- free((void *)data);
-}
-
-void dataProviderUnlockCallback (void *info, const void *data, size_t size)
-{
- GPUImageFramebuffer *framebuffer = (__bridge_transfer GPUImageFramebuffer*)info;
-
- [framebuffer restoreRenderTarget];
- [framebuffer unlock];
- [[GPUImageContext sharedFramebufferCache] removeFramebufferFromActiveImageCaptureList:framebuffer];
-}
-
-- (CGImageRef)newCGImageFromFramebufferContents;
-{
- // a CGImage can only be created from a 'normal' color texture
- NSAssert(self.textureOptions.internalFormat == GL_RGBA, @"For conversion to a CGImage the output texture format for this filter must be GL_RGBA.");
- NSAssert(self.textureOptions.type == GL_UNSIGNED_BYTE, @"For conversion to a CGImage the type of the output texture of this filter must be GL_UNSIGNED_BYTE.");
-
- __block CGImageRef cgImageFromBytes;
-
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext useImageProcessingContext];
-
- NSUInteger totalBytesForImage = (int)_size.width * (int)_size.height * 4;
- // It appears that the width of a texture must be padded out to be a multiple of 8 (32 bytes) if reading from it using a texture cache
-
- GLubyte *rawImagePixels;
-
- CGDataProviderRef dataProvider = NULL;
- if ([GPUImageContext supportsFastTextureUpload])
- {
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- NSUInteger paddedWidthOfImage = CVPixelBufferGetBytesPerRow(renderTarget) / 4.0;
- NSUInteger paddedBytesForImage = paddedWidthOfImage * (int)_size.height * 4;
-
- glFinish();
- CFRetain(renderTarget); // I need to retain the pixel buffer here and release in the data source callback to prevent its bytes from being prematurely deallocated during a photo write operation
- [self lockForReading];
- rawImagePixels = (GLubyte *)CVPixelBufferGetBaseAddress(renderTarget);
- dataProvider = CGDataProviderCreateWithData((__bridge_retained void*)self, rawImagePixels, paddedBytesForImage, dataProviderUnlockCallback);
- [[GPUImageContext sharedFramebufferCache] addFramebufferToActiveImageCaptureList:self]; // In case the framebuffer is swapped out on the filter, need to have a strong reference to it somewhere for it to hang on while the image is in existence
-#else
-#endif
- }
- else
- {
- [self activateFramebuffer];
- rawImagePixels = (GLubyte *)malloc(totalBytesForImage);
- glReadPixels(0, 0, (int)_size.width, (int)_size.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);
- dataProvider = CGDataProviderCreateWithData(NULL, rawImagePixels, totalBytesForImage, dataProviderReleaseCallback);
- [self unlock]; // Don't need to keep this around anymore
- }
-
- CGColorSpaceRef defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB();
-
- if ([GPUImageContext supportsFastTextureUpload])
- {
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- cgImageFromBytes = CGImageCreate((int)_size.width, (int)_size.height, 8, 32, CVPixelBufferGetBytesPerRow(renderTarget), defaultRGBColorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst, dataProvider, NULL, NO, kCGRenderingIntentDefault);
-#else
-#endif
- }
- else
- {
- cgImageFromBytes = CGImageCreate((int)_size.width, (int)_size.height, 8, 32, 4 * (int)_size.width, defaultRGBColorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaLast, dataProvider, NULL, NO, kCGRenderingIntentDefault);
- }
-
- // Capture image with current device orientation
- CGDataProviderRelease(dataProvider);
- CGColorSpaceRelease(defaultRGBColorSpace);
-
- });
-
- return cgImageFromBytes;
-}
-
-- (void)restoreRenderTarget;
-{
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- [self unlockAfterReading];
- CFRelease(renderTarget);
-#else
-#endif
-}
-
-#pragma mark -
-#pragma mark Raw data bytes
-
-- (void)lockForReading
-{
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- if ([GPUImageContext supportsFastTextureUpload])
- {
- if (readLockCount == 0)
- {
- CVPixelBufferLockBaseAddress(renderTarget, 0);
- }
- readLockCount++;
- }
-#endif
-}
-
-- (void)unlockAfterReading
-{
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- if ([GPUImageContext supportsFastTextureUpload])
- {
- NSAssert(readLockCount > 0, @"Unbalanced call to -[GPUImageFramebuffer unlockAfterReading]");
- readLockCount--;
- if (readLockCount == 0)
- {
- CVPixelBufferUnlockBaseAddress(renderTarget, 0);
- }
- }
-#endif
-}
-
-- (NSUInteger)bytesPerRow;
-{
- if ([GPUImageContext supportsFastTextureUpload])
- {
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- return CVPixelBufferGetBytesPerRow(renderTarget);
-#else
- return _size.width * 4; // TODO: do more with this on the non-texture-cache side
-#endif
- }
- else
- {
- return _size.width * 4;
- }
-}
-
-- (GLubyte *)byteBuffer;
-{
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- [self lockForReading];
- GLubyte * bufferBytes = CVPixelBufferGetBaseAddress(renderTarget);
- [self unlockAfterReading];
- return bufferBytes;
-#else
- return NULL; // TODO: do more with this on the non-texture-cache side
-#endif
-}
-
-- (GLuint)texture;
-{
-// NSLog(@"Accessing texture: %d from FB: %@", _texture, self);
- return _texture;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageFramebufferCache.h b/Example/Pods/GPUImage/framework/Source/GPUImageFramebufferCache.h
deleted file mode 100644
index e56a345..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageFramebufferCache.h
+++ /dev/null
@@ -1,15 +0,0 @@
-#import
-#import
-#import "GPUImageFramebuffer.h"
-
-@interface GPUImageFramebufferCache : NSObject
-
-// Framebuffer management
-- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;
-- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize onlyTexture:(BOOL)onlyTexture;
-- (void)returnFramebufferToCache:(GPUImageFramebuffer *)framebuffer;
-- (void)purgeAllUnassignedFramebuffers;
-- (void)addFramebufferToActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;
-- (void)removeFramebufferFromActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageFramebufferCache.m b/Example/Pods/GPUImage/framework/Source/GPUImageFramebufferCache.m
deleted file mode 100644
index 21925ad..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageFramebufferCache.m
+++ /dev/null
@@ -1,179 +0,0 @@
-#import "GPUImageFramebufferCache.h"
-#import "GPUImageContext.h"
-#import "GPUImageOutput.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-#import
-#else
-#endif
-
-@interface GPUImageFramebufferCache()
-{
-// NSCache *framebufferCache;
- NSMutableDictionary *framebufferCache;
- NSMutableDictionary *framebufferTypeCounts;
- NSMutableArray *activeImageCaptureList; // Where framebuffers that may be lost by a filter, but which are still needed for a UIImage, etc., are stored
- id memoryWarningObserver;
-
- dispatch_queue_t framebufferCacheQueue;
-}
-
-- (NSString *)hashForSize:(CGSize)size textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;
-
-@end
-
-
-@implementation GPUImageFramebufferCache
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- memoryWarningObserver = [[NSNotificationCenter defaultCenter] addObserverForName:UIApplicationDidReceiveMemoryWarningNotification object:nil queue:nil usingBlock:^(NSNotification *note) {
-
- [self purgeAllUnassignedFramebuffers];
- }];
-#else
-#endif
-
-// framebufferCache = [[NSCache alloc] init];
- framebufferCache = [[NSMutableDictionary alloc] init];
- framebufferTypeCounts = [[NSMutableDictionary alloc] init];
- activeImageCaptureList = [[NSMutableArray alloc] init];
- framebufferCacheQueue = dispatch_queue_create("com.sunsetlakesoftware.GPUImage.framebufferCacheQueue", NULL);
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Framebuffer management
-
-- (NSString *)hashForSize:(CGSize)size textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;
-{
- if (onlyTexture)
- {
- return [NSString stringWithFormat:@"%.1fx%.1f-%d:%d:%d:%d:%d:%d:%d-NOFB", size.width, size.height, textureOptions.minFilter, textureOptions.magFilter, textureOptions.wrapS, textureOptions.wrapT, textureOptions.internalFormat, textureOptions.format, textureOptions.type];
- }
- else
- {
- return [NSString stringWithFormat:@"%.1fx%.1f-%d:%d:%d:%d:%d:%d:%d", size.width, size.height, textureOptions.minFilter, textureOptions.magFilter, textureOptions.wrapS, textureOptions.wrapT, textureOptions.internalFormat, textureOptions.format, textureOptions.type];
- }
-}
-
-- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;
-{
- __block GPUImageFramebuffer *framebufferFromCache = nil;
-// dispatch_sync(framebufferCacheQueue, ^{
- runSynchronouslyOnVideoProcessingQueue(^{
- NSString *lookupHash = [self hashForSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture];
- NSNumber *numberOfMatchingTexturesInCache = [framebufferTypeCounts objectForKey:lookupHash];
- NSInteger numberOfMatchingTextures = [numberOfMatchingTexturesInCache integerValue];
-
- if ([numberOfMatchingTexturesInCache integerValue] < 1)
- {
- // Nothing in the cache, create a new framebuffer to use
- framebufferFromCache = [[GPUImageFramebuffer alloc] initWithSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture];
- }
- else
- {
- // Something found, pull the old framebuffer and decrement the count
- NSInteger currentTextureID = (numberOfMatchingTextures - 1);
- while ((framebufferFromCache == nil) && (currentTextureID >= 0))
- {
- NSString *textureHash = [NSString stringWithFormat:@"%@-%ld", lookupHash, (long)currentTextureID];
- framebufferFromCache = [framebufferCache objectForKey:textureHash];
- // Test the values in the cache first, to see if they got invalidated behind our back
- if (framebufferFromCache != nil)
- {
- // Withdraw this from the cache while it's in use
- [framebufferCache removeObjectForKey:textureHash];
- }
- currentTextureID--;
- }
-
- currentTextureID++;
-
- [framebufferTypeCounts setObject:[NSNumber numberWithInteger:currentTextureID] forKey:lookupHash];
-
- if (framebufferFromCache == nil)
- {
- framebufferFromCache = [[GPUImageFramebuffer alloc] initWithSize:framebufferSize textureOptions:textureOptions onlyTexture:onlyTexture];
- }
- }
- });
-
- [framebufferFromCache lock];
- return framebufferFromCache;
-}
-
-- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize onlyTexture:(BOOL)onlyTexture;
-{
- GPUTextureOptions defaultTextureOptions;
- defaultTextureOptions.minFilter = GL_LINEAR;
- defaultTextureOptions.magFilter = GL_LINEAR;
- defaultTextureOptions.wrapS = GL_CLAMP_TO_EDGE;
- defaultTextureOptions.wrapT = GL_CLAMP_TO_EDGE;
- defaultTextureOptions.internalFormat = GL_RGBA;
- defaultTextureOptions.format = GL_BGRA;
- defaultTextureOptions.type = GL_UNSIGNED_BYTE;
-
- return [self fetchFramebufferForSize:framebufferSize textureOptions:defaultTextureOptions onlyTexture:onlyTexture];
-}
-
-- (void)returnFramebufferToCache:(GPUImageFramebuffer *)framebuffer;
-{
- [framebuffer clearAllLocks];
-
-// dispatch_async(framebufferCacheQueue, ^{
- runAsynchronouslyOnVideoProcessingQueue(^{
- CGSize framebufferSize = framebuffer.size;
- GPUTextureOptions framebufferTextureOptions = framebuffer.textureOptions;
- NSString *lookupHash = [self hashForSize:framebufferSize textureOptions:framebufferTextureOptions onlyTexture:framebuffer.missingFramebuffer];
- NSNumber *numberOfMatchingTexturesInCache = [framebufferTypeCounts objectForKey:lookupHash];
- NSInteger numberOfMatchingTextures = [numberOfMatchingTexturesInCache integerValue];
-
- NSString *textureHash = [NSString stringWithFormat:@"%@-%ld", lookupHash, (long)numberOfMatchingTextures];
-
-// [framebufferCache setObject:framebuffer forKey:textureHash cost:round(framebufferSize.width * framebufferSize.height * 4.0)];
- [framebufferCache setObject:framebuffer forKey:textureHash];
- [framebufferTypeCounts setObject:[NSNumber numberWithInteger:(numberOfMatchingTextures + 1)] forKey:lookupHash];
- });
-}
-
-- (void)purgeAllUnassignedFramebuffers;
-{
- runAsynchronouslyOnVideoProcessingQueue(^{
-// dispatch_async(framebufferCacheQueue, ^{
- [framebufferCache removeAllObjects];
- [framebufferTypeCounts removeAllObjects];
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- CVOpenGLESTextureCacheFlush([[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], 0);
-#else
-#endif
- });
-}
-
-- (void)addFramebufferToActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;
-{
- runAsynchronouslyOnVideoProcessingQueue(^{
-// dispatch_async(framebufferCacheQueue, ^{
- [activeImageCaptureList addObject:framebuffer];
- });
-}
-
-- (void)removeFramebufferFromActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;
-{
- runAsynchronouslyOnVideoProcessingQueue(^{
-// dispatch_async(framebufferCacheQueue, ^{
- [activeImageCaptureList removeObject:framebuffer];
- });
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageGammaFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageGammaFilter.h
deleted file mode 100755
index 0521d08..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageGammaFilter.h
+++ /dev/null
@@ -1,11 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageGammaFilter : GPUImageFilter
-{
- GLint gammaUniform;
-}
-
-// Gamma ranges from 0.0 to 3.0, with 1.0 as the normal level
-@property(readwrite, nonatomic) CGFloat gamma;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageGammaFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageGammaFilter.m
deleted file mode 100755
index 35adaba..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageGammaFilter.m
+++ /dev/null
@@ -1,66 +0,0 @@
-#import "GPUImageGammaFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageGammaFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform lowp float gamma;
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- gl_FragColor = vec4(pow(textureColor.rgb, vec3(gamma)), textureColor.w);
- }
-);
-#else
-NSString *const kGPUImageGammaFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float gamma;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- gl_FragColor = vec4(pow(textureColor.rgb, vec3(gamma)), textureColor.w);
- }
-);
-#endif
-
-@implementation GPUImageGammaFilter
-
-@synthesize gamma = _gamma;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageGammaFragmentShaderString]))
- {
- return nil;
- }
-
- gammaUniform = [filterProgram uniformIndex:@"gamma"];
- self.gamma = 1.0;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setGamma:(CGFloat)newValue;
-{
- _gamma = newValue;
-
- [self setFloat:_gamma forUniform:gammaUniform program:filterProgram];
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageGaussianBlurFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageGaussianBlurFilter.h
deleted file mode 100755
index 1fb7a13..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageGaussianBlurFilter.h
+++ /dev/null
@@ -1,36 +0,0 @@
-#import "GPUImageTwoPassTextureSamplingFilter.h"
-
-/** A Gaussian blur filter
- Interpolated optimization based on Daniel Rákos' work at http://rastergrid.com/blog/2010/09/efficient-gaussian-blur-with-linear-sampling/
- */
-
-@interface GPUImageGaussianBlurFilter : GPUImageTwoPassTextureSamplingFilter
-{
- BOOL shouldResizeBlurRadiusWithImageSize;
- CGFloat _blurRadiusInPixels;
-}
-
-/** A multiplier for the spacing between texels, ranging from 0.0 on up, with a default of 1.0. Adjusting this may slightly increase the blur strength, but will introduce artifacts in the result.
- */
-@property (readwrite, nonatomic) CGFloat texelSpacingMultiplier;
-
-/** A radius in pixels to use for the blur, with a default of 2.0. This adjusts the sigma variable in the Gaussian distribution function.
- */
-@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
-
-/** Setting these properties will allow the blur radius to scale with the size of the image. These properties are mutually exclusive; setting either will set the other to 0.
- */
-@property (readwrite, nonatomic) CGFloat blurRadiusAsFractionOfImageWidth;
-@property (readwrite, nonatomic) CGFloat blurRadiusAsFractionOfImageHeight;
-
-/// The number of times to sequentially blur the incoming image. The more passes, the slower the filter.
-@property(readwrite, nonatomic) NSUInteger blurPasses;
-
-+ (NSString *)vertexShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
-+ (NSString *)fragmentShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
-+ (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
-+ (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
-
-- (void)switchToVertexShader:(NSString *)newVertexShader fragmentShader:(NSString *)newFragmentShader;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageGaussianBlurFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageGaussianBlurFilter.m
deleted file mode 100755
index ec99352..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageGaussianBlurFilter.m
+++ /dev/null
@@ -1,513 +0,0 @@
-#import "GPUImageGaussianBlurFilter.h"
-
-@implementation GPUImageGaussianBlurFilter
-
-@synthesize texelSpacingMultiplier = _texelSpacingMultiplier;
-@synthesize blurRadiusInPixels = _blurRadiusInPixels;
-@synthesize blurRadiusAsFractionOfImageWidth = _blurRadiusAsFractionOfImageWidth;
-@synthesize blurRadiusAsFractionOfImageHeight = _blurRadiusAsFractionOfImageHeight;
-@synthesize blurPasses = _blurPasses;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString
-{
- if (!(self = [super initWithFirstStageVertexShaderFromString:firstStageVertexShaderString firstStageFragmentShaderFromString:firstStageFragmentShaderString secondStageVertexShaderFromString:secondStageVertexShaderString secondStageFragmentShaderFromString:secondStageFragmentShaderString]))
- {
- return nil;
- }
-
- self.texelSpacingMultiplier = 1.0;
- _blurRadiusInPixels = 2.0;
- shouldResizeBlurRadiusWithImageSize = NO;
-
- return self;
-}
-
-- (id)init;
-{
- NSString *currentGaussianBlurVertexShader = [[self class] vertexShaderForOptimizedBlurOfRadius:4 sigma:2.0];
- NSString *currentGaussianBlurFragmentShader = [[self class] fragmentShaderForOptimizedBlurOfRadius:4 sigma:2.0];
-
- return [self initWithFirstStageVertexShaderFromString:currentGaussianBlurVertexShader firstStageFragmentShaderFromString:currentGaussianBlurFragmentShader secondStageVertexShaderFromString:currentGaussianBlurVertexShader secondStageFragmentShaderFromString:currentGaussianBlurFragmentShader];
-}
-
-#pragma mark -
-#pragma mark Auto-generation of optimized Gaussian shaders
-
-// "Implementation limit of 32 varying components exceeded" - Max number of varyings for these GPUs
-
-+ (NSString *)vertexShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
-{
- if (blurRadius < 1)
- {
- return kGPUImageVertexShaderString;
- }
-
-// NSLog(@"Max varyings: %d", [GPUImageContext maximumVaryingVectorsForThisDevice]);
- NSMutableString *shaderString = [[NSMutableString alloc] init];
-
- // Header
- [shaderString appendFormat:@"\
- attribute vec4 position;\n\
- attribute vec4 inputTextureCoordinate;\n\
- \n\
- uniform float texelWidthOffset;\n\
- uniform float texelHeightOffset;\n\
- \n\
- varying vec2 blurCoordinates[%lu];\n\
- \n\
- void main()\n\
- {\n\
- gl_Position = position;\n\
- \n\
- vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n", (unsigned long)(blurRadius * 2 + 1) ];
-
- // Inner offset loop
- for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < (blurRadius * 2 + 1); currentBlurCoordinateIndex++)
- {
- NSInteger offsetFromCenter = currentBlurCoordinateIndex - blurRadius;
- if (offsetFromCenter < 0)
- {
- [shaderString appendFormat:@"blurCoordinates[%ld] = inputTextureCoordinate.xy - singleStepOffset * %f;\n", (unsigned long)currentBlurCoordinateIndex, (GLfloat)(-offsetFromCenter)];
- }
- else if (offsetFromCenter > 0)
- {
- [shaderString appendFormat:@"blurCoordinates[%ld] = inputTextureCoordinate.xy + singleStepOffset * %f;\n", (unsigned long)currentBlurCoordinateIndex, (GLfloat)(offsetFromCenter)];
- }
- else
- {
- [shaderString appendFormat:@"blurCoordinates[%ld] = inputTextureCoordinate.xy;\n", (unsigned long)currentBlurCoordinateIndex];
- }
- }
-
- // Footer
- [shaderString appendString:@"}\n"];
-
- return shaderString;
-}
-
-+ (NSString *)fragmentShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
-{
- if (blurRadius < 1)
- {
- return kGPUImagePassthroughFragmentShaderString;
- }
-
- // First, generate the normal Gaussian weights for a given sigma
- GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat));
- GLfloat sumOfWeights = 0.0;
- for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
- {
- standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0)));
-
- if (currentGaussianWeightIndex == 0)
- {
- sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex];
- }
- else
- {
- sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex];
- }
- }
-
- // Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance
- for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
- {
- standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights;
- }
-
- // Finally, generate the shader from these weights
- NSMutableString *shaderString = [[NSMutableString alloc] init];
-
- // Header
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- [shaderString appendFormat:@"\
- uniform sampler2D inputImageTexture;\n\
- \n\
- varying highp vec2 blurCoordinates[%lu];\n\
- \n\
- void main()\n\
- {\n\
- lowp vec4 sum = vec4(0.0);\n", (unsigned long)(blurRadius * 2 + 1) ];
-#else
- [shaderString appendFormat:@"\
- uniform sampler2D inputImageTexture;\n\
- \n\
- varying vec2 blurCoordinates[%lu];\n\
- \n\
- void main()\n\
- {\n\
- vec4 sum = vec4(0.0);\n", (blurRadius * 2 + 1) ];
-#endif
-
- // Inner texture loop
- for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < (blurRadius * 2 + 1); currentBlurCoordinateIndex++)
- {
- NSInteger offsetFromCenter = currentBlurCoordinateIndex - blurRadius;
- if (offsetFromCenter < 0)
- {
- [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\n", (unsigned long)currentBlurCoordinateIndex, standardGaussianWeights[-offsetFromCenter]];
- }
- else
- {
- [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\n", (unsigned long)currentBlurCoordinateIndex, standardGaussianWeights[offsetFromCenter]];
- }
- }
-
- // Footer
- [shaderString appendString:@"\
- gl_FragColor = sum;\n\
- }\n"];
-
- free(standardGaussianWeights);
- return shaderString;
-}
-
-+ (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
-{
- if (blurRadius < 1)
- {
- return kGPUImageVertexShaderString;
- }
-
- // First, generate the normal Gaussian weights for a given sigma
- GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat));
- GLfloat sumOfWeights = 0.0;
- for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
- {
- standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0)));
-
- if (currentGaussianWeightIndex == 0)
- {
- sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex];
- }
- else
- {
- sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex];
- }
- }
-
- // Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance
- for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
- {
- standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights;
- }
-
- // From these weights we calculate the offsets to read interpolated values from
- NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);
- GLfloat *optimizedGaussianOffsets = calloc(numberOfOptimizedOffsets, sizeof(GLfloat));
-
- for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++)
- {
- GLfloat firstWeight = standardGaussianWeights[currentOptimizedOffset*2 + 1];
- GLfloat secondWeight = standardGaussianWeights[currentOptimizedOffset*2 + 2];
-
- GLfloat optimizedWeight = firstWeight + secondWeight;
-
- optimizedGaussianOffsets[currentOptimizedOffset] = (firstWeight * (currentOptimizedOffset*2 + 1) + secondWeight * (currentOptimizedOffset*2 + 2)) / optimizedWeight;
- }
-
- NSMutableString *shaderString = [[NSMutableString alloc] init];
- // Header
- [shaderString appendFormat:@"\
- attribute vec4 position;\n\
- attribute vec4 inputTextureCoordinate;\n\
- \n\
- uniform float texelWidthOffset;\n\
- uniform float texelHeightOffset;\n\
- \n\
- varying vec2 blurCoordinates[%lu];\n\
- \n\
- void main()\n\
- {\n\
- gl_Position = position;\n\
- \n\
- vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2))];
-
- // Inner offset loop
- [shaderString appendString:@"blurCoordinates[0] = inputTextureCoordinate.xy;\n"];
- for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++)
- {
- [shaderString appendFormat:@"\
- blurCoordinates[%lu] = inputTextureCoordinate.xy + singleStepOffset * %f;\n\
- blurCoordinates[%lu] = inputTextureCoordinate.xy - singleStepOffset * %f;\n", (unsigned long)((currentOptimizedOffset * 2) + 1), optimizedGaussianOffsets[currentOptimizedOffset], (unsigned long)((currentOptimizedOffset * 2) + 2), optimizedGaussianOffsets[currentOptimizedOffset]];
- }
-
- // Footer
- [shaderString appendString:@"}\n"];
-
- free(optimizedGaussianOffsets);
- free(standardGaussianWeights);
- return shaderString;
-}
-
-+ (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
-{
- if (blurRadius < 1)
- {
- return kGPUImagePassthroughFragmentShaderString;
- }
-
- // First, generate the normal Gaussian weights for a given sigma
- GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat));
- GLfloat sumOfWeights = 0.0;
- for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
- {
- standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0)));
-
- if (currentGaussianWeightIndex == 0)
- {
- sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex];
- }
- else
- {
- sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex];
- }
- }
-
- // Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance
- for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
- {
- standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights;
- }
-
- // From these weights we calculate the offsets to read interpolated values from
- NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);
- NSUInteger trueNumberOfOptimizedOffsets = blurRadius / 2 + (blurRadius % 2);
-
- NSMutableString *shaderString = [[NSMutableString alloc] init];
-
- // Header
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- [shaderString appendFormat:@"\
- uniform sampler2D inputImageTexture;\n\
- uniform highp float texelWidthOffset;\n\
- uniform highp float texelHeightOffset;\n\
- \n\
- varying highp vec2 blurCoordinates[%lu];\n\
- \n\
- void main()\n\
- {\n\
- lowp vec4 sum = vec4(0.0);\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2)) ];
-#else
- [shaderString appendFormat:@"\
- uniform sampler2D inputImageTexture;\n\
- uniform float texelWidthOffset;\n\
- uniform float texelHeightOffset;\n\
- \n\
- varying vec2 blurCoordinates[%lu];\n\
- \n\
- void main()\n\
- {\n\
- vec4 sum = vec4(0.0);\n", 1 + (numberOfOptimizedOffsets * 2) ];
-#endif
-
- // Inner texture loop
- [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0]) * %f;\n", standardGaussianWeights[0]];
-
- for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < numberOfOptimizedOffsets; currentBlurCoordinateIndex++)
- {
- GLfloat firstWeight = standardGaussianWeights[currentBlurCoordinateIndex * 2 + 1];
- GLfloat secondWeight = standardGaussianWeights[currentBlurCoordinateIndex * 2 + 2];
- GLfloat optimizedWeight = firstWeight + secondWeight;
-
- [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 1), optimizedWeight];
- [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]) * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 2), optimizedWeight];
- }
-
- // If the number of required samples exceeds the amount we can pass in via varyings, we have to do dependent texture reads in the fragment shader
- if (trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets)
- {
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- [shaderString appendString:@"highp vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n"];
-#else
- [shaderString appendString:@"vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n"];
-#endif
-
- for (NSUInteger currentOverlowTextureRead = numberOfOptimizedOffsets; currentOverlowTextureRead < trueNumberOfOptimizedOffsets; currentOverlowTextureRead++)
- {
- GLfloat firstWeight = standardGaussianWeights[currentOverlowTextureRead * 2 + 1];
- GLfloat secondWeight = standardGaussianWeights[currentOverlowTextureRead * 2 + 2];
-
- GLfloat optimizedWeight = firstWeight + secondWeight;
- GLfloat optimizedOffset = (firstWeight * (currentOverlowTextureRead * 2 + 1) + secondWeight * (currentOverlowTextureRead * 2 + 2)) / optimizedWeight;
-
- [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0] + singleStepOffset * %f) * %f;\n", optimizedOffset, optimizedWeight];
- [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0] - singleStepOffset * %f) * %f;\n", optimizedOffset, optimizedWeight];
- }
- }
-
- // Footer
- [shaderString appendString:@"\
- gl_FragColor = sum;\n\
- }\n"];
-
- free(standardGaussianWeights);
- return shaderString;
-}
-
-- (void)setupFilterForSize:(CGSize)filterFrameSize;
-{
- [super setupFilterForSize:filterFrameSize];
-
- if (shouldResizeBlurRadiusWithImageSize)
- {
- if (self.blurRadiusAsFractionOfImageWidth > 0)
- {
- self.blurRadiusInPixels = filterFrameSize.width * self.blurRadiusAsFractionOfImageWidth;
- }
- else
- {
- self.blurRadiusInPixels = filterFrameSize.height * self.blurRadiusAsFractionOfImageHeight;
- }
- }
-}
-
-#pragma mark -
-#pragma mark Rendering
-
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-{
- [super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];
-
- for (NSUInteger currentAdditionalBlurPass = 1; currentAdditionalBlurPass < _blurPasses; currentAdditionalBlurPass++)
- {
- [super renderToTextureWithVertices:vertices textureCoordinates:[[self class] textureCoordinatesForRotation:kGPUImageNoRotation]];
- }
-}
-
-- (void)switchToVertexShader:(NSString *)newVertexShader fragmentShader:(NSString *)newFragmentShader;
-{
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext useImageProcessingContext];
-
- filterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:newVertexShader fragmentShaderString:newFragmentShader];
-
- if (!filterProgram.initialized)
- {
- [self initializeAttributes];
-
- if (![filterProgram link])
- {
- NSString *progLog = [filterProgram programLog];
- NSLog(@"Program link log: %@", progLog);
- NSString *fragLog = [filterProgram fragmentShaderLog];
- NSLog(@"Fragment shader compile log: %@", fragLog);
- NSString *vertLog = [filterProgram vertexShaderLog];
- NSLog(@"Vertex shader compile log: %@", vertLog);
- filterProgram = nil;
- NSAssert(NO, @"Filter shader link failed");
- }
- }
-
- filterPositionAttribute = [filterProgram attributeIndex:@"position"];
- filterTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate"];
- filterInputTextureUniform = [filterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader
- verticalPassTexelWidthOffsetUniform = [filterProgram uniformIndex:@"texelWidthOffset"];
- verticalPassTexelHeightOffsetUniform = [filterProgram uniformIndex:@"texelHeightOffset"];
- [GPUImageContext setActiveShaderProgram:filterProgram];
-
- glEnableVertexAttribArray(filterPositionAttribute);
- glEnableVertexAttribArray(filterTextureCoordinateAttribute);
-
- secondFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:newVertexShader fragmentShaderString:newFragmentShader];
-
- if (!secondFilterProgram.initialized)
- {
- [self initializeSecondaryAttributes];
-
- if (![secondFilterProgram link])
- {
- NSString *progLog = [secondFilterProgram programLog];
- NSLog(@"Program link log: %@", progLog);
- NSString *fragLog = [secondFilterProgram fragmentShaderLog];
- NSLog(@"Fragment shader compile log: %@", fragLog);
- NSString *vertLog = [secondFilterProgram vertexShaderLog];
- NSLog(@"Vertex shader compile log: %@", vertLog);
- secondFilterProgram = nil;
- NSAssert(NO, @"Filter shader link failed");
- }
- }
-
- secondFilterPositionAttribute = [secondFilterProgram attributeIndex:@"position"];
- secondFilterTextureCoordinateAttribute = [secondFilterProgram attributeIndex:@"inputTextureCoordinate"];
- secondFilterInputTextureUniform = [secondFilterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader
- secondFilterInputTextureUniform2 = [secondFilterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
- horizontalPassTexelWidthOffsetUniform = [secondFilterProgram uniformIndex:@"texelWidthOffset"];
- horizontalPassTexelHeightOffsetUniform = [secondFilterProgram uniformIndex:@"texelHeightOffset"];
- [GPUImageContext setActiveShaderProgram:secondFilterProgram];
-
- glEnableVertexAttribArray(secondFilterPositionAttribute);
- glEnableVertexAttribArray(secondFilterTextureCoordinateAttribute);
-
- [self setupFilterForSize:[self sizeOfFBO]];
- glFinish();
- });
-
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setTexelSpacingMultiplier:(CGFloat)newValue;
-{
- _texelSpacingMultiplier = newValue;
-
- _verticalTexelSpacing = _texelSpacingMultiplier;
- _horizontalTexelSpacing = _texelSpacingMultiplier;
-
- [self setupFilterForSize:[self sizeOfFBO]];
-}
-
-// inputRadius for Core Image's CIGaussianBlur is really sigma in the Gaussian equation, so I'm using that for my blur radius, to be consistent
-- (void)setBlurRadiusInPixels:(CGFloat)newValue;
-{
- // 7.0 is the limit for blur size for hardcoded varying offsets
-
- if (round(newValue) != _blurRadiusInPixels)
- {
- _blurRadiusInPixels = round(newValue); // For now, only do integral sigmas
-
- NSUInteger calculatedSampleRadius = 0;
- if (_blurRadiusInPixels >= 1) // Avoid a divide-by-zero error here
- {
- // Calculate the number of pixels to sample from by setting a bottom limit for the contribution of the outermost pixel
- CGFloat minimumWeightToFindEdgeOfSamplingArea = 1.0/256.0;
- calculatedSampleRadius = floor(sqrt(-2.0 * pow(_blurRadiusInPixels, 2.0) * log(minimumWeightToFindEdgeOfSamplingArea * sqrt(2.0 * M_PI * pow(_blurRadiusInPixels, 2.0))) ));
- calculatedSampleRadius += calculatedSampleRadius % 2; // There's nothing to gain from handling odd radius sizes, due to the optimizations I use
- }
-
-// NSLog(@"Blur radius: %f, calculated sample radius: %d", _blurRadiusInPixels, calculatedSampleRadius);
-//
- NSString *newGaussianBlurVertexShader = [[self class] vertexShaderForOptimizedBlurOfRadius:calculatedSampleRadius sigma:_blurRadiusInPixels];
- NSString *newGaussianBlurFragmentShader = [[self class] fragmentShaderForOptimizedBlurOfRadius:calculatedSampleRadius sigma:_blurRadiusInPixels];
-
-// NSLog(@"Optimized vertex shader: \n%@", newGaussianBlurVertexShader);
-// NSLog(@"Optimized fragment shader: \n%@", newGaussianBlurFragmentShader);
-//
- [self switchToVertexShader:newGaussianBlurVertexShader fragmentShader:newGaussianBlurFragmentShader];
- }
- shouldResizeBlurRadiusWithImageSize = NO;
-}
-
-- (void)setBlurRadiusAsFractionOfImageWidth:(CGFloat)blurRadiusAsFractionOfImageWidth
-{
- if (blurRadiusAsFractionOfImageWidth < 0) return;
-
- shouldResizeBlurRadiusWithImageSize = _blurRadiusAsFractionOfImageWidth != blurRadiusAsFractionOfImageWidth && blurRadiusAsFractionOfImageWidth > 0;
- _blurRadiusAsFractionOfImageWidth = blurRadiusAsFractionOfImageWidth;
- _blurRadiusAsFractionOfImageHeight = 0;
-}
-
-- (void)setBlurRadiusAsFractionOfImageHeight:(CGFloat)blurRadiusAsFractionOfImageHeight
-{
- if (blurRadiusAsFractionOfImageHeight < 0) return;
-
- shouldResizeBlurRadiusWithImageSize = _blurRadiusAsFractionOfImageHeight != blurRadiusAsFractionOfImageHeight && blurRadiusAsFractionOfImageHeight > 0;
- _blurRadiusAsFractionOfImageHeight = blurRadiusAsFractionOfImageHeight;
- _blurRadiusAsFractionOfImageWidth = 0;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageGaussianBlurPositionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageGaussianBlurPositionFilter.h
deleted file mode 100755
index dc88a56..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageGaussianBlurPositionFilter.h
+++ /dev/null
@@ -1,22 +0,0 @@
-#import "GPUImageTwoPassTextureSamplingFilter.h"
-
-/** A more generalized 9x9 Gaussian blur filter
- */
-@interface GPUImageGaussianBlurPositionFilter : GPUImageTwoPassTextureSamplingFilter
-{
- GLint blurCenterUniform, blurRadiusUniform, aspectRatioUniform;
-}
-
-/** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
- */
-@property (readwrite, nonatomic) CGFloat blurSize;
-
-/** Center for the blur, defaults to 0.5, 0.5
- */
-@property (readwrite, nonatomic) CGPoint blurCenter;
-
-/** Radius for the blur, defaults to 1.0
- */
-@property (readwrite, nonatomic) CGFloat blurRadius;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageGaussianBlurPositionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageGaussianBlurPositionFilter.m
deleted file mode 100755
index 8ecd924..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageGaussianBlurPositionFilter.m
+++ /dev/null
@@ -1,232 +0,0 @@
-#import "GPUImageGaussianBlurPositionFilter.h"
-
-NSString *const kGPUImageGaussianBlurPositionVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec4 inputTextureCoordinate;
-
- const int GAUSSIAN_SAMPLES = 9;
-
- uniform float texelWidthOffset;
- uniform float texelHeightOffset;
- varying vec2 textureCoordinate;
- varying vec2 blurCoordinates[GAUSSIAN_SAMPLES];
-
- void main()
- {
- gl_Position = position;
- textureCoordinate = inputTextureCoordinate.xy;
-
- // Calculate the positions for the blur
- int multiplier = 0;
- vec2 blurStep;
- vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);
-
- for (int i = 0; i < GAUSSIAN_SAMPLES; i++) {
- multiplier = (i - ((GAUSSIAN_SAMPLES - 1) / 2));
- // Blur in x (horizontal)
- blurStep = float(multiplier) * singleStepOffset;
- blurCoordinates[i] = inputTextureCoordinate.xy + blurStep;
- }
- }
-);
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageGaussianBlurPositionFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
-
- const lowp int GAUSSIAN_SAMPLES = 9;
-
- varying highp vec2 textureCoordinate;
- varying highp vec2 blurCoordinates[GAUSSIAN_SAMPLES];
-
- uniform highp float aspectRatio;
- uniform lowp vec2 blurCenter;
- uniform highp float blurRadius;
-
- void main() {
- highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- highp float dist = distance(blurCenter, textureCoordinateToUse);
-
- if (dist < blurRadius)
- {
- lowp vec4 sum = vec4(0.0);
-
- sum += texture2D(inputImageTexture, blurCoordinates[0]) * 0.05;
- sum += texture2D(inputImageTexture, blurCoordinates[1]) * 0.09;
- sum += texture2D(inputImageTexture, blurCoordinates[2]) * 0.12;
- sum += texture2D(inputImageTexture, blurCoordinates[3]) * 0.15;
- sum += texture2D(inputImageTexture, blurCoordinates[4]) * 0.18;
- sum += texture2D(inputImageTexture, blurCoordinates[5]) * 0.15;
- sum += texture2D(inputImageTexture, blurCoordinates[6]) * 0.12;
- sum += texture2D(inputImageTexture, blurCoordinates[7]) * 0.09;
- sum += texture2D(inputImageTexture, blurCoordinates[8]) * 0.05;
-
- gl_FragColor = sum;
- }
- else
- {
- gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
- }
- }
-);
-#else
-NSString *const kGPUImageGaussianBlurPositionFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
-
- const int GAUSSIAN_SAMPLES = 9;
-
- varying vec2 textureCoordinate;
- varying vec2 blurCoordinates[GAUSSIAN_SAMPLES];
-
- uniform float aspectRatio;
- uniform vec2 blurCenter;
- uniform float blurRadius;
-
- void main()
- {
- vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- float dist = distance(blurCenter, textureCoordinateToUse);
-
- if (dist < blurRadius)
- {
- vec4 sum = vec4(0.0);
-
- sum += texture2D(inputImageTexture, blurCoordinates[0]) * 0.05;
- sum += texture2D(inputImageTexture, blurCoordinates[1]) * 0.09;
- sum += texture2D(inputImageTexture, blurCoordinates[2]) * 0.12;
- sum += texture2D(inputImageTexture, blurCoordinates[3]) * 0.15;
- sum += texture2D(inputImageTexture, blurCoordinates[4]) * 0.18;
- sum += texture2D(inputImageTexture, blurCoordinates[5]) * 0.15;
- sum += texture2D(inputImageTexture, blurCoordinates[6]) * 0.12;
- sum += texture2D(inputImageTexture, blurCoordinates[7]) * 0.09;
- sum += texture2D(inputImageTexture, blurCoordinates[8]) * 0.05;
-
- gl_FragColor = sum;
- }
- else
- {
- gl_FragColor = texture2D(inputImageTexture, textureCoordinate);
- }
- }
-);
-#endif
-
-@interface GPUImageGaussianBlurPositionFilter ()
-
-- (void)adjustAspectRatio;
-
-@property (readwrite, nonatomic) CGFloat aspectRatio;
-
-@end
-
-@implementation GPUImageGaussianBlurPositionFilter
-
-@synthesize blurSize = _blurSize;
-@synthesize blurCenter = _blurCenter;
-@synthesize aspectRatio = _aspectRatio;
-
-- (id) initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString
- firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString
- secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString
- secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString {
-
- if (!(self = [super initWithFirstStageVertexShaderFromString:firstStageVertexShaderString ? firstStageVertexShaderString : kGPUImageGaussianBlurPositionVertexShaderString
- firstStageFragmentShaderFromString:firstStageFragmentShaderString ? firstStageFragmentShaderString : kGPUImageGaussianBlurPositionFragmentShaderString
- secondStageVertexShaderFromString:secondStageVertexShaderString ? secondStageVertexShaderString : kGPUImageGaussianBlurPositionVertexShaderString
- secondStageFragmentShaderFromString:secondStageFragmentShaderString ? secondStageFragmentShaderString : kGPUImageGaussianBlurPositionFragmentShaderString])) {
- return nil;
- }
-
- aspectRatioUniform = [secondFilterProgram uniformIndex:@"aspectRatio"];
- blurCenterUniform = [secondFilterProgram uniformIndex:@"blurCenter"];
- blurRadiusUniform = [secondFilterProgram uniformIndex:@"blurRadius"];
-
- self.blurSize = 1.0;
- self.blurRadius = 1.0;
- self.blurCenter = CGPointMake(0.5, 0.5);
-
- return self;
-}
-
-- (id)init;
-{
- return [self initWithFirstStageVertexShaderFromString:nil
- firstStageFragmentShaderFromString:nil
- secondStageVertexShaderFromString:nil
- secondStageFragmentShaderFromString:nil];
-}
-
-- (void)adjustAspectRatio;
-{
- if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
- {
- [self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)];
- }
- else
- {
- [self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)];
- }
-}
-
-- (void)forceProcessingAtSize:(CGSize)frameSize;
-{
- [super forceProcessingAtSize:frameSize];
- [self adjustAspectRatio];
-}
-
-- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
-{
- CGSize oldInputSize = inputTextureSize;
- [super setInputSize:newSize atIndex:textureIndex];
-
- if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )
- {
- [self adjustAspectRatio];
- }
-}
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
- [super setInputRotation:newInputRotation atIndex:textureIndex];
- [self setBlurCenter:self.blurCenter];
- [self adjustAspectRatio];
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setBlurSize:(CGFloat)newValue;
-{
- _blurSize = newValue;
-
- _verticalTexelSpacing = _blurSize;
- _horizontalTexelSpacing = _blurSize;
-
- [self setupFilterForSize:[self sizeOfFBO]];
-}
-
-- (void) setBlurCenter:(CGPoint)blurCenter;
-{
- _blurCenter = blurCenter;
- CGPoint rotatedPoint = [self rotatedPoint:blurCenter forRotation:inputRotation];
- [self setPoint:rotatedPoint forUniform:blurCenterUniform program:secondFilterProgram];
-}
-
-- (void) setBlurRadius:(CGFloat)blurRadius;
-{
- _blurRadius = blurRadius;
-
- [self setFloat:_blurRadius forUniform:blurRadiusUniform program:secondFilterProgram];
-}
-
-- (void) setAspectRatio:(CGFloat)newValue;
-{
- _aspectRatio = newValue;
-
- [self setFloat:_aspectRatio forUniform:aspectRatioUniform program:secondFilterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageGaussianSelectiveBlurFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageGaussianSelectiveBlurFilter.h
deleted file mode 100755
index 0232456..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageGaussianSelectiveBlurFilter.h
+++ /dev/null
@@ -1,30 +0,0 @@
-#import "GPUImageFilterGroup.h"
-
-@class GPUImageGaussianBlurFilter;
-
-/** A Gaussian blur that preserves focus within a circular region
- */
-@interface GPUImageGaussianSelectiveBlurFilter : GPUImageFilterGroup
-{
- GPUImageGaussianBlurFilter *blurFilter;
- GPUImageFilter *selectiveFocusFilter;
- BOOL hasOverriddenAspectRatio;
-}
-
-/** The radius of the circular area being excluded from the blur
- */
-@property (readwrite, nonatomic) CGFloat excludeCircleRadius;
-/** The center of the circular area being excluded from the blur
- */
-@property (readwrite, nonatomic) CGPoint excludeCirclePoint;
-/** The size of the area between the blurred portion and the clear circle
- */
-@property (readwrite, nonatomic) CGFloat excludeBlurSize;
-/** A radius in pixels to use for the blur, with a default of 5.0. This adjusts the sigma variable in the Gaussian distribution function.
- */
-@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
-/** The aspect ratio of the image, used to adjust the circularity of the in-focus region. By default, this matches the image aspect ratio, but you can override this value.
- */
-@property (readwrite, nonatomic) CGFloat aspectRatio;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageGaussianSelectiveBlurFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageGaussianSelectiveBlurFilter.m
deleted file mode 100755
index 7ebc9e1..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageGaussianSelectiveBlurFilter.m
+++ /dev/null
@@ -1,147 +0,0 @@
-#import "GPUImageGaussianSelectiveBlurFilter.h"
-#import "GPUImageGaussianBlurFilter.h"
-#import "GPUImageTwoInputFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageGaussianSelectiveBlurFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- uniform lowp float excludeCircleRadius;
- uniform lowp vec2 excludeCirclePoint;
- uniform lowp float excludeBlurSize;
- uniform highp float aspectRatio;
-
- void main()
- {
- lowp vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate);
- lowp vec4 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2);
-
- highp vec2 textureCoordinateToUse = vec2(textureCoordinate2.x, (textureCoordinate2.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- highp float distanceFromCenter = distance(excludeCirclePoint, textureCoordinateToUse);
-
- gl_FragColor = mix(sharpImageColor, blurredImageColor, smoothstep(excludeCircleRadius - excludeBlurSize, excludeCircleRadius, distanceFromCenter));
- }
-);
-#else
-NSString *const kGPUImageGaussianSelectiveBlurFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- uniform float excludeCircleRadius;
- uniform vec2 excludeCirclePoint;
- uniform float excludeBlurSize;
- uniform float aspectRatio;
-
- void main()
- {
- vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate);
- vec4 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2);
-
- vec2 textureCoordinateToUse = vec2(textureCoordinate2.x, (textureCoordinate2.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- float distanceFromCenter = distance(excludeCirclePoint, textureCoordinateToUse);
-
- gl_FragColor = mix(sharpImageColor, blurredImageColor, smoothstep(excludeCircleRadius - excludeBlurSize, excludeCircleRadius, distanceFromCenter));
- }
-);
-#endif
-
-@implementation GPUImageGaussianSelectiveBlurFilter
-
-@synthesize excludeCirclePoint = _excludeCirclePoint, excludeCircleRadius = _excludeCircleRadius, excludeBlurSize = _excludeBlurSize;
-@synthesize blurRadiusInPixels = _blurRadiusInPixels;
-@synthesize aspectRatio = _aspectRatio;
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- hasOverriddenAspectRatio = NO;
-
- // First pass: apply a variable Gaussian blur
- blurFilter = [[GPUImageGaussianBlurFilter alloc] init];
- [self addFilter:blurFilter];
-
- // Second pass: combine the blurred image with the original sharp one
- selectiveFocusFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageGaussianSelectiveBlurFragmentShaderString];
- [self addFilter:selectiveFocusFilter];
-
- // Texture location 0 needs to be the sharp image for both the blur and the second stage processing
- [blurFilter addTarget:selectiveFocusFilter atTextureLocation:1];
-
- // To prevent double updating of this filter, disable updates from the sharp image side
- self.initialFilters = [NSArray arrayWithObjects:blurFilter, selectiveFocusFilter, nil];
- self.terminalFilter = selectiveFocusFilter;
-
- self.blurRadiusInPixels = 5.0;
-
- self.excludeCircleRadius = 60.0/320.0;
- self.excludeCirclePoint = CGPointMake(0.5f, 0.5f);
- self.excludeBlurSize = 30.0/320.0;
-
- return self;
-}
-
-- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
-{
- CGSize oldInputSize = inputTextureSize;
- [super setInputSize:newSize atIndex:textureIndex];
- inputTextureSize = newSize;
-
- if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!hasOverriddenAspectRatio) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )
- {
- _aspectRatio = (inputTextureSize.width / inputTextureSize.height);
- [selectiveFocusFilter setFloat:_aspectRatio forUniformName:@"aspectRatio"];
- }
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setBlurRadiusInPixels:(CGFloat)newValue;
-{
- blurFilter.blurRadiusInPixels = newValue;
-}
-
-- (CGFloat)blurRadiusInPixels;
-{
- return blurFilter.blurRadiusInPixels;
-}
-
-- (void)setExcludeCirclePoint:(CGPoint)newValue;
-{
- _excludeCirclePoint = newValue;
- [selectiveFocusFilter setPoint:newValue forUniformName:@"excludeCirclePoint"];
-}
-
-- (void)setExcludeCircleRadius:(CGFloat)newValue;
-{
- _excludeCircleRadius = newValue;
- [selectiveFocusFilter setFloat:newValue forUniformName:@"excludeCircleRadius"];
-}
-
-- (void)setExcludeBlurSize:(CGFloat)newValue;
-{
- _excludeBlurSize = newValue;
- [selectiveFocusFilter setFloat:newValue forUniformName:@"excludeBlurSize"];
-}
-
-- (void)setAspectRatio:(CGFloat)newValue;
-{
- hasOverriddenAspectRatio = YES;
- _aspectRatio = newValue;
- [selectiveFocusFilter setFloat:_aspectRatio forUniformName:@"aspectRatio"];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageGlassSphereFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageGlassSphereFilter.h
deleted file mode 100644
index 809a4ee..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageGlassSphereFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImageSphereRefractionFilter.h"
-
-@interface GPUImageGlassSphereFilter : GPUImageSphereRefractionFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageGlassSphereFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageGlassSphereFilter.m
deleted file mode 100644
index 1866291..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageGlassSphereFilter.m
+++ /dev/null
@@ -1,106 +0,0 @@
-#import "GPUImageGlassSphereFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageGlassSphereFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform highp vec2 center;
- uniform highp float radius;
- uniform highp float aspectRatio;
- uniform highp float refractiveIndex;
-// uniform vec3 lightPosition;
- const highp vec3 lightPosition = vec3(-0.5, 0.5, 1.0);
- const highp vec3 ambientLightPosition = vec3(0.0, 0.0, 1.0);
-
- void main()
- {
- highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- highp float distanceFromCenter = distance(center, textureCoordinateToUse);
- lowp float checkForPresenceWithinSphere = step(distanceFromCenter, radius);
-
- distanceFromCenter = distanceFromCenter / radius;
-
- highp float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter);
- highp vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth));
-
- highp vec3 refractedVector = 2.0 * refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex);
- refractedVector.xy = -refractedVector.xy;
-
- highp vec3 finalSphereColor = texture2D(inputImageTexture, (refractedVector.xy + 1.0) * 0.5).rgb;
-
- // Grazing angle lighting
- highp float lightingIntensity = 2.5 * (1.0 - pow(clamp(dot(ambientLightPosition, sphereNormal), 0.0, 1.0), 0.25));
- finalSphereColor += lightingIntensity;
-
- // Specular lighting
- lightingIntensity = clamp(dot(normalize(lightPosition), sphereNormal), 0.0, 1.0);
- lightingIntensity = pow(lightingIntensity, 15.0);
- finalSphereColor += vec3(0.8, 0.8, 0.8) * lightingIntensity;
-
- gl_FragColor = vec4(finalSphereColor, 1.0) * checkForPresenceWithinSphere;
- }
-);
-#else
-NSString *const kGPUImageGlassSphereFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform vec2 center;
- uniform float radius;
- uniform float aspectRatio;
- uniform float refractiveIndex;
- // uniform vec3 lightPosition;
- const vec3 lightPosition = vec3(-0.5, 0.5, 1.0);
- const vec3 ambientLightPosition = vec3(0.0, 0.0, 1.0);
-
- void main()
- {
- vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- float distanceFromCenter = distance(center, textureCoordinateToUse);
- float checkForPresenceWithinSphere = step(distanceFromCenter, radius);
-
- distanceFromCenter = distanceFromCenter / radius;
-
- float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter);
- vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth));
-
- vec3 refractedVector = 2.0 * refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex);
- refractedVector.xy = -refractedVector.xy;
-
- vec3 finalSphereColor = texture2D(inputImageTexture, (refractedVector.xy + 1.0) * 0.5).rgb;
-
- // Grazing angle lighting
- float lightingIntensity = 2.5 * (1.0 - pow(clamp(dot(ambientLightPosition, sphereNormal), 0.0, 1.0), 0.25));
- finalSphereColor += lightingIntensity;
-
- // Specular lighting
- lightingIntensity = clamp(dot(normalize(lightPosition), sphereNormal), 0.0, 1.0);
- lightingIntensity = pow(lightingIntensity, 15.0);
- finalSphereColor += vec3(0.8, 0.8, 0.8) * lightingIntensity;
-
- gl_FragColor = vec4(finalSphereColor, 1.0) * checkForPresenceWithinSphere;
- }
-);
-#endif
-
-@implementation GPUImageGlassSphereFilter
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageGlassSphereFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageGrayscaleFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageGrayscaleFilter.h
deleted file mode 100755
index 2d97f8c..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageGrayscaleFilter.h
+++ /dev/null
@@ -1,9 +0,0 @@
-#import "GPUImageFilter.h"
-
-extern NSString *const kGPUImageLuminanceFragmentShaderString;
-
-/** Converts an image to grayscale (a slightly faster implementation of the saturation filter, without the ability to vary the color contribution)
- */
-@interface GPUImageGrayscaleFilter : GPUImageFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageGrayscaleFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageGrayscaleFilter.m
deleted file mode 100755
index 0066ca8..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageGrayscaleFilter.m
+++ /dev/null
@@ -1,141 +0,0 @@
-#import "GPUImageGrayscaleFilter.h"
-
-@implementation GPUImageGrayscaleFilter
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageLuminanceFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- float luminance = dot(textureColor.rgb, W);
-
- gl_FragColor = vec4(vec3(luminance), textureColor.a);
- }
-);
-#else
-NSString *const kGPUImageLuminanceFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- const vec3 W = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- float luminance = dot(textureColor.rgb, W);
-
- gl_FragColor = vec4(vec3(luminance), textureColor.a);
- }
-);
-#endif
-
-
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-{
- if (!currentlyReceivingMonochromeInput)
- {
- [super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];
- }
-}
-
-//- (void)setInputTexture:(GLuint)newInputTexture atIndex:(NSInteger)textureIndex;
-//{
-// [super setInputTexture:newInputTexture atIndex:textureIndex];
-// if (currentlyReceivingMonochromeInput)
-// {
-// [self notifyTargetsAboutNewOutputTexture];
-// }
-//}
-
-//- (GLuint)textureForOutput;
-//{
-// if (currentlyReceivingMonochromeInput)
-// {
-// return filterSourceTexture;
-// }
-// else
-// {
-// return outputTexture;
-// }
-//}
-
-- (BOOL)wantsMonochromeInput;
-{
-// return YES;
- return NO;
-}
-
-- (BOOL)providesMonochromeOutput;
-{
-// return YES;
- return NO;
-}
-
-// TODO: Rewrite this based on the new GPUImageFilter implementation
-//- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;
-//{
-// if (self.frameProcessingCompletionBlock != NULL)
-// {
-// self.frameProcessingCompletionBlock(self, frameTime);
-// }
-//
-// for (id currentTarget in targets)
-// {
-// if (currentTarget != self.targetToIgnoreForUpdates)
-// {
-// NSInteger indexOfObject = [targets indexOfObject:currentTarget];
-// NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
-//
-// if ([GPUImageContext supportsFastTextureUpload] && preparedToCaptureImage)
-// {
-// [self setInputTextureForTarget:currentTarget atIndex:textureIndex];
-// }
-//
-// if (currentlyReceivingMonochromeInput)
-// {
-// [currentTarget setInputRotation:inputRotation atIndex:textureIndex];
-//
-// CGSize sizeToRotate = [self outputFrameSize];
-// CGSize rotatedSize = sizeToRotate;
-// if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
-// {
-// rotatedSize.width = sizeToRotate.height;
-// rotatedSize.height = sizeToRotate.width;
-// }
-// [currentTarget setInputSize:rotatedSize atIndex:textureIndex];
-// }
-// else
-// {
-// [currentTarget setInputSize:[self outputFrameSize] atIndex:textureIndex];
-// }
-// [currentTarget newFrameReadyAtTime:frameTime atIndex:textureIndex];
-// }
-// }
-//}
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageLuminanceFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHSBFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageHSBFilter.h
deleted file mode 100644
index 65a9e1d..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHSBFilter.h
+++ /dev/null
@@ -1,27 +0,0 @@
-#import "GPUImageColorMatrixFilter.h"
-
-@interface GPUImageHSBFilter : GPUImageColorMatrixFilter
-
-/** Reset the filter to have no transformations.
- */
-- (void)reset;
-
-/** Add a hue rotation to the filter.
- The hue rotation is in the range [-360, 360] with 0 being no-change.
- Note that this adjustment is additive, so use the reset method if you need to.
- */
-- (void)rotateHue:(float)h;
-
-/** Add a saturation adjustment to the filter.
- The saturation adjustment is in the range [0.0, 2.0] with 1.0 being no-change.
- Note that this adjustment is additive, so use the reset method if you need to.
- */
-- (void)adjustSaturation:(float)s;
-
-/** Add a brightness adjustment to the filter.
- The brightness adjustment is in the range [0.0, 2.0] with 1.0 being no-change.
- Note that this adjustment is additive, so use the reset method if you need to.
- */
-- (void)adjustBrightness:(float)b;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHSBFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageHSBFilter.m
deleted file mode 100644
index eb668f7..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHSBFilter.m
+++ /dev/null
@@ -1,414 +0,0 @@
-#import "GPUImageHSBFilter.h"
-
-@implementation GPUImageHSBFilter {
- float matrix[4][4];
-}
-
-- (id)init
-{
- self = [super init];
- if (self) {
- [self reset];
- }
- return self;
-}
-
-- (void)reset {
- identmat(matrix);
- [self _updateColorMatrix];
-}
-
-- (void)rotateHue:(float)h {
- huerotatemat(matrix, h);
- [self _updateColorMatrix];
-}
-
-- (void)adjustSaturation:(float)s {
- saturatemat(matrix, s);
- [self _updateColorMatrix];
-}
-
-- (void)adjustBrightness:(float)b {
- cscalemat(matrix, b, b, b);
- [self _updateColorMatrix];
-}
-
-- (void)_updateColorMatrix {
- GPUMatrix4x4 gpuMatrix;
- gpuMatrix.one.one = matrix[0][0];
- gpuMatrix.one.two = matrix[1][0];
- gpuMatrix.one.three = matrix[2][0];
- gpuMatrix.one.four = matrix[3][0];
- gpuMatrix.two.one = matrix[0][1];
- gpuMatrix.two.two = matrix[1][1];
- gpuMatrix.two.three = matrix[2][1];
- gpuMatrix.two.four = matrix[3][1];
- gpuMatrix.three.one = matrix[0][2];
- gpuMatrix.three.two = matrix[1][2];
- gpuMatrix.three.three = matrix[2][2];
- gpuMatrix.three.four = matrix[3][2];
- gpuMatrix.four.one = matrix[0][3];
- gpuMatrix.four.two = matrix[1][3];
- gpuMatrix.four.three = matrix[2][3];
- gpuMatrix.four.four = matrix[3][3];
- self.colorMatrix = gpuMatrix;
-}
-
-#pragma mark - Matrix algorithms
-
-/* Matrix algorithms adapted from http://www.graficaobscura.com/matrix/index.html
-
- Note about luminance vector values below from that page:
- Where rwgt is 0.3086, gwgt is 0.6094, and bwgt is 0.0820. This is the luminance vector. Notice here that we do not use the standard NTSC weights of 0.299, 0.587, and 0.114. The NTSC weights are only applicable to RGB colors in a gamma 2.2 color space. For linear RGB colors the values above are better.
- */
-//#define RLUM (0.3086f)
-//#define GLUM (0.6094f)
-//#define BLUM (0.0820f)
-
-/* This is the vector value from the PDF specification, and may be closer to what Photoshop uses */
-#define RLUM (0.3f)
-#define GLUM (0.59f)
-#define BLUM (0.11f)
-
-/*
- * matrixmult -
- * multiply two matricies
- */
-static void matrixmult(a,b,c)
-float a[4][4], b[4][4], c[4][4];
-{
- int x, y;
- float temp[4][4];
-
- for(y=0; y<4 ; y++)
- for(x=0 ; x<4 ; x++) {
- temp[y][x] = b[y][0] * a[0][x]
- + b[y][1] * a[1][x]
- + b[y][2] * a[2][x]
- + b[y][3] * a[3][x];
- }
- for(y=0; y<4; y++)
- for(x=0; x<4; x++)
- c[y][x] = temp[y][x];
-}
-
-/*
- * identmat -
- * make an identity matrix
- */
-static void identmat(matrix)
-float matrix[4][4];
-{
- memset(matrix, 0, sizeof(float[4][4]));
- matrix[0][0] = 1.0f;
- matrix[1][1] = 1.0f;
- matrix[2][2] = 1.0f;
- matrix[3][3] = 1.0f;
-}
-
-/*
- * xformpnt -
- * transform a 3D point using a matrix
- */
-static void xformpnt(matrix,x,y,z,tx,ty,tz)
-float matrix[4][4];
-float x,y,z;
-float *tx,*ty,*tz;
-{
- *tx = x*matrix[0][0] + y*matrix[1][0] + z*matrix[2][0] + matrix[3][0];
- *ty = x*matrix[0][1] + y*matrix[1][1] + z*matrix[2][1] + matrix[3][1];
- *tz = x*matrix[0][2] + y*matrix[1][2] + z*matrix[2][2] + matrix[3][2];
-}
-
-/*
- * cscalemat -
- * make a color scale marix
- */
-static void cscalemat(mat,rscale,gscale,bscale)
-float mat[4][4];
-float rscale, gscale, bscale;
-{
- float mmat[4][4];
-
- mmat[0][0] = rscale;
- mmat[0][1] = 0.0;
- mmat[0][2] = 0.0;
- mmat[0][3] = 0.0;
-
- mmat[1][0] = 0.0;
- mmat[1][1] = gscale;
- mmat[1][2] = 0.0;
- mmat[1][3] = 0.0;
-
-
- mmat[2][0] = 0.0;
- mmat[2][1] = 0.0;
- mmat[2][2] = bscale;
- mmat[2][3] = 0.0;
-
- mmat[3][0] = 0.0;
- mmat[3][1] = 0.0;
- mmat[3][2] = 0.0;
- mmat[3][3] = 1.0;
- matrixmult(mmat,mat,mat);
-}
-
-/*
- * saturatemat -
- * make a saturation marix
- */
-static void saturatemat(mat,sat)
-float mat[4][4];
-float sat;
-{
- float mmat[4][4];
- float a, b, c, d, e, f, g, h, i;
- float rwgt, gwgt, bwgt;
-
- rwgt = RLUM;
- gwgt = GLUM;
- bwgt = BLUM;
-
- a = (1.0-sat)*rwgt + sat;
- b = (1.0-sat)*rwgt;
- c = (1.0-sat)*rwgt;
- d = (1.0-sat)*gwgt;
- e = (1.0-sat)*gwgt + sat;
- f = (1.0-sat)*gwgt;
- g = (1.0-sat)*bwgt;
- h = (1.0-sat)*bwgt;
- i = (1.0-sat)*bwgt + sat;
- mmat[0][0] = a;
- mmat[0][1] = b;
- mmat[0][2] = c;
- mmat[0][3] = 0.0;
-
- mmat[1][0] = d;
- mmat[1][1] = e;
- mmat[1][2] = f;
- mmat[1][3] = 0.0;
-
- mmat[2][0] = g;
- mmat[2][1] = h;
- mmat[2][2] = i;
- mmat[2][3] = 0.0;
-
- mmat[3][0] = 0.0;
- mmat[3][1] = 0.0;
- mmat[3][2] = 0.0;
- mmat[3][3] = 1.0;
- matrixmult(mmat,mat,mat);
-}
-
-/*
- * xrotate -
- * rotate about the x (red) axis
- */
-static void xrotatemat(mat,rs,rc)
-float mat[4][4];
-float rs, rc;
-{
- float mmat[4][4];
-
- mmat[0][0] = 1.0;
- mmat[0][1] = 0.0;
- mmat[0][2] = 0.0;
- mmat[0][3] = 0.0;
-
- mmat[1][0] = 0.0;
- mmat[1][1] = rc;
- mmat[1][2] = rs;
- mmat[1][3] = 0.0;
-
- mmat[2][0] = 0.0;
- mmat[2][1] = -rs;
- mmat[2][2] = rc;
- mmat[2][3] = 0.0;
-
- mmat[3][0] = 0.0;
- mmat[3][1] = 0.0;
- mmat[3][2] = 0.0;
- mmat[3][3] = 1.0;
- matrixmult(mmat,mat,mat);
-}
-
-/*
- * yrotate -
- * rotate about the y (green) axis
- */
-static void yrotatemat(mat,rs,rc)
-float mat[4][4];
-float rs, rc;
-{
- float mmat[4][4];
-
- mmat[0][0] = rc;
- mmat[0][1] = 0.0;
- mmat[0][2] = -rs;
- mmat[0][3] = 0.0;
-
- mmat[1][0] = 0.0;
- mmat[1][1] = 1.0;
- mmat[1][2] = 0.0;
- mmat[1][3] = 0.0;
-
- mmat[2][0] = rs;
- mmat[2][1] = 0.0;
- mmat[2][2] = rc;
- mmat[2][3] = 0.0;
-
- mmat[3][0] = 0.0;
- mmat[3][1] = 0.0;
- mmat[3][2] = 0.0;
- mmat[3][3] = 1.0;
- matrixmult(mmat,mat,mat);
-}
-
-/*
- * zrotate -
- * rotate about the z (blue) axis
- */
-static void zrotatemat(mat,rs,rc)
-float mat[4][4];
-float rs, rc;
-{
- float mmat[4][4];
-
- mmat[0][0] = rc;
- mmat[0][1] = rs;
- mmat[0][2] = 0.0;
- mmat[0][3] = 0.0;
-
- mmat[1][0] = -rs;
- mmat[1][1] = rc;
- mmat[1][2] = 0.0;
- mmat[1][3] = 0.0;
-
- mmat[2][0] = 0.0;
- mmat[2][1] = 0.0;
- mmat[2][2] = 1.0;
- mmat[2][3] = 0.0;
-
- mmat[3][0] = 0.0;
- mmat[3][1] = 0.0;
- mmat[3][2] = 0.0;
- mmat[3][3] = 1.0;
- matrixmult(mmat,mat,mat);
-}
-
-/*
- * zshear -
- * shear z using x and y.
- */
-static void zshearmat(mat,dx,dy)
-float mat[4][4];
-float dx, dy;
-{
- float mmat[4][4];
-
- mmat[0][0] = 1.0;
- mmat[0][1] = 0.0;
- mmat[0][2] = dx;
- mmat[0][3] = 0.0;
-
- mmat[1][0] = 0.0;
- mmat[1][1] = 1.0;
- mmat[1][2] = dy;
- mmat[1][3] = 0.0;
-
- mmat[2][0] = 0.0;
- mmat[2][1] = 0.0;
- mmat[2][2] = 1.0;
- mmat[2][3] = 0.0;
-
- mmat[3][0] = 0.0;
- mmat[3][1] = 0.0;
- mmat[3][2] = 0.0;
- mmat[3][3] = 1.0;
- matrixmult(mmat,mat,mat);
-}
-
-/*
- * simplehuerotatemat -
- * simple hue rotation. This changes luminance
- */
-//static void simplehuerotatemat(mat,rot)
-//float mat[4][4];
-//float rot;
-//{
-// float mag;
-// float xrs, xrc;
-// float yrs, yrc;
-// float zrs, zrc;
-//
-// /* rotate the grey vector into positive Z */
-// mag = sqrt(2.0);
-// xrs = 1.0/mag;
-// xrc = 1.0/mag;
-// xrotatemat(mat,xrs,xrc);
-//
-// mag = sqrt(3.0);
-// yrs = -1.0/mag;
-// yrc = sqrt(2.0)/mag;
-// yrotatemat(mat,yrs,yrc);
-//
-// /* rotate the hue */
-// zrs = sin(rot*M_PI/180.0);
-// zrc = cos(rot*M_PI/180.0);
-// zrotatemat(mat,zrs,zrc);
-//
-// /* rotate the grey vector back into place */
-// yrotatemat(mat,-yrs,yrc);
-// xrotatemat(mat,-xrs,xrc);
-//}
-
-/*
- * huerotatemat -
- * rotate the hue, while maintaining luminance.
- */
-static void huerotatemat(mat,rot)
-float mat[4][4];
-float rot;
-{
- float mmat[4][4];
- float mag;
- float lx, ly, lz;
- float xrs, xrc;
- float yrs, yrc;
- float zrs, zrc;
- float zsx, zsy;
-
- identmat(mmat);
-
- /* rotate the grey vector into positive Z */
- mag = sqrt(2.0);
- xrs = 1.0/mag;
- xrc = 1.0/mag;
- xrotatemat(mmat,xrs,xrc);
- mag = sqrt(3.0);
- yrs = -1.0/mag;
- yrc = sqrt(2.0)/mag;
- yrotatemat(mmat,yrs,yrc);
-
- /* shear the space to make the luminance plane horizontal */
- xformpnt(mmat,RLUM,GLUM,BLUM,&lx,&ly,&lz);
- zsx = lx/lz;
- zsy = ly/lz;
- zshearmat(mmat,zsx,zsy);
-
- /* rotate the hue */
- zrs = sin(rot*M_PI/180.0);
- zrc = cos(rot*M_PI/180.0);
- zrotatemat(mmat,zrs,zrc);
-
- /* unshear the space to put the luminance plane back */
- zshearmat(mmat,-zsx,-zsy);
-
- /* rotate the grey vector back into place */
- yrotatemat(mmat,-yrs,yrc);
- xrotatemat(mmat,-xrs,xrc);
-
- matrixmult(mmat,mat,mat);
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHalftoneFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageHalftoneFilter.h
deleted file mode 100644
index 1860bc9..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHalftoneFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImagePixellateFilter.h"
-
-@interface GPUImageHalftoneFilter : GPUImagePixellateFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHalftoneFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageHalftoneFilter.m
deleted file mode 100644
index 1b621c6..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHalftoneFilter.m
+++ /dev/null
@@ -1,79 +0,0 @@
-#import "GPUImageHalftoneFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageHalftoneFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform highp float fractionalWidthOfPixel;
- uniform highp float aspectRatio;
- uniform highp float dotScaling;
-
- const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- highp vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);
-
- highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;
- highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- highp vec2 adjustedSamplePos = vec2(samplePos.x, (samplePos.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- highp float distanceFromSamplePoint = distance(adjustedSamplePos, textureCoordinateToUse);
-
- lowp vec3 sampledColor = texture2D(inputImageTexture, samplePos ).rgb;
- highp float dotScaling = 1.0 - dot(sampledColor, W);
-
- lowp float checkForPresenceWithinDot = 1.0 - step(distanceFromSamplePoint, (fractionalWidthOfPixel * 0.5) * dotScaling);
-
- gl_FragColor = vec4(vec3(checkForPresenceWithinDot), 1.0);
- }
-);
-#else
-NSString *const kGPUImageHalftoneFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform float fractionalWidthOfPixel;
- uniform float aspectRatio;
- uniform float dotScaling;
-
- const vec3 W = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);
-
- vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;
- vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- vec2 adjustedSamplePos = vec2(samplePos.x, (samplePos.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- float distanceFromSamplePoint = distance(adjustedSamplePos, textureCoordinateToUse);
-
- vec3 sampledColor = texture2D(inputImageTexture, samplePos ).rgb;
- float dotScaling = 1.0 - dot(sampledColor, W);
-
- float checkForPresenceWithinDot = 1.0 - step(distanceFromSamplePoint, (fractionalWidthOfPixel * 0.5) * dotScaling);
-
- gl_FragColor = vec4(vec3(checkForPresenceWithinDot), 1.0);
- }
-);
-#endif
-
-@implementation GPUImageHalftoneFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageHalftoneFragmentShaderString]))
- {
- return nil;
- }
-
- self.fractionalWidthOfAPixel = 0.01;
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHardLightBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageHardLightBlendFilter.h
deleted file mode 100755
index 47d6260..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHardLightBlendFilter.h
+++ /dev/null
@@ -1,7 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageHardLightBlendFilter : GPUImageTwoInputFilter
-{
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHardLightBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageHardLightBlendFilter.m
deleted file mode 100755
index 2896ce8..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHardLightBlendFilter.m
+++ /dev/null
@@ -1,99 +0,0 @@
-#import "GPUImageHardLightBlendFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageHardLightBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- mediump vec4 base = texture2D(inputImageTexture, textureCoordinate);
- mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
-
- highp float ra;
- if (2.0 * overlay.r < overlay.a) {
- ra = 2.0 * overlay.r * base.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
- } else {
- ra = overlay.a * base.a - 2.0 * (base.a - base.r) * (overlay.a - overlay.r) + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
- }
-
- highp float ga;
- if (2.0 * overlay.g < overlay.a) {
- ga = 2.0 * overlay.g * base.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
- } else {
- ga = overlay.a * base.a - 2.0 * (base.a - base.g) * (overlay.a - overlay.g) + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
- }
-
- highp float ba;
- if (2.0 * overlay.b < overlay.a) {
- ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
- } else {
- ba = overlay.a * base.a - 2.0 * (base.a - base.b) * (overlay.a - overlay.b) + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
- }
-
- gl_FragColor = vec4(ra, ga, ba, 1.0);
- }
-);
-#else
-NSString *const kGPUImageHardLightBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- const vec3 W = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- vec4 base = texture2D(inputImageTexture, textureCoordinate);
- vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
-
- float ra;
- if (2.0 * overlay.r < overlay.a) {
- ra = 2.0 * overlay.r * base.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
- } else {
- ra = overlay.a * base.a - 2.0 * (base.a - base.r) * (overlay.a - overlay.r) + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
- }
-
- float ga;
- if (2.0 * overlay.g < overlay.a) {
- ga = 2.0 * overlay.g * base.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
- } else {
- ga = overlay.a * base.a - 2.0 * (base.a - base.g) * (overlay.a - overlay.g) + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
- }
-
- float ba;
- if (2.0 * overlay.b < overlay.a) {
- ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
- } else {
- ba = overlay.a * base.a - 2.0 * (base.a - base.b) * (overlay.a - overlay.b) + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
- }
-
- gl_FragColor = vec4(ra, ga, ba, 1.0);
- }
-);
-#endif
-
-
-@implementation GPUImageHardLightBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageHardLightBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHarrisCornerDetectionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageHarrisCornerDetectionFilter.h
deleted file mode 100755
index 1492b8b..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHarrisCornerDetectionFilter.h
+++ /dev/null
@@ -1,53 +0,0 @@
-#import "GPUImageFilterGroup.h"
-
-@class GPUImageGaussianBlurFilter;
-@class GPUImageXYDerivativeFilter;
-@class GPUImageGrayscaleFilter;
-@class GPUImageGaussianBlurFilter;
-@class GPUImageThresholdedNonMaximumSuppressionFilter;
-@class GPUImageColorPackingFilter;
-
-//#define DEBUGFEATUREDETECTION
-
-/** Harris corner detector
-
- First pass: reduce to luminance and take the derivative of the luminance texture (GPUImageXYDerivativeFilter)
-
- Second pass: blur the derivative (GPUImageGaussianBlurFilter)
-
- Third pass: apply the Harris corner detection calculation
-
- This is the Harris corner detector, as described in
- C. Harris and M. Stephens. A Combined Corner and Edge Detector. Proc. Alvey Vision Conf., Univ. Manchester, pp. 147-151, 1988.
- */
-@interface GPUImageHarrisCornerDetectionFilter : GPUImageFilterGroup
-{
- GPUImageXYDerivativeFilter *derivativeFilter;
- GPUImageGaussianBlurFilter *blurFilter;
- GPUImageFilter *harrisCornerDetectionFilter;
- GPUImageThresholdedNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;
- GPUImageColorPackingFilter *colorPackingFilter;
- GLfloat *cornersArray;
- GLubyte *rawImagePixels;
-}
-
-/** The radius of the underlying Gaussian blur. The default is 2.0.
- */
-@property(readwrite, nonatomic) CGFloat blurRadiusInPixels;
-
-// This changes the dynamic range of the Harris corner detector by amplifying small cornerness values. Default is 5.0.
-@property(readwrite, nonatomic) CGFloat sensitivity;
-
-// A threshold value at which a point is recognized as being a corner after the non-maximum suppression. Default is 0.20.
-@property(readwrite, nonatomic) CGFloat threshold;
-
-// This block is called on the detection of new corner points, usually on every processed frame. A C array containing normalized coordinates in X, Y pairs is passed in, along with a count of the number of corners detected and the current timestamp of the video frame
-@property(nonatomic, copy) void(^cornersDetectedBlock)(GLfloat* cornerArray, NSUInteger cornersDetected, CMTime frameTime);
-
-// These images are only enabled when built with DEBUGFEATUREDETECTION defined, and are used to examine the intermediate states of the feature detector
-@property(nonatomic, readonly, strong) NSMutableArray *intermediateImages;
-
-// Initialization and teardown
-- (id)initWithCornerDetectionFragmentShader:(NSString *)cornerDetectionFragmentShader;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHarrisCornerDetectionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageHarrisCornerDetectionFilter.m
deleted file mode 100755
index 999748d..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHarrisCornerDetectionFilter.m
+++ /dev/null
@@ -1,292 +0,0 @@
-#import "GPUImageHarrisCornerDetectionFilter.h"
-#import "GPUImageGaussianBlurFilter.h"
-#import "GPUImageXYDerivativeFilter.h"
-#import "GPUImageGrayscaleFilter.h"
-#import "GPUImageThresholdedNonMaximumSuppressionFilter.h"
-#import "GPUImageColorPackingFilter.h"
-#import "GPUImageGaussianBlurFilter.h"
-
-@interface GPUImageHarrisCornerDetectionFilter()
-
-- (void)extractCornerLocationsFromImageAtFrameTime:(CMTime)frameTime;
-
-@end
-
-// This is the Harris corner detector, as described in
-// C. Harris and M. Stephens. A Combined Corner and Edge Detector. Proc. Alvey Vision Conf., Univ. Manchester, pp. 147-151, 1988.
-
-@implementation GPUImageHarrisCornerDetectionFilter
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageHarrisCornerDetectionFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform lowp float sensitivity;
-
- const mediump float harrisConstant = 0.04;
-
- void main()
- {
- mediump vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;
-
- mediump float derivativeSum = derivativeElements.x + derivativeElements.y;
-
- mediump float zElement = (derivativeElements.z * 2.0) - 1.0;
-
- // R = Ix^2 * Iy^2 - Ixy * Ixy - k * (Ix^2 + Iy^2)^2
- mediump float cornerness = derivativeElements.x * derivativeElements.y - (zElement * zElement) - harrisConstant * derivativeSum * derivativeSum;
-
- gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);
- }
-);
-#else
-NSString *const kGPUImageHarrisCornerDetectionFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float sensitivity;
-
- const float harrisConstant = 0.04;
-
- void main()
- {
- vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;
-
- float derivativeSum = derivativeElements.x + derivativeElements.y;
-
- float zElement = (derivativeElements.z * 2.0) - 1.0;
-
- // R = Ix^2 * Iy^2 - Ixy * Ixy - k * (Ix^2 + Iy^2)^2
- float cornerness = derivativeElements.x * derivativeElements.y - (zElement * zElement) - harrisConstant * derivativeSum * derivativeSum;
-
- gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);
- }
-);
-#endif
-
-@synthesize blurRadiusInPixels;
-@synthesize cornersDetectedBlock;
-@synthesize sensitivity = _sensitivity;
-@synthesize threshold = _threshold;
-@synthesize intermediateImages = _intermediateImages;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [self initWithCornerDetectionFragmentShader:kGPUImageHarrisCornerDetectionFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)initWithCornerDetectionFragmentShader:(NSString *)cornerDetectionFragmentShader;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
-#ifdef DEBUGFEATUREDETECTION
- _intermediateImages = [[NSMutableArray alloc] init];
-#endif
-
- // First pass: reduce to luminance and take the derivative of the luminance texture
- derivativeFilter = [[GPUImageXYDerivativeFilter alloc] init];
- [self addFilter:derivativeFilter];
-
-#ifdef DEBUGFEATUREDETECTION
- __unsafe_unretained NSMutableArray *weakIntermediateImages = _intermediateImages;
- __unsafe_unretained GPUImageFilter *weakFilter = derivativeFilter;
- [derivativeFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){
- UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput];
- [weakIntermediateImages addObject:intermediateImage];
- }];
-#endif
-
- // Second pass: blur the derivative
- blurFilter = [[GPUImageGaussianBlurFilter alloc] init];
- [self addFilter:blurFilter];
-
-#ifdef DEBUGFEATUREDETECTION
- weakFilter = blurFilter;
- [blurFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){
- UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput];
- [weakIntermediateImages addObject:intermediateImage];
- }];
-#endif
-
- // Third pass: apply the Harris corner detection calculation
- harrisCornerDetectionFilter = [[GPUImageFilter alloc] initWithFragmentShaderFromString:cornerDetectionFragmentShader];
- [self addFilter:harrisCornerDetectionFilter];
-
-#ifdef DEBUGFEATUREDETECTION
- weakFilter = harrisCornerDetectionFilter;
- [harrisCornerDetectionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){
- UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput];
- [weakIntermediateImages addObject:intermediateImage];
- }];
-#endif
-
- // Fourth pass: apply non-maximum suppression and thresholding to find the local maxima
- nonMaximumSuppressionFilter = [[GPUImageThresholdedNonMaximumSuppressionFilter alloc] init];
- [self addFilter:nonMaximumSuppressionFilter];
-
- __unsafe_unretained GPUImageHarrisCornerDetectionFilter *weakSelf = self;
-#ifdef DEBUGFEATUREDETECTION
- weakFilter = nonMaximumSuppressionFilter;
- [nonMaximumSuppressionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){
- UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput];
- [weakIntermediateImages addObject:intermediateImage];
-
- [weakSelf extractCornerLocationsFromImageAtFrameTime:frameTime];
- }];
-#else
- [nonMaximumSuppressionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) {
- [weakSelf extractCornerLocationsFromImageAtFrameTime:frameTime];
- }];
-#endif
-
-// Sixth pass: compress the thresholded points into the RGBA channels
-// colorPackingFilter = [[GPUImageColorPackingFilter alloc] init];
-// [self addFilter:colorPackingFilter];
-//
-//
-//#ifdef DEBUGFEATUREDETECTION
-// __unsafe_unretained GPUImageHarrisCornerDetectionFilter *weakSelf = self;
-// weakFilter = colorPackingFilter;
-// [colorPackingFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){
-// NSLog(@"Triggered response from compaction filter");
-//
-// UIImage *intermediateImage = [weakFilter imageFromCurrentlyProcessedOutput];
-// [weakIntermediateImages addObject:intermediateImage];
-//
-// [weakSelf extractCornerLocationsFromImageAtFrameTime:frameTime];
-// }];
-//#else
-// __unsafe_unretained GPUImageHarrisCornerDetectionFilter *weakSelf = self;
-// [colorPackingFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) {
-// [weakSelf extractCornerLocationsFromImageAtFrameTime:frameTime];
-// }];
-//#endif
-
- [derivativeFilter addTarget:blurFilter];
- [blurFilter addTarget:harrisCornerDetectionFilter];
- [harrisCornerDetectionFilter addTarget:nonMaximumSuppressionFilter];
-// [simpleThresholdFilter addTarget:colorPackingFilter];
-
- self.initialFilters = [NSArray arrayWithObjects:derivativeFilter, nil];
-// self.terminalFilter = colorPackingFilter;
- self.terminalFilter = nonMaximumSuppressionFilter;
-
- self.blurRadiusInPixels = 2.0;
- self.sensitivity = 5.0;
- self.threshold = 0.20;
-
- return self;
-}
-
-- (void)dealloc;
-{
- free(rawImagePixels);
- free(cornersArray);
-}
-
-#pragma mark -
-#pragma mark Corner extraction
-
-- (void)extractCornerLocationsFromImageAtFrameTime:(CMTime)frameTime;
-{
- // we need a normal color texture for this filter
- NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture format for this filter must be GL_RGBA.");
- NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.");
-
- NSUInteger numberOfCorners = 0;
- CGSize imageSize = nonMaximumSuppressionFilter.outputFrameSize;
-
- unsigned int imageByteSize = imageSize.width * imageSize.height * 4;
-
- if (rawImagePixels == NULL)
- {
- rawImagePixels = (GLubyte *)malloc(imageByteSize);
- cornersArray = calloc(512 * 2, sizeof(GLfloat));
- }
-
- glReadPixels(0, 0, (int)imageSize.width, (int)imageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);
-
- CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
-
- unsigned int imageWidth = imageSize.width * 4;
-
- unsigned int currentByte = 0;
- unsigned int cornerStorageIndex = 0;
- while (currentByte < imageByteSize)
- {
- GLubyte colorByte = rawImagePixels[currentByte];
-
- if (colorByte > 0)
- {
- unsigned int xCoordinate = currentByte % imageWidth;
- unsigned int yCoordinate = currentByte / imageWidth;
-
- cornersArray[cornerStorageIndex++] = (CGFloat)(xCoordinate / 4) / imageSize.width;
- cornersArray[cornerStorageIndex++] = (CGFloat)(yCoordinate) / imageSize.height;
- numberOfCorners++;
-
- numberOfCorners = MIN(numberOfCorners, 511);
- cornerStorageIndex = MIN(cornerStorageIndex, 1021);
- }
- currentByte +=4;
- }
-
- CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
- NSLog(@"Processing time : %f ms", 1000.0 * currentFrameTime);
-
- if (cornersDetectedBlock != NULL)
- {
- cornersDetectedBlock(cornersArray, numberOfCorners, frameTime);
- }
-}
-
-- (BOOL)wantsMonochromeInput;
-{
-// return YES;
- return NO;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setBlurRadiusInPixels:(CGFloat)newValue;
-{
- blurFilter.blurRadiusInPixels = newValue;
-}
-
-- (CGFloat)blurRadiusInPixels;
-{
- return blurFilter.blurRadiusInPixels;
-}
-
-- (void)setSensitivity:(CGFloat)newValue;
-{
- _sensitivity = newValue;
- [harrisCornerDetectionFilter setFloat:newValue forUniformName:@"sensitivity"];
-}
-
-- (void)setThreshold:(CGFloat)newValue;
-{
- nonMaximumSuppressionFilter.threshold = newValue;
-}
-
-- (CGFloat)threshold;
-{
- return nonMaximumSuppressionFilter.threshold;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHazeFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageHazeFilter.h
deleted file mode 100755
index eb3fbca..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHazeFilter.h
+++ /dev/null
@@ -1,29 +0,0 @@
-#import "GPUImageFilter.h"
-
-/*
- * The haze filter can be used to add or remove haze (similar to a UV filter)
- *
- * @author Alaric Cole
- * @creationDate 03/10/12
- *
- */
-
-/** The haze filter can be used to add or remove haze
-
- This is similar to a UV filter
- */
-@interface GPUImageHazeFilter : GPUImageFilter
-{
- GLint distanceUniform;
- GLint slopeUniform;
-}
-
-/** Strength of the color applied. Default 0. Values between -.3 and .3 are best
- */
-@property(readwrite, nonatomic) CGFloat distance;
-
-/** Amount of color change. Default 0. Values between -.3 and .3 are best
- */
-@property(readwrite, nonatomic) CGFloat slope;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHazeFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageHazeFilter.m
deleted file mode 100755
index f90fc22..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHazeFilter.m
+++ /dev/null
@@ -1,96 +0,0 @@
-#import "GPUImageHazeFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageHazeFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform lowp float hazeDistance;
- uniform highp float slope;
-
- void main()
- {
- //todo reconsider precision modifiers
- highp vec4 color = vec4(1.0);//todo reimplement as a parameter
-
- highp float d = textureCoordinate.y * slope + hazeDistance;
-
- highp vec4 c = texture2D(inputImageTexture, textureCoordinate) ; // consider using unpremultiply
-
- c = (c - d * color) / (1.0 -d);
-
- gl_FragColor = c; //consider using premultiply(c);
- }
-);
-#else
-NSString *const kGPUImageHazeFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform float hazeDistance;
- uniform float slope;
-
- void main()
- {
- //todo reconsider precision modifiers
- vec4 color = vec4(1.0);//todo reimplement as a parameter
-
- float d = textureCoordinate.y * slope + hazeDistance;
-
- vec4 c = texture2D(inputImageTexture, textureCoordinate) ; // consider using unpremultiply
-
- c = (c - d * color) / (1.0 -d);
-
- gl_FragColor = c; //consider using premultiply(c);
- }
-);
-#endif
-
-
-
-
-@implementation GPUImageHazeFilter
-
-@synthesize distance = _distance;
-@synthesize slope = _slope;
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageHazeFragmentShaderString]))
- {
- return nil;
- }
-
- distanceUniform = [filterProgram uniformIndex:@"hazeDistance"];
- slopeUniform = [filterProgram uniformIndex:@"slope"];
-
- self.distance = 0.2;
- self.slope = 0.0;
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setDistance:(CGFloat)newValue;
-{
- _distance = newValue;
-
- [self setFloat:_distance forUniform:distanceUniform program:filterProgram];
-}
-
-- (void)setSlope:(CGFloat)newValue;
-{
- _slope = newValue;
-
- [self setFloat:_slope forUniform:slopeUniform program:filterProgram];
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHighPassFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageHighPassFilter.h
deleted file mode 100644
index 263d8df..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHighPassFilter.h
+++ /dev/null
@@ -1,14 +0,0 @@
-#import "GPUImageFilterGroup.h"
-#import "GPUImageLowPassFilter.h"
-#import "GPUImageDifferenceBlendFilter.h"
-
-@interface GPUImageHighPassFilter : GPUImageFilterGroup
-{
- GPUImageLowPassFilter *lowPassFilter;
- GPUImageDifferenceBlendFilter *differenceBlendFilter;
-}
-
-// This controls the degree by which the previous accumulated frames are blended and then subtracted from the current one. This ranges from 0.0 to 1.0, with a default of 0.5.
-@property(readwrite, nonatomic) CGFloat filterStrength;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHighPassFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageHighPassFilter.m
deleted file mode 100644
index 511240d..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHighPassFilter.m
+++ /dev/null
@@ -1,46 +0,0 @@
-#import "GPUImageHighPassFilter.h"
-
-@implementation GPUImageHighPassFilter
-
-@synthesize filterStrength;
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- // Start with a low pass filter to define the component to be removed
- lowPassFilter = [[GPUImageLowPassFilter alloc] init];
- [self addFilter:lowPassFilter];
-
- // Take the difference of the current frame from the low pass filtered result to get the high pass
- differenceBlendFilter = [[GPUImageDifferenceBlendFilter alloc] init];
- [self addFilter:differenceBlendFilter];
-
- // Texture location 0 needs to be the original image for the difference blend
- [lowPassFilter addTarget:differenceBlendFilter atTextureLocation:1];
-
- self.initialFilters = [NSArray arrayWithObjects:lowPassFilter, differenceBlendFilter, nil];
- self.terminalFilter = differenceBlendFilter;
-
- self.filterStrength = 0.5;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setFilterStrength:(CGFloat)newValue;
-{
- lowPassFilter.filterStrength = newValue;
-}
-
-- (CGFloat)filterStrength;
-{
- return lowPassFilter.filterStrength;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHighlightShadowFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageHighlightShadowFilter.h
deleted file mode 100644
index 3579129..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHighlightShadowFilter.h
+++ /dev/null
@@ -1,20 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageHighlightShadowFilter : GPUImageFilter
-{
- GLint shadowsUniform, highlightsUniform;
-}
-
-/**
- * 0 - 1, increase to lighten shadows.
- * @default 0
- */
-@property(readwrite, nonatomic) CGFloat shadows;
-
-/**
- * 0 - 1, decrease to darken highlights.
- * @default 1
- */
-@property(readwrite, nonatomic) CGFloat highlights;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHighlightShadowFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageHighlightShadowFilter.m
deleted file mode 100644
index aa92b74..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHighlightShadowFilter.m
+++ /dev/null
@@ -1,93 +0,0 @@
-#import "GPUImageHighlightShadowFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageHighlightShadowFragmentShaderString = SHADER_STRING
-(
-uniform sampler2D inputImageTexture;
-varying highp vec2 textureCoordinate;
-
-uniform lowp float shadows;
-uniform lowp float highlights;
-
-const mediump vec3 luminanceWeighting = vec3(0.3, 0.3, 0.3);
-
-void main()
-{
- lowp vec4 source = texture2D(inputImageTexture, textureCoordinate);
- mediump float luminance = dot(source.rgb, luminanceWeighting);
-
- mediump float shadow = clamp((pow(luminance, 1.0/(shadows+1.0)) + (-0.76)*pow(luminance, 2.0/(shadows+1.0))) - luminance, 0.0, 1.0);
- mediump float highlight = clamp((1.0 - (pow(1.0-luminance, 1.0/(2.0-highlights)) + (-0.8)*pow(1.0-luminance, 2.0/(2.0-highlights)))) - luminance, -1.0, 0.0);
- lowp vec3 result = vec3(0.0, 0.0, 0.0) + ((luminance + shadow + highlight) - 0.0) * ((source.rgb - vec3(0.0, 0.0, 0.0))/(luminance - 0.0));
-
- gl_FragColor = vec4(result.rgb, source.a);
-}
-);
-#else
-NSString *const kGPUImageHighlightShadowFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
- varying vec2 textureCoordinate;
-
- uniform float shadows;
- uniform float highlights;
-
- const vec3 luminanceWeighting = vec3(0.3, 0.3, 0.3);
-
- void main()
- {
- vec4 source = texture2D(inputImageTexture, textureCoordinate);
- float luminance = dot(source.rgb, luminanceWeighting);
-
- float shadow = clamp((pow(luminance, 1.0/(shadows+1.0)) + (-0.76)*pow(luminance, 2.0/(shadows+1.0))) - luminance, 0.0, 1.0);
- float highlight = clamp((1.0 - (pow(1.0-luminance, 1.0/(2.0-highlights)) + (-0.8)*pow(1.0-luminance, 2.0/(2.0-highlights)))) - luminance, -1.0, 0.0);
- vec3 result = vec3(0.0, 0.0, 0.0) + ((luminance + shadow + highlight) - 0.0) * ((source.rgb - vec3(0.0, 0.0, 0.0))/(luminance - 0.0));
-
- gl_FragColor = vec4(result.rgb, source.a);
- }
-);
-#endif
-
-@implementation GPUImageHighlightShadowFilter
-
-@synthesize shadows = _shadows;
-@synthesize highlights = _highlights;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageHighlightShadowFragmentShaderString]))
- {
- return nil;
- }
-
- shadowsUniform = [filterProgram uniformIndex:@"shadows"];
- highlightsUniform = [filterProgram uniformIndex:@"highlights"];
-
- self.shadows = 0.0;
- self.highlights = 1.0;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setShadows:(CGFloat)newValue;
-{
- _shadows = newValue;
-
- [self setFloat:_shadows forUniform:shadowsUniform program:filterProgram];
-}
-
-- (void)setHighlights:(CGFloat)newValue;
-{
- _highlights = newValue;
-
- [self setFloat:_highlights forUniform:highlightsUniform program:filterProgram];
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHistogramEqualizationFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageHistogramEqualizationFilter.h
deleted file mode 100644
index 2b71cda..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHistogramEqualizationFilter.h
+++ /dev/null
@@ -1,26 +0,0 @@
-//
-// GPUImageHistogramEqualizationFilter.h
-// FilterShowcase
-//
-// Created by Adam Marcus on 19/08/2014.
-// Copyright (c) 2014 Sunset Lake Software LLC. All rights reserved.
-//
-
-#import "GPUImageFilterGroup.h"
-#import "GPUImageHistogramFilter.h"
-#import "GPUImageRawDataOutput.h"
-#import "GPUImageRawDataInput.h"
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageHistogramEqualizationFilter : GPUImageFilterGroup
-{
- GPUImageHistogramFilter *histogramFilter;
- GPUImageRawDataOutput *rawDataOutputFilter;
- GPUImageRawDataInput *rawDataInputFilter;
-}
-
-@property(readwrite, nonatomic) NSUInteger downsamplingFactor;
-
-- (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHistogramEqualizationFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageHistogramEqualizationFilter.m
deleted file mode 100644
index 7d6b9a2..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHistogramEqualizationFilter.m
+++ /dev/null
@@ -1,307 +0,0 @@
-//
-// GPUImageHistogramEqualizationFilter.m
-// FilterShowcase
-//
-// Created by Adam Marcus on 19/08/2014.
-// Copyright (c) 2014 Sunset Lake Software LLC. All rights reserved.
-//
-
-#import "GPUImageHistogramEqualizationFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageRedHistogramEqualizationFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- lowp float redCurveValue = texture2D(inputImageTexture2, vec2(textureColor.r, 0.0)).r;
-
- gl_FragColor = vec4(redCurveValue, textureColor.g, textureColor.b, textureColor.a);
- }
- );
-#else
-NSString *const kGPUImageRedHistogramEqualizationFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- float redCurveValue = texture2D(inputImageTexture2, vec2(textureColor.r, 0.0)).r;
-
- gl_FragColor = vec4(redCurveValue, textureColor.g, textureColor.b, textureColor.a);
- }
- );
-#endif
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageGreenHistogramEqualizationFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- lowp float greenCurveValue = texture2D(inputImageTexture2, vec2(textureColor.g, 0.0)).g;
-
- gl_FragColor = vec4(textureColor.r, greenCurveValue, textureColor.b, textureColor.a);
- }
- );
-#else
-NSString *const kGPUImageGreenHistogramEqualizationFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- float greenCurveValue = texture2D(inputImageTexture2, vec2(textureColor.g, 0.0)).g;
-
- gl_FragColor = vec4(textureColor.r, greenCurveValue, textureColor.b, textureColor.a);
- }
- );
-#endif
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageBlueHistogramEqualizationFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- lowp float blueCurveValue = texture2D(inputImageTexture2, vec2(textureColor.b, 0.0)).b;
-
- gl_FragColor = vec4(textureColor.r, textureColor.g, blueCurveValue, textureColor.a);
- }
- );
-#else
-NSString *const kGPUImageBlueHistogramEqualizationFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- float blueCurveValue = texture2D(inputImageTexture2, vec2(textureColor.b, 0.0)).b;
-
- gl_FragColor = vec4(textureColor.r, textureColor.g, blueCurveValue, textureColor.a);
- }
- );
-#endif
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageRGBHistogramEqualizationFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- lowp float redCurveValue = texture2D(inputImageTexture2, vec2(textureColor.r, 0.0)).r;
- lowp float greenCurveValue = texture2D(inputImageTexture2, vec2(textureColor.g, 0.0)).g;
- lowp float blueCurveValue = texture2D(inputImageTexture2, vec2(textureColor.b, 0.0)).b;
-
- gl_FragColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, textureColor.a);
- }
- );
-#else
-NSString *const kGPUImageRGBHistogramEqualizationFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- float redCurveValue = texture2D(inputImageTexture2, vec2(textureColor.r, 0.0)).r;
- float greenCurveValue = texture2D(inputImageTexture2, vec2(textureColor.g, 0.0)).g;
- float blueCurveValue = texture2D(inputImageTexture2, vec2(textureColor.b, 0.0)).b;
-
- gl_FragColor = vec4(redCurveValue, greenCurveValue, blueCurveValue, textureColor.a);
- }
- );
-#endif
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageLuminanceHistogramEqualizationFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- const lowp vec3 W = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- lowp float luminance = dot(textureColor.rgb, W);
- lowp float newLuminance = texture2D(inputImageTexture2, vec2(luminance, 0.0)).r;
- lowp float deltaLuminance = newLuminance - luminance;
-
- lowp float red = clamp(textureColor.r + deltaLuminance, 0.0, 1.0);
- lowp float green = clamp(textureColor.g + deltaLuminance, 0.0, 1.0);
- lowp float blue = clamp(textureColor.b + deltaLuminance, 0.0, 1.0);
-
- gl_FragColor = vec4(red, green, blue, textureColor.a);
- }
- );
-#else
-NSString *const kGPUImageLuminanceHistogramEqualizationFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- const vec3 W = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- float luminance = dot(textureColor.rgb, W);
- float newLuminance = texture2D(inputImageTexture2, vec2(luminance, 0.0)).r;
- float deltaLuminance = newLuminance - luminance;
-
- float red = clamp(textureColor.r + deltaLuminance, 0.0, 1.0);
- float green = clamp(textureColor.g + deltaLuminance, 0.0, 1.0);
- float blue = clamp(textureColor.b + deltaLuminance, 0.0, 1.0);
-
- gl_FragColor = vec4(red, green, blue, textureColor.a);
- }
- );
-#endif
-
-@implementation GPUImageHistogramEqualizationFilter
-
-@synthesize downsamplingFactor = _downsamplingFactor;
-
-#pragma mark -
-#pragma mark Initialization
-
-- (id)init;
-{
- if (!(self = [self initWithHistogramType:kGPUImageHistogramRGB]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- histogramFilter = [[GPUImageHistogramFilter alloc] initWithHistogramType:newHistogramType];
- [self addFilter:histogramFilter];
-
- GLubyte dummyInput[4 * 256]; // NB: No way to initialise GPUImageRawDataInput without providing bytes
- rawDataInputFilter = [[GPUImageRawDataInput alloc] initWithBytes:dummyInput size:CGSizeMake(256.0, 1.0) pixelFormat:GPUPixelFormatBGRA type:GPUPixelTypeUByte];
- rawDataOutputFilter = [[GPUImageRawDataOutput alloc] initWithImageSize:CGSizeMake(256.0, 3.0) resultsInBGRAFormat:YES];
-
- __unsafe_unretained GPUImageRawDataOutput *_rawDataOutputFilter = rawDataOutputFilter;
- __unsafe_unretained GPUImageRawDataInput *_rawDataInputFilter = rawDataInputFilter;
- [rawDataOutputFilter setNewFrameAvailableBlock:^{
-
- unsigned int histogramBins[3][256];
-
- [_rawDataOutputFilter lockFramebufferForReading];
-
- GLubyte *data = [_rawDataOutputFilter rawBytesForImage];
- data += [_rawDataOutputFilter bytesPerRowInOutput];
-
- histogramBins[0][0] = *data++;
- histogramBins[1][0] = *data++;
- histogramBins[2][0] = *data++;
- data++;
-
- for (unsigned int x = 1; x < 256; x++) {
- histogramBins[0][x] = histogramBins[0][x-1] + *data++;
- histogramBins[1][x] = histogramBins[1][x-1] + *data++;
- histogramBins[2][x] = histogramBins[2][x-1] + *data++;
- data++;
- }
-
- [_rawDataOutputFilter unlockFramebufferAfterReading];
-
- GLubyte colorMapping[4 * 256];
- GLubyte *_colorMapping = colorMapping;
-
- for (unsigned int x = 0; x < 256; x++) {
- *_colorMapping++ = (GLubyte) (((histogramBins[0][x] - histogramBins[0][0]) * 255) / histogramBins[0][255]);
- *_colorMapping++ = (GLubyte) (((histogramBins[1][x] - histogramBins[1][0]) * 255) / histogramBins[1][255]);
- *_colorMapping++ = (GLubyte) (((histogramBins[2][x] - histogramBins[2][0]) * 255) / histogramBins[2][255]);
- *_colorMapping++ = 255;
- }
-
- _colorMapping = colorMapping;
- [_rawDataInputFilter updateDataFromBytes:_colorMapping size:CGSizeMake(256.0, 1.0)];
- [_rawDataInputFilter processData];
- }];
- [histogramFilter addTarget:rawDataOutputFilter];
-
- NSString *fragmentShader = nil;
- switch (newHistogramType) {
- case kGPUImageHistogramRed:
- fragmentShader = kGPUImageRedHistogramEqualizationFragmentShaderString;
- break;
- case kGPUImageHistogramGreen:
- fragmentShader = kGPUImageGreenHistogramEqualizationFragmentShaderString;
- break;
- case kGPUImageHistogramBlue:
- fragmentShader = kGPUImageBlueHistogramEqualizationFragmentShaderString;
- break;
- default:
- case kGPUImageHistogramRGB:
- fragmentShader = kGPUImageRGBHistogramEqualizationFragmentShaderString;
- break;
- case kGPUImageHistogramLuminance:
- fragmentShader = kGPUImageLuminanceHistogramEqualizationFragmentShaderString;
- break;
- }
- GPUImageFilter *equalizationFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:fragmentShader];
- [rawDataInputFilter addTarget:equalizationFilter atTextureLocation:1];
-
- [self addFilter:equalizationFilter];
-
- self.initialFilters = [NSArray arrayWithObjects:histogramFilter, equalizationFilter, nil];
- self.terminalFilter = equalizationFilter;
-
- self.downsamplingFactor = 16;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setDownsamplingFactor:(NSUInteger)newValue;
-{
- if (_downsamplingFactor != newValue)
- {
- _downsamplingFactor = newValue;
- histogramFilter.downsamplingFactor = newValue;
- }
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHistogramFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageHistogramFilter.h
deleted file mode 100755
index 32004bf..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHistogramFilter.h
+++ /dev/null
@@ -1,22 +0,0 @@
-#import "GPUImageFilter.h"
-
-typedef enum { kGPUImageHistogramRed, kGPUImageHistogramGreen, kGPUImageHistogramBlue, kGPUImageHistogramRGB, kGPUImageHistogramLuminance} GPUImageHistogramType;
-
-@interface GPUImageHistogramFilter : GPUImageFilter
-{
- GPUImageHistogramType histogramType;
-
- GLubyte *vertexSamplingCoordinates;
-
- GLProgram *secondFilterProgram, *thirdFilterProgram;
- GLint secondFilterPositionAttribute, thirdFilterPositionAttribute;
-}
-
-// Rather than sampling every pixel, this dictates what fraction of the image is sampled. By default, this is 16 with a minimum of 1.
-@property(readwrite, nonatomic) NSUInteger downsamplingFactor;
-
-// Initialization and teardown
-- (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType;
-- (void)initializeSecondaryAttributes;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHistogramFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageHistogramFilter.m
deleted file mode 100755
index bb7acce..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHistogramFilter.m
+++ /dev/null
@@ -1,341 +0,0 @@
-#import "GPUImageHistogramFilter.h"
-
-// Unlike other filters, this one uses a grid of GL_POINTs to sample the incoming image in a grid. A custom vertex shader reads the color in the texture at its position
-// and outputs a bin position in the final histogram as the vertex position. That point is then written into the image of the histogram using translucent pixels.
-// The degree of translucency is controlled by the scalingFactor, which lets you adjust the dynamic range of the histogram. The histogram can only be generated for one
-// color channel or luminance value at a time.
-//
-// This is based on this implementation: http://www.shaderwrangler.com/publications/histogram/histogram_cameraready.pdf
-//
-// Or at least that's how it would work if iOS could read from textures in a vertex shader, which it can't. Therefore, I read the texture data down from the
-// incoming frame and process the texture colors as vertices.
-
-NSString *const kGPUImageRedHistogramSamplingVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
-
- varying vec3 colorFactor;
-
- void main()
- {
- colorFactor = vec3(1.0, 0.0, 0.0);
- gl_Position = vec4(-1.0 + (position.x * 0.0078125), 0.0, 0.0, 1.0);
- gl_PointSize = 1.0;
- }
-);
-
-NSString *const kGPUImageGreenHistogramSamplingVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
-
- varying vec3 colorFactor;
-
- void main()
- {
- colorFactor = vec3(0.0, 1.0, 0.0);
- gl_Position = vec4(-1.0 + (position.y * 0.0078125), 0.0, 0.0, 1.0);
- gl_PointSize = 1.0;
- }
-);
-
-NSString *const kGPUImageBlueHistogramSamplingVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
-
- varying vec3 colorFactor;
-
- void main()
- {
- colorFactor = vec3(0.0, 0.0, 1.0);
- gl_Position = vec4(-1.0 + (position.z * 0.0078125), 0.0, 0.0, 1.0);
- gl_PointSize = 1.0;
- }
-);
-
-NSString *const kGPUImageLuminanceHistogramSamplingVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
-
- varying vec3 colorFactor;
-
- const vec3 W = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- float luminance = dot(position.xyz, W);
-
- colorFactor = vec3(1.0, 1.0, 1.0);
- gl_Position = vec4(-1.0 + (luminance * 0.0078125), 0.0, 0.0, 1.0);
- gl_PointSize = 1.0;
- }
-);
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageHistogramAccumulationFragmentShaderString = SHADER_STRING
-(
- const lowp float scalingFactor = 1.0 / 256.0;
-
- varying lowp vec3 colorFactor;
-
- void main()
- {
- gl_FragColor = vec4(colorFactor * scalingFactor , 1.0);
- }
-);
-#else
-NSString *const kGPUImageHistogramAccumulationFragmentShaderString = SHADER_STRING
-(
- const float scalingFactor = 1.0 / 256.0;
-
- varying vec3 colorFactor;
-
- void main()
- {
- gl_FragColor = vec4(colorFactor * scalingFactor , 1.0);
- }
-);
-#endif
-
-@implementation GPUImageHistogramFilter
-
-@synthesize downsamplingFactor = _downsamplingFactor;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType;
-{
- switch (newHistogramType)
- {
- case kGPUImageHistogramRed:
- {
- if (!(self = [super initWithVertexShaderFromString:kGPUImageRedHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString]))
- {
- return nil;
- }
- }; break;
- case kGPUImageHistogramGreen:
- {
- if (!(self = [super initWithVertexShaderFromString:kGPUImageGreenHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString]))
- {
- return nil;
- }
- }; break;
- case kGPUImageHistogramBlue:
- {
- if (!(self = [super initWithVertexShaderFromString:kGPUImageBlueHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString]))
- {
- return nil;
- }
- }; break;
- case kGPUImageHistogramLuminance:
- {
- if (!(self = [super initWithVertexShaderFromString:kGPUImageLuminanceHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString]))
- {
- return nil;
- }
- }; break;
- case kGPUImageHistogramRGB:
- {
- if (!(self = [super initWithVertexShaderFromString:kGPUImageRedHistogramSamplingVertexShaderString fragmentShaderFromString:kGPUImageHistogramAccumulationFragmentShaderString]))
- {
- return nil;
- }
-
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext useImageProcessingContext];
-
- secondFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageGreenHistogramSamplingVertexShaderString fragmentShaderString:kGPUImageHistogramAccumulationFragmentShaderString];
- thirdFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageBlueHistogramSamplingVertexShaderString fragmentShaderString:kGPUImageHistogramAccumulationFragmentShaderString];
-
- if (!secondFilterProgram.initialized)
- {
- [self initializeSecondaryAttributes];
-
- if (![secondFilterProgram link])
- {
- NSString *progLog = [secondFilterProgram programLog];
- NSLog(@"Program link log: %@", progLog);
- NSString *fragLog = [secondFilterProgram fragmentShaderLog];
- NSLog(@"Fragment shader compile log: %@", fragLog);
- NSString *vertLog = [secondFilterProgram vertexShaderLog];
- NSLog(@"Vertex shader compile log: %@", vertLog);
- filterProgram = nil;
- NSAssert(NO, @"Filter shader link failed");
-
- }
-
- [GPUImageContext setActiveShaderProgram:secondFilterProgram];
-
- glEnableVertexAttribArray(secondFilterPositionAttribute);
-
- if (![thirdFilterProgram link])
- {
- NSString *progLog = [secondFilterProgram programLog];
- NSLog(@"Program link log: %@", progLog);
- NSString *fragLog = [secondFilterProgram fragmentShaderLog];
- NSLog(@"Fragment shader compile log: %@", fragLog);
- NSString *vertLog = [secondFilterProgram vertexShaderLog];
- NSLog(@"Vertex shader compile log: %@", vertLog);
- filterProgram = nil;
- NSAssert(NO, @"Filter shader link failed");
- }
- }
-
- secondFilterPositionAttribute = [secondFilterProgram attributeIndex:@"position"];
-
-
- thirdFilterPositionAttribute = [thirdFilterProgram attributeIndex:@"position"];
- [GPUImageContext setActiveShaderProgram:thirdFilterProgram];
-
- glEnableVertexAttribArray(thirdFilterPositionAttribute);
- });
- }; break;
- }
-
- histogramType = newHistogramType;
-
- self.downsamplingFactor = 16;
-
- return self;
-}
-
-- (id)init;
-{
- if (!(self = [self initWithHistogramType:kGPUImageHistogramRGB]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (void)initializeSecondaryAttributes;
-{
- [secondFilterProgram addAttribute:@"position"];
- [thirdFilterProgram addAttribute:@"position"];
-}
-
-- (void)dealloc;
-{
- if (vertexSamplingCoordinates != NULL && ![GPUImageContext supportsFastTextureUpload])
- {
- free(vertexSamplingCoordinates);
- }
-}
-
-#pragma mark -
-#pragma mark Rendering
-
-- (CGSize)sizeOfFBO;
-{
- return CGSizeMake(256.0, 3.0);
-}
-
-- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
-{
- [self renderToTextureWithVertices:NULL textureCoordinates:NULL];
-
- [self informTargetsAboutNewFrameAtTime:frameTime];
-}
-
-- (CGSize)outputFrameSize;
-{
- return [self sizeOfFBO];
-}
-
-- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
-{
- if (self.preventRendering)
- {
- return;
- }
-
- inputTextureSize = newSize;
-}
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
- inputRotation = kGPUImageNoRotation;
-}
-
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-{
- // we need a normal color texture for this filter
- NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture format for this filter must be GL_RGBA.");
- NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.");
-
- if (self.preventRendering)
- {
- [firstInputFramebuffer unlock];
- return;
- }
-
- [GPUImageContext useImageProcessingContext];
-
- if ([GPUImageContext supportsFastTextureUpload])
- {
- glFinish();
- vertexSamplingCoordinates = [firstInputFramebuffer byteBuffer];
- } else {
- if (vertexSamplingCoordinates == NULL)
- {
- vertexSamplingCoordinates = calloc(inputTextureSize.width * inputTextureSize.height * 4, sizeof(GLubyte));
- }
- glReadPixels(0, 0, inputTextureSize.width, inputTextureSize.height, GL_RGBA, GL_UNSIGNED_BYTE, vertexSamplingCoordinates);
- }
-
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
- [outputFramebuffer activateFramebuffer];
- if (usingNextFrameForImageCapture)
- {
- [outputFramebuffer lock];
- }
-
- [GPUImageContext setActiveShaderProgram:filterProgram];
-
- glClearColor(0.0, 0.0, 0.0, 1.0);
- glClear(GL_COLOR_BUFFER_BIT);
-
- glBlendEquation(GL_FUNC_ADD);
- glBlendFunc(GL_ONE, GL_ONE);
- glEnable(GL_BLEND);
-
- glVertexAttribPointer(filterPositionAttribute, 4, GL_UNSIGNED_BYTE, 0, ((unsigned int)_downsamplingFactor - 1) * 4, vertexSamplingCoordinates);
- glDrawArrays(GL_POINTS, 0, inputTextureSize.width * inputTextureSize.height / (CGFloat)_downsamplingFactor);
-
- if (histogramType == kGPUImageHistogramRGB)
- {
- [GPUImageContext setActiveShaderProgram:secondFilterProgram];
-
- glVertexAttribPointer(secondFilterPositionAttribute, 4, GL_UNSIGNED_BYTE, 0, ((unsigned int)_downsamplingFactor - 1) * 4, vertexSamplingCoordinates);
- glDrawArrays(GL_POINTS, 0, inputTextureSize.width * inputTextureSize.height / (CGFloat)_downsamplingFactor);
-
- [GPUImageContext setActiveShaderProgram:thirdFilterProgram];
-
- glVertexAttribPointer(thirdFilterPositionAttribute, 4, GL_UNSIGNED_BYTE, 0, ((unsigned int)_downsamplingFactor - 1) * 4, vertexSamplingCoordinates);
- glDrawArrays(GL_POINTS, 0, inputTextureSize.width * inputTextureSize.height / (CGFloat)_downsamplingFactor);
- }
-
- glDisable(GL_BLEND);
- [firstInputFramebuffer unlock];
-
- if (usingNextFrameForImageCapture)
- {
- dispatch_semaphore_signal(imageCaptureSemaphore);
- }
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-//- (void)setScalingFactor:(CGFloat)newValue;
-//{
-// _scalingFactor = newValue;
-//
-// [GPUImageContext useImageProcessingContext];
-// [filterProgram use];
-// glUniform1f(scalingFactorUniform, _scalingFactor);
-//}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHistogramGenerator.h b/Example/Pods/GPUImage/framework/Source/GPUImageHistogramGenerator.h
deleted file mode 100755
index f80c50f..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHistogramGenerator.h
+++ /dev/null
@@ -1,8 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageHistogramGenerator : GPUImageFilter
-{
- GLint backgroundColorUniform;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHistogramGenerator.m b/Example/Pods/GPUImage/framework/Source/GPUImageHistogramGenerator.m
deleted file mode 100755
index 703795d..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHistogramGenerator.m
+++ /dev/null
@@ -1,87 +0,0 @@
-#import "GPUImageHistogramGenerator.h"
-
-NSString *const kGPUImageHistogramGeneratorVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec4 inputTextureCoordinate;
-
- varying vec2 textureCoordinate;
- varying float height;
-
- void main()
- {
- gl_Position = position;
- textureCoordinate = vec2(inputTextureCoordinate.x, 0.5);
- height = 1.0 - inputTextureCoordinate.y;
- }
-);
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageHistogramGeneratorFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp float height;
-
- uniform sampler2D inputImageTexture;
- uniform lowp vec4 backgroundColor;
-
- void main()
- {
- lowp vec3 colorChannels = texture2D(inputImageTexture, textureCoordinate).rgb;
- lowp vec4 heightTest = vec4(step(height, colorChannels), 1.0);
- gl_FragColor = mix(backgroundColor, heightTest, heightTest.r + heightTest.g + heightTest.b);
- }
-);
-#else
-NSString *const kGPUImageHistogramGeneratorFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying float height;
-
- uniform sampler2D inputImageTexture;
- uniform vec4 backgroundColor;
-
- void main()
- {
- vec3 colorChannels = texture2D(inputImageTexture, textureCoordinate).rgb;
- vec4 heightTest = vec4(step(height, colorChannels), 1.0);
- gl_FragColor = mix(backgroundColor, heightTest, heightTest.r + heightTest.g + heightTest.b);
- }
-);
-#endif
-
-@implementation GPUImageHistogramGenerator
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithVertexShaderFromString:kGPUImageHistogramGeneratorVertexShaderString fragmentShaderFromString:kGPUImageHistogramGeneratorFragmentShaderString]))
- {
- return nil;
- }
-
- backgroundColorUniform = [filterProgram uniformIndex:@"backgroundColor"];
-
- [self setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:0.0];
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
-{
-// GLfloat backgroundColor[4];
-// backgroundColor[0] = redComponent;
-// backgroundColor[1] = greenComponent;
-// backgroundColor[2] = blueComponent;
-// backgroundColor[3] = alphaComponent;
- GPUVector4 backgroundColor = {redComponent, greenComponent, blueComponent, alphaComponent};
-
- [self setVec4:backgroundColor forUniform:backgroundColorUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHoughTransformLineDetector.h b/Example/Pods/GPUImage/framework/Source/GPUImageHoughTransformLineDetector.h
deleted file mode 100644
index 3ab6977..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHoughTransformLineDetector.h
+++ /dev/null
@@ -1,49 +0,0 @@
-#import "GPUImageFilterGroup.h"
-#import "GPUImageThresholdEdgeDetectionFilter.h"
-#import "GPUImageParallelCoordinateLineTransformFilter.h"
-#import "GPUImageThresholdedNonMaximumSuppressionFilter.h"
-#import "GPUImageCannyEdgeDetectionFilter.h"
-
-// This applies a Hough transform to detect lines in a scene. It starts with a thresholded Sobel edge detection pass,
-// then takes those edge points in and applies a Hough transform to convert them to lines. The intersection of these lines
-// is then determined via blending and accumulation, and a non-maximum suppression filter is applied to find local maxima.
-// These local maxima are then converted back into lines in normal space and returned via a callback block.
-//
-// Rather than using one of the standard Hough transform types, this filter uses parallel coordinate space which is far more efficient
-// to rasterize on a GPU.
-//
-// This approach is based entirely on the PC lines process developed by the Graph@FIT research group at the Brno University of Technology
-// and described in their publications:
-//
-// M. Dubská, J. Havel, and A. Herout. Real-Time Detection of Lines using Parallel Coordinates and OpenGL. Proceedings of SCCG 2011, Bratislava, SK, p. 7.
-// http://medusa.fit.vutbr.cz/public/data/papers/2011-SCCG-Dubska-Real-Time-Line-Detection-Using-PC-and-OpenGL.pdf
-// M. Dubská, J. Havel, and A. Herout. PClines — Line detection using parallel coordinates. 2011 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), p. 1489- 1494.
-// http://medusa.fit.vutbr.cz/public/data/papers/2011-CVPR-Dubska-PClines.pdf
-
-//#define DEBUGLINEDETECTION
-
-@interface GPUImageHoughTransformLineDetector : GPUImageFilterGroup
-{
- GPUImageOutput *thresholdEdgeDetectionFilter;
-
-// GPUImageThresholdEdgeDetectionFilter *thresholdEdgeDetectionFilter;
- GPUImageParallelCoordinateLineTransformFilter *parallelCoordinateLineTransformFilter;
- GPUImageThresholdedNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;
-
- GLfloat *linesArray;
- GLubyte *rawImagePixels;
-}
-
-// A threshold value for which a point is detected as belonging to an edge for determining lines. Default is 0.9.
-@property(readwrite, nonatomic) CGFloat edgeThreshold;
-
-// A threshold value for which a local maximum is detected as belonging to a line in parallel coordinate space. Default is 0.20.
-@property(readwrite, nonatomic) CGFloat lineDetectionThreshold;
-
-// This block is called on the detection of lines, usually on every processed frame. A C array containing normalized slopes and intercepts in m, b pairs (y=mx+b) is passed in, along with a count of the number of lines detected and the current timestamp of the video frame
-@property(nonatomic, copy) void(^linesDetectedBlock)(GLfloat* lineArray, NSUInteger linesDetected, CMTime frameTime);
-
-// These images are only enabled when built with DEBUGLINEDETECTION defined, and are used to examine the intermediate states of the Hough transform
-@property(nonatomic, readonly, strong) NSMutableArray *intermediateImages;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHoughTransformLineDetector.m b/Example/Pods/GPUImage/framework/Source/GPUImageHoughTransformLineDetector.m
deleted file mode 100644
index 8289eb8..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHoughTransformLineDetector.m
+++ /dev/null
@@ -1,241 +0,0 @@
-#import "GPUImageHoughTransformLineDetector.h"
-
-@interface GPUImageHoughTransformLineDetector()
-
-- (void)extractLineParametersFromImageAtFrameTime:(CMTime)frameTime;
-
-@end
-
-@implementation GPUImageHoughTransformLineDetector
-
-@synthesize linesDetectedBlock;
-@synthesize edgeThreshold;
-@synthesize lineDetectionThreshold;
-@synthesize intermediateImages = _intermediateImages;
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- // First pass: do edge detection and threshold that to just have white pixels for edges
-// if ([GPUImageContext deviceSupportsFramebufferReads])
-// if ([GPUImageContext deviceSupportsFramebufferReads])
-// {
-// thresholdEdgeDetectionFilter = [[GPUImageThresholdEdgeDetectionFilter alloc] init];
-// thresholdEdgeDetectionFilter = [[GPUImageSobelEdgeDetectionFilter alloc] init];
-// [(GPUImageThresholdEdgeDetectionFilter *)thresholdEdgeDetectionFilter setThreshold:0.07];
-// [(GPUImageThresholdEdgeDetectionFilter *)thresholdEdgeDetectionFilter setEdgeStrength:0.25];
-// [(GPUImageThresholdEdgeDetectionFilter *)thresholdEdgeDetectionFilter setEdgeStrength:1.0];
-// thresholdEdgeDetectionFilter = [[GPUImageCannyEdgeDetectionFilter alloc] init];
-// }
-// else
-// {
- thresholdEdgeDetectionFilter = [[GPUImageCannyEdgeDetectionFilter alloc] init];
-// }
- [self addFilter:thresholdEdgeDetectionFilter];
-
- // Second pass: extract the white points and draw representative lines in parallel coordinate space
- parallelCoordinateLineTransformFilter = [[GPUImageParallelCoordinateLineTransformFilter alloc] init];
- [self addFilter:parallelCoordinateLineTransformFilter];
-
- // Third pass: apply non-maximum suppression
- if ([GPUImageContext deviceSupportsFramebufferReads])
- {
- nonMaximumSuppressionFilter = [[GPUImageThresholdedNonMaximumSuppressionFilter alloc] initWithPackedColorspace:YES];
- }
- else
- {
- nonMaximumSuppressionFilter = [[GPUImageThresholdedNonMaximumSuppressionFilter alloc] initWithPackedColorspace:NO];
- }
- [self addFilter:nonMaximumSuppressionFilter];
-
- __unsafe_unretained GPUImageHoughTransformLineDetector *weakSelf = self;
-#ifdef DEBUGLINEDETECTION
- _intermediateImages = [[NSMutableArray alloc] init];
- __unsafe_unretained NSMutableArray *weakIntermediateImages = _intermediateImages;
-
-// __unsafe_unretained GPUImageOutput *weakEdgeDetectionFilter = thresholdEdgeDetectionFilter;
-// [thresholdEdgeDetectionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){
-// [weakIntermediateImages removeAllObjects];
-// UIImage *intermediateImage = [weakEdgeDetectionFilter imageFromCurrentFramebuffer];
-// [weakIntermediateImages addObject:intermediateImage];
-// }];
-//
-// __unsafe_unretained GPUImageOutput *weakParallelCoordinateLineTransformFilter = parallelCoordinateLineTransformFilter;
-// [parallelCoordinateLineTransformFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){
-// UIImage *intermediateImage = [weakParallelCoordinateLineTransformFilter imageFromCurrentFramebuffer];
-// [weakIntermediateImages addObject:intermediateImage];
-// }];
-
- __unsafe_unretained GPUImageOutput *weakNonMaximumSuppressionFilter = nonMaximumSuppressionFilter;
- [nonMaximumSuppressionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime){
- UIImage *intermediateImage = [weakNonMaximumSuppressionFilter imageFromCurrentFramebuffer];
- [weakIntermediateImages addObject:intermediateImage];
-
- [weakSelf extractLineParametersFromImageAtFrameTime:frameTime];
- }];
-#else
- [nonMaximumSuppressionFilter setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) {
- [weakSelf extractLineParametersFromImageAtFrameTime:frameTime];
- }];
-#endif
-
- [thresholdEdgeDetectionFilter addTarget:parallelCoordinateLineTransformFilter];
- [parallelCoordinateLineTransformFilter addTarget:nonMaximumSuppressionFilter];
-
- self.initialFilters = [NSArray arrayWithObjects:thresholdEdgeDetectionFilter, nil];
- // self.terminalFilter = colorPackingFilter;
- self.terminalFilter = nonMaximumSuppressionFilter;
-
-// self.edgeThreshold = 0.95;
- self.lineDetectionThreshold = 0.12;
-
- return self;
-}
-
-- (void)dealloc;
-{
- free(rawImagePixels);
- free(linesArray);
-}
-
-#pragma mark -
-#pragma mark Corner extraction
-
-- (void)extractLineParametersFromImageAtFrameTime:(CMTime)frameTime;
-{
- // we need a normal color texture for this filter
- NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture format for this filter must be GL_RGBA.");
- NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.");
-
- NSUInteger numberOfLines = 0;
- CGSize imageSize = nonMaximumSuppressionFilter.outputFrameSize;
-
- unsigned int imageByteSize = imageSize.width * imageSize.height * 4;
-
- if (rawImagePixels == NULL)
- {
- rawImagePixels = (GLubyte *)malloc(imageByteSize);
- linesArray = calloc(1024 * 2, sizeof(GLfloat));
- }
-
- glReadPixels(0, 0, (int)imageSize.width, (int)imageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);
-
-// CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
-
- unsigned int imageWidth = imageSize.width * 4;
-
- unsigned int currentByte = 0;
- unsigned int cornerStorageIndex = 0;
- unsigned long lineStrengthCounter = 0;
- while (currentByte < imageByteSize)
- {
- GLubyte colorByte = rawImagePixels[currentByte];
-// NSLog(@"(%d,%d): [%d,%d,%d,%d]", xCoordinate, yCoordinate, rawImagePixels[currentByte], rawImagePixels[currentByte+1], rawImagePixels[currentByte+2], rawImagePixels[currentByte+3]);
-// NSLog(@"[%d,%d,%d,%d]", rawImagePixels[currentByte], rawImagePixels[currentByte+1], rawImagePixels[currentByte+2], rawImagePixels[currentByte+3]);
-
- if (colorByte > 0)
- {
- unsigned int xCoordinate = currentByte % imageWidth;
- unsigned int yCoordinate = currentByte / imageWidth;
-
- lineStrengthCounter += colorByte;
-// NSLog(@"(%d,%d): [%d,%d,%d,%d]", xCoordinate, yCoordinate, rawImagePixels[currentByte], rawImagePixels[currentByte+1], rawImagePixels[currentByte+2], rawImagePixels[currentByte+3]);
-
- CGFloat normalizedXCoordinate = -1.0 + 2.0 * (CGFloat)(xCoordinate / 4) / imageSize.width;
- CGFloat normalizedYCoordinate = -1.0 + 2.0 * (CGFloat)(yCoordinate) / imageSize.height;
-
- if (normalizedXCoordinate < 0.0)
- {
- // T space
- // m = -1 - d/u
- // b = d * v/u
- if (normalizedXCoordinate > -0.05) // Test for the case right near the X axis, stamp the X intercept instead of the Y
- {
- linesArray[cornerStorageIndex++] = 100000.0;
- linesArray[cornerStorageIndex++] = normalizedYCoordinate;
- }
- else
- {
- linesArray[cornerStorageIndex++] = -1.0 - 1.0 / normalizedXCoordinate;
- linesArray[cornerStorageIndex++] = 1.0 * normalizedYCoordinate / normalizedXCoordinate;
- }
- }
- else
- {
- // S space
- // m = 1 - d/u
- // b = d * v/u
- if (normalizedXCoordinate < 0.05) // Test for the case right near the X axis, stamp the X intercept instead of the Y
- {
- linesArray[cornerStorageIndex++] = 100000.0;
- linesArray[cornerStorageIndex++] = normalizedYCoordinate;
- }
- else
- {
- linesArray[cornerStorageIndex++] = 1.0 - 1.0 / normalizedXCoordinate;
- linesArray[cornerStorageIndex++] = 1.0 * normalizedYCoordinate / normalizedXCoordinate;
- }
- }
-
- numberOfLines++;
-
- numberOfLines = MIN(numberOfLines, 1023);
- cornerStorageIndex = MIN(cornerStorageIndex, 2040);
- }
- currentByte +=4;
- }
-
-// CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
-// NSLog(@"Processing time : %f ms", 1000.0 * currentFrameTime);
-
- if (linesDetectedBlock != NULL)
- {
- linesDetectedBlock(linesArray, numberOfLines, frameTime);
- }
-}
-
-- (BOOL)wantsMonochromeInput;
-{
-// return YES;
- return NO;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-//- (void)setEdgeThreshold:(CGFloat)newValue;
-//{
-// [(GPUImageCannyEdgeDetectionFilter *)thresholdEdgeDetectionFilter setThreshold:newValue];
-//}
-//
-//- (CGFloat)edgeThreshold;
-//{
-// return [(GPUImageCannyEdgeDetectionFilter *)thresholdEdgeDetectionFilter threshold];
-//}
-
-- (void)setLineDetectionThreshold:(CGFloat)newValue;
-{
- nonMaximumSuppressionFilter.threshold = newValue;
-}
-
-- (CGFloat)lineDetectionThreshold;
-{
- return nonMaximumSuppressionFilter.threshold;
-}
-
-#ifdef DEBUGLINEDETECTION
-- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
-{
-// [thresholdEdgeDetectionFilter useNextFrameForImageCapture];
-// [parallelCoordinateLineTransformFilter useNextFrameForImageCapture];
- [nonMaximumSuppressionFilter useNextFrameForImageCapture];
-
- [super newFrameReadyAtTime:frameTime atIndex:textureIndex];
-}
-#endif
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHueBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageHueBlendFilter.h
deleted file mode 100644
index 4399ffc..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHueBlendFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageHueBlendFilter : GPUImageTwoInputFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHueBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageHueBlendFilter.m
deleted file mode 100644
index f9dfbbb..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHueBlendFilter.m
+++ /dev/null
@@ -1,212 +0,0 @@
-#import "GPUImageHueBlendFilter.h"
-
-/**
- * Hue blend mode based upon pseudo code from the PDF specification.
- */
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageHueBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- highp float lum(lowp vec3 c) {
- return dot(c, vec3(0.3, 0.59, 0.11));
- }
-
- lowp vec3 clipcolor(lowp vec3 c) {
- highp float l = lum(c);
- lowp float n = min(min(c.r, c.g), c.b);
- lowp float x = max(max(c.r, c.g), c.b);
-
- if (n < 0.0) {
- c.r = l + ((c.r - l) * l) / (l - n);
- c.g = l + ((c.g - l) * l) / (l - n);
- c.b = l + ((c.b - l) * l) / (l - n);
- }
- if (x > 1.0) {
- c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);
- c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);
- c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);
- }
-
- return c;
- }
-
- lowp vec3 setlum(lowp vec3 c, highp float l) {
- highp float d = l - lum(c);
- c = c + vec3(d);
- return clipcolor(c);
- }
-
- highp float sat(lowp vec3 c) {
- lowp float n = min(min(c.r, c.g), c.b);
- lowp float x = max(max(c.r, c.g), c.b);
- return x - n;
- }
-
- lowp float mid(lowp float cmin, lowp float cmid, lowp float cmax, highp float s) {
- return ((cmid - cmin) * s) / (cmax - cmin);
- }
-
- lowp vec3 setsat(lowp vec3 c, highp float s) {
- if (c.r > c.g) {
- if (c.r > c.b) {
- if (c.g > c.b) {
- /* g is mid, b is min */
- c.g = mid(c.b, c.g, c.r, s);
- c.b = 0.0;
- } else {
- /* b is mid, g is min */
- c.b = mid(c.g, c.b, c.r, s);
- c.g = 0.0;
- }
- c.r = s;
- } else {
- /* b is max, r is mid, g is min */
- c.r = mid(c.g, c.r, c.b, s);
- c.b = s;
- c.r = 0.0;
- }
- } else if (c.r > c.b) {
- /* g is max, r is mid, b is min */
- c.r = mid(c.b, c.r, c.g, s);
- c.g = s;
- c.b = 0.0;
- } else if (c.g > c.b) {
- /* g is max, b is mid, r is min */
- c.b = mid(c.r, c.b, c.g, s);
- c.g = s;
- c.r = 0.0;
- } else if (c.b > c.g) {
- /* b is max, g is mid, r is min */
- c.g = mid(c.r, c.g, c.b, s);
- c.b = s;
- c.r = 0.0;
- } else {
- c = vec3(0.0);
- }
- return c;
- }
-
- void main()
- {
- highp vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);
- highp vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(setsat(overlayColor.rgb, sat(baseColor.rgb)), lum(baseColor.rgb)) * overlayColor.a, baseColor.a);
- }
-);
-#else
-NSString *const kGPUImageHueBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- float lum(vec3 c) {
- return dot(c, vec3(0.3, 0.59, 0.11));
- }
-
- vec3 clipcolor(vec3 c) {
- float l = lum(c);
- float n = min(min(c.r, c.g), c.b);
- float x = max(max(c.r, c.g), c.b);
-
- if (n < 0.0) {
- c.r = l + ((c.r - l) * l) / (l - n);
- c.g = l + ((c.g - l) * l) / (l - n);
- c.b = l + ((c.b - l) * l) / (l - n);
- }
- if (x > 1.0) {
- c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);
- c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);
- c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);
- }
-
- return c;
- }
-
- vec3 setlum(vec3 c, float l) {
- float d = l - lum(c);
- c = c + vec3(d);
- return clipcolor(c);
- }
-
- float sat(vec3 c) {
- float n = min(min(c.r, c.g), c.b);
- float x = max(max(c.r, c.g), c.b);
- return x - n;
- }
-
- float mid(float cmin, float cmid, float cmax, float s) {
- return ((cmid - cmin) * s) / (cmax - cmin);
- }
-
- vec3 setsat(vec3 c, float s) {
- if (c.r > c.g) {
- if (c.r > c.b) {
- if (c.g > c.b) {
- /* g is mid, b is min */
- c.g = mid(c.b, c.g, c.r, s);
- c.b = 0.0;
- } else {
- /* b is mid, g is min */
- c.b = mid(c.g, c.b, c.r, s);
- c.g = 0.0;
- }
- c.r = s;
- } else {
- /* b is max, r is mid, g is min */
- c.r = mid(c.g, c.r, c.b, s);
- c.b = s;
- c.r = 0.0;
- }
- } else if (c.r > c.b) {
- /* g is max, r is mid, b is min */
- c.r = mid(c.b, c.r, c.g, s);
- c.g = s;
- c.b = 0.0;
- } else if (c.g > c.b) {
- /* g is max, b is mid, r is min */
- c.b = mid(c.r, c.b, c.g, s);
- c.g = s;
- c.r = 0.0;
- } else if (c.b > c.g) {
- /* b is max, g is mid, r is min */
- c.g = mid(c.r, c.g, c.b, s);
- c.b = s;
- c.r = 0.0;
- } else {
- c = vec3(0.0);
- }
- return c;
- }
-
- void main()
- {
- vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);
- vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(setsat(overlayColor.rgb, sat(baseColor.rgb)), lum(baseColor.rgb)) * overlayColor.a, baseColor.a);
- }
-);
-#endif
-
-@implementation GPUImageHueBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageHueBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHueFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageHueFilter.h
deleted file mode 100644
index eef2465..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHueFilter.h
+++ /dev/null
@@ -1,11 +0,0 @@
-
-#import "GPUImageFilter.h"
-
-@interface GPUImageHueFilter : GPUImageFilter
-{
- GLint hueAdjustUniform;
-
-}
-@property (nonatomic, readwrite) CGFloat hue;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageHueFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageHueFilter.m
deleted file mode 100644
index 5b42c86..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageHueFilter.m
+++ /dev/null
@@ -1,123 +0,0 @@
-
-#import "GPUImageHueFilter.h"
-
-// Adapted from http://stackoverflow.com/questions/9234724/how-to-change-hue-of-a-texture-with-glsl - see for code and discussion
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageHueFragmentShaderString = SHADER_STRING
-(
- precision highp float;
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform mediump float hueAdjust;
- const highp vec4 kRGBToYPrime = vec4 (0.299, 0.587, 0.114, 0.0);
- const highp vec4 kRGBToI = vec4 (0.595716, -0.274453, -0.321263, 0.0);
- const highp vec4 kRGBToQ = vec4 (0.211456, -0.522591, 0.31135, 0.0);
-
- const highp vec4 kYIQToR = vec4 (1.0, 0.9563, 0.6210, 0.0);
- const highp vec4 kYIQToG = vec4 (1.0, -0.2721, -0.6474, 0.0);
- const highp vec4 kYIQToB = vec4 (1.0, -1.1070, 1.7046, 0.0);
-
- void main ()
- {
- // Sample the input pixel
- highp vec4 color = texture2D(inputImageTexture, textureCoordinate);
-
- // Convert to YIQ
- highp float YPrime = dot (color, kRGBToYPrime);
- highp float I = dot (color, kRGBToI);
- highp float Q = dot (color, kRGBToQ);
-
- // Calculate the hue and chroma
- highp float hue = atan (Q, I);
- highp float chroma = sqrt (I * I + Q * Q);
-
- // Make the user's adjustments
- hue += (-hueAdjust); //why negative rotation?
-
- // Convert back to YIQ
- Q = chroma * sin (hue);
- I = chroma * cos (hue);
-
- // Convert back to RGB
- highp vec4 yIQ = vec4 (YPrime, I, Q, 0.0);
- color.r = dot (yIQ, kYIQToR);
- color.g = dot (yIQ, kYIQToG);
- color.b = dot (yIQ, kYIQToB);
-
- // Save the result
- gl_FragColor = color;
- }
-);
-#else
-NSString *const kGPUImageHueFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float hueAdjust;
- const vec4 kRGBToYPrime = vec4 (0.299, 0.587, 0.114, 0.0);
- const vec4 kRGBToI = vec4 (0.595716, -0.274453, -0.321263, 0.0);
- const vec4 kRGBToQ = vec4 (0.211456, -0.522591, 0.31135, 0.0);
-
- const vec4 kYIQToR = vec4 (1.0, 0.9563, 0.6210, 0.0);
- const vec4 kYIQToG = vec4 (1.0, -0.2721, -0.6474, 0.0);
- const vec4 kYIQToB = vec4 (1.0, -1.1070, 1.7046, 0.0);
-
- void main ()
- {
- // Sample the input pixel
- vec4 color = texture2D(inputImageTexture, textureCoordinate);
-
- // Convert to YIQ
- float YPrime = dot (color, kRGBToYPrime);
- float I = dot (color, kRGBToI);
- float Q = dot (color, kRGBToQ);
-
- // Calculate the hue and chroma
- float hue = atan (Q, I);
- float chroma = sqrt (I * I + Q * Q);
-
- // Make the user's adjustments
- hue += (-hueAdjust); //why negative rotation?
-
- // Convert back to YIQ
- Q = chroma * sin (hue);
- I = chroma * cos (hue);
-
- // Convert back to RGB
- vec4 yIQ = vec4 (YPrime, I, Q, 0.0);
- color.r = dot (yIQ, kYIQToR);
- color.g = dot (yIQ, kYIQToG);
- color.b = dot (yIQ, kYIQToB);
-
- // Save the result
- gl_FragColor = color;
- }
-);
-#endif
-
-@implementation GPUImageHueFilter
-@synthesize hue;
-
-- (id)init
-{
- if(! (self = [super initWithFragmentShaderFromString:kGPUImageHueFragmentShaderString]) )
- {
- return nil;
- }
-
- hueAdjustUniform = [filterProgram uniformIndex:@"hueAdjust"];
- self.hue = 90;
-
- return self;
-}
-
-- (void)setHue:(CGFloat)newHue
-{
- // Convert degrees to radians for hue rotation
- hue = fmodf(newHue, 360.0) * M_PI/180;
- [self setFloat:hue forUniform:hueAdjustUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageJFAVoronoiFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageJFAVoronoiFilter.h
deleted file mode 100644
index 4c50cc3..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageJFAVoronoiFilter.h
+++ /dev/null
@@ -1,17 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageJFAVoronoiFilter : GPUImageFilter
-{
- GLuint secondFilterOutputTexture;
- GLuint secondFilterFramebuffer;
-
-
- GLint sampleStepUniform;
- GLint sizeUniform;
- NSUInteger numPasses;
-
-}
-
-@property (nonatomic, readwrite) CGSize sizeInPixels;
-
-@end
\ No newline at end of file
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageJFAVoronoiFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageJFAVoronoiFilter.m
deleted file mode 100644
index 1583591..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageJFAVoronoiFilter.m
+++ /dev/null
@@ -1,446 +0,0 @@
-// adapted from unitzeroone - http://unitzeroone.com/labs/jfavoronoi/
-
-#import "GPUImageJFAVoronoiFilter.h"
-
-// The shaders are mostly taken from UnitZeroOne's WebGL example here:
-// http://unitzeroone.com/blog/2011/03/22/jump-flood-voronoi-for-webgl/
-
-NSString *const kGPUImageJFAVoronoiVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec4 inputTextureCoordinate;
-
- uniform float sampleStep;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- void main()
- {
- gl_Position = position;
-
- vec2 widthStep = vec2(sampleStep, 0.0);
- vec2 heightStep = vec2(0.0, sampleStep);
- vec2 widthHeightStep = vec2(sampleStep);
- vec2 widthNegativeHeightStep = vec2(sampleStep, -sampleStep);
-
- textureCoordinate = inputTextureCoordinate.xy;
- leftTextureCoordinate = inputTextureCoordinate.xy - widthStep;
- rightTextureCoordinate = inputTextureCoordinate.xy + widthStep;
-
- topTextureCoordinate = inputTextureCoordinate.xy - heightStep;
- topLeftTextureCoordinate = inputTextureCoordinate.xy - widthHeightStep;
- topRightTextureCoordinate = inputTextureCoordinate.xy + widthNegativeHeightStep;
-
- bottomTextureCoordinate = inputTextureCoordinate.xy + heightStep;
- bottomLeftTextureCoordinate = inputTextureCoordinate.xy - widthNegativeHeightStep;
- bottomRightTextureCoordinate = inputTextureCoordinate.xy + widthHeightStep;
- }
- );
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageJFAVoronoiFragmentShaderString = SHADER_STRING
-(
-
- precision highp float;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform vec2 size;
- //varying vec2 textureCoordinate;
- //uniform float sampleStep;
-
- vec2 getCoordFromColor(vec4 color)
-{
- float z = color.z * 256.0;
- float yoff = floor(z / 8.0);
- float xoff = mod(z, 8.0);
- float x = color.x*256.0 + xoff*256.0;
- float y = color.y*256.0 + yoff*256.0;
- return vec2(x,y) / size;
-}
-
- void main(void) {
-
- vec2 sub;
- vec4 dst;
- vec4 local = texture2D(inputImageTexture, textureCoordinate);
- vec4 sam;
- float l;
- float smallestDist;
- if(local.a == 0.0){
-
- smallestDist = dot(1.0,1.0);
- }else{
- sub = getCoordFromColor(local)-textureCoordinate;
- smallestDist = dot(sub,sub);
- }
- dst = local;
-
-
- sam = texture2D(inputImageTexture, topRightTextureCoordinate);
- if(sam.a == 1.0){
- sub = (getCoordFromColor(sam)-textureCoordinate);
- l = dot(sub,sub);
- if(l < smallestDist){
- smallestDist = l;
- dst = sam;
- }
- }
-
- sam = texture2D(inputImageTexture, topTextureCoordinate);
- if(sam.a == 1.0){
- sub = (getCoordFromColor(sam)-textureCoordinate);
- l = dot(sub,sub);
- if(l < smallestDist){
- smallestDist = l;
- dst = sam;
- }
- }
-
- sam = texture2D(inputImageTexture, topLeftTextureCoordinate);
- if(sam.a == 1.0){
- sub = (getCoordFromColor(sam)-textureCoordinate);
- l = dot(sub,sub);
- if(l < smallestDist){
- smallestDist = l;
- dst = sam;
- }
- }
-
- sam = texture2D(inputImageTexture, bottomRightTextureCoordinate);
- if(sam.a == 1.0){
- sub = (getCoordFromColor(sam)-textureCoordinate);
- l = dot(sub,sub);
- if(l < smallestDist){
- smallestDist = l;
- dst = sam;
- }
- }
-
- sam = texture2D(inputImageTexture, bottomTextureCoordinate);
- if(sam.a == 1.0){
- sub = (getCoordFromColor(sam)-textureCoordinate);
- l = dot(sub,sub);
- if(l < smallestDist){
- smallestDist = l;
- dst = sam;
- }
- }
-
- sam = texture2D(inputImageTexture, bottomLeftTextureCoordinate);
- if(sam.a == 1.0){
- sub = (getCoordFromColor(sam)-textureCoordinate);
- l = dot(sub,sub);
- if(l < smallestDist){
- smallestDist = l;
- dst = sam;
- }
- }
-
- sam = texture2D(inputImageTexture, leftTextureCoordinate);
- if(sam.a == 1.0){
- sub = (getCoordFromColor(sam)-textureCoordinate);
- l = dot(sub,sub);
- if(l < smallestDist){
- smallestDist = l;
- dst = sam;
- }
- }
-
- sam = texture2D(inputImageTexture, rightTextureCoordinate);
- if(sam.a == 1.0){
- sub = (getCoordFromColor(sam)-textureCoordinate);
- l = dot(sub,sub);
- if(l < smallestDist){
- smallestDist = l;
- dst = sam;
- }
- }
- gl_FragColor = dst;
- }
-);
-#else
-NSString *const kGPUImageJFAVoronoiFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform vec2 size;
- //varying vec2 textureCoordinate;
- //uniform float sampleStep;
-
- vec2 getCoordFromColor(vec4 color)
-{
- float z = color.z * 256.0;
- float yoff = floor(z / 8.0);
- float xoff = mod(z, 8.0);
- float x = color.x*256.0 + xoff*256.0;
- float y = color.y*256.0 + yoff*256.0;
- return vec2(x,y) / size;
-}
-
- void main(void) {
-
- vec2 sub;
- vec4 dst;
- vec4 local = texture2D(inputImageTexture, textureCoordinate);
- vec4 sam;
- float l;
- float smallestDist;
- if(local.a == 0.0){
-
- smallestDist = dot(1.0,1.0);
- }else{
- sub = getCoordFromColor(local)-textureCoordinate;
- smallestDist = dot(sub,sub);
- }
- dst = local;
-
-
- sam = texture2D(inputImageTexture, topRightTextureCoordinate);
- if(sam.a == 1.0){
- sub = (getCoordFromColor(sam)-textureCoordinate);
- l = dot(sub,sub);
- if(l < smallestDist){
- smallestDist = l;
- dst = sam;
- }
- }
-
- sam = texture2D(inputImageTexture, topTextureCoordinate);
- if(sam.a == 1.0){
- sub = (getCoordFromColor(sam)-textureCoordinate);
- l = dot(sub,sub);
- if(l < smallestDist){
- smallestDist = l;
- dst = sam;
- }
- }
-
- sam = texture2D(inputImageTexture, topLeftTextureCoordinate);
- if(sam.a == 1.0){
- sub = (getCoordFromColor(sam)-textureCoordinate);
- l = dot(sub,sub);
- if(l < smallestDist){
- smallestDist = l;
- dst = sam;
- }
- }
-
- sam = texture2D(inputImageTexture, bottomRightTextureCoordinate);
- if(sam.a == 1.0){
- sub = (getCoordFromColor(sam)-textureCoordinate);
- l = dot(sub,sub);
- if(l < smallestDist){
- smallestDist = l;
- dst = sam;
- }
- }
-
- sam = texture2D(inputImageTexture, bottomTextureCoordinate);
- if(sam.a == 1.0){
- sub = (getCoordFromColor(sam)-textureCoordinate);
- l = dot(sub,sub);
- if(l < smallestDist){
- smallestDist = l;
- dst = sam;
- }
- }
-
- sam = texture2D(inputImageTexture, bottomLeftTextureCoordinate);
- if(sam.a == 1.0){
- sub = (getCoordFromColor(sam)-textureCoordinate);
- l = dot(sub,sub);
- if(l < smallestDist){
- smallestDist = l;
- dst = sam;
- }
- }
-
- sam = texture2D(inputImageTexture, leftTextureCoordinate);
- if(sam.a == 1.0){
- sub = (getCoordFromColor(sam)-textureCoordinate);
- l = dot(sub,sub);
- if(l < smallestDist){
- smallestDist = l;
- dst = sam;
- }
- }
-
- sam = texture2D(inputImageTexture, rightTextureCoordinate);
- if(sam.a == 1.0){
- sub = (getCoordFromColor(sam)-textureCoordinate);
- l = dot(sub,sub);
- if(l < smallestDist){
- smallestDist = l;
- dst = sam;
- }
- }
- gl_FragColor = dst;
- }
-);
-#endif
-
-@interface GPUImageJFAVoronoiFilter() {
- int currentPass;
-}
-
-
-@end
-
-@implementation GPUImageJFAVoronoiFilter
-
-@synthesize sizeInPixels = _sizeInPixels;
-
-- (id)init;
-{
- if (!(self = [super initWithVertexShaderFromString:kGPUImageJFAVoronoiVertexShaderString fragmentShaderFromString:kGPUImageJFAVoronoiFragmentShaderString]))
- {
-
- NSLog(@"nil returned");
- return nil;
-
- }
-
- sampleStepUniform = [filterProgram uniformIndex:@"sampleStep"];
- sizeUniform = [filterProgram uniformIndex:@"size"];
- //[self disableSecondFrameCheck];
-
- return self;
-}
-
--(void)setSizeInPixels:(CGSize)sizeInPixels {
- _sizeInPixels = sizeInPixels;
-
- //validate that it's a power of 2
-
- float width = log2(sizeInPixels.width);
- float height = log2(sizeInPixels.height);
-
- if (width != height) {
- NSLog(@"Voronoi point texture must be square");
- return;
- }
- if (width != floor(width) || height != floor(height)) {
- NSLog(@"Voronoi point texture must be a power of 2. Texture size: %f, %f", sizeInPixels.width, sizeInPixels.height);
- return;
- }
- glUniform2f(sizeUniform, _sizeInPixels.width, _sizeInPixels.height);
-}
-
-#pragma mark -
-#pragma mark Managing the display FBOs
-
--(NSUInteger)nextPowerOfTwo:(CGPoint)input {
- NSUInteger val;
- if (input.x > input.y) {
- val = (NSUInteger)input.x;
- } else {
- val = (NSUInteger)input.y;
- }
-
- val--;
- val = (val >> 1) | val;
- val = (val >> 2) | val;
- val = (val >> 4) | val;
- val = (val >> 8) | val;
- val = (val >> 16) | val;
- val++;
- return val;
-}
-
-//- (void)setOutputFBO;
-//{
-// if (currentPass % 2 == 1) {
-// [self setSecondFilterFBO];
-// } else {
-// [self setFilterFBO];
-// }
-//
-//}
-
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-{
- // Run the first stage of the two-pass filter
- [GPUImageContext setActiveShaderProgram:filterProgram];
- currentPass = 0;
-
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
- [outputFramebuffer activateFramebuffer];
-
- glActiveTexture(GL_TEXTURE2);
-
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT);
-
- glUniform1f(sampleStepUniform, 0.5);
-
- glUniform2f(sizeUniform, _sizeInPixels.width, _sizeInPixels.height);
-
- glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
-
- glUniform1i(filterInputTextureUniform, 2);
-
- glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
- glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
-
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
-
- for (int pass = 1; pass <= numPasses + 1; pass++) {
- currentPass = pass;
-// [self setOutputFBO];
-
- //glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT);
-
- glActiveTexture(GL_TEXTURE2);
- if (pass % 2 == 0) {
- glBindTexture(GL_TEXTURE_2D, secondFilterOutputTexture);
- } else {
- glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
- }
- glUniform1i(filterInputTextureUniform, 2);
-
- float step = pow(2.0, numPasses - pass) / pow(2.0, numPasses);
- glUniform1f(sampleStepUniform, step);
- glUniform2f(sizeUniform, _sizeInPixels.width, _sizeInPixels.height);
-
- glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
- glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
-
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
- }
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageKuwaharaFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageKuwaharaFilter.h
deleted file mode 100755
index 4fb0bce..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageKuwaharaFilter.h
+++ /dev/null
@@ -1,13 +0,0 @@
-#import "GPUImageFilter.h"
-
-/** Kuwahara image abstraction, drawn from the work of Kyprianidis, et. al. in their publication "Anisotropic Kuwahara Filtering on the GPU" within the GPU Pro collection. This produces an oil-painting-like image, but it is extremely computationally expensive, so it can take seconds to render a frame on an iPad 2. This might be best used for still images.
- */
-@interface GPUImageKuwaharaFilter : GPUImageFilter
-{
- GLint radiusUniform;
-}
-
-/// The radius to sample from when creating the brush-stroke effect, with a default of 3. The larger the radius, the slower the filter.
-@property(readwrite, nonatomic) NSUInteger radius;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageKuwaharaFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageKuwaharaFilter.m
deleted file mode 100755
index 9061dbf..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageKuwaharaFilter.m
+++ /dev/null
@@ -1,223 +0,0 @@
-#import "GPUImageKuwaharaFilter.h"
-
-// Sourced from Kyprianidis, J. E., Kang, H., and Doellner, J. "Anisotropic Kuwahara Filtering on the GPU," GPU Pro p.247 (2010).
-//
-// Original header:
-//
-// Anisotropic Kuwahara Filtering on the GPU
-// by Jan Eric Kyprianidis
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageKuwaharaFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- uniform sampler2D inputImageTexture;
- uniform int radius;
-
- precision highp float;
-
- const vec2 src_size = vec2 (1.0 / 768.0, 1.0 / 1024.0);
-
- void main (void)
- {
- vec2 uv = textureCoordinate;
- float n = float((radius + 1) * (radius + 1));
- int i; int j;
- vec3 m0 = vec3(0.0); vec3 m1 = vec3(0.0); vec3 m2 = vec3(0.0); vec3 m3 = vec3(0.0);
- vec3 s0 = vec3(0.0); vec3 s1 = vec3(0.0); vec3 s2 = vec3(0.0); vec3 s3 = vec3(0.0);
- vec3 c;
-
- for (j = -radius; j <= 0; ++j) {
- for (i = -radius; i <= 0; ++i) {
- c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- }
- }
-
- for (j = -radius; j <= 0; ++j) {
- for (i = 0; i <= radius; ++i) {
- c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;
- m1 += c;
- s1 += c * c;
- }
- }
-
- for (j = 0; j <= radius; ++j) {
- for (i = 0; i <= radius; ++i) {
- c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- }
- }
-
- for (j = 0; j <= radius; ++j) {
- for (i = -radius; i <= 0; ++i) {
- c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;
- m3 += c;
- s3 += c * c;
- }
- }
-
-
- float min_sigma2 = 1e+2;
- m0 /= n;
- s0 = abs(s0 / n - m0 * m0);
-
- float sigma2 = s0.r + s0.g + s0.b;
- if (sigma2 < min_sigma2) {
- min_sigma2 = sigma2;
- gl_FragColor = vec4(m0, 1.0);
- }
-
- m1 /= n;
- s1 = abs(s1 / n - m1 * m1);
-
- sigma2 = s1.r + s1.g + s1.b;
- if (sigma2 < min_sigma2) {
- min_sigma2 = sigma2;
- gl_FragColor = vec4(m1, 1.0);
- }
-
- m2 /= n;
- s2 = abs(s2 / n - m2 * m2);
-
- sigma2 = s2.r + s2.g + s2.b;
- if (sigma2 < min_sigma2) {
- min_sigma2 = sigma2;
- gl_FragColor = vec4(m2, 1.0);
- }
-
- m3 /= n;
- s3 = abs(s3 / n - m3 * m3);
-
- sigma2 = s3.r + s3.g + s3.b;
- if (sigma2 < min_sigma2) {
- min_sigma2 = sigma2;
- gl_FragColor = vec4(m3, 1.0);
- }
- }
-);
-#else
-NSString *const kGPUImageKuwaharaFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- uniform sampler2D inputImageTexture;
- uniform int radius;
-
- const vec2 src_size = vec2 (1.0 / 768.0, 1.0 / 1024.0);
-
- void main (void)
- {
- vec2 uv = textureCoordinate;
- float n = float((radius + 1) * (radius + 1));
- int i; int j;
- vec3 m0 = vec3(0.0); vec3 m1 = vec3(0.0); vec3 m2 = vec3(0.0); vec3 m3 = vec3(0.0);
- vec3 s0 = vec3(0.0); vec3 s1 = vec3(0.0); vec3 s2 = vec3(0.0); vec3 s3 = vec3(0.0);
- vec3 c;
-
- for (j = -radius; j <= 0; ++j) {
- for (i = -radius; i <= 0; ++i) {
- c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- }
- }
-
- for (j = -radius; j <= 0; ++j) {
- for (i = 0; i <= radius; ++i) {
- c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;
- m1 += c;
- s1 += c * c;
- }
- }
-
- for (j = 0; j <= radius; ++j) {
- for (i = 0; i <= radius; ++i) {
- c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- }
- }
-
- for (j = 0; j <= radius; ++j) {
- for (i = -radius; i <= 0; ++i) {
- c = texture2D(inputImageTexture, uv + vec2(i,j) * src_size).rgb;
- m3 += c;
- s3 += c * c;
- }
- }
-
-
- float min_sigma2 = 1e+2;
- m0 /= n;
- s0 = abs(s0 / n - m0 * m0);
-
- float sigma2 = s0.r + s0.g + s0.b;
- if (sigma2 < min_sigma2) {
- min_sigma2 = sigma2;
- gl_FragColor = vec4(m0, 1.0);
- }
-
- m1 /= n;
- s1 = abs(s1 / n - m1 * m1);
-
- sigma2 = s1.r + s1.g + s1.b;
- if (sigma2 < min_sigma2) {
- min_sigma2 = sigma2;
- gl_FragColor = vec4(m1, 1.0);
- }
-
- m2 /= n;
- s2 = abs(s2 / n - m2 * m2);
-
- sigma2 = s2.r + s2.g + s2.b;
- if (sigma2 < min_sigma2) {
- min_sigma2 = sigma2;
- gl_FragColor = vec4(m2, 1.0);
- }
-
- m3 /= n;
- s3 = abs(s3 / n - m3 * m3);
-
- sigma2 = s3.r + s3.g + s3.b;
- if (sigma2 < min_sigma2) {
- min_sigma2 = sigma2;
- gl_FragColor = vec4(m3, 1.0);
- }
- }
-);
-#endif
-
-@implementation GPUImageKuwaharaFilter
-
-@synthesize radius = _radius;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageKuwaharaFragmentShaderString]))
- {
- return nil;
- }
-
- radiusUniform = [filterProgram uniformIndex:@"radius"];
-
- self.radius = 3;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setRadius:(NSUInteger)newValue;
-{
- _radius = newValue;
-
- [self setInteger:(GLint)_radius forUniform:radiusUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageKuwaharaRadius3Filter.h b/Example/Pods/GPUImage/framework/Source/GPUImageKuwaharaRadius3Filter.h
deleted file mode 100644
index c4591b8..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageKuwaharaRadius3Filter.h
+++ /dev/null
@@ -1,8 +0,0 @@
-//
-// GPUImageKuwaharaRadius3Filter.h
-
-#import "GPUImageFilter.h"
-
-@interface GPUImageKuwaharaRadius3Filter : GPUImageFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageKuwaharaRadius3Filter.m b/Example/Pods/GPUImage/framework/Source/GPUImageKuwaharaRadius3Filter.m
deleted file mode 100644
index 98b092c..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageKuwaharaRadius3Filter.m
+++ /dev/null
@@ -1,547 +0,0 @@
-#import "GPUImageKuwaharaRadius3Filter.h"
-
-// Sourced from Kyprianidis, J. E., Kang, H., and Doellner, J. "Anisotropic Kuwahara Filtering on the GPU," GPU Pro p.247 (2010).
-//
-// Original header:
-//
-// Anisotropic Kuwahara Filtering on the GPU
-// by Jan Eric Kyprianidis
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageKuwaharaRadius3FragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- uniform sampler2D inputImageTexture;
-
- precision highp float;
-
- const vec2 src_size = vec2 (1.0 / 768.0, 1.0 / 1024.0);
-
- void main (void)
- {
- vec2 uv = textureCoordinate;
- float n = float(16); // radius is assumed to be 3
- vec3 m0 = vec3(0.0); vec3 m1 = vec3(0.0); vec3 m2 = vec3(0.0); vec3 m3 = vec3(0.0);
- vec3 s0 = vec3(0.0); vec3 s1 = vec3(0.0); vec3 s2 = vec3(0.0); vec3 s3 = vec3(0.0);
- vec3 c;
- vec3 cSq;
-
- c = texture2D(inputImageTexture, uv + vec2(-3,-3) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-3,-2) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-3,-1) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-3,0) * src_size).rgb;
- cSq = c * c;
- m0 += c;
- s0 += cSq;
- m1 += c;
- s1 += cSq;
-
- c = texture2D(inputImageTexture, uv + vec2(-2,-3) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-2,-2) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-2,-1) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-2,0) * src_size).rgb;
- cSq = c * c;
- m0 += c;
- s0 += cSq;
- m1 += c;
- s1 += cSq;
-
- c = texture2D(inputImageTexture, uv + vec2(-1,-3) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-1,-2) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-1,-1) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-1,0) * src_size).rgb;
- cSq = c * c;
- m0 += c;
- s0 += cSq;
- m1 += c;
- s1 += cSq;
-
- c = texture2D(inputImageTexture, uv + vec2(0,-3) * src_size).rgb;
- cSq = c * c;
- m0 += c;
- s0 += cSq;
- m3 += c;
- s3 += cSq;
- c = texture2D(inputImageTexture, uv + vec2(0,-2) * src_size).rgb;
- cSq = c * c;
- m0 += c;
- s0 += cSq;
- m3 += c;
- s3 += cSq;
- c = texture2D(inputImageTexture, uv + vec2(0,-1) * src_size).rgb;
- cSq = c * c;
- m0 += c;
- s0 += cSq;
- m3 += c;
- s3 += cSq;
- c = texture2D(inputImageTexture, uv + vec2(0,0) * src_size).rgb;
- cSq = c * c;
- m0 += c;
- s0 += cSq;
- m1 += c;
- s1 += cSq;
- m2 += c;
- s2 += cSq;
- m3 += c;
- s3 += cSq;
-
- c = texture2D(inputImageTexture, uv + vec2(-3,3) * src_size).rgb;
- m1 += c;
- s1 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-3,2) * src_size).rgb;
- m1 += c;
- s1 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-3,1) * src_size).rgb;
- m1 += c;
- s1 += c * c;
-
- c = texture2D(inputImageTexture, uv + vec2(-2,3) * src_size).rgb;
- m1 += c;
- s1 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-2,2) * src_size).rgb;
- m1 += c;
- s1 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-2,1) * src_size).rgb;
- m1 += c;
- s1 += c * c;
-
- c = texture2D(inputImageTexture, uv + vec2(-1,3) * src_size).rgb;
- m1 += c;
- s1 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-1,2) * src_size).rgb;
- m1 += c;
- s1 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-1,1) * src_size).rgb;
- m1 += c;
- s1 += c * c;
-
- c = texture2D(inputImageTexture, uv + vec2(0,3) * src_size).rgb;
- cSq = c * c;
- m1 += c;
- s1 += cSq;
- m2 += c;
- s2 += cSq;
- c = texture2D(inputImageTexture, uv + vec2(0,2) * src_size).rgb;
- cSq = c * c;
- m1 += c;
- s1 += cSq;
- m2 += c;
- s2 += cSq;
- c = texture2D(inputImageTexture, uv + vec2(0,1) * src_size).rgb;
- cSq = c * c;
- m1 += c;
- s1 += cSq;
- m2 += c;
- s2 += cSq;
-
- c = texture2D(inputImageTexture, uv + vec2(3,3) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(3,2) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(3,1) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(3,0) * src_size).rgb;
- cSq = c * c;
- m2 += c;
- s2 += cSq;
- m3 += c;
- s3 += cSq;
-
- c = texture2D(inputImageTexture, uv + vec2(2,3) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(2,2) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(2,1) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(2,0) * src_size).rgb;
- cSq = c * c;
- m2 += c;
- s2 += cSq;
- m3 += c;
- s3 += cSq;
-
- c = texture2D(inputImageTexture, uv + vec2(1,3) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(1,2) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(1,1) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(1,0) * src_size).rgb;
- cSq = c * c;
- m2 += c;
- s2 += cSq;
- m3 += c;
- s3 += cSq;
-
- c = texture2D(inputImageTexture, uv + vec2(3,-3) * src_size).rgb;
- m3 += c;
- s3 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(3,-2) * src_size).rgb;
- m3 += c;
- s3 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(3,-1) * src_size).rgb;
- m3 += c;
- s3 += c * c;
-
- c = texture2D(inputImageTexture, uv + vec2(2,-3) * src_size).rgb;
- m3 += c;
- s3 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(2,-2) * src_size).rgb;
- m3 += c;
- s3 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(2,-1) * src_size).rgb;
- m3 += c;
- s3 += c * c;
-
- c = texture2D(inputImageTexture, uv + vec2(1,-3) * src_size).rgb;
- m3 += c;
- s3 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(1,-2) * src_size).rgb;
- m3 += c;
- s3 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(1,-1) * src_size).rgb;
- m3 += c;
- s3 += c * c;
-
- float min_sigma2 = 1e+2;
- m0 /= n;
- s0 = abs(s0 / n - m0 * m0);
-
- float sigma2 = s0.r + s0.g + s0.b;
- if (sigma2 < min_sigma2) {
- min_sigma2 = sigma2;
- gl_FragColor = vec4(m0, 1.0);
- }
-
- m1 /= n;
- s1 = abs(s1 / n - m1 * m1);
-
- sigma2 = s1.r + s1.g + s1.b;
- if (sigma2 < min_sigma2) {
- min_sigma2 = sigma2;
- gl_FragColor = vec4(m1, 1.0);
- }
-
- m2 /= n;
- s2 = abs(s2 / n - m2 * m2);
-
- sigma2 = s2.r + s2.g + s2.b;
- if (sigma2 < min_sigma2) {
- min_sigma2 = sigma2;
- gl_FragColor = vec4(m2, 1.0);
- }
-
- m3 /= n;
- s3 = abs(s3 / n - m3 * m3);
-
- sigma2 = s3.r + s3.g + s3.b;
- if (sigma2 < min_sigma2) {
- min_sigma2 = sigma2;
- gl_FragColor = vec4(m3, 1.0);
- }
- }
-);
-#else
-NSString *const kGPUImageKuwaharaRadius3FragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- uniform sampler2D inputImageTexture;
-
- const vec2 src_size = vec2 (1.0 / 768.0, 1.0 / 1024.0);
-
- void main (void)
- {
- vec2 uv = textureCoordinate;
- float n = float(16); // radius is assumed to be 3
- vec3 m0 = vec3(0.0); vec3 m1 = vec3(0.0); vec3 m2 = vec3(0.0); vec3 m3 = vec3(0.0);
- vec3 s0 = vec3(0.0); vec3 s1 = vec3(0.0); vec3 s2 = vec3(0.0); vec3 s3 = vec3(0.0);
- vec3 c;
- vec3 cSq;
-
- c = texture2D(inputImageTexture, uv + vec2(-3,-3) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-3,-2) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-3,-1) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-3,0) * src_size).rgb;
- cSq = c * c;
- m0 += c;
- s0 += cSq;
- m1 += c;
- s1 += cSq;
-
- c = texture2D(inputImageTexture, uv + vec2(-2,-3) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-2,-2) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-2,-1) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-2,0) * src_size).rgb;
- cSq = c * c;
- m0 += c;
- s0 += cSq;
- m1 += c;
- s1 += cSq;
-
- c = texture2D(inputImageTexture, uv + vec2(-1,-3) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-1,-2) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-1,-1) * src_size).rgb;
- m0 += c;
- s0 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-1,0) * src_size).rgb;
- cSq = c * c;
- m0 += c;
- s0 += cSq;
- m1 += c;
- s1 += cSq;
-
- c = texture2D(inputImageTexture, uv + vec2(0,-3) * src_size).rgb;
- cSq = c * c;
- m0 += c;
- s0 += cSq;
- m3 += c;
- s3 += cSq;
- c = texture2D(inputImageTexture, uv + vec2(0,-2) * src_size).rgb;
- cSq = c * c;
- m0 += c;
- s0 += cSq;
- m3 += c;
- s3 += cSq;
- c = texture2D(inputImageTexture, uv + vec2(0,-1) * src_size).rgb;
- cSq = c * c;
- m0 += c;
- s0 += cSq;
- m3 += c;
- s3 += cSq;
- c = texture2D(inputImageTexture, uv + vec2(0,0) * src_size).rgb;
- cSq = c * c;
- m0 += c;
- s0 += cSq;
- m1 += c;
- s1 += cSq;
- m2 += c;
- s2 += cSq;
- m3 += c;
- s3 += cSq;
-
- c = texture2D(inputImageTexture, uv + vec2(-3,3) * src_size).rgb;
- m1 += c;
- s1 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-3,2) * src_size).rgb;
- m1 += c;
- s1 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-3,1) * src_size).rgb;
- m1 += c;
- s1 += c * c;
-
- c = texture2D(inputImageTexture, uv + vec2(-2,3) * src_size).rgb;
- m1 += c;
- s1 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-2,2) * src_size).rgb;
- m1 += c;
- s1 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-2,1) * src_size).rgb;
- m1 += c;
- s1 += c * c;
-
- c = texture2D(inputImageTexture, uv + vec2(-1,3) * src_size).rgb;
- m1 += c;
- s1 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-1,2) * src_size).rgb;
- m1 += c;
- s1 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(-1,1) * src_size).rgb;
- m1 += c;
- s1 += c * c;
-
- c = texture2D(inputImageTexture, uv + vec2(0,3) * src_size).rgb;
- cSq = c * c;
- m1 += c;
- s1 += cSq;
- m2 += c;
- s2 += cSq;
- c = texture2D(inputImageTexture, uv + vec2(0,2) * src_size).rgb;
- cSq = c * c;
- m1 += c;
- s1 += cSq;
- m2 += c;
- s2 += cSq;
- c = texture2D(inputImageTexture, uv + vec2(0,1) * src_size).rgb;
- cSq = c * c;
- m1 += c;
- s1 += cSq;
- m2 += c;
- s2 += cSq;
-
- c = texture2D(inputImageTexture, uv + vec2(3,3) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(3,2) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(3,1) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(3,0) * src_size).rgb;
- cSq = c * c;
- m2 += c;
- s2 += cSq;
- m3 += c;
- s3 += cSq;
-
- c = texture2D(inputImageTexture, uv + vec2(2,3) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(2,2) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(2,1) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(2,0) * src_size).rgb;
- cSq = c * c;
- m2 += c;
- s2 += cSq;
- m3 += c;
- s3 += cSq;
-
- c = texture2D(inputImageTexture, uv + vec2(1,3) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(1,2) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(1,1) * src_size).rgb;
- m2 += c;
- s2 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(1,0) * src_size).rgb;
- cSq = c * c;
- m2 += c;
- s2 += cSq;
- m3 += c;
- s3 += cSq;
-
- c = texture2D(inputImageTexture, uv + vec2(3,-3) * src_size).rgb;
- m3 += c;
- s3 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(3,-2) * src_size).rgb;
- m3 += c;
- s3 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(3,-1) * src_size).rgb;
- m3 += c;
- s3 += c * c;
-
- c = texture2D(inputImageTexture, uv + vec2(2,-3) * src_size).rgb;
- m3 += c;
- s3 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(2,-2) * src_size).rgb;
- m3 += c;
- s3 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(2,-1) * src_size).rgb;
- m3 += c;
- s3 += c * c;
-
- c = texture2D(inputImageTexture, uv + vec2(1,-3) * src_size).rgb;
- m3 += c;
- s3 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(1,-2) * src_size).rgb;
- m3 += c;
- s3 += c * c;
- c = texture2D(inputImageTexture, uv + vec2(1,-1) * src_size).rgb;
- m3 += c;
- s3 += c * c;
-
- float min_sigma2 = 1e+2;
- m0 /= n;
- s0 = abs(s0 / n - m0 * m0);
-
- float sigma2 = s0.r + s0.g + s0.b;
- if (sigma2 < min_sigma2) {
- min_sigma2 = sigma2;
- gl_FragColor = vec4(m0, 1.0);
- }
-
- m1 /= n;
- s1 = abs(s1 / n - m1 * m1);
-
- sigma2 = s1.r + s1.g + s1.b;
- if (sigma2 < min_sigma2) {
- min_sigma2 = sigma2;
- gl_FragColor = vec4(m1, 1.0);
- }
-
- m2 /= n;
- s2 = abs(s2 / n - m2 * m2);
-
- sigma2 = s2.r + s2.g + s2.b;
- if (sigma2 < min_sigma2) {
- min_sigma2 = sigma2;
- gl_FragColor = vec4(m2, 1.0);
- }
-
- m3 /= n;
- s3 = abs(s3 / n - m3 * m3);
-
- sigma2 = s3.r + s3.g + s3.b;
- if (sigma2 < min_sigma2) {
- min_sigma2 = sigma2;
- gl_FragColor = vec4(m3, 1.0);
- }
- }
-);
-#endif
-
-@implementation GPUImageKuwaharaRadius3Filter
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageKuwaharaRadius3FragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLanczosResamplingFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageLanczosResamplingFilter.h
deleted file mode 100644
index 5d7409f..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLanczosResamplingFilter.h
+++ /dev/null
@@ -1,7 +0,0 @@
-#import "GPUImageTwoPassTextureSamplingFilter.h"
-
-@interface GPUImageLanczosResamplingFilter : GPUImageTwoPassTextureSamplingFilter
-
-@property(readwrite, nonatomic) CGSize originalImageSize;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLanczosResamplingFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageLanczosResamplingFilter.m
deleted file mode 100644
index a655f48..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLanczosResamplingFilter.m
+++ /dev/null
@@ -1,239 +0,0 @@
-#import "GPUImageLanczosResamplingFilter.h"
-
-NSString *const kGPUImageLanczosVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec2 inputTextureCoordinate;
-
- uniform float texelWidthOffset;
- uniform float texelHeightOffset;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepLeftTextureCoordinate;
- varying vec2 twoStepsLeftTextureCoordinate;
- varying vec2 threeStepsLeftTextureCoordinate;
- varying vec2 fourStepsLeftTextureCoordinate;
- varying vec2 oneStepRightTextureCoordinate;
- varying vec2 twoStepsRightTextureCoordinate;
- varying vec2 threeStepsRightTextureCoordinate;
- varying vec2 fourStepsRightTextureCoordinate;
-
- void main()
- {
- gl_Position = position;
-
- vec2 firstOffset = vec2(texelWidthOffset, texelHeightOffset);
- vec2 secondOffset = vec2(2.0 * texelWidthOffset, 2.0 * texelHeightOffset);
- vec2 thirdOffset = vec2(3.0 * texelWidthOffset, 3.0 * texelHeightOffset);
- vec2 fourthOffset = vec2(4.0 * texelWidthOffset, 4.0 * texelHeightOffset);
-
- centerTextureCoordinate = inputTextureCoordinate;
- oneStepLeftTextureCoordinate = inputTextureCoordinate - firstOffset;
- twoStepsLeftTextureCoordinate = inputTextureCoordinate - secondOffset;
- threeStepsLeftTextureCoordinate = inputTextureCoordinate - thirdOffset;
- fourStepsLeftTextureCoordinate = inputTextureCoordinate - fourthOffset;
- oneStepRightTextureCoordinate = inputTextureCoordinate + firstOffset;
- twoStepsRightTextureCoordinate = inputTextureCoordinate + secondOffset;
- threeStepsRightTextureCoordinate = inputTextureCoordinate + thirdOffset;
- fourStepsRightTextureCoordinate = inputTextureCoordinate + fourthOffset;
- }
-);
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageLanczosFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- uniform sampler2D inputImageTexture;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepLeftTextureCoordinate;
- varying vec2 twoStepsLeftTextureCoordinate;
- varying vec2 threeStepsLeftTextureCoordinate;
- varying vec2 fourStepsLeftTextureCoordinate;
- varying vec2 oneStepRightTextureCoordinate;
- varying vec2 twoStepsRightTextureCoordinate;
- varying vec2 threeStepsRightTextureCoordinate;
- varying vec2 fourStepsRightTextureCoordinate;
-
- // sinc(x) * sinc(x/a) = (a * sin(pi * x) * sin(pi * x / a)) / (pi^2 * x^2)
- // Assuming a Lanczos constant of 2.0, and scaling values to max out at x = +/- 1.5
-
- void main()
- {
- lowp vec4 fragmentColor = texture2D(inputImageTexture, centerTextureCoordinate) * 0.38026;
-
- fragmentColor += texture2D(inputImageTexture, oneStepLeftTextureCoordinate) * 0.27667;
- fragmentColor += texture2D(inputImageTexture, oneStepRightTextureCoordinate) * 0.27667;
-
- fragmentColor += texture2D(inputImageTexture, twoStepsLeftTextureCoordinate) * 0.08074;
- fragmentColor += texture2D(inputImageTexture, twoStepsRightTextureCoordinate) * 0.08074;
-
- fragmentColor += texture2D(inputImageTexture, threeStepsLeftTextureCoordinate) * -0.02612;
- fragmentColor += texture2D(inputImageTexture, threeStepsRightTextureCoordinate) * -0.02612;
-
- fragmentColor += texture2D(inputImageTexture, fourStepsLeftTextureCoordinate) * -0.02143;
- fragmentColor += texture2D(inputImageTexture, fourStepsRightTextureCoordinate) * -0.02143;
-
- gl_FragColor = fragmentColor;
- }
-);
-#else
-NSString *const kGPUImageLanczosFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepLeftTextureCoordinate;
- varying vec2 twoStepsLeftTextureCoordinate;
- varying vec2 threeStepsLeftTextureCoordinate;
- varying vec2 fourStepsLeftTextureCoordinate;
- varying vec2 oneStepRightTextureCoordinate;
- varying vec2 twoStepsRightTextureCoordinate;
- varying vec2 threeStepsRightTextureCoordinate;
- varying vec2 fourStepsRightTextureCoordinate;
-
- // sinc(x) * sinc(x/a) = (a * sin(pi * x) * sin(pi * x / a)) / (pi^2 * x^2)
- // Assuming a Lanczos constant of 2.0, and scaling values to max out at x = +/- 1.5
-
- void main()
- {
- vec4 fragmentColor = texture2D(inputImageTexture, centerTextureCoordinate) * 0.38026;
-
- fragmentColor += texture2D(inputImageTexture, oneStepLeftTextureCoordinate) * 0.27667;
- fragmentColor += texture2D(inputImageTexture, oneStepRightTextureCoordinate) * 0.27667;
-
- fragmentColor += texture2D(inputImageTexture, twoStepsLeftTextureCoordinate) * 0.08074;
- fragmentColor += texture2D(inputImageTexture, twoStepsRightTextureCoordinate) * 0.08074;
-
- fragmentColor += texture2D(inputImageTexture, threeStepsLeftTextureCoordinate) * -0.02612;
- fragmentColor += texture2D(inputImageTexture, threeStepsRightTextureCoordinate) * -0.02612;
-
- fragmentColor += texture2D(inputImageTexture, fourStepsLeftTextureCoordinate) * -0.02143;
- fragmentColor += texture2D(inputImageTexture, fourStepsRightTextureCoordinate) * -0.02143;
-
- gl_FragColor = fragmentColor;
- }
-);
-#endif
-
-@implementation GPUImageLanczosResamplingFilter
-
-@synthesize originalImageSize = _originalImageSize;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFirstStageVertexShaderFromString:kGPUImageLanczosVertexShaderString firstStageFragmentShaderFromString:kGPUImageLanczosFragmentShaderString secondStageVertexShaderFromString:kGPUImageLanczosVertexShaderString secondStageFragmentShaderFromString:kGPUImageLanczosFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-// Base texture sampling offset on the input image, not the final size
-- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
-{
- self.originalImageSize = newSize;
- [super setInputSize:newSize atIndex:textureIndex];
-}
-
-- (void)setupFilterForSize:(CGSize)filterFrameSize;
-{
- runSynchronouslyOnVideoProcessingQueue(^{
- // The first pass through the framebuffer may rotate the inbound image, so need to account for that by changing up the kernel ordering for that pass
- if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
- {
- verticalPassTexelWidthOffset = 1.0 / _originalImageSize.height;
- verticalPassTexelHeightOffset = 0.0;
- }
- else
- {
- verticalPassTexelWidthOffset = 0.0;
- verticalPassTexelHeightOffset = 1.0 / _originalImageSize.height;
- }
-
- horizontalPassTexelWidthOffset = 1.0 / _originalImageSize.width;
- horizontalPassTexelHeightOffset = 0.0;
- });
-}
-
-
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-{
- if (self.preventRendering)
- {
- [firstInputFramebuffer unlock];
- return;
- }
-
- [GPUImageContext setActiveShaderProgram:filterProgram];
-
- CGSize currentFBOSize = [self sizeOfFBO];
- if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
- {
- currentFBOSize.height = self.originalImageSize.height;
- }
- else
- {
- currentFBOSize.width = self.originalImageSize.width;
- }
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:currentFBOSize textureOptions:self.outputTextureOptions onlyTexture:NO];
- [outputFramebuffer activateFramebuffer];
-
- [self setUniformsForProgramAtIndex:0];
-
- glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
- glClear(GL_COLOR_BUFFER_BIT);
-
- glActiveTexture(GL_TEXTURE2);
- glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
-
- glUniform1i(filterInputTextureUniform, 2);
-
- glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
- glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
-
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
-
- [firstInputFramebuffer unlock];
-
- // Run the second stage of the two-pass filter
- [GPUImageContext setActiveShaderProgram:secondFilterProgram];
- glActiveTexture(GL_TEXTURE2);
- glBindTexture(GL_TEXTURE_2D, 0);
- glActiveTexture(GL_TEXTURE3);
- glBindTexture(GL_TEXTURE_2D, 0);
- secondOutputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
- [secondOutputFramebuffer activateFramebuffer];
- if (usingNextFrameForImageCapture)
- {
- [secondOutputFramebuffer lock];
- }
-
- [self setUniformsForProgramAtIndex:1];
-
- glActiveTexture(GL_TEXTURE3);
- glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
- glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]);
-
- glUniform1i(secondFilterInputTextureUniform, 3);
-
- glVertexAttribPointer(secondFilterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
-
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT);
-
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
- [outputFramebuffer unlock];
- outputFramebuffer = nil;
- if (usingNextFrameForImageCapture)
- {
- dispatch_semaphore_signal(imageCaptureSemaphore);
- }
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLaplacianFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageLaplacianFilter.h
deleted file mode 100644
index 267c1ba..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLaplacianFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImage3x3ConvolutionFilter.h"
-
-@interface GPUImageLaplacianFilter : GPUImage3x3ConvolutionFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLaplacianFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageLaplacianFilter.m
deleted file mode 100644
index 98b7850..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLaplacianFilter.m
+++ /dev/null
@@ -1,115 +0,0 @@
-#import "GPUImageLaplacianFilter.h"
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageLaplacianFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- uniform sampler2D inputImageTexture;
-
- uniform mediump mat3 convolutionMatrix;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- void main()
- {
- mediump vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
- mediump vec3 bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;
- mediump vec3 bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;
- mediump vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
- mediump vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
- mediump vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
- mediump vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;
- mediump vec3 topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;
- mediump vec3 topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;
-
- mediump vec3 resultColor = topLeftColor * convolutionMatrix[0][0] + topColor * convolutionMatrix[0][1] + topRightColor * convolutionMatrix[0][2];
- resultColor += leftColor * convolutionMatrix[1][0] + centerColor.rgb * convolutionMatrix[1][1] + rightColor * convolutionMatrix[1][2];
- resultColor += bottomLeftColor * convolutionMatrix[2][0] + bottomColor * convolutionMatrix[2][1] + bottomRightColor * convolutionMatrix[2][2];
-
- // Normalize the results to allow for negative gradients in the 0.0-1.0 colorspace
- resultColor = resultColor + 0.5;
-
- gl_FragColor = vec4(resultColor, centerColor.a);
- }
-);
-#else
-NSString *const kGPUImageLaplacianFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
-
- uniform mat3 convolutionMatrix;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- void main()
- {
- vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
- vec3 bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;
- vec3 bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;
- vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
- vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
- vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
- vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;
- vec3 topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;
- vec3 topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;
-
- vec3 resultColor = topLeftColor * convolutionMatrix[0][0] + topColor * convolutionMatrix[0][1] + topRightColor * convolutionMatrix[0][2];
- resultColor += leftColor * convolutionMatrix[1][0] + centerColor.rgb * convolutionMatrix[1][1] + rightColor * convolutionMatrix[1][2];
- resultColor += bottomLeftColor * convolutionMatrix[2][0] + bottomColor * convolutionMatrix[2][1] + bottomRightColor * convolutionMatrix[2][2];
-
- // Normalize the results to allow for negative gradients in the 0.0-1.0 colorspace
- resultColor = resultColor + 0.5;
-
- gl_FragColor = vec4(resultColor, centerColor.a);
- }
-);
-#endif
-
-@implementation GPUImageLaplacianFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageLaplacianFragmentShaderString]))
- {
- return nil;
- }
-
- GPUMatrix3x3 newConvolutionMatrix;
- newConvolutionMatrix.one.one = 0.5;
- newConvolutionMatrix.one.two = 1.0;
- newConvolutionMatrix.one.three = 0.5;
-
- newConvolutionMatrix.two.one = 1.0;
- newConvolutionMatrix.two.two = -6.0;
- newConvolutionMatrix.two.three = 1.0;
-
- newConvolutionMatrix.three.one = 0.5;
- newConvolutionMatrix.three.two = 1.0;
- newConvolutionMatrix.three.three = 0.5;
-
- self.convolutionKernel = newConvolutionMatrix;
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLevelsFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageLevelsFilter.h
deleted file mode 100644
index d0948fb..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLevelsFilter.h
+++ /dev/null
@@ -1,45 +0,0 @@
-#import "GPUImageFilter.h"
-
-/**
- * Levels like Photoshop.
- *
- * The min, max, minOut and maxOut parameters are floats in the range [0, 1].
- * If you have parameters from Photoshop in the range [0, 255] you must first
- * convert them to be [0, 1].
- * The gamma/mid parameter is a float >= 0. This matches the value from Photoshop.
- *
- * If you want to apply levels to RGB as well as individual channels you need to use
- * this filter twice - first for the individual channels and then for all channels.
- */
-@interface GPUImageLevelsFilter : GPUImageFilter
-{
- GLint minUniform;
- GLint midUniform;
- GLint maxUniform;
- GLint minOutputUniform;
- GLint maxOutputUniform;
-
- GPUVector3 minVector, midVector, maxVector, minOutputVector, maxOutputVector;
-}
-
-/** Set levels for the red channel */
-- (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
-
-- (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
-
-/** Set levels for the green channel */
-- (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
-
-- (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
-
-/** Set levels for the blue channel */
-- (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
-
-- (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
-
-/** Set levels for all channels at once */
-- (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
-- (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLevelsFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageLevelsFilter.m
deleted file mode 100644
index 158815f..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLevelsFilter.m
+++ /dev/null
@@ -1,152 +0,0 @@
-#import "GPUImageLevelsFilter.h"
-
-/*
- ** Gamma correction
- ** Details: http://blog.mouaif.org/2009/01/22/photoshop-gamma-correction-shader/
- */
-
-#define GammaCorrection(color, gamma) pow(color, 1.0 / gamma)
-
-/*
- ** Levels control (input (+gamma), output)
- ** Details: http://blog.mouaif.org/2009/01/28/levels-control-shader/
- */
-
-#define LevelsControlInputRange(color, minInput, maxInput) min(max(color - minInput, vec3(0.0)) / (maxInput - minInput), vec3(1.0))
-#define LevelsControlInput(color, minInput, gamma, maxInput) GammaCorrection(LevelsControlInputRange(color, minInput, maxInput), gamma)
-#define LevelsControlOutputRange(color, minOutput, maxOutput) mix(minOutput, maxOutput, color)
-#define LevelsControl(color, minInput, gamma, maxInput, minOutput, maxOutput) LevelsControlOutputRange(LevelsControlInput(color, minInput, gamma, maxInput), minOutput, maxOutput)
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageLevelsFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform mediump vec3 levelMinimum;
- uniform mediump vec3 levelMiddle;
- uniform mediump vec3 levelMaximum;
- uniform mediump vec3 minOutput;
- uniform mediump vec3 maxOutput;
-
- void main()
- {
- mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- gl_FragColor = vec4(LevelsControl(textureColor.rgb, levelMinimum, levelMiddle, levelMaximum, minOutput, maxOutput), textureColor.a);
- }
-);
-#else
-NSString *const kGPUImageLevelsFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform vec3 levelMinimum;
- uniform vec3 levelMiddle;
- uniform vec3 levelMaximum;
- uniform vec3 minOutput;
- uniform vec3 maxOutput;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- gl_FragColor = vec4(LevelsControl(textureColor.rgb, levelMinimum, levelMiddle, levelMaximum, minOutput, maxOutput), textureColor.a);
- }
-);
-#endif
-
-@implementation GPUImageLevelsFilter
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageLevelsFragmentShaderString]))
- {
- return nil;
- }
-
- minUniform = [filterProgram uniformIndex:@"levelMinimum"];
- midUniform = [filterProgram uniformIndex:@"levelMiddle"];
- maxUniform = [filterProgram uniformIndex:@"levelMaximum"];
- minOutputUniform = [filterProgram uniformIndex:@"minOutput"];
- maxOutputUniform = [filterProgram uniformIndex:@"maxOutput"];
-
- [self setRedMin:0.0 gamma:1.0 max:1.0 minOut:0.0 maxOut:1.0];
- [self setGreenMin:0.0 gamma:1.0 max:1.0 minOut:0.0 maxOut:1.0];
- [self setBlueMin:0.0 gamma:1.0 max:1.0 minOut:0.0 maxOut:1.0];
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Helpers
-
-- (void)updateUniforms {
- [self setVec3:minVector forUniform:minUniform program:filterProgram];
- [self setVec3:midVector forUniform:midUniform program:filterProgram];
- [self setVec3:maxVector forUniform:maxUniform program:filterProgram];
- [self setVec3:minOutputVector forUniform:minOutputUniform program:filterProgram];
- [self setVec3:maxOutputVector forUniform:maxOutputUniform program:filterProgram];
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut {
- [self setRedMin:min gamma:mid max:max minOut:minOut maxOut:maxOut];
- [self setGreenMin:min gamma:mid max:max minOut:minOut maxOut:maxOut];
- [self setBlueMin:min gamma:mid max:max minOut:minOut maxOut:maxOut];
-}
-
-- (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max {
- [self setMin:min gamma:mid max:max minOut:0.0 maxOut:1.0];
-}
-
-- (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut {
- minVector.one = min;
- midVector.one = mid;
- maxVector.one = max;
- minOutputVector.one = minOut;
- maxOutputVector.one = maxOut;
-
- [self updateUniforms];
-}
-
-- (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max {
- [self setRedMin:min gamma:mid max:max minOut:0.0 maxOut:1.0];
-}
-
-- (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut {
- minVector.two = min;
- midVector.two = mid;
- maxVector.two = max;
- minOutputVector.two = minOut;
- maxOutputVector.two = maxOut;
-
- [self updateUniforms];
-}
-
-- (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max {
- [self setGreenMin:min gamma:mid max:max minOut:0.0 maxOut:1.0];
-}
-
-- (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut {
- minVector.three = min;
- midVector.three = mid;
- maxVector.three = max;
- minOutputVector.three = minOut;
- maxOutputVector.three = maxOut;
-
- [self updateUniforms];
-}
-
-- (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max {
- [self setBlueMin:min gamma:mid max:max minOut:0.0 maxOut:1.0];
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLightenBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageLightenBlendFilter.h
deleted file mode 100755
index b0287c1..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLightenBlendFilter.h
+++ /dev/null
@@ -1,8 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-/// Blends two images by taking the maximum value of each color component between the images
-@interface GPUImageLightenBlendFilter : GPUImageTwoInputFilter
-{
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLightenBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageLightenBlendFilter.m
deleted file mode 100755
index 2bbd4b2..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLightenBlendFilter.m
+++ /dev/null
@@ -1,52 +0,0 @@
-#import "GPUImageLightenBlendFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageLightenBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = max(textureColor, textureColor2);
- }
-);
-#else
-NSString *const kGPUImageLightenBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = max(textureColor, textureColor2);
- }
- );
-#endif
-
-@implementation GPUImageLightenBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageLightenBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLineGenerator.h b/Example/Pods/GPUImage/framework/Source/GPUImageLineGenerator.h
deleted file mode 100644
index 4c46736..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLineGenerator.h
+++ /dev/null
@@ -1,18 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageLineGenerator : GPUImageFilter
-{
- GLint lineWidthUniform, lineColorUniform;
- GLfloat *lineCoordinates;
-}
-
-// The width of the displayed lines, in pixels. The default is 1.
-@property(readwrite, nonatomic) CGFloat lineWidth;
-
-// The color of the lines is specified using individual red, green, and blue components (normalized to 1.0). The default is green: (0.0, 1.0, 0.0).
-- (void)setLineColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
-
-// Rendering
-- (void)renderLinesFromArray:(GLfloat *)lineSlopeAndIntercepts count:(NSUInteger)numberOfLines frameTime:(CMTime)frameTime;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLineGenerator.m b/Example/Pods/GPUImage/framework/Source/GPUImageLineGenerator.m
deleted file mode 100644
index 85d93be..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLineGenerator.m
+++ /dev/null
@@ -1,164 +0,0 @@
-#import "GPUImageLineGenerator.h"
-
-NSString *const kGPUImageLineGeneratorVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
-
- void main()
- {
- gl_Position = position;
- }
-);
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageLineGeneratorFragmentShaderString = SHADER_STRING
-(
- uniform lowp vec3 lineColor;
-
- void main()
- {
- gl_FragColor = vec4(lineColor, 1.0);
- }
-);
-#else
-NSString *const kGPUImageLineGeneratorFragmentShaderString = SHADER_STRING
-(
- uniform vec3 lineColor;
-
- void main()
- {
- gl_FragColor = vec4(lineColor, 1.0);
- }
-);
-#endif
-
-@interface GPUImageLineGenerator()
-
-- (void)generateLineCoordinates;
-
-@end
-
-@implementation GPUImageLineGenerator
-
-@synthesize lineWidth = _lineWidth;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithVertexShaderFromString:kGPUImageLineGeneratorVertexShaderString fragmentShaderFromString:kGPUImageLineGeneratorFragmentShaderString]))
- {
- return nil;
- }
-
- runSynchronouslyOnVideoProcessingQueue(^{
- lineWidthUniform = [filterProgram uniformIndex:@"lineWidth"];
- lineColorUniform = [filterProgram uniformIndex:@"lineColor"];
-
- self.lineWidth = 1.0;
- [self setLineColorRed:0.0 green:1.0 blue:0.0];
- });
-
- return self;
-}
-
-- (void)dealloc
-{
- if (lineCoordinates)
- {
- free(lineCoordinates);
- }
-}
-
-#pragma mark -
-#pragma mark Rendering
-
-- (void)generateLineCoordinates;
-{
- lineCoordinates = calloc(1024 * 4, sizeof(GLfloat));
-}
-
-- (void)renderLinesFromArray:(GLfloat *)lineSlopeAndIntercepts count:(NSUInteger)numberOfLines frameTime:(CMTime)frameTime;
-{
- if (self.preventRendering)
- {
- return;
- }
-
- if (lineCoordinates == NULL)
- {
- [self generateLineCoordinates];
- }
-
- // Iterate through and generate vertices from the slopes and intercepts
- NSUInteger currentVertexIndex = 0;
- NSUInteger currentLineIndex = 0;
- NSUInteger maxLineIndex = numberOfLines *2;
- while(currentLineIndex < maxLineIndex)
- {
- GLfloat slope = lineSlopeAndIntercepts[currentLineIndex++];
- GLfloat intercept = lineSlopeAndIntercepts[currentLineIndex++];
-
- if (slope > 9000.0) // Vertical line
- {
- lineCoordinates[currentVertexIndex++] = intercept;
- lineCoordinates[currentVertexIndex++] = -1.0;
- lineCoordinates[currentVertexIndex++] = intercept;
- lineCoordinates[currentVertexIndex++] = 1.0;
- }
- else
- {
- lineCoordinates[currentVertexIndex++] = -1.0;
- lineCoordinates[currentVertexIndex++] = slope * -1.0 + intercept;
- lineCoordinates[currentVertexIndex++] = 1.0;
- lineCoordinates[currentVertexIndex++] = slope * 1.0 + intercept;
- }
- }
-
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext setActiveShaderProgram:filterProgram];
-
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
- [outputFramebuffer activateFramebuffer];
-
- glClearColor(0.0, 0.0, 0.0, 0.0);
- glClear(GL_COLOR_BUFFER_BIT);
-
- glBlendEquation(GL_FUNC_ADD);
- glBlendFunc(GL_ONE, GL_ONE);
- glEnable(GL_BLEND);
-
- glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, lineCoordinates);
- glDrawArrays(GL_LINES, 0, ((unsigned int)numberOfLines * 2));
-
- glDisable(GL_BLEND);
-
- [self informTargetsAboutNewFrameAtTime:frameTime];
- });
-}
-
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-{
- // Prevent rendering of the frame by normal means
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setLineWidth:(CGFloat)newValue;
-{
- _lineWidth = newValue;
- [GPUImageContext setActiveShaderProgram:filterProgram];
- glLineWidth(newValue);
-}
-
-- (void)setLineColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
-{
- GPUVector3 lineColor = {redComponent, greenComponent, blueComponent};
-
- [self setVec3:lineColor forUniform:lineColorUniform program:filterProgram];
-}
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLinearBurnBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageLinearBurnBlendFilter.h
deleted file mode 100644
index 7e5e415..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLinearBurnBlendFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageLinearBurnBlendFilter : GPUImageTwoInputFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLinearBurnBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageLinearBurnBlendFilter.m
deleted file mode 100644
index 4703196..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLinearBurnBlendFilter.m
+++ /dev/null
@@ -1,51 +0,0 @@
-#import "GPUImageLinearBurnBlendFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageLinearBurnBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = vec4(clamp(textureColor.rgb + textureColor2.rgb - vec3(1.0), vec3(0.0), vec3(1.0)), textureColor.a);
- }
-);
-#else
-NSString *const kGPUImageLinearBurnBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = vec4(clamp(textureColor.rgb + textureColor2.rgb - vec3(1.0), vec3(0.0), vec3(1.0)), textureColor.a);
- }
-);
-#endif
-
-@implementation GPUImageLinearBurnBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageLinearBurnBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLocalBinaryPatternFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageLocalBinaryPatternFilter.h
deleted file mode 100644
index 431dbbd..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLocalBinaryPatternFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImage3x3TextureSamplingFilter.h"
-
-@interface GPUImageLocalBinaryPatternFilter : GPUImage3x3TextureSamplingFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLocalBinaryPatternFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageLocalBinaryPatternFilter.m
deleted file mode 100644
index 1ee1f8d..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLocalBinaryPatternFilter.m
+++ /dev/null
@@ -1,123 +0,0 @@
-#import "GPUImageLocalBinaryPatternFilter.h"
-
-// This is based on "Accelerating image recognition on mobile devices using GPGPU" by Miguel Bordallo Lopez, Henri Nykanen, Jari Hannuksela, Olli Silven and Markku Vehvilainen
-// http://www.ee.oulu.fi/~jhannuks/publications/SPIE2011a.pdf
-
-// Right pixel is the most significant bit, traveling clockwise to get to the upper right, which is the least significant
-// If the external pixel is greater than or equal to the center, set to 1, otherwise 0
-//
-// 2 1 0
-// 3 7
-// 4 5 6
-
-// 01101101
-// 76543210
-
-@implementation GPUImageLocalBinaryPatternFilter
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageLocalBinaryPatternFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- lowp float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r;
- lowp float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- lowp float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- lowp float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- lowp float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- lowp float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- lowp float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
- lowp float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- lowp float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
-
- lowp float byteTally = 1.0 / 255.0 * step(centerIntensity, topRightIntensity);
- byteTally += 2.0 / 255.0 * step(centerIntensity, topIntensity);
- byteTally += 4.0 / 255.0 * step(centerIntensity, topLeftIntensity);
- byteTally += 8.0 / 255.0 * step(centerIntensity, leftIntensity);
- byteTally += 16.0 / 255.0 * step(centerIntensity, bottomLeftIntensity);
- byteTally += 32.0 / 255.0 * step(centerIntensity, bottomIntensity);
- byteTally += 64.0 / 255.0 * step(centerIntensity, bottomRightIntensity);
- byteTally += 128.0 / 255.0 * step(centerIntensity, rightIntensity);
-
- // TODO: Replace the above with a dot product and two vec4s
- // TODO: Apply step to a matrix, rather than individually
-
- gl_FragColor = vec4(byteTally, byteTally, byteTally, 1.0);
- }
-);
-#else
-NSString *const kGPUImageLocalBinaryPatternFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r;
- float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
-
- float byteTally = 1.0 / 255.0 * step(centerIntensity, topRightIntensity);
- byteTally += 2.0 / 255.0 * step(centerIntensity, topIntensity);
- byteTally += 4.0 / 255.0 * step(centerIntensity, topLeftIntensity);
- byteTally += 8.0 / 255.0 * step(centerIntensity, leftIntensity);
- byteTally += 16.0 / 255.0 * step(centerIntensity, bottomLeftIntensity);
- byteTally += 32.0 / 255.0 * step(centerIntensity, bottomIntensity);
- byteTally += 64.0 / 255.0 * step(centerIntensity, bottomRightIntensity);
- byteTally += 128.0 / 255.0 * step(centerIntensity, rightIntensity);
-
- // TODO: Replace the above with a dot product and two vec4s
- // TODO: Apply step to a matrix, rather than individually
-
- gl_FragColor = vec4(byteTally, byteTally, byteTally, 1.0);
- }
-);
-#endif
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageLocalBinaryPatternFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLookupFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageLookupFilter.h
deleted file mode 100644
index 23ebde2..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLookupFilter.h
+++ /dev/null
@@ -1,34 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageLookupFilter : GPUImageTwoInputFilter
-{
- GLint intensityUniform;
-}
-
-// How To Use:
-// 1) Use your favourite photo editing application to apply a filter to lookup.png from GPUImage/framework/Resources.
-// For this to work properly each pixel color must not depend on other pixels (e.g. blur will not work).
-// If you need more complex filter you can create as many lookup tables as required.
-// E.g. color_balance_lookup_1.png -> GPUImageGaussianBlurFilter -> color_balance_lookup_2.png
-// 2) Use you new lookup.png file as a second input for GPUImageLookupFilter.
-
-// See GPUImageAmatorkaFilter, GPUImageMissEtikateFilter, and GPUImageSoftEleganceFilter for example.
-
-// Additional Info:
-// Lookup texture is organised as 8x8 quads of 64x64 pixels representing all possible RGB colors:
-//for (int by = 0; by < 8; by++) {
-// for (int bx = 0; bx < 8; bx++) {
-// for (int g = 0; g < 64; g++) {
-// for (int r = 0; r < 64; r++) {
-// image.setPixel(r + bx * 64, g + by * 64, qRgb((int)(r * 255.0 / 63.0 + 0.5),
-// (int)(g * 255.0 / 63.0 + 0.5),
-// (int)((bx + by * 8.0) * 255.0 / 63.0 + 0.5)));
-// }
-// }
-// }
-//}
-
-// Opacity/intensity of lookup filter ranges from 0.0 to 1.0, with 1.0 as the normal setting
-@property(readwrite, nonatomic) CGFloat intensity;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLookupFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageLookupFilter.m
deleted file mode 100644
index c098392..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLookupFilter.m
+++ /dev/null
@@ -1,115 +0,0 @@
-#import "GPUImageLookupFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageLookupFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2; // TODO: This is not used
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2; // lookup texture
-
- uniform lowp float intensity;
-
- void main()
- {
- highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- highp float blueColor = textureColor.b * 63.0;
-
- highp vec2 quad1;
- quad1.y = floor(floor(blueColor) / 8.0);
- quad1.x = floor(blueColor) - (quad1.y * 8.0);
-
- highp vec2 quad2;
- quad2.y = floor(ceil(blueColor) / 8.0);
- quad2.x = ceil(blueColor) - (quad2.y * 8.0);
-
- highp vec2 texPos1;
- texPos1.x = (quad1.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);
- texPos1.y = (quad1.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);
-
- highp vec2 texPos2;
- texPos2.x = (quad2.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);
- texPos2.y = (quad2.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);
-
- lowp vec4 newColor1 = texture2D(inputImageTexture2, texPos1);
- lowp vec4 newColor2 = texture2D(inputImageTexture2, texPos2);
-
- lowp vec4 newColor = mix(newColor1, newColor2, fract(blueColor));
- gl_FragColor = mix(textureColor, vec4(newColor.rgb, textureColor.w), intensity);
- }
-);
-#else
-NSString *const kGPUImageLookupFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2; // TODO: This is not used
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2; // lookup texture
-
- uniform float intensity;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- float blueColor = textureColor.b * 63.0;
-
- vec2 quad1;
- quad1.y = floor(floor(blueColor) / 8.0);
- quad1.x = floor(blueColor) - (quad1.y * 8.0);
-
- vec2 quad2;
- quad2.y = floor(ceil(blueColor) / 8.0);
- quad2.x = ceil(blueColor) - (quad2.y * 8.0);
-
- vec2 texPos1;
- texPos1.x = (quad1.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);
- texPos1.y = (quad1.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);
-
- vec2 texPos2;
- texPos2.x = (quad2.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.r);
- texPos2.y = (quad2.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * textureColor.g);
-
- vec4 newColor1 = texture2D(inputImageTexture2, texPos1);
- vec4 newColor2 = texture2D(inputImageTexture2, texPos2);
-
- vec4 newColor = mix(newColor1, newColor2, fract(blueColor));
- gl_FragColor = mix(textureColor, vec4(newColor.rgb, textureColor.w), intensity);
- }
-);
-#endif
-
-@implementation GPUImageLookupFilter
-
-@synthesize intensity = _intensity;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- intensityUniform = [filterProgram uniformIndex:@"intensity"];
- self.intensity = 1.0f;
-
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageLookupFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setIntensity:(CGFloat)intensity
-{
- _intensity = intensity;
-
- [self setFloat:_intensity forUniform:intensityUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLowPassFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageLowPassFilter.h
deleted file mode 100644
index be5c397..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLowPassFilter.h
+++ /dev/null
@@ -1,14 +0,0 @@
-#import "GPUImageFilterGroup.h"
-#import "GPUImageBuffer.h"
-#import "GPUImageDissolveBlendFilter.h"
-
-@interface GPUImageLowPassFilter : GPUImageFilterGroup
-{
- GPUImageBuffer *bufferFilter;
- GPUImageDissolveBlendFilter *dissolveBlendFilter;
-}
-
-// This controls the degree by which the previous accumulated frames are blended with the current one. This ranges from 0.0 to 1.0, with a default of 0.5.
-@property(readwrite, nonatomic) CGFloat filterStrength;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLowPassFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageLowPassFilter.m
deleted file mode 100644
index 39ca08f..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLowPassFilter.m
+++ /dev/null
@@ -1,61 +0,0 @@
-#import "GPUImageLowPassFilter.h"
-
-@implementation GPUImageLowPassFilter
-
-@synthesize filterStrength;
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- // Take in the frame and blend it with the previous one
- dissolveBlendFilter = [[GPUImageDissolveBlendFilter alloc] init];
- [self addFilter:dissolveBlendFilter];
-
- // Buffer the result to be fed back into the blend
- bufferFilter = [[GPUImageBuffer alloc] init];
- [self addFilter:bufferFilter];
-
- // Texture location 0 needs to be the original image for the dissolve blend
- [bufferFilter addTarget:dissolveBlendFilter atTextureLocation:1];
- [dissolveBlendFilter addTarget:bufferFilter];
-
- [dissolveBlendFilter disableSecondFrameCheck];
-
- // To prevent double updating of this filter, disable updates from the sharp image side
- // self.inputFilterToIgnoreForUpdates = unsharpMaskFilter;
-
- self.initialFilters = [NSArray arrayWithObject:dissolveBlendFilter];
- self.terminalFilter = dissolveBlendFilter;
-
- self.filterStrength = 0.5;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setFilterStrength:(CGFloat)newValue;
-{
- dissolveBlendFilter.mix = newValue;
-}
-
-- (CGFloat)filterStrength;
-{
- return dissolveBlendFilter.mix;
-}
-
-- (void)addTarget:(id)newTarget atTextureLocation:(NSInteger)textureLocation;
-{
- [self.terminalFilter addTarget:newTarget atTextureLocation:textureLocation];
- //if use GPUImagePipline,will cause self.termainlFilter removeAllTargets,so need add bufferFilter back
- if (self.terminalFilter == dissolveBlendFilter && ![self.terminalFilter.targets containsObject:bufferFilter]) {
- [self.terminalFilter addTarget:bufferFilter atTextureLocation:1];
- }
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLuminanceRangeFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageLuminanceRangeFilter.h
deleted file mode 100644
index 5a31037..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLuminanceRangeFilter.h
+++ /dev/null
@@ -1,12 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageLuminanceRangeFilter : GPUImageFilter
-{
- GLint rangeReductionUniform;
-}
-
-/** The degree to reduce the luminance range, from 0.0 to 1.0. Default is 0.6.
- */
-@property(readwrite, nonatomic) CGFloat rangeReductionFactor;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLuminanceRangeFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageLuminanceRangeFilter.m
deleted file mode 100644
index 5122c95..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLuminanceRangeFilter.m
+++ /dev/null
@@ -1,76 +0,0 @@
-#import "GPUImageLuminanceRangeFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageLuminanceRangeFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform lowp float rangeReduction;
-
- // Values from "Graphics Shaders: Theory and Practice" by Bailey and Cunningham
- const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- mediump float luminance = dot(textureColor.rgb, luminanceWeighting);
- mediump float luminanceRatio = ((0.5 - luminance) * rangeReduction);
-
- gl_FragColor = vec4((textureColor.rgb) + (luminanceRatio), textureColor.w);
- }
-);
-#else
-NSString *const kGPUImageLuminanceRangeFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float rangeReduction;
-
- // Values from "Graphics Shaders: Theory and Practice" by Bailey and Cunningham
- const vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- float luminance = dot(textureColor.rgb, luminanceWeighting);
- float luminanceRatio = ((0.5 - luminance) * rangeReduction);
-
- gl_FragColor = vec4((textureColor.rgb) + (luminanceRatio), textureColor.w);
- }
-);
-#endif
-
-@implementation GPUImageLuminanceRangeFilter
-
-@synthesize rangeReductionFactor = _rangeReductionFactor;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageLuminanceRangeFragmentShaderString]))
- {
- return nil;
- }
-
- rangeReductionUniform = [filterProgram uniformIndex:@"rangeReduction"];
- self.rangeReductionFactor = 0.6;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setRangeReductionFactor:(CGFloat)newValue;
-{
- _rangeReductionFactor = newValue;
-
- [self setFloat:_rangeReductionFactor forUniform:rangeReductionUniform program:filterProgram];
-}
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLuminanceThresholdFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageLuminanceThresholdFilter.h
deleted file mode 100755
index 0abb9a1..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLuminanceThresholdFilter.h
+++ /dev/null
@@ -1,14 +0,0 @@
-#import "GPUImageFilter.h"
-
-/** Pixels with a luminance above the threshold will appear white, and those below will be black
- */
-@interface GPUImageLuminanceThresholdFilter : GPUImageFilter
-{
- GLint thresholdUniform;
-}
-
-/** Anything above this luminance will be white, and anything below black. Ranges from 0.0 to 1.0, with 0.5 as the default
- */
-@property(readwrite, nonatomic) CGFloat threshold;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLuminanceThresholdFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageLuminanceThresholdFilter.m
deleted file mode 100755
index 368b8fb..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLuminanceThresholdFilter.m
+++ /dev/null
@@ -1,74 +0,0 @@
-#import "GPUImageLuminanceThresholdFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageLuminanceThresholdFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform highp float threshold;
-
- const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- highp float luminance = dot(textureColor.rgb, W);
- highp float thresholdResult = step(threshold, luminance);
-
- gl_FragColor = vec4(vec3(thresholdResult), textureColor.w);
- }
-);
-#else
-NSString *const kGPUImageLuminanceThresholdFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float threshold;
-
- const vec3 W = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- float luminance = dot(textureColor.rgb, W);
- float thresholdResult = step(threshold, luminance);
-
- gl_FragColor = vec4(vec3(thresholdResult), textureColor.w);
- }
-);
-#endif
-
-@implementation GPUImageLuminanceThresholdFilter
-
-@synthesize threshold = _threshold;
-
-#pragma mark -
-#pragma mark Initialization
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageLuminanceThresholdFragmentShaderString]))
- {
- return nil;
- }
-
- thresholdUniform = [filterProgram uniformIndex:@"threshold"];
- self.threshold = 0.5;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setThreshold:(CGFloat)newValue;
-{
- _threshold = newValue;
-
- [self setFloat:_threshold forUniform:thresholdUniform program:filterProgram];
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLuminosity.h b/Example/Pods/GPUImage/framework/Source/GPUImageLuminosity.h
deleted file mode 100644
index b2d2458..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLuminosity.h
+++ /dev/null
@@ -1,17 +0,0 @@
-#import "GPUImageAverageColor.h"
-
-@interface GPUImageLuminosity : GPUImageAverageColor
-{
- GLProgram *secondFilterProgram;
- GLint secondFilterPositionAttribute, secondFilterTextureCoordinateAttribute;
- GLint secondFilterInputTextureUniform, secondFilterInputTextureUniform2;
- GLint secondFilterTexelWidthUniform, secondFilterTexelHeightUniform;
-}
-
-// This block is called on the completion of color averaging for a frame
-@property(nonatomic, copy) void(^luminosityProcessingFinishedBlock)(CGFloat luminosity, CMTime frameTime);
-
-- (void)extractLuminosityAtFrameTime:(CMTime)frameTime;
-- (void)initializeSecondaryAttributes;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLuminosity.m b/Example/Pods/GPUImage/framework/Source/GPUImageLuminosity.m
deleted file mode 100644
index 37f374a..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLuminosity.m
+++ /dev/null
@@ -1,329 +0,0 @@
-#import "GPUImageLuminosity.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageInitialLuminosityFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- uniform sampler2D inputImageTexture;
-
- varying highp vec2 outputTextureCoordinate;
-
- varying highp vec2 upperLeftInputTextureCoordinate;
- varying highp vec2 upperRightInputTextureCoordinate;
- varying highp vec2 lowerLeftInputTextureCoordinate;
- varying highp vec2 lowerRightInputTextureCoordinate;
-
- const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- highp float upperLeftLuminance = dot(texture2D(inputImageTexture, upperLeftInputTextureCoordinate).rgb, W);
- highp float upperRightLuminance = dot(texture2D(inputImageTexture, upperRightInputTextureCoordinate).rgb, W);
- highp float lowerLeftLuminance = dot(texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).rgb, W);
- highp float lowerRightLuminance = dot(texture2D(inputImageTexture, lowerRightInputTextureCoordinate).rgb, W);
-
- highp float luminosity = 0.25 * (upperLeftLuminance + upperRightLuminance + lowerLeftLuminance + lowerRightLuminance);
- gl_FragColor = vec4(luminosity, luminosity, luminosity, 1.0);
- }
-);
-
-NSString *const kGPUImageLuminosityFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- uniform sampler2D inputImageTexture;
-
- varying highp vec2 outputTextureCoordinate;
-
- varying highp vec2 upperLeftInputTextureCoordinate;
- varying highp vec2 upperRightInputTextureCoordinate;
- varying highp vec2 lowerLeftInputTextureCoordinate;
- varying highp vec2 lowerRightInputTextureCoordinate;
-
- void main()
- {
- highp float upperLeftLuminance = texture2D(inputImageTexture, upperLeftInputTextureCoordinate).r;
- highp float upperRightLuminance = texture2D(inputImageTexture, upperRightInputTextureCoordinate).r;
- highp float lowerLeftLuminance = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).r;
- highp float lowerRightLuminance = texture2D(inputImageTexture, lowerRightInputTextureCoordinate).r;
-
- highp float luminosity = 0.25 * (upperLeftLuminance + upperRightLuminance + lowerLeftLuminance + lowerRightLuminance);
- gl_FragColor = vec4(luminosity, luminosity, luminosity, 1.0);
- }
-);
-#else
-NSString *const kGPUImageInitialLuminosityFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
-
- varying vec2 outputTextureCoordinate;
-
- varying vec2 upperLeftInputTextureCoordinate;
- varying vec2 upperRightInputTextureCoordinate;
- varying vec2 lowerLeftInputTextureCoordinate;
- varying vec2 lowerRightInputTextureCoordinate;
-
- const vec3 W = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- float upperLeftLuminance = dot(texture2D(inputImageTexture, upperLeftInputTextureCoordinate).rgb, W);
- float upperRightLuminance = dot(texture2D(inputImageTexture, upperRightInputTextureCoordinate).rgb, W);
- float lowerLeftLuminance = dot(texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).rgb, W);
- float lowerRightLuminance = dot(texture2D(inputImageTexture, lowerRightInputTextureCoordinate).rgb, W);
-
- float luminosity = 0.25 * (upperLeftLuminance + upperRightLuminance + lowerLeftLuminance + lowerRightLuminance);
- gl_FragColor = vec4(luminosity, luminosity, luminosity, 1.0);
- }
-);
-
-NSString *const kGPUImageLuminosityFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
-
- varying vec2 outputTextureCoordinate;
-
- varying vec2 upperLeftInputTextureCoordinate;
- varying vec2 upperRightInputTextureCoordinate;
- varying vec2 lowerLeftInputTextureCoordinate;
- varying vec2 lowerRightInputTextureCoordinate;
-
- void main()
- {
- float upperLeftLuminance = texture2D(inputImageTexture, upperLeftInputTextureCoordinate).r;
- float upperRightLuminance = texture2D(inputImageTexture, upperRightInputTextureCoordinate).r;
- float lowerLeftLuminance = texture2D(inputImageTexture, lowerLeftInputTextureCoordinate).r;
- float lowerRightLuminance = texture2D(inputImageTexture, lowerRightInputTextureCoordinate).r;
-
- float luminosity = 0.25 * (upperLeftLuminance + upperRightLuminance + lowerLeftLuminance + lowerRightLuminance);
- gl_FragColor = vec4(luminosity, luminosity, luminosity, 1.0);
- }
-);
-#endif
-
-@implementation GPUImageLuminosity
-
-@synthesize luminosityProcessingFinishedBlock = _luminosityProcessingFinishedBlock;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithVertexShaderFromString:kGPUImageColorAveragingVertexShaderString fragmentShaderFromString:kGPUImageInitialLuminosityFragmentShaderString]))
- {
- return nil;
- }
-
- texelWidthUniform = [filterProgram uniformIndex:@"texelWidth"];
- texelHeightUniform = [filterProgram uniformIndex:@"texelHeight"];
-
- __unsafe_unretained GPUImageLuminosity *weakSelf = self;
- [self setFrameProcessingCompletionBlock:^(GPUImageOutput *filter, CMTime frameTime) {
- [weakSelf extractLuminosityAtFrameTime:frameTime];
- }];
-
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext useImageProcessingContext];
-
- secondFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageColorAveragingVertexShaderString fragmentShaderString:kGPUImageLuminosityFragmentShaderString];
-
- if (!secondFilterProgram.initialized)
- {
- [self initializeSecondaryAttributes];
-
- if (![secondFilterProgram link])
- {
- NSString *progLog = [secondFilterProgram programLog];
- NSLog(@"Program link log: %@", progLog);
- NSString *fragLog = [secondFilterProgram fragmentShaderLog];
- NSLog(@"Fragment shader compile log: %@", fragLog);
- NSString *vertLog = [secondFilterProgram vertexShaderLog];
- NSLog(@"Vertex shader compile log: %@", vertLog);
- filterProgram = nil;
- NSAssert(NO, @"Filter shader link failed");
- }
- }
-
- secondFilterPositionAttribute = [secondFilterProgram attributeIndex:@"position"];
- secondFilterTextureCoordinateAttribute = [secondFilterProgram attributeIndex:@"inputTextureCoordinate"];
- secondFilterInputTextureUniform = [secondFilterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader
- secondFilterInputTextureUniform2 = [secondFilterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
-
- secondFilterTexelWidthUniform = [secondFilterProgram uniformIndex:@"texelWidth"];
- secondFilterTexelHeightUniform = [secondFilterProgram uniformIndex:@"texelHeight"];
-
- [GPUImageContext setActiveShaderProgram:secondFilterProgram];
-
- glEnableVertexAttribArray(secondFilterPositionAttribute);
- glEnableVertexAttribArray(secondFilterTextureCoordinateAttribute);
- });
-
- return self;
-}
-
-- (void)initializeSecondaryAttributes;
-{
- [secondFilterProgram addAttribute:@"position"];
- [secondFilterProgram addAttribute:@"inputTextureCoordinate"];
-}
-
-/*
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-{
- if (self.preventRendering)
- {
- [firstInputFramebuffer unlock];
- return;
- }
-
- // Do an initial render pass that both convert to luminance and reduces
- [GPUImageContext setActiveShaderProgram:filterProgram];
-
- glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
- glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
-
- GLuint currentFramebuffer = [[stageFramebuffers objectAtIndex:0] intValue];
- glBindFramebuffer(GL_FRAMEBUFFER, currentFramebuffer);
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- CGSize currentStageSize = [[stageSizes objectAtIndex:0] CGSizeValue];
-#else
- NSSize currentStageSize = [[stageSizes objectAtIndex:0] sizeValue];
-#endif
- glViewport(0, 0, (int)currentStageSize.width, (int)currentStageSize.height);
-
- GLuint currentTexture = [firstInputFramebuffer texture];
-
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT);
-
- glActiveTexture(GL_TEXTURE2);
- glBindTexture(GL_TEXTURE_2D, currentTexture);
-
- glUniform1i(filterInputTextureUniform, 2);
-
- glUniform1f(texelWidthUniform, 0.5 / currentStageSize.width);
- glUniform1f(texelHeightUniform, 0.5 / currentStageSize.height);
-
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
-
- currentTexture = [[stageTextures objectAtIndex:0] intValue];
-
- // Just perform reductions from this point on
- [GPUImageContext setActiveShaderProgram:secondFilterProgram];
- glVertexAttribPointer(secondFilterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
- glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
-
- NSUInteger numberOfStageFramebuffers = [stageFramebuffers count];
- for (NSUInteger currentStage = 1; currentStage < numberOfStageFramebuffers; currentStage++)
- {
- currentFramebuffer = [[stageFramebuffers objectAtIndex:currentStage] intValue];
- glBindFramebuffer(GL_FRAMEBUFFER, currentFramebuffer);
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- currentStageSize = [[stageSizes objectAtIndex:currentStage] CGSizeValue];
-#else
- currentStageSize = [[stageSizes objectAtIndex:currentStage] sizeValue];
-#endif
- glViewport(0, 0, (int)currentStageSize.width, (int)currentStageSize.height);
-
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT);
-
- glActiveTexture(GL_TEXTURE2);
- glBindTexture(GL_TEXTURE_2D, currentTexture);
-
- glUniform1i(secondFilterInputTextureUniform, 2);
-
- glUniform1f(secondFilterTexelWidthUniform, 0.5 / currentStageSize.width);
- glUniform1f(secondFilterTexelHeightUniform, 0.5 / currentStageSize.height);
-
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
-
- currentTexture = [[stageTextures objectAtIndex:currentStage] intValue];
-
-// NSUInteger totalBytesForImage = (int)currentStageSize.width * (int)currentStageSize.height * 4;
-// GLubyte *rawImagePixels2 = (GLubyte *)malloc(totalBytesForImage);
-// glReadPixels(0, 0, (int)currentStageSize.width, (int)currentStageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels2);
-// CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, rawImagePixels2, totalBytesForImage, NULL);
-// CGColorSpaceRef defaultRGBColorSpace = CGColorSpaceCreateDeviceRGB();
-//
-// CGFloat currentRedTotal = 0.0f, currentGreenTotal = 0.0f, currentBlueTotal = 0.0f, currentAlphaTotal = 0.0f;
-// NSUInteger totalNumberOfPixels = totalBytesForImage / 4;
-//
-// for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++)
-// {
-// currentRedTotal += (CGFloat)rawImagePixels2[(currentPixel * 4)] / 255.0f;
-// currentGreenTotal += (CGFloat)rawImagePixels2[(currentPixel * 4) + 1] / 255.0f;
-// currentBlueTotal += (CGFloat)rawImagePixels2[(currentPixel * 4 + 2)] / 255.0f;
-// currentAlphaTotal += (CGFloat)rawImagePixels2[(currentPixel * 4) + 3] / 255.0f;
-// }
-//
-// NSLog(@"Stage %d average image red: %f, green: %f, blue: %f, alpha: %f", currentStage, currentRedTotal / (CGFloat)totalNumberOfPixels, currentGreenTotal / (CGFloat)totalNumberOfPixels, currentBlueTotal / (CGFloat)totalNumberOfPixels, currentAlphaTotal / (CGFloat)totalNumberOfPixels);
-//
-//
-// CGImageRef cgImageFromBytes = CGImageCreate((int)currentStageSize.width, (int)currentStageSize.height, 8, 32, 4 * (int)currentStageSize.width, defaultRGBColorSpace, kCGBitmapByteOrderDefault | kCGImageAlphaLast, dataProvider, NULL, NO, kCGRenderingIntentDefault);
-//
-// UIImage *imageToSave = [UIImage imageWithCGImage:cgImageFromBytes];
-//
-// NSData *dataForPNGFile = UIImagePNGRepresentation(imageToSave);
-//
-// NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
-// NSString *documentsDirectory = [paths objectAtIndex:0];
-//
-// NSString *imageName = [NSString stringWithFormat:@"AverageLevel%d.png", currentStage];
-// NSError *error = nil;
-// if (![dataForPNGFile writeToFile:[documentsDirectory stringByAppendingPathComponent:imageName] options:NSAtomicWrite error:&error])
-// {
-// return;
-// }
- }
-
- [firstInputFramebuffer unlock];
-}
- */
-
-#pragma mark -
-#pragma mark Callbacks
-
-- (void)extractLuminosityAtFrameTime:(CMTime)frameTime;
-{
- runSynchronouslyOnVideoProcessingQueue(^{
-
- // we need a normal color texture for this filter
- NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture format for this filter must be GL_RGBA.");
- NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.");
-
- NSUInteger totalNumberOfPixels = round(finalStageSize.width * finalStageSize.height);
-
- if (rawImagePixels == NULL)
- {
- rawImagePixels = (GLubyte *)malloc(totalNumberOfPixels * 4);
- }
-
- [GPUImageContext useImageProcessingContext];
- [outputFramebuffer activateFramebuffer];
-
- glReadPixels(0, 0, (int)finalStageSize.width, (int)finalStageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);
-
- NSUInteger luminanceTotal = 0;
- NSUInteger byteIndex = 0;
- for (NSUInteger currentPixel = 0; currentPixel < totalNumberOfPixels; currentPixel++)
- {
- luminanceTotal += rawImagePixels[byteIndex];
- byteIndex += 4;
- }
-
- CGFloat normalizedLuminosityTotal = (CGFloat)luminanceTotal / (CGFloat)totalNumberOfPixels / 255.0;
-
- if (_luminosityProcessingFinishedBlock != NULL)
- {
- _luminosityProcessingFinishedBlock(normalizedLuminosityTotal, frameTime);
- }
- });
-}
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLuminosityBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageLuminosityBlendFilter.h
deleted file mode 100644
index 03b5e4c..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLuminosityBlendFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageLuminosityBlendFilter : GPUImageTwoInputFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageLuminosityBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageLuminosityBlendFilter.m
deleted file mode 100644
index 7e39974..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageLuminosityBlendFilter.m
+++ /dev/null
@@ -1,113 +0,0 @@
-#import "GPUImageLuminosityBlendFilter.h"
-
-/**
- * Luminosity blend mode based upon pseudo code from the PDF specification.
- */
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageLuminosityBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- highp float lum(lowp vec3 c) {
- return dot(c, vec3(0.3, 0.59, 0.11));
- }
-
- lowp vec3 clipcolor(lowp vec3 c) {
- highp float l = lum(c);
- lowp float n = min(min(c.r, c.g), c.b);
- lowp float x = max(max(c.r, c.g), c.b);
-
- if (n < 0.0) {
- c.r = l + ((c.r - l) * l) / (l - n);
- c.g = l + ((c.g - l) * l) / (l - n);
- c.b = l + ((c.b - l) * l) / (l - n);
- }
- if (x > 1.0) {
- c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);
- c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);
- c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);
- }
-
- return c;
- }
-
- lowp vec3 setlum(lowp vec3 c, highp float l) {
- highp float d = l - lum(c);
- c = c + vec3(d);
- return clipcolor(c);
- }
-
- void main()
- {
- highp vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);
- highp vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(baseColor.rgb, lum(overlayColor.rgb)) * overlayColor.a, baseColor.a);
- }
-);
-#else
-NSString *const kGPUImageLuminosityBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- float lum(vec3 c) {
- return dot(c, vec3(0.3, 0.59, 0.11));
- }
-
- vec3 clipcolor(vec3 c) {
- float l = lum(c);
- float n = min(min(c.r, c.g), c.b);
- float x = max(max(c.r, c.g), c.b);
-
- if (n < 0.0) {
- c.r = l + ((c.r - l) * l) / (l - n);
- c.g = l + ((c.g - l) * l) / (l - n);
- c.b = l + ((c.b - l) * l) / (l - n);
- }
- if (x > 1.0) {
- c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);
- c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);
- c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);
- }
-
- return c;
- }
-
- vec3 setlum(vec3 c, float l) {
- float d = l - lum(c);
- c = c + vec3(d);
- return clipcolor(c);
- }
-
- void main()
- {
- vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);
- vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(baseColor.rgb, lum(overlayColor.rgb)) * overlayColor.a, baseColor.a);
- }
-);
-#endif
-
-
-@implementation GPUImageLuminosityBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageLuminosityBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMaskFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageMaskFilter.h
deleted file mode 100755
index 94cf064..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMaskFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageMaskFilter : GPUImageTwoInputFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMaskFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageMaskFilter.m
deleted file mode 100755
index 24503b3..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMaskFilter.m
+++ /dev/null
@@ -1,76 +0,0 @@
-#import "GPUImageMaskFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageMaskShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
-
- //Averages mask's the RGB values, and scales that value by the mask's alpha
- //
- //The dot product should take fewer cycles than doing an average normally
- //
- //Typical/ideal case, R,G, and B will be the same, and Alpha will be 1.0
- lowp float newAlpha = dot(textureColor2.rgb, vec3(.33333334, .33333334, .33333334)) * textureColor2.a;
-
- gl_FragColor = vec4(textureColor.xyz, newAlpha);
-// gl_FragColor = vec4(textureColor2);
- }
-);
-#else
-NSString *const kGPUImageMaskShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
-
- //Averages mask's the RGB values, and scales that value by the mask's alpha
- //
- //The dot product should take fewer cycles than doing an average normally
- //
- //Typical/ideal case, R,G, and B will be the same, and Alpha will be 1.0
- float newAlpha = dot(textureColor2.rgb, vec3(.33333334, .33333334, .33333334)) * textureColor2.a;
-
- gl_FragColor = vec4(textureColor.xyz, newAlpha);
- // gl_FragColor = vec4(textureColor2);
- }
-);
-#endif
-
-@implementation GPUImageMaskFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageMaskShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-{
- glEnable(GL_BLEND);
- glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
- [super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];
- glDisable(GL_BLEND);
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMedianFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageMedianFilter.h
deleted file mode 100644
index 8022578..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMedianFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImage3x3TextureSamplingFilter.h"
-
-@interface GPUImageMedianFilter : GPUImage3x3TextureSamplingFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMedianFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageMedianFilter.m
deleted file mode 100644
index 78c1180..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMedianFilter.m
+++ /dev/null
@@ -1,178 +0,0 @@
-#import "GPUImageMedianFilter.h"
-
-/*
- 3x3 median filter, adapted from "A Fast, Small-Radius GPU Median Filter" by Morgan McGuire in ShaderX6
- http://graphics.cs.williams.edu/papers/MedianShaderX6/
-
- Morgan McGuire and Kyle Whitson
- Williams College
-
- Register allocation tips by Victor Huang Xiaohuang
- University of Illinois at Urbana-Champaign
-
- http://graphics.cs.williams.edu
-
-
- Copyright (c) Morgan McGuire and Williams College, 2006
- All rights reserved.
-
- Redistribution and use in source and binary forms, with or without
- modification, are permitted provided that the following conditions are
- met:
-
- Redistributions of source code must retain the above copyright notice,
- this list of conditions and the following disclaimer.
-
- Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
- "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
- HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageMedianFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
-#define s2(a, b) temp = a; a = min(a, b); b = max(temp, b);
-#define mn3(a, b, c) s2(a, b); s2(a, c);
-#define mx3(a, b, c) s2(b, c); s2(a, c);
-
-#define mnmx3(a, b, c) mx3(a, b, c); s2(a, b); // 3 exchanges
-#define mnmx4(a, b, c, d) s2(a, b); s2(c, d); s2(a, c); s2(b, d); // 4 exchanges
-#define mnmx5(a, b, c, d, e) s2(a, b); s2(c, d); mn3(a, c, e); mx3(b, d, e); // 6 exchanges
-#define mnmx6(a, b, c, d, e, f) s2(a, d); s2(b, e); s2(c, f); mn3(a, b, c); mx3(d, e, f); // 7 exchanges
-
- void main()
- {
- vec3 v[6];
-
- v[0] = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;
- v[1] = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;
- v[2] = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;
- v[3] = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;
- v[4] = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
- v[5] = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
-// v[6] = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
-// v[7] = texture2D(inputImageTexture, topTextureCoordinate).rgb;
- vec3 temp;
-
- mnmx6(v[0], v[1], v[2], v[3], v[4], v[5]);
-
- v[5] = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
-
- mnmx5(v[1], v[2], v[3], v[4], v[5]);
-
- v[5] = texture2D(inputImageTexture, topTextureCoordinate).rgb;
-
- mnmx4(v[2], v[3], v[4], v[5]);
-
- v[5] = texture2D(inputImageTexture, textureCoordinate).rgb;
-
- mnmx3(v[3], v[4], v[5]);
-
- gl_FragColor = vec4(v[4], 1.0);
- }
-);
-#else
-NSString *const kGPUImageMedianFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
-#define s2(a, b) temp = a; a = min(a, b); b = max(temp, b);
-#define mn3(a, b, c) s2(a, b); s2(a, c);
-#define mx3(a, b, c) s2(b, c); s2(a, c);
-
-#define mnmx3(a, b, c) mx3(a, b, c); s2(a, b); // 3 exchanges
-#define mnmx4(a, b, c, d) s2(a, b); s2(c, d); s2(a, c); s2(b, d); // 4 exchanges
-#define mnmx5(a, b, c, d, e) s2(a, b); s2(c, d); mn3(a, c, e); mx3(b, d, e); // 6 exchanges
-#define mnmx6(a, b, c, d, e, f) s2(a, d); s2(b, e); s2(c, f); mn3(a, b, c); mx3(d, e, f); // 7 exchanges
-
- void main()
- {
- vec3 v[6];
-
- v[0] = texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb;
- v[1] = texture2D(inputImageTexture, topRightTextureCoordinate).rgb;
- v[2] = texture2D(inputImageTexture, topLeftTextureCoordinate).rgb;
- v[3] = texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb;
- v[4] = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
- v[5] = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
- // v[6] = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
- // v[7] = texture2D(inputImageTexture, topTextureCoordinate).rgb;
- vec3 temp;
-
- mnmx6(v[0], v[1], v[2], v[3], v[4], v[5]);
-
- v[5] = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
-
- mnmx5(v[1], v[2], v[3], v[4], v[5]);
-
- v[5] = texture2D(inputImageTexture, topTextureCoordinate).rgb;
-
- mnmx4(v[2], v[3], v[4], v[5]);
-
- v[5] = texture2D(inputImageTexture, textureCoordinate).rgb;
-
- mnmx3(v[3], v[4], v[5]);
-
- gl_FragColor = vec4(v[4], 1.0);
- }
-);
-#endif
-
-@implementation GPUImageMedianFilter
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageMedianFragmentShaderString]))
- {
- return nil;
- }
-
- hasOverriddenImageSizeFactor = NO;
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMissEtikateFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageMissEtikateFilter.h
deleted file mode 100755
index de17064..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMissEtikateFilter.h
+++ /dev/null
@@ -1,17 +0,0 @@
-#import "GPUImageFilterGroup.h"
-
-@class GPUImagePicture;
-
-/** A photo filter based on Photoshop action by Miss Etikate:
- http://miss-etikate.deviantart.com/art/Photoshop-Action-15-120151961
- */
-
-// Note: If you want to use this effect you have to add lookup_miss_etikate.png
-// from Resources folder to your application bundle.
-
-@interface GPUImageMissEtikateFilter : GPUImageFilterGroup
-{
- GPUImagePicture *lookupImageSource;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMissEtikateFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageMissEtikateFilter.m
deleted file mode 100755
index 1810b70..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMissEtikateFilter.m
+++ /dev/null
@@ -1,38 +0,0 @@
-#import "GPUImageMissEtikateFilter.h"
-#import "GPUImagePicture.h"
-#import "GPUImageLookupFilter.h"
-
-@implementation GPUImageMissEtikateFilter
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- UIImage *image = [UIImage imageNamed:@"lookup_miss_etikate.png"];
-#else
- NSImage *image = [NSImage imageNamed:@"lookup_miss_etikate.png"];
-#endif
-
- NSAssert(image, @"To use GPUImageMissEtikateFilter you need to add lookup_miss_etikate.png from GPUImage/framework/Resources to your application bundle.");
-
- lookupImageSource = [[GPUImagePicture alloc] initWithImage:image];
- GPUImageLookupFilter *lookupFilter = [[GPUImageLookupFilter alloc] init];
- [self addFilter:lookupFilter];
-
- [lookupImageSource addTarget:lookupFilter atTextureLocation:1];
- [lookupImageSource processImage];
-
- self.initialFilters = [NSArray arrayWithObjects:lookupFilter, nil];
- self.terminalFilter = lookupFilter;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMonochromeFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageMonochromeFilter.h
deleted file mode 100644
index 66a0e77..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMonochromeFilter.h
+++ /dev/null
@@ -1,13 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageMonochromeFilter : GPUImageFilter
-{
- GLint intensityUniform, filterColorUniform;
-}
-
-@property(readwrite, nonatomic) CGFloat intensity;
-@property(readwrite, nonatomic) GPUVector4 color;
-
-- (void)setColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMonochromeFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageMonochromeFilter.m
deleted file mode 100644
index 70cef87..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMonochromeFilter.m
+++ /dev/null
@@ -1,115 +0,0 @@
-#import "GPUImageMonochromeFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUMonochromeFragmentShaderString = SHADER_STRING
-(
- precision lowp float;
-
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float intensity;
- uniform vec3 filterColor;
-
- const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- //desat, then apply overlay blend
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- float luminance = dot(textureColor.rgb, luminanceWeighting);
-
- lowp vec4 desat = vec4(vec3(luminance), 1.0);
-
- //overlay
- lowp vec4 outputColor = vec4(
- (desat.r < 0.5 ? (2.0 * desat.r * filterColor.r) : (1.0 - 2.0 * (1.0 - desat.r) * (1.0 - filterColor.r))),
- (desat.g < 0.5 ? (2.0 * desat.g * filterColor.g) : (1.0 - 2.0 * (1.0 - desat.g) * (1.0 - filterColor.g))),
- (desat.b < 0.5 ? (2.0 * desat.b * filterColor.b) : (1.0 - 2.0 * (1.0 - desat.b) * (1.0 - filterColor.b))),
- 1.0
- );
-
- //which is better, or are they equal?
- gl_FragColor = vec4( mix(textureColor.rgb, outputColor.rgb, intensity), textureColor.a);
- }
-);
-#else
-NSString *const kGPUMonochromeFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float intensity;
- uniform vec3 filterColor;
-
- const vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- //desat, then apply overlay blend
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- float luminance = dot(textureColor.rgb, luminanceWeighting);
-
- vec4 desat = vec4(vec3(luminance), 1.0);
-
- //overlay
- vec4 outputColor = vec4(
- (desat.r < 0.5 ? (2.0 * desat.r * filterColor.r) : (1.0 - 2.0 * (1.0 - desat.r) * (1.0 - filterColor.r))),
- (desat.g < 0.5 ? (2.0 * desat.g * filterColor.g) : (1.0 - 2.0 * (1.0 - desat.g) * (1.0 - filterColor.g))),
- (desat.b < 0.5 ? (2.0 * desat.b * filterColor.b) : (1.0 - 2.0 * (1.0 - desat.b) * (1.0 - filterColor.b))),
- 1.0
- );
-
- //which is better, or are they equal?
- gl_FragColor = vec4( mix(textureColor.rgb, outputColor.rgb, intensity), textureColor.a);
- }
-);
-#endif
-
-@implementation GPUImageMonochromeFilter
-
-@synthesize intensity = _intensity;
-@synthesize color = _color;
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUMonochromeFragmentShaderString]))
- {
- return nil;
- }
-
- intensityUniform = [filterProgram uniformIndex:@"intensity"];
- filterColorUniform = [filterProgram uniformIndex:@"filterColor"];
-
- self.intensity = 1.0;
- self.color = (GPUVector4){0.6f, 0.45f, 0.3f, 1.f};
- //self.color = [CIColor colorWithRed:0.6 green:0.45 blue:0.3 alpha:1.];
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setColor:(GPUVector4)color;
-{
-
- _color = color;
-
- [self setColorRed:color.one green:color.two blue:color.three];
-}
-
-- (void)setColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
-{
- GPUVector3 filterColor = {redComponent, greenComponent, blueComponent};
-
- [self setVec3:filterColor forUniform:filterColorUniform program:filterProgram];
-}
-
-- (void)setIntensity:(CGFloat)newValue;
-{
- _intensity = newValue;
-
- [self setFloat:_intensity forUniform:intensityUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMosaicFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageMosaicFilter.h
deleted file mode 100644
index 5f72d3c..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMosaicFilter.h
+++ /dev/null
@@ -1,22 +0,0 @@
-
-// This needs a little more work, it's rotating the input tileset and there are some artifacts (I think from GL_LINEAR interpolation), but it's working
-
-#import "GPUImageTwoInputFilter.h"
-#import "GPUImagePicture.h"
-
-@interface GPUImageMosaicFilter : GPUImageTwoInputFilter {
- GLint inputTileSizeUniform, numTilesUniform, displayTileSizeUniform, colorOnUniform;
- GPUImagePicture *pic;
-}
-
-// This filter takes an input tileset, the tiles must ascend in luminance
-// It looks at the input image and replaces each display tile with an input tile
-// according to the luminance of that tile. The idea was to replicate the ASCII
-// video filters seen in other apps, but the tileset can be anything.
-@property(readwrite, nonatomic) CGSize inputTileSize;
-@property(readwrite, nonatomic) float numTiles;
-@property(readwrite, nonatomic) CGSize displayTileSize;
-@property(readwrite, nonatomic) BOOL colorOn;
-@property(readwrite, nonatomic, copy) NSString *tileSet;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMosaicFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageMosaicFilter.m
deleted file mode 100644
index b017ca9..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMosaicFilter.m
+++ /dev/null
@@ -1,188 +0,0 @@
-//
-// GPUImageMosaicFilter.m
-
-
-#import "GPUImageMosaicFilter.h"
-#import "GPUImagePicture.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageMosaicFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- uniform vec2 inputTileSize;
- uniform vec2 displayTileSize;
- uniform float numTiles;
- uniform int colorOn;
-
- void main()
- {
- vec2 xy = textureCoordinate;
- xy = xy - mod(xy, displayTileSize);
-
- vec4 lumcoeff = vec4(0.299,0.587,0.114,0.0);
-
- vec4 inputColor = texture2D(inputImageTexture2, xy);
- float lum = dot(inputColor,lumcoeff);
- lum = 1.0 - lum;
-
- float stepsize = 1.0 / numTiles;
- float lumStep = (lum - mod(lum, stepsize)) / stepsize;
-
- float rowStep = 1.0 / inputTileSize.x;
- float x = mod(lumStep, rowStep);
- float y = floor(lumStep / rowStep);
-
- vec2 startCoord = vec2(float(x) * inputTileSize.x, float(y) * inputTileSize.y);
- vec2 finalCoord = startCoord + ((textureCoordinate - xy) * (inputTileSize / displayTileSize));
-
- vec4 color = texture2D(inputImageTexture, finalCoord);
- if (colorOn == 1) {
- color = color * inputColor;
- }
- gl_FragColor = color;
-
- }
-);
-#else
-NSString *const kGPUImageMosaicFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- uniform vec2 inputTileSize;
- uniform vec2 displayTileSize;
- uniform float numTiles;
- uniform int colorOn;
-
- void main()
- {
- vec2 xy = textureCoordinate;
- xy = xy - mod(xy, displayTileSize);
-
- vec4 lumcoeff = vec4(0.299,0.587,0.114,0.0);
-
- vec4 inputColor = texture2D(inputImageTexture2, xy);
- float lum = dot(inputColor,lumcoeff);
- lum = 1.0 - lum;
-
- float stepsize = 1.0 / numTiles;
- float lumStep = (lum - mod(lum, stepsize)) / stepsize;
-
- float rowStep = 1.0 / inputTileSize.x;
- float x = mod(lumStep, rowStep);
- float y = floor(lumStep / rowStep);
-
- vec2 startCoord = vec2(float(x) * inputTileSize.x, float(y) * inputTileSize.y);
- vec2 finalCoord = startCoord + ((textureCoordinate - xy) * (inputTileSize / displayTileSize));
-
- vec4 color = texture2D(inputImageTexture, finalCoord);
- if (colorOn == 1) {
- color = color * inputColor;
- }
- gl_FragColor = color;
- }
-);
-#endif
-
-@implementation GPUImageMosaicFilter
-
-@synthesize inputTileSize = _inputTileSize, numTiles = _numTiles, displayTileSize = _displayTileSize, colorOn = _colorOn;
-@synthesize tileSet = _tileSet;
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageMosaicFragmentShaderString]))
- {
- return nil;
- }
-
- inputTileSizeUniform = [filterProgram uniformIndex:@"inputTileSize"];
- displayTileSizeUniform = [filterProgram uniformIndex:@"displayTileSize"];
- numTilesUniform = [filterProgram uniformIndex:@"numTiles"];
- colorOnUniform = [filterProgram uniformIndex:@"colorOn"];
-
- CGSize its = CGSizeMake(0.125, 0.125);
- CGSize dts = CGSizeMake(0.025, 0.025);
- [self setDisplayTileSize:dts];
- [self setInputTileSize:its];
- [self setNumTiles:64.0];
- [self setColorOn:YES];
- //[self setTileSet:@"squares.png"];
- return self;
-}
-
-- (void)setColorOn:(BOOL)yes
-{
- glUniform1i(colorOnUniform, yes);
-}
-
-- (void)setNumTiles:(float)numTiles
-{
-
- _numTiles = numTiles;
- [self setFloat:_numTiles forUniformName:@"numTiles"];
-}
-
-- (void)setInputTileSize:(CGSize)inputTileSize
-{
- if (inputTileSize.width > 1.0) {
- _inputTileSize.width = 1.0;
- }
- if (inputTileSize.height > 1.0) {
- _inputTileSize.height = 1.0;
- }
- if (inputTileSize.width < 0.0) {
- _inputTileSize.width = 0.0;
- }
- if (inputTileSize.height < 0.0) {
- _inputTileSize.height = 0.0;
- }
-
-
- _inputTileSize = inputTileSize;
-
- [self setSize:_inputTileSize forUniform:inputTileSizeUniform program:filterProgram];
-}
-
--(void)setDisplayTileSize:(CGSize)displayTileSize
-{
- if (displayTileSize.width > 1.0) {
- _displayTileSize.width = 1.0;
- }
- if (displayTileSize.height > 1.0) {
- _displayTileSize.height = 1.0;
- }
- if (displayTileSize.width < 0.0) {
- _displayTileSize.width = 0.0;
- }
- if (displayTileSize.height < 0.0) {
- _displayTileSize.height = 0.0;
- }
-
-
- _displayTileSize = displayTileSize;
-
- [self setSize:_displayTileSize forUniform:displayTileSizeUniform program:filterProgram];
-}
-
--(void)setTileSet:(NSString *)tileSet
-{
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- UIImage *img = [UIImage imageNamed:tileSet];
-#else
- NSImage *img = [NSImage imageNamed:tileSet];
-#endif
- pic = [[GPUImagePicture alloc] initWithImage:img smoothlyScaleOutput:YES];
- [pic addTarget:self];
- [pic processImage];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMotionBlurFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageMotionBlurFilter.h
deleted file mode 100644
index dcca712..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMotionBlurFilter.h
+++ /dev/null
@@ -1,13 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageMotionBlurFilter : GPUImageFilter
-
-/** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
- */
-@property (readwrite, nonatomic) CGFloat blurSize;
-
-/** The angular direction of the blur, in degrees. 0 degrees by default
- */
-@property (readwrite, nonatomic) CGFloat blurAngle;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMotionBlurFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageMotionBlurFilter.m
deleted file mode 100644
index 5a2c20b..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMotionBlurFilter.m
+++ /dev/null
@@ -1,209 +0,0 @@
-#import "GPUImageMotionBlurFilter.h"
-
-// Override vertex shader to remove dependent texture reads
-NSString *const kGPUImageTiltedTexelSamplingVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec4 inputTextureCoordinate;
-
- uniform vec2 directionalTexelStep;
-
- varying vec2 textureCoordinate;
- varying vec2 oneStepBackTextureCoordinate;
- varying vec2 twoStepsBackTextureCoordinate;
- varying vec2 threeStepsBackTextureCoordinate;
- varying vec2 fourStepsBackTextureCoordinate;
- varying vec2 oneStepForwardTextureCoordinate;
- varying vec2 twoStepsForwardTextureCoordinate;
- varying vec2 threeStepsForwardTextureCoordinate;
- varying vec2 fourStepsForwardTextureCoordinate;
-
- void main()
- {
- gl_Position = position;
-
- textureCoordinate = inputTextureCoordinate.xy;
- oneStepBackTextureCoordinate = inputTextureCoordinate.xy - directionalTexelStep;
- twoStepsBackTextureCoordinate = inputTextureCoordinate.xy - 2.0 * directionalTexelStep;
- threeStepsBackTextureCoordinate = inputTextureCoordinate.xy - 3.0 * directionalTexelStep;
- fourStepsBackTextureCoordinate = inputTextureCoordinate.xy - 4.0 * directionalTexelStep;
- oneStepForwardTextureCoordinate = inputTextureCoordinate.xy + directionalTexelStep;
- twoStepsForwardTextureCoordinate = inputTextureCoordinate.xy + 2.0 * directionalTexelStep;
- threeStepsForwardTextureCoordinate = inputTextureCoordinate.xy + 3.0 * directionalTexelStep;
- fourStepsForwardTextureCoordinate = inputTextureCoordinate.xy + 4.0 * directionalTexelStep;
- }
-);
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageMotionBlurFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- uniform sampler2D inputImageTexture;
-
- varying vec2 textureCoordinate;
- varying vec2 oneStepBackTextureCoordinate;
- varying vec2 twoStepsBackTextureCoordinate;
- varying vec2 threeStepsBackTextureCoordinate;
- varying vec2 fourStepsBackTextureCoordinate;
- varying vec2 oneStepForwardTextureCoordinate;
- varying vec2 twoStepsForwardTextureCoordinate;
- varying vec2 threeStepsForwardTextureCoordinate;
- varying vec2 fourStepsForwardTextureCoordinate;
-
- void main()
- {
- // Box weights
-// lowp vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.1111111;
-// fragmentColor += texture2D(inputImageTexture, oneStepBackTextureCoordinate) * 0.1111111;
-// fragmentColor += texture2D(inputImageTexture, twoStepsBackTextureCoordinate) * 0.1111111;
-// fragmentColor += texture2D(inputImageTexture, threeStepsBackTextureCoordinate) * 0.1111111;
-// fragmentColor += texture2D(inputImageTexture, fourStepsBackTextureCoordinate) * 0.1111111;
-// fragmentColor += texture2D(inputImageTexture, oneStepForwardTextureCoordinate) * 0.1111111;
-// fragmentColor += texture2D(inputImageTexture, twoStepsForwardTextureCoordinate) * 0.1111111;
-// fragmentColor += texture2D(inputImageTexture, threeStepsForwardTextureCoordinate) * 0.1111111;
-// fragmentColor += texture2D(inputImageTexture, fourStepsForwardTextureCoordinate) * 0.1111111;
-
- lowp vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.18;
- fragmentColor += texture2D(inputImageTexture, oneStepBackTextureCoordinate) * 0.15;
- fragmentColor += texture2D(inputImageTexture, twoStepsBackTextureCoordinate) * 0.12;
- fragmentColor += texture2D(inputImageTexture, threeStepsBackTextureCoordinate) * 0.09;
- fragmentColor += texture2D(inputImageTexture, fourStepsBackTextureCoordinate) * 0.05;
- fragmentColor += texture2D(inputImageTexture, oneStepForwardTextureCoordinate) * 0.15;
- fragmentColor += texture2D(inputImageTexture, twoStepsForwardTextureCoordinate) * 0.12;
- fragmentColor += texture2D(inputImageTexture, threeStepsForwardTextureCoordinate) * 0.09;
- fragmentColor += texture2D(inputImageTexture, fourStepsForwardTextureCoordinate) * 0.05;
-
- gl_FragColor = fragmentColor;
- }
-);
-#else
-NSString *const kGPUImageMotionBlurFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
-
- varying vec2 textureCoordinate;
- varying vec2 oneStepBackTextureCoordinate;
- varying vec2 twoStepsBackTextureCoordinate;
- varying vec2 threeStepsBackTextureCoordinate;
- varying vec2 fourStepsBackTextureCoordinate;
- varying vec2 oneStepForwardTextureCoordinate;
- varying vec2 twoStepsForwardTextureCoordinate;
- varying vec2 threeStepsForwardTextureCoordinate;
- varying vec2 fourStepsForwardTextureCoordinate;
-
- void main()
- {
- // Box weights
- // vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.1111111;
- // fragmentColor += texture2D(inputImageTexture, oneStepBackTextureCoordinate) * 0.1111111;
- // fragmentColor += texture2D(inputImageTexture, twoStepsBackTextureCoordinate) * 0.1111111;
- // fragmentColor += texture2D(inputImageTexture, threeStepsBackTextureCoordinate) * 0.1111111;
- // fragmentColor += texture2D(inputImageTexture, fourStepsBackTextureCoordinate) * 0.1111111;
- // fragmentColor += texture2D(inputImageTexture, oneStepForwardTextureCoordinate) * 0.1111111;
- // fragmentColor += texture2D(inputImageTexture, twoStepsForwardTextureCoordinate) * 0.1111111;
- // fragmentColor += texture2D(inputImageTexture, threeStepsForwardTextureCoordinate) * 0.1111111;
- // fragmentColor += texture2D(inputImageTexture, fourStepsForwardTextureCoordinate) * 0.1111111;
-
- vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.18;
- fragmentColor += texture2D(inputImageTexture, oneStepBackTextureCoordinate) * 0.15;
- fragmentColor += texture2D(inputImageTexture, twoStepsBackTextureCoordinate) * 0.12;
- fragmentColor += texture2D(inputImageTexture, threeStepsBackTextureCoordinate) * 0.09;
- fragmentColor += texture2D(inputImageTexture, fourStepsBackTextureCoordinate) * 0.05;
- fragmentColor += texture2D(inputImageTexture, oneStepForwardTextureCoordinate) * 0.15;
- fragmentColor += texture2D(inputImageTexture, twoStepsForwardTextureCoordinate) * 0.12;
- fragmentColor += texture2D(inputImageTexture, threeStepsForwardTextureCoordinate) * 0.09;
- fragmentColor += texture2D(inputImageTexture, fourStepsForwardTextureCoordinate) * 0.05;
-
- gl_FragColor = fragmentColor;
- }
-);
-#endif
-
-@interface GPUImageMotionBlurFilter()
-{
- GLint directionalTexelStepUniform;
-}
-
-- (void)recalculateTexelOffsets;
-
-@end
-
-@implementation GPUImageMotionBlurFilter
-
-@synthesize blurSize = _blurSize;
-@synthesize blurAngle = _blurAngle;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithVertexShaderFromString:kGPUImageTiltedTexelSamplingVertexShaderString fragmentShaderFromString:kGPUImageMotionBlurFragmentShaderString]))
- {
- return nil;
- }
-
- directionalTexelStepUniform = [filterProgram uniformIndex:@"directionalTexelStep"];
-
- self.blurSize = 2.5;
- self.blurAngle = 0.0;
-
- return self;
-}
-
-- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
-{
- CGSize oldInputSize = inputTextureSize;
- [super setInputSize:newSize atIndex:textureIndex];
-
- if (!CGSizeEqualToSize(oldInputSize, inputTextureSize) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )
- {
- [self recalculateTexelOffsets];
- }
-}
-
-- (void)recalculateTexelOffsets;
-{
- CGFloat aspectRatio = 1.0;
- CGPoint texelOffsets;
-
- if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
- {
- aspectRatio = (inputTextureSize.width / inputTextureSize.height);
- texelOffsets.x = _blurSize * sin(_blurAngle * M_PI / 180.0) * aspectRatio / inputTextureSize.height;
- texelOffsets.y = _blurSize * cos(_blurAngle * M_PI / 180.0) / inputTextureSize.height;
- }
- else
- {
- aspectRatio = (inputTextureSize.height / inputTextureSize.width);
- texelOffsets.x = _blurSize * cos(_blurAngle * M_PI / 180.0) * aspectRatio / inputTextureSize.width;
- texelOffsets.y = _blurSize * sin(_blurAngle * M_PI / 180.0) / inputTextureSize.width;
- }
-
- [self setPoint:texelOffsets forUniform:directionalTexelStepUniform program:filterProgram];
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
- [super setInputRotation:newInputRotation atIndex:textureIndex];
- [self recalculateTexelOffsets];
-}
-
-- (void)setBlurAngle:(CGFloat)newValue;
-{
- _blurAngle = newValue;
- [self recalculateTexelOffsets];
-}
-
-- (void)setBlurSize:(CGFloat)newValue;
-{
- _blurSize = newValue;
- [self recalculateTexelOffsets];
-}
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMotionDetector.h b/Example/Pods/GPUImage/framework/Source/GPUImageMotionDetector.h
deleted file mode 100644
index 0132914..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMotionDetector.h
+++ /dev/null
@@ -1,18 +0,0 @@
-#import "GPUImageFilterGroup.h"
-#import "GPUImageLowPassFilter.h"
-#import "GPUImageAverageColor.h"
-
-@interface GPUImageMotionDetector : GPUImageFilterGroup
-{
- GPUImageLowPassFilter *lowPassFilter;
- GPUImageTwoInputFilter *frameComparisonFilter;
- GPUImageAverageColor *averageColor;
-}
-
-// This controls the low pass filter strength used to compare the current frame with previous ones to detect motion. This ranges from 0.0 to 1.0, with a default of 0.5.
-@property(readwrite, nonatomic) CGFloat lowPassFilterStrength;
-
-// For every frame, this will feed back the calculated centroid of the motion, as well as a relative intensity.
-@property(nonatomic, copy) void(^motionDetectionBlock)(CGPoint motionCentroid, CGFloat motionIntensity, CMTime frameTime);
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMotionDetector.m b/Example/Pods/GPUImage/framework/Source/GPUImageMotionDetector.m
deleted file mode 100644
index 0e204ad..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMotionDetector.m
+++ /dev/null
@@ -1,112 +0,0 @@
-#import "GPUImageMotionDetector.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageMotionComparisonFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- uniform highp float intensity;
-
- void main()
- {
- lowp vec3 currentImageColor = texture2D(inputImageTexture, textureCoordinate).rgb;
- lowp vec3 lowPassImageColor = texture2D(inputImageTexture2, textureCoordinate2).rgb;
-
- mediump float colorDistance = distance(currentImageColor, lowPassImageColor); // * 0.57735
- lowp float movementThreshold = step(0.2, colorDistance);
-
- gl_FragColor = movementThreshold * vec4(textureCoordinate2.x, textureCoordinate2.y, 1.0, 1.0);
- }
-);
-#else
-NSString *const kGPUImageMotionComparisonFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- uniform float intensity;
-
- void main()
- {
- vec3 currentImageColor = texture2D(inputImageTexture, textureCoordinate).rgb;
- vec3 lowPassImageColor = texture2D(inputImageTexture2, textureCoordinate2).rgb;
-
- float colorDistance = distance(currentImageColor, lowPassImageColor); // * 0.57735
- float movementThreshold = step(0.2, colorDistance);
-
- gl_FragColor = movementThreshold * vec4(textureCoordinate2.x, textureCoordinate2.y, 1.0, 1.0);
- }
-);
-#endif
-
-
-@implementation GPUImageMotionDetector
-
-@synthesize lowPassFilterStrength, motionDetectionBlock;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- // Start with a low pass filter to define the component to be removed
- lowPassFilter = [[GPUImageLowPassFilter alloc] init];
- [self addFilter:lowPassFilter];
-
- // Take the difference of the current frame from the low pass filtered result to get the high pass
- frameComparisonFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageMotionComparisonFragmentShaderString];
- [self addFilter:frameComparisonFilter];
-
- // Texture location 0 needs to be the original image for the difference blend
- [lowPassFilter addTarget:frameComparisonFilter atTextureLocation:1];
-
- // End with the average color for the scene to determine the centroid
- averageColor = [[GPUImageAverageColor alloc] init];
-
- __unsafe_unretained GPUImageMotionDetector *weakSelf = self;
-
- [averageColor setColorAverageProcessingFinishedBlock:^(CGFloat redComponent, CGFloat greenComponent, CGFloat blueComponent, CGFloat alphaComponent, CMTime frameTime) {
- if (weakSelf.motionDetectionBlock != NULL)
- {
- weakSelf.motionDetectionBlock(CGPointMake(redComponent / alphaComponent, greenComponent / alphaComponent), alphaComponent, frameTime);
- }
-// NSLog(@"Average X: %f, Y: %f total: %f", redComponent / alphaComponent, greenComponent / alphaComponent, alphaComponent);
- }];
-
- [frameComparisonFilter addTarget:averageColor];
-
- self.initialFilters = [NSArray arrayWithObjects:lowPassFilter, frameComparisonFilter, nil];
- self.terminalFilter = frameComparisonFilter;
-
- self.lowPassFilterStrength = 0.5;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setLowPassFilterStrength:(CGFloat)newValue;
-{
- lowPassFilter.filterStrength = newValue;
-}
-
-- (CGFloat)lowPassFilterStrength;
-{
- return lowPassFilter.filterStrength;
-}
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMovie.h b/Example/Pods/GPUImage/framework/Source/GPUImageMovie.h
deleted file mode 100755
index f61e56e..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMovie.h
+++ /dev/null
@@ -1,61 +0,0 @@
-#import
-#import
-#import "GPUImageContext.h"
-#import "GPUImageOutput.h"
-
-/** Protocol for getting Movie played callback.
- */
-@protocol GPUImageMovieDelegate
-
-- (void)didCompletePlayingMovie;
-@end
-
-/** Source object for filtering movies
- */
-@interface GPUImageMovie : GPUImageOutput
-
-@property (readwrite, retain) AVAsset *asset;
-@property (readwrite, retain) AVPlayerItem *playerItem;
-@property(readwrite, retain) NSURL *url;
-
-/** This enables the benchmarking mode, which logs out instantaneous and average frame times to the console
- */
-@property(readwrite, nonatomic) BOOL runBenchmark;
-
-/** This determines whether to play back a movie as fast as the frames can be processed, or if the original speed of the movie should be respected. Defaults to NO.
- */
-@property(readwrite, nonatomic) BOOL playAtActualSpeed;
-
-/** This determines whether the video should repeat (loop) at the end and restart from the beginning. Defaults to NO.
- */
-@property(readwrite, nonatomic) BOOL shouldRepeat;
-
-/** This specifies the progress of the process on a scale from 0 to 1.0. A value of 0 means the process has not yet begun, A value of 1.0 means the conversaion is complete.
- This property is not key-value observable.
- */
-@property(readonly, nonatomic) float progress;
-
-/** This is used to send the delete Movie did complete playing alert
- */
-@property (readwrite, nonatomic, assign) id delegate;
-
-@property (readonly, nonatomic) AVAssetReader *assetReader;
-@property (readonly, nonatomic) BOOL audioEncodingIsFinished;
-@property (readonly, nonatomic) BOOL videoEncodingIsFinished;
-
-/// @name Initialization and teardown
-- (id)initWithAsset:(AVAsset *)asset;
-- (id)initWithPlayerItem:(AVPlayerItem *)playerItem;
-- (id)initWithURL:(NSURL *)url;
-- (void)yuvConversionSetup;
-
-/// @name Movie processing
-- (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter;
-- (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;
-- (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;
-- (void)startProcessing;
-- (void)endProcessing;
-- (void)cancelProcessing;
-- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMovie.m b/Example/Pods/GPUImage/framework/Source/GPUImageMovie.m
deleted file mode 100755
index d096b03..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMovie.m
+++ /dev/null
@@ -1,761 +0,0 @@
-#import "GPUImageMovie.h"
-#import "GPUImageMovieWriter.h"
-#import "GPUImageFilter.h"
-#import "GPUImageVideoCamera.h"
-
-@interface GPUImageMovie ()
-{
- BOOL audioEncodingIsFinished, videoEncodingIsFinished;
- GPUImageMovieWriter *synchronizedMovieWriter;
- AVAssetReader *reader;
- AVPlayerItemVideoOutput *playerItemOutput;
- CADisplayLink *displayLink;
- CMTime previousFrameTime, processingFrameTime;
- CFAbsoluteTime previousActualFrameTime;
- BOOL keepLooping;
-
- GLuint luminanceTexture, chrominanceTexture;
-
- GLProgram *yuvConversionProgram;
- GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;
- GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;
- GLint yuvConversionMatrixUniform;
- const GLfloat *_preferredConversion;
-
- BOOL isFullYUVRange;
-
- int imageBufferWidth, imageBufferHeight;
-}
-
-- (void)processAsset;
-
-@end
-
-@implementation GPUImageMovie
-
-@synthesize url = _url;
-@synthesize asset = _asset;
-@synthesize runBenchmark = _runBenchmark;
-@synthesize playAtActualSpeed = _playAtActualSpeed;
-@synthesize delegate = _delegate;
-@synthesize shouldRepeat = _shouldRepeat;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithURL:(NSURL *)url;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- [self yuvConversionSetup];
-
- self.url = url;
- self.asset = nil;
-
- return self;
-}
-
-- (id)initWithAsset:(AVAsset *)asset;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- [self yuvConversionSetup];
-
- self.url = nil;
- self.asset = asset;
-
- return self;
-}
-
-- (id)initWithPlayerItem:(AVPlayerItem *)playerItem;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- [self yuvConversionSetup];
-
- self.url = nil;
- self.asset = nil;
- self.playerItem = playerItem;
-
- return self;
-}
-
-- (void)yuvConversionSetup;
-{
- if ([GPUImageContext supportsFastTextureUpload])
- {
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext useImageProcessingContext];
-
- _preferredConversion = kColorConversion709;
- isFullYUVRange = YES;
- yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];
-
- if (!yuvConversionProgram.initialized)
- {
- [yuvConversionProgram addAttribute:@"position"];
- [yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
-
- if (![yuvConversionProgram link])
- {
- NSString *progLog = [yuvConversionProgram programLog];
- NSLog(@"Program link log: %@", progLog);
- NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
- NSLog(@"Fragment shader compile log: %@", fragLog);
- NSString *vertLog = [yuvConversionProgram vertexShaderLog];
- NSLog(@"Vertex shader compile log: %@", vertLog);
- yuvConversionProgram = nil;
- NSAssert(NO, @"Filter shader link failed");
- }
- }
-
- yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
- yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
- yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
- yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
- yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];
-
- [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
-
- glEnableVertexAttribArray(yuvConversionPositionAttribute);
- glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
- });
- }
-}
-
-- (void)dealloc
-{
- // Moved into endProcessing
- //if (self.playerItem && (displayLink != nil))
- //{
- // [displayLink invalidate]; // remove from all run loops
- // displayLink = nil;
- //}
-}
-
-#pragma mark -
-#pragma mark Movie processing
-
-- (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter;
-{
- synchronizedMovieWriter = movieWriter;
- movieWriter.encodingLiveVideo = NO;
-}
-
-- (void)startProcessing
-{
- if( self.playerItem ) {
- [self processPlayerItem];
- return;
- }
- if(self.url == nil)
- {
- [self processAsset];
- return;
- }
-
- if (_shouldRepeat) keepLooping = YES;
-
- previousFrameTime = kCMTimeZero;
- previousActualFrameTime = CFAbsoluteTimeGetCurrent();
-
- NSDictionary *inputOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
- AVURLAsset *inputAsset = [[AVURLAsset alloc] initWithURL:self.url options:inputOptions];
-
- GPUImageMovie __block *blockSelf = self;
-
- [inputAsset loadValuesAsynchronouslyForKeys:[NSArray arrayWithObject:@"tracks"] completionHandler: ^{
- dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
- NSError *error = nil;
- AVKeyValueStatus tracksStatus = [inputAsset statusOfValueForKey:@"tracks" error:&error];
- if (tracksStatus != AVKeyValueStatusLoaded)
- {
- return;
- }
- blockSelf.asset = inputAsset;
- [blockSelf processAsset];
- blockSelf = nil;
- });
- }];
-}
-
-- (AVAssetReader*)createAssetReader
-{
- NSError *error = nil;
- AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.asset error:&error];
-
- NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary];
- if ([GPUImageContext supportsFastTextureUpload]) {
- [outputSettings setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
- isFullYUVRange = YES;
- }
- else {
- [outputSettings setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
- isFullYUVRange = NO;
- }
-
- // Maybe set alwaysCopiesSampleData to NO on iOS 5.0 for faster video decoding
- AVAssetReaderTrackOutput *readerVideoTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:[[self.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] outputSettings:outputSettings];
- readerVideoTrackOutput.alwaysCopiesSampleData = NO;
- [assetReader addOutput:readerVideoTrackOutput];
-
- NSArray *audioTracks = [self.asset tracksWithMediaType:AVMediaTypeAudio];
- BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) );
- AVAssetReaderTrackOutput *readerAudioTrackOutput = nil;
-
- if (shouldRecordAudioTrack)
- {
- [self.audioEncodingTarget setShouldInvalidateAudioSampleWhenDone:YES];
-
- // This might need to be extended to handle movies with more than one audio track
- AVAssetTrack* audioTrack = [audioTracks objectAtIndex:0];
- readerAudioTrackOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
- readerAudioTrackOutput.alwaysCopiesSampleData = NO;
- [assetReader addOutput:readerAudioTrackOutput];
- }
-
- return assetReader;
-}
-
-- (void)processAsset
-{
- reader = [self createAssetReader];
-
- AVAssetReaderOutput *readerVideoTrackOutput = nil;
- AVAssetReaderOutput *readerAudioTrackOutput = nil;
-
- audioEncodingIsFinished = YES;
- for( AVAssetReaderOutput *output in reader.outputs ) {
- if( [output.mediaType isEqualToString:AVMediaTypeAudio] ) {
- audioEncodingIsFinished = NO;
- readerAudioTrackOutput = output;
- }
- else if( [output.mediaType isEqualToString:AVMediaTypeVideo] ) {
- readerVideoTrackOutput = output;
- }
- }
-
- if ([reader startReading] == NO)
- {
- NSLog(@"Error reading from file at URL: %@", self.url);
- return;
- }
-
- __unsafe_unretained GPUImageMovie *weakSelf = self;
-
- if (synchronizedMovieWriter != nil)
- {
- [synchronizedMovieWriter setVideoInputReadyCallback:^{
- return [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
- }];
-
- [synchronizedMovieWriter setAudioInputReadyCallback:^{
- return [weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
- }];
-
- [synchronizedMovieWriter enableSynchronizationCallbacks];
- }
- else
- {
- while (reader.status == AVAssetReaderStatusReading && (!_shouldRepeat || keepLooping))
- {
- [weakSelf readNextVideoFrameFromOutput:readerVideoTrackOutput];
-
- if ( (readerAudioTrackOutput) && (!audioEncodingIsFinished) )
- {
- [weakSelf readNextAudioSampleFromOutput:readerAudioTrackOutput];
- }
-
- }
-
- if (reader.status == AVAssetReaderStatusCompleted) {
-
- [reader cancelReading];
-
- if (keepLooping) {
- reader = nil;
- dispatch_async(dispatch_get_main_queue(), ^{
- [self startProcessing];
- });
- } else {
- [weakSelf endProcessing];
- }
-
- }
- }
-}
-
-- (void)processPlayerItem
-{
- runSynchronouslyOnVideoProcessingQueue(^{
- displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(displayLinkCallback:)];
- [displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes];
- [displayLink setPaused:YES];
-
- dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue];
- NSMutableDictionary *pixBuffAttributes = [NSMutableDictionary dictionary];
- if ([GPUImageContext supportsFastTextureUpload]) {
- [pixBuffAttributes setObject:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
- }
- else {
- [pixBuffAttributes setObject:@(kCVPixelFormatType_32BGRA) forKey:(id)kCVPixelBufferPixelFormatTypeKey];
- }
- playerItemOutput = [[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:pixBuffAttributes];
- [playerItemOutput setDelegate:self queue:videoProcessingQueue];
-
- [_playerItem addOutput:playerItemOutput];
- [playerItemOutput requestNotificationOfMediaDataChangeWithAdvanceInterval:0.1];
- });
-}
-
-- (void)outputMediaDataWillChange:(AVPlayerItemOutput *)sender
-{
- // Restart display link.
- [displayLink setPaused:NO];
-}
-
-- (void)displayLinkCallback:(CADisplayLink *)sender
-{
- /*
- The callback gets called once every Vsync.
- Using the display link's timestamp and duration we can compute the next time the screen will be refreshed, and copy the pixel buffer for that time
- This pixel buffer can then be processed and later rendered on screen.
- */
- // Calculate the nextVsync time which is when the screen will be refreshed next.
- CFTimeInterval nextVSync = ([sender timestamp] + [sender duration]);
-
- CMTime outputItemTime = [playerItemOutput itemTimeForHostTime:nextVSync];
-
- if ([playerItemOutput hasNewPixelBufferForItemTime:outputItemTime]) {
- __unsafe_unretained GPUImageMovie *weakSelf = self;
- CVPixelBufferRef pixelBuffer = [playerItemOutput copyPixelBufferForItemTime:outputItemTime itemTimeForDisplay:NULL];
- if( pixelBuffer )
- runSynchronouslyOnVideoProcessingQueue(^{
- [weakSelf processMovieFrame:pixelBuffer withSampleTime:outputItemTime];
- CFRelease(pixelBuffer);
- });
- }
-}
-
-- (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;
-{
- if (reader.status == AVAssetReaderStatusReading && ! videoEncodingIsFinished)
- {
- CMSampleBufferRef sampleBufferRef = [readerVideoTrackOutput copyNextSampleBuffer];
- if (sampleBufferRef)
- {
- //NSLog(@"read a video frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef))));
- if (_playAtActualSpeed)
- {
- // Do this outside of the video processing queue to not slow that down while waiting
- CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferRef);
- CMTime differenceFromLastFrame = CMTimeSubtract(currentSampleTime, previousFrameTime);
- CFAbsoluteTime currentActualTime = CFAbsoluteTimeGetCurrent();
-
- CGFloat frameTimeDifference = CMTimeGetSeconds(differenceFromLastFrame);
- CGFloat actualTimeDifference = currentActualTime - previousActualFrameTime;
-
- if (frameTimeDifference > actualTimeDifference)
- {
- usleep(1000000.0 * (frameTimeDifference - actualTimeDifference));
- }
-
- previousFrameTime = currentSampleTime;
- previousActualFrameTime = CFAbsoluteTimeGetCurrent();
- }
-
- __unsafe_unretained GPUImageMovie *weakSelf = self;
- runSynchronouslyOnVideoProcessingQueue(^{
- [weakSelf processMovieFrame:sampleBufferRef];
- CMSampleBufferInvalidate(sampleBufferRef);
- CFRelease(sampleBufferRef);
- });
-
- return YES;
- }
- else
- {
- if (!keepLooping) {
- videoEncodingIsFinished = YES;
- if( videoEncodingIsFinished && audioEncodingIsFinished )
- [self endProcessing];
- }
- }
- }
- else if (synchronizedMovieWriter != nil)
- {
- if (reader.status == AVAssetReaderStatusCompleted)
- {
- [self endProcessing];
- }
- }
- return NO;
-}
-
-- (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;
-{
- if (reader.status == AVAssetReaderStatusReading && ! audioEncodingIsFinished)
- {
- CMSampleBufferRef audioSampleBufferRef = [readerAudioTrackOutput copyNextSampleBuffer];
- if (audioSampleBufferRef)
- {
- //NSLog(@"read an audio frame: %@", CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, CMSampleBufferGetOutputPresentationTimeStamp(audioSampleBufferRef))));
- [self.audioEncodingTarget processAudioBuffer:audioSampleBufferRef];
- CFRelease(audioSampleBufferRef);
- return YES;
- }
- else
- {
- if (!keepLooping) {
- audioEncodingIsFinished = YES;
- if( videoEncodingIsFinished && audioEncodingIsFinished )
- [self endProcessing];
- }
- }
- }
- else if (synchronizedMovieWriter != nil)
- {
- if (reader.status == AVAssetReaderStatusCompleted || reader.status == AVAssetReaderStatusFailed ||
- reader.status == AVAssetReaderStatusCancelled)
- {
- [self endProcessing];
- }
- }
- return NO;
-}
-
-- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
-{
-// CMTimeGetSeconds
-// CMTimeSubtract
-
- CMTime currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(movieSampleBuffer);
- CVImageBufferRef movieFrame = CMSampleBufferGetImageBuffer(movieSampleBuffer);
-
- processingFrameTime = currentSampleTime;
- [self processMovieFrame:movieFrame withSampleTime:currentSampleTime];
-}
-
-- (float)progress
-{
- if ( AVAssetReaderStatusReading == reader.status )
- {
- float current = processingFrameTime.value * 1.0f / processingFrameTime.timescale;
- float duration = self.asset.duration.value * 1.0f / self.asset.duration.timescale;
- return current / duration;
- }
- else if ( AVAssetReaderStatusCompleted == reader.status )
- {
- return 1.f;
- }
- else
- {
- return 0.f;
- }
-}
-
-- (void)processMovieFrame:(CVPixelBufferRef)movieFrame withSampleTime:(CMTime)currentSampleTime
-{
- int bufferHeight = (int) CVPixelBufferGetHeight(movieFrame);
- int bufferWidth = (int) CVPixelBufferGetWidth(movieFrame);
-
- CFTypeRef colorAttachments = CVBufferGetAttachment(movieFrame, kCVImageBufferYCbCrMatrixKey, NULL);
- if (colorAttachments != NULL)
- {
- if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)
- {
- if (isFullYUVRange)
- {
- _preferredConversion = kColorConversion601FullRange;
- }
- else
- {
- _preferredConversion = kColorConversion601;
- }
- }
- else
- {
- _preferredConversion = kColorConversion709;
- }
- }
- else
- {
- if (isFullYUVRange)
- {
- _preferredConversion = kColorConversion601FullRange;
- }
- else
- {
- _preferredConversion = kColorConversion601;
- }
-
- }
-
- CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
-
- // Fix issue 1580
- [GPUImageContext useImageProcessingContext];
-
- if ([GPUImageContext supportsFastTextureUpload])
- {
- CVOpenGLESTextureRef luminanceTextureRef = NULL;
- CVOpenGLESTextureRef chrominanceTextureRef = NULL;
-
- // if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
- if (CVPixelBufferGetPlaneCount(movieFrame) > 0) // Check for YUV planar inputs to do RGB conversion
- {
-
- if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )
- {
- imageBufferWidth = bufferWidth;
- imageBufferHeight = bufferHeight;
- }
-
- CVReturn err;
- // Y-plane
- glActiveTexture(GL_TEXTURE4);
- if ([GPUImageContext deviceSupportsRedTextures])
- {
- err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
- }
- else
- {
- err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
- }
- if (err)
- {
- NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
- }
-
- luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);
- glBindTexture(GL_TEXTURE_2D, luminanceTexture);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
-
- // UV-plane
- glActiveTexture(GL_TEXTURE5);
- if ([GPUImageContext deviceSupportsRedTextures])
- {
- err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
- }
- else
- {
- err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], movieFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
- }
- if (err)
- {
- NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
- }
-
- chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);
- glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
-
-// if (!allTargetsWantMonochromeData)
-// {
- [self convertYUVToRGBOutput];
-// }
-
- for (id currentTarget in targets)
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
- [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
- [currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
- }
-
- [outputFramebuffer unlock];
-
- for (id currentTarget in targets)
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
- [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
- }
-
- CVPixelBufferUnlockBaseAddress(movieFrame, 0);
- CFRelease(luminanceTextureRef);
- CFRelease(chrominanceTextureRef);
- }
- else
- {
- // TODO: Mesh this with the new framebuffer cache
-// CVPixelBufferLockBaseAddress(movieFrame, 0);
-//
-// CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, movieFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);
-//
-// if (!texture || err) {
-// NSLog(@"Movie CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
-// NSAssert(NO, @"Camera failure");
-// return;
-// }
-//
-// outputTexture = CVOpenGLESTextureGetName(texture);
-// // glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture);
-// glBindTexture(GL_TEXTURE_2D, outputTexture);
-// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
-// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
-// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
-// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
-//
-// for (id currentTarget in targets)
-// {
-// NSInteger indexOfObject = [targets indexOfObject:currentTarget];
-// NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
-//
-// [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
-// [currentTarget setInputTexture:outputTexture atIndex:targetTextureIndex];
-//
-// [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
-// }
-//
-// CVPixelBufferUnlockBaseAddress(movieFrame, 0);
-// CVOpenGLESTextureCacheFlush(coreVideoTextureCache, 0);
-// CFRelease(texture);
-//
-// outputTexture = 0;
- }
- }
- else
- {
- // Upload to texture
- CVPixelBufferLockBaseAddress(movieFrame, 0);
-
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bufferWidth, bufferHeight) textureOptions:self.outputTextureOptions onlyTexture:YES];
-
- glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
- // Using BGRA extension to pull in video frame data directly
- glTexImage2D(GL_TEXTURE_2D,
- 0,
- self.outputTextureOptions.internalFormat,
- bufferWidth,
- bufferHeight,
- 0,
- self.outputTextureOptions.format,
- self.outputTextureOptions.type,
- CVPixelBufferGetBaseAddress(movieFrame));
-
- for (id currentTarget in targets)
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
- [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
- [currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
- }
-
- [outputFramebuffer unlock];
-
- for (id currentTarget in targets)
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
- [currentTarget newFrameReadyAtTime:currentSampleTime atIndex:targetTextureIndex];
- }
- CVPixelBufferUnlockBaseAddress(movieFrame, 0);
- }
-
- if (_runBenchmark)
- {
- CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
- NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
- }
-}
-
-- (void)endProcessing;
-{
- keepLooping = NO;
- [displayLink setPaused:YES];
-
- for (id currentTarget in targets)
- {
- [currentTarget endProcessing];
- }
-
- if (synchronizedMovieWriter != nil)
- {
- [synchronizedMovieWriter setVideoInputReadyCallback:^{return NO;}];
- [synchronizedMovieWriter setAudioInputReadyCallback:^{return NO;}];
- }
-
- if (self.playerItem && (displayLink != nil))
- {
- [displayLink invalidate]; // remove from all run loops
- displayLink = nil;
- }
-
- if ([self.delegate respondsToSelector:@selector(didCompletePlayingMovie)]) {
- [self.delegate didCompletePlayingMovie];
- }
- self.delegate = nil;
-}
-
-- (void)cancelProcessing
-{
- if (reader) {
- [reader cancelReading];
- }
- [self endProcessing];
-}
-
-- (void)convertYUVToRGBOutput;
-{
- [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(imageBufferWidth, imageBufferHeight) onlyTexture:NO];
- [outputFramebuffer activateFramebuffer];
-
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
-
- static const GLfloat squareVertices[] = {
- -1.0f, -1.0f,
- 1.0f, -1.0f,
- -1.0f, 1.0f,
- 1.0f, 1.0f,
- };
-
- static const GLfloat textureCoordinates[] = {
- 0.0f, 0.0f,
- 1.0f, 0.0f,
- 0.0f, 1.0f,
- 1.0f, 1.0f,
- };
-
- glActiveTexture(GL_TEXTURE4);
- glBindTexture(GL_TEXTURE_2D, luminanceTexture);
- glUniform1i(yuvConversionLuminanceTextureUniform, 4);
-
- glActiveTexture(GL_TEXTURE5);
- glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
- glUniform1i(yuvConversionChrominanceTextureUniform, 5);
-
- glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);
-
- glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
- glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
-
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
-}
-
-- (AVAssetReader*)assetReader {
- return reader;
-}
-
-- (BOOL)audioEncodingIsFinished {
- return audioEncodingIsFinished;
-}
-
-- (BOOL)videoEncodingIsFinished {
- return videoEncodingIsFinished;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMovieComposition.h b/Example/Pods/GPUImage/framework/Source/GPUImageMovieComposition.h
deleted file mode 100644
index 00e4381..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMovieComposition.h
+++ /dev/null
@@ -1,21 +0,0 @@
-//
-// GPUImageMovieComposition.h
-// Givit
-//
-// Created by Sean Meiners on 2013/01/25.
-//
-//
-
-#import "GPUImageMovie.h"
-
-@interface GPUImageMovieComposition : GPUImageMovie
-
-@property (readwrite, retain) AVComposition *compositon;
-@property (readwrite, retain) AVVideoComposition *videoComposition;
-@property (readwrite, retain) AVAudioMix *audioMix;
-
-- (id)initWithComposition:(AVComposition*)compositon
- andVideoComposition:(AVVideoComposition*)videoComposition
- andAudioMix:(AVAudioMix*)audioMix;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMovieComposition.m b/Example/Pods/GPUImage/framework/Source/GPUImageMovieComposition.m
deleted file mode 100644
index 6138fff..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMovieComposition.m
+++ /dev/null
@@ -1,70 +0,0 @@
-//
-// GPUImageMovieComposition.m
-// Givit
-//
-// Created by Sean Meiners on 2013/01/25.
-//
-//
-
-#import "GPUImageMovieComposition.h"
-#import "GPUImageMovieWriter.h"
-
-@implementation GPUImageMovieComposition
-
-@synthesize compositon = _compositon;
-@synthesize videoComposition = _videoComposition;
-@synthesize audioMix = _audioMix;
-
-- (id)initWithComposition:(AVComposition*)compositon
- andVideoComposition:(AVVideoComposition*)videoComposition
- andAudioMix:(AVAudioMix*)audioMix {
- if (!(self = [super init]))
- {
- return nil;
- }
-
- [self yuvConversionSetup];
-
- self.compositon = compositon;
- self.videoComposition = videoComposition;
- self.audioMix = audioMix;
-
- return self;
-}
-
-- (AVAssetReader*)createAssetReader
- {
- //NSLog(@"creating reader from composition: %@, video: %@, audio: %@ with duration: %@", _compositon, _videoComposition, _audioMix, CFBridgingRelease(CMTimeCopyDescription(kCFAllocatorDefault, _compositon.duration)));
-
- NSError *error = nil;
- AVAssetReader *assetReader = [AVAssetReader assetReaderWithAsset:self.compositon error:&error];
-
- NSDictionary *outputSettings = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)};
- AVAssetReaderVideoCompositionOutput *readerVideoOutput = [AVAssetReaderVideoCompositionOutput assetReaderVideoCompositionOutputWithVideoTracks:[_compositon tracksWithMediaType:AVMediaTypeVideo]
- videoSettings:outputSettings];
-#if ! TARGET_IPHONE_SIMULATOR
- if( [_videoComposition isKindOfClass:[AVMutableVideoComposition class]] )
- [(AVMutableVideoComposition*)_videoComposition setRenderScale:1.0];
-#endif
- readerVideoOutput.videoComposition = self.videoComposition;
- readerVideoOutput.alwaysCopiesSampleData = NO;
- [assetReader addOutput:readerVideoOutput];
-
- NSArray *audioTracks = [_compositon tracksWithMediaType:AVMediaTypeAudio];
- BOOL shouldRecordAudioTrack = (([audioTracks count] > 0) && (self.audioEncodingTarget != nil) );
- AVAssetReaderAudioMixOutput *readerAudioOutput = nil;
-
- if (shouldRecordAudioTrack)
- {
- [self.audioEncodingTarget setShouldInvalidateAudioSampleWhenDone:YES];
-
- readerAudioOutput = [AVAssetReaderAudioMixOutput assetReaderAudioMixOutputWithAudioTracks:audioTracks audioSettings:nil];
- readerAudioOutput.audioMix = self.audioMix;
- readerAudioOutput.alwaysCopiesSampleData = NO;
- [assetReader addOutput:readerAudioOutput];
- }
-
- return assetReader;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMultiplyBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageMultiplyBlendFilter.h
deleted file mode 100755
index 5ebc28b..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMultiplyBlendFilter.h
+++ /dev/null
@@ -1,7 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageMultiplyBlendFilter : GPUImageTwoInputFilter
-{
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageMultiplyBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageMultiplyBlendFilter.m
deleted file mode 100755
index ed64707..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageMultiplyBlendFilter.m
+++ /dev/null
@@ -1,52 +0,0 @@
-#import "GPUImageMultiplyBlendFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageMultiplyBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- lowp vec4 base = texture2D(inputImageTexture, textureCoordinate);
- lowp vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = overlayer * base + overlayer * (1.0 - base.a) + base * (1.0 - overlayer.a);
- }
-);
-#else
-NSString *const kGPUImageMultiplyBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 base = texture2D(inputImageTexture, textureCoordinate);
- vec4 overlayer = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = overlayer * base + overlayer * (1.0 - base.a) + base * (1.0 - overlayer.a);
- }
-);
-#endif
-
-@implementation GPUImageMultiplyBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageMultiplyBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageNobleCornerDetectionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageNobleCornerDetectionFilter.h
deleted file mode 100644
index 963fd66..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageNobleCornerDetectionFilter.h
+++ /dev/null
@@ -1,12 +0,0 @@
-#import "GPUImageHarrisCornerDetectionFilter.h"
-
-/** Noble corner detector
-
- This is the Noble variant on the Harris detector, from
- Alison Noble, "Descriptions of Image Surfaces", PhD thesis, Department of Engineering Science, Oxford University 1989, p45.
-*/
-
-
-@interface GPUImageNobleCornerDetectionFilter : GPUImageHarrisCornerDetectionFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageNobleCornerDetectionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageNobleCornerDetectionFilter.m
deleted file mode 100644
index aa6b304..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageNobleCornerDetectionFilter.m
+++ /dev/null
@@ -1,74 +0,0 @@
-#import "GPUImageNobleCornerDetectionFilter.h"
-
-@implementation GPUImageNobleCornerDetectionFilter
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageNobleCornerDetectionFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform lowp float sensitivity;
-
- void main()
- {
- mediump vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;
-
- mediump float derivativeSum = derivativeElements.x + derivativeElements.y;
-
- // R = (Ix^2 * Iy^2 - Ixy * Ixy) / (Ix^2 + Iy^2)
- mediump float zElement = (derivativeElements.z * 2.0) - 1.0;
- // mediump float harrisIntensity = (derivativeElements.x * derivativeElements.y - (derivativeElements.z * derivativeElements.z)) / (derivativeSum);
- mediump float cornerness = (derivativeElements.x * derivativeElements.y - (zElement * zElement)) / (derivativeSum);
-
- // Original Harris detector
- // R = Ix^2 * Iy^2 - Ixy * Ixy - k * (Ix^2 + Iy^2)^2
- // highp float harrisIntensity = derivativeElements.x * derivativeElements.y - (derivativeElements.z * derivativeElements.z) - harrisConstant * derivativeSum * derivativeSum;
-
- // gl_FragColor = vec4(vec3(harrisIntensity * 7.0), 1.0);
- gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);
- }
-);
-#else
-NSString *const kGPUImageNobleCornerDetectionFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float sensitivity;
-
- void main()
- {
- vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;
-
- float derivativeSum = derivativeElements.x + derivativeElements.y;
-
- // R = (Ix^2 * Iy^2 - Ixy * Ixy) / (Ix^2 + Iy^2)
- float zElement = (derivativeElements.z * 2.0) - 1.0;
- // mediump float harrisIntensity = (derivativeElements.x * derivativeElements.y - (derivativeElements.z * derivativeElements.z)) / (derivativeSum);
- float cornerness = (derivativeElements.x * derivativeElements.y - (zElement * zElement)) / (derivativeSum);
-
- // Original Harris detector
- // R = Ix^2 * Iy^2 - Ixy * Ixy - k * (Ix^2 + Iy^2)^2
- // highp float harrisIntensity = derivativeElements.x * derivativeElements.y - (derivativeElements.z * derivativeElements.z) - harrisConstant * derivativeSum * derivativeSum;
-
- // gl_FragColor = vec4(vec3(harrisIntensity * 7.0), 1.0);
- gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);
- }
-);
-#endif
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [self initWithCornerDetectionFragmentShader:kGPUImageNobleCornerDetectionFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageNonMaximumSuppressionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageNonMaximumSuppressionFilter.h
deleted file mode 100644
index fd8fe6d..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageNonMaximumSuppressionFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImage3x3TextureSamplingFilter.h"
-
-@interface GPUImageNonMaximumSuppressionFilter : GPUImage3x3TextureSamplingFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageNonMaximumSuppressionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageNonMaximumSuppressionFilter.m
deleted file mode 100644
index eaf7ce5..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageNonMaximumSuppressionFilter.m
+++ /dev/null
@@ -1,107 +0,0 @@
-#import "GPUImageNonMaximumSuppressionFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageNonMaximumSuppressionFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
-
- varying highp vec2 textureCoordinate;
- varying highp vec2 leftTextureCoordinate;
- varying highp vec2 rightTextureCoordinate;
-
- varying highp vec2 topTextureCoordinate;
- varying highp vec2 topLeftTextureCoordinate;
- varying highp vec2 topRightTextureCoordinate;
-
- varying highp vec2 bottomTextureCoordinate;
- varying highp vec2 bottomLeftTextureCoordinate;
- varying highp vec2 bottomRightTextureCoordinate;
-
- void main()
- {
- lowp float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- lowp float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- lowp float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- lowp vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
- lowp float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r;
- lowp float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r;
- lowp float topColor = texture2D(inputImageTexture, topTextureCoordinate).r;
- lowp float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- lowp float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
-
- // Use a tiebreaker for pixels to the left and immediately above this one
- lowp float multiplier = 1.0 - step(centerColor.r, topColor);
- multiplier = multiplier * (1.0 - step(centerColor.r, topLeftColor));
- multiplier = multiplier * (1.0 - step(centerColor.r, leftColor));
- multiplier = multiplier * (1.0 - step(centerColor.r, bottomLeftColor));
-
- lowp float maxValue = max(centerColor.r, bottomColor);
- maxValue = max(maxValue, bottomRightColor);
- maxValue = max(maxValue, rightColor);
- maxValue = max(maxValue, topRightColor);
-
- gl_FragColor = vec4((centerColor.rgb * step(maxValue, centerColor.r) * multiplier), 1.0);
- }
-);
-#else
-NSString *const kGPUImageNonMaximumSuppressionFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- void main()
- {
- float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
- float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float topColor = texture2D(inputImageTexture, topTextureCoordinate).r;
- float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
-
- // Use a tiebreaker for pixels to the left and immediately above this one
- float multiplier = 1.0 - step(centerColor.r, topColor);
- multiplier = multiplier * (1.0 - step(centerColor.r, topLeftColor));
- multiplier = multiplier * (1.0 - step(centerColor.r, leftColor));
- multiplier = multiplier * (1.0 - step(centerColor.r, bottomLeftColor));
-
- float maxValue = max(centerColor.r, bottomColor);
- maxValue = max(maxValue, bottomRightColor);
- maxValue = max(maxValue, rightColor);
- maxValue = max(maxValue, topRightColor);
-
- gl_FragColor = vec4((centerColor.rgb * step(maxValue, centerColor.r) * multiplier), 1.0);
- }
-);
-#endif
-
-@implementation GPUImageNonMaximumSuppressionFilter
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageNonMaximumSuppressionFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageNormalBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageNormalBlendFilter.h
deleted file mode 100644
index ce5e22b..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageNormalBlendFilter.h
+++ /dev/null
@@ -1,8 +0,0 @@
-// Created by Jorge Garcia on 9/5/12.
-//
-
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageNormalBlendFilter : GPUImageTwoInputFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageNormalBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageNormalBlendFilter.m
deleted file mode 100644
index f5b5069..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageNormalBlendFilter.m
+++ /dev/null
@@ -1,96 +0,0 @@
-// Created by Jorge Garcia on 9/5/12.
-
-#import "GPUImageNormalBlendFilter.h"
-/*
- This equation is a simplification of the general blending equation. It assumes the destination color is opaque, and therefore drops the destination color's alpha term.
-
- D = C1 * C1a + C2 * C2a * (1 - C1a)
- where D is the resultant color, C1 is the color of the first element, C1a is the alpha of the first element, C2 is the second element color, C2a is the alpha of the second element. The destination alpha is calculated with:
-
- Da = C1a + C2a * (1 - C1a)
- The resultant color is premultiplied with the alpha. To restore the color to the unmultiplied values, just divide by Da, the resultant alpha.
-
- http://stackoverflow.com/questions/1724946/blend-mode-on-a-transparent-and-semi-transparent-background
-
- For some reason Photoshop behaves
- D = C1 + C2 * C2a * (1 - C1a)
- */
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageNormalBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- lowp vec4 c2 = texture2D(inputImageTexture, textureCoordinate);
- lowp vec4 c1 = texture2D(inputImageTexture2, textureCoordinate2);
-
- lowp vec4 outputColor;
-
-// outputColor.r = c1.r + c2.r * c2.a * (1.0 - c1.a);
-// outputColor.g = c1.g + c2.g * c2.a * (1.0 - c1.a);
-// outputColor.b = c1.b + c2.b * c2.a * (1.0 - c1.a);
-// outputColor.a = c1.a + c2.a * (1.0 - c1.a);
-
- lowp float a = c1.a + c2.a * (1.0 - c1.a);
- lowp float alphaDivisor = a + step(a, 0.0); // Protect against a divide-by-zero blacking out things in the output
-
- outputColor.r = (c1.r * c1.a + c2.r * c2.a * (1.0 - c1.a))/alphaDivisor;
- outputColor.g = (c1.g * c1.a + c2.g * c2.a * (1.0 - c1.a))/alphaDivisor;
- outputColor.b = (c1.b * c1.a + c2.b * c2.a * (1.0 - c1.a))/alphaDivisor;
- outputColor.a = a;
-
- gl_FragColor = outputColor;
- }
-);
-#else
-NSString *const kGPUImageNormalBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 c2 = texture2D(inputImageTexture, textureCoordinate);
- vec4 c1 = texture2D(inputImageTexture2, textureCoordinate2);
-
- vec4 outputColor;
-
- // outputColor.r = c1.r + c2.r * c2.a * (1.0 - c1.a);
- // outputColor.g = c1.g + c2.g * c2.a * (1.0 - c1.a);
- // outputColor.b = c1.b + c2.b * c2.a * (1.0 - c1.a);
- // outputColor.a = c1.a + c2.a * (1.0 - c1.a);
-
- float a = c1.a + c2.a * (1.0 - c1.a);
- float alphaDivisor = a + step(a, 0.0); // Protect against a divide-by-zero blacking out things in the output
-
- outputColor.r = (c1.r * c1.a + c2.r * c2.a * (1.0 - c1.a))/alphaDivisor;
- outputColor.g = (c1.g * c1.a + c2.g * c2.a * (1.0 - c1.a))/alphaDivisor;
- outputColor.b = (c1.b * c1.a + c2.b * c2.a * (1.0 - c1.a))/alphaDivisor;
- outputColor.a = a;
-
- gl_FragColor = outputColor;
- }
-);
-#endif
-
-@implementation GPUImageNormalBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageNormalBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
\ No newline at end of file
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageOpacityFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageOpacityFilter.h
deleted file mode 100644
index 826749f..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageOpacityFilter.h
+++ /dev/null
@@ -1,11 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageOpacityFilter : GPUImageFilter
-{
- GLint opacityUniform;
-}
-
-// Opacity ranges from 0.0 to 1.0, with 1.0 as the normal setting
-@property(readwrite, nonatomic) CGFloat opacity;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageOpacityFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageOpacityFilter.m
deleted file mode 100644
index b74acb6..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageOpacityFilter.m
+++ /dev/null
@@ -1,65 +0,0 @@
-#import "GPUImageOpacityFilter.h"
-
-@implementation GPUImageOpacityFilter
-
-@synthesize opacity = _opacity;
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageOpacityFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform lowp float opacity;
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- gl_FragColor = vec4(textureColor.rgb, textureColor.a * opacity);
- }
-);
-#else
-NSString *const kGPUImageOpacityFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float opacity;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- gl_FragColor = vec4(textureColor.rgb, textureColor.a * opacity);
- }
-);
-#endif
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageOpacityFragmentShaderString]))
- {
- return nil;
- }
-
- opacityUniform = [filterProgram uniformIndex:@"opacity"];
- self.opacity = 1.0;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setOpacity:(CGFloat)newValue;
-{
- _opacity = newValue;
-
- [self setFloat:_opacity forUniform:opacityUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageOpeningFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageOpeningFilter.h
deleted file mode 100644
index 3e4f754..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageOpeningFilter.h
+++ /dev/null
@@ -1,19 +0,0 @@
-#import "GPUImageFilterGroup.h"
-
-@class GPUImageErosionFilter;
-@class GPUImageDilationFilter;
-
-// A filter that first performs an erosion on the red channel of an image, followed by a dilation of the same radius.
-// This helps to filter out smaller bright elements.
-
-@interface GPUImageOpeningFilter : GPUImageFilterGroup
-{
- GPUImageErosionFilter *erosionFilter;
- GPUImageDilationFilter *dilationFilter;
-}
-
-@property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
-
-- (id)initWithRadius:(NSUInteger)radius;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageOpeningFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageOpeningFilter.m
deleted file mode 100644
index 4e7a565..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageOpeningFilter.m
+++ /dev/null
@@ -1,57 +0,0 @@
-#import "GPUImageOpeningFilter.h"
-#import "GPUImageErosionFilter.h"
-#import "GPUImageDilationFilter.h"
-
-@implementation GPUImageOpeningFilter
-
-@synthesize verticalTexelSpacing = _verticalTexelSpacing;
-@synthesize horizontalTexelSpacing = _horizontalTexelSpacing;
-
-- (id)init;
-{
- if (!(self = [self initWithRadius:1]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)initWithRadius:(NSUInteger)radius;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- // First pass: erosion
- erosionFilter = [[GPUImageErosionFilter alloc] initWithRadius:radius];
- [self addFilter:erosionFilter];
-
- // Second pass: dilation
- dilationFilter = [[GPUImageDilationFilter alloc] initWithRadius:radius];
- [self addFilter:dilationFilter];
-
- [erosionFilter addTarget:dilationFilter];
-
- self.initialFilters = [NSArray arrayWithObjects:erosionFilter, nil];
- self.terminalFilter = dilationFilter;
-
- return self;
-}
-
-- (void)setVerticalTexelSpacing:(CGFloat)newValue;
-{
- _verticalTexelSpacing = newValue;
- erosionFilter.verticalTexelSpacing = newValue;
- dilationFilter.verticalTexelSpacing = newValue;
-}
-
-- (void)setHorizontalTexelSpacing:(CGFloat)newValue;
-{
- _horizontalTexelSpacing = newValue;
- erosionFilter.horizontalTexelSpacing = newValue;
- dilationFilter.horizontalTexelSpacing = newValue;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageOutput.h b/Example/Pods/GPUImage/framework/Source/GPUImageOutput.h
deleted file mode 100755
index a1af54d..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageOutput.h
+++ /dev/null
@@ -1,127 +0,0 @@
-#import "GPUImageContext.h"
-#import "GPUImageFramebuffer.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-#import
-#else
-// For now, just redefine this on the Mac
-typedef NS_ENUM(NSInteger, UIImageOrientation) {
- UIImageOrientationUp, // default orientation
- UIImageOrientationDown, // 180 deg rotation
- UIImageOrientationLeft, // 90 deg CCW
- UIImageOrientationRight, // 90 deg CW
- UIImageOrientationUpMirrored, // as above but image mirrored along other axis. horizontal flip
- UIImageOrientationDownMirrored, // horizontal flip
- UIImageOrientationLeftMirrored, // vertical flip
- UIImageOrientationRightMirrored, // vertical flip
-};
-#endif
-
-void runOnMainQueueWithoutDeadlocking(void (^block)(void));
-void runSynchronouslyOnVideoProcessingQueue(void (^block)(void));
-void runAsynchronouslyOnVideoProcessingQueue(void (^block)(void));
-void runSynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void));
-void runAsynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void));
-void reportAvailableMemoryForGPUImage(NSString *tag);
-
-@class GPUImageMovieWriter;
-
-/** GPUImage's base source object
-
- Images or frames of video are uploaded from source objects, which are subclasses of GPUImageOutput. These include:
-
- - GPUImageVideoCamera (for live video from an iOS camera)
- - GPUImageStillCamera (for taking photos with the camera)
- - GPUImagePicture (for still images)
- - GPUImageMovie (for movies)
-
- Source objects upload still image frames to OpenGL ES as textures, then hand those textures off to the next objects in the processing chain.
- */
-@interface GPUImageOutput : NSObject
-{
- GPUImageFramebuffer *outputFramebuffer;
-
- NSMutableArray *targets, *targetTextureIndices;
-
- CGSize inputTextureSize, cachedMaximumOutputSize, forcedMaximumSize;
-
- BOOL overrideInputSize;
-
- BOOL allTargetsWantMonochromeData;
- BOOL usingNextFrameForImageCapture;
-}
-
-@property(readwrite, nonatomic) BOOL shouldSmoothlyScaleOutput;
-@property(readwrite, nonatomic) BOOL shouldIgnoreUpdatesToThisTarget;
-@property(readwrite, nonatomic, retain) GPUImageMovieWriter *audioEncodingTarget;
-@property(readwrite, nonatomic, unsafe_unretained) id targetToIgnoreForUpdates;
-@property(nonatomic, copy) void(^frameProcessingCompletionBlock)(GPUImageOutput*, CMTime);
-@property(nonatomic) BOOL enabled;
-@property(readwrite, nonatomic) GPUTextureOptions outputTextureOptions;
-
-/// @name Managing targets
-- (void)setInputFramebufferForTarget:(id)target atIndex:(NSInteger)inputTextureIndex;
-- (GPUImageFramebuffer *)framebufferForOutput;
-- (void)removeOutputFramebuffer;
-- (void)notifyTargetsAboutNewOutputTexture;
-
-/** Returns an array of the current targets.
- */
-- (NSArray*)targets;
-
-/** Adds a target to receive notifications when new frames are available.
-
- The target will be asked for its next available texture.
-
- See [GPUImageInput newFrameReadyAtTime:]
-
- @param newTarget Target to be added
- */
-- (void)addTarget:(id)newTarget;
-
-/** Adds a target to receive notifications when new frames are available.
-
- See [GPUImageInput newFrameReadyAtTime:]
-
- @param newTarget Target to be added
- */
-- (void)addTarget:(id)newTarget atTextureLocation:(NSInteger)textureLocation;
-
-/** Removes a target. The target will no longer receive notifications when new frames are available.
-
- @param targetToRemove Target to be removed
- */
-- (void)removeTarget:(id)targetToRemove;
-
-/** Removes all targets.
- */
-- (void)removeAllTargets;
-
-/// @name Manage the output texture
-
-- (void)forceProcessingAtSize:(CGSize)frameSize;
-- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize;
-
-/// @name Still image processing
-
-- (void)useNextFrameForImageCapture;
-- (CGImageRef)newCGImageFromCurrentlyProcessedOutput;
-- (CGImageRef)newCGImageByFilteringCGImage:(CGImageRef)imageToFilter;
-
-// Platform-specific image output methods
-// If you're trying to use these methods, remember that you need to set -useNextFrameForImageCapture before running -processImage or running video and calling any of these methods, or you will get a nil image
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-- (UIImage *)imageFromCurrentFramebuffer;
-- (UIImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
-- (UIImage *)imageByFilteringImage:(UIImage *)imageToFilter;
-- (CGImageRef)newCGImageByFilteringImage:(UIImage *)imageToFilter;
-#else
-- (NSImage *)imageFromCurrentFramebuffer;
-- (NSImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
-- (NSImage *)imageByFilteringImage:(NSImage *)imageToFilter;
-- (CGImageRef)newCGImageByFilteringImage:(NSImage *)imageToFilter;
-#endif
-
-- (BOOL)providesMonochromeOutput;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageOutput.m b/Example/Pods/GPUImage/framework/Source/GPUImageOutput.m
deleted file mode 100755
index 2817a44..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageOutput.m
+++ /dev/null
@@ -1,428 +0,0 @@
-#import "GPUImageOutput.h"
-#import "GPUImageMovieWriter.h"
-#import "GPUImagePicture.h"
-#import
-
-void runOnMainQueueWithoutDeadlocking(void (^block)(void))
-{
- if ([NSThread isMainThread])
- {
- block();
- }
- else
- {
- dispatch_sync(dispatch_get_main_queue(), block);
- }
-}
-
-void runSynchronouslyOnVideoProcessingQueue(void (^block)(void))
-{
- dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue];
-#if !OS_OBJECT_USE_OBJC
-#pragma clang diagnostic push
-#pragma clang diagnostic ignored "-Wdeprecated-declarations"
- if (dispatch_get_current_queue() == videoProcessingQueue)
-#pragma clang diagnostic pop
-#else
- if (dispatch_get_specific([GPUImageContext contextKey]))
-#endif
- {
- block();
- }else
- {
- dispatch_sync(videoProcessingQueue, block);
- }
-}
-
-void runAsynchronouslyOnVideoProcessingQueue(void (^block)(void))
-{
- dispatch_queue_t videoProcessingQueue = [GPUImageContext sharedContextQueue];
-
-#if !OS_OBJECT_USE_OBJC
-#pragma clang diagnostic push
-#pragma clang diagnostic ignored "-Wdeprecated-declarations"
- if (dispatch_get_current_queue() == videoProcessingQueue)
-#pragma clang diagnostic pop
-#else
- if (dispatch_get_specific([GPUImageContext contextKey]))
-#endif
- {
- block();
- }else
- {
- dispatch_async(videoProcessingQueue, block);
- }
-}
-
-void runSynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void))
-{
- dispatch_queue_t videoProcessingQueue = [context contextQueue];
-#if !OS_OBJECT_USE_OBJC
-#pragma clang diagnostic push
-#pragma clang diagnostic ignored "-Wdeprecated-declarations"
- if (dispatch_get_current_queue() == videoProcessingQueue)
-#pragma clang diagnostic pop
-#else
- if (dispatch_get_specific([GPUImageContext contextKey]))
-#endif
- {
- block();
- }else
- {
- dispatch_sync(videoProcessingQueue, block);
- }
-}
-
-void runAsynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void))
-{
- dispatch_queue_t videoProcessingQueue = [context contextQueue];
-
-#if !OS_OBJECT_USE_OBJC
-#pragma clang diagnostic push
-#pragma clang diagnostic ignored "-Wdeprecated-declarations"
- if (dispatch_get_current_queue() == videoProcessingQueue)
-#pragma clang diagnostic pop
-#else
- if (dispatch_get_specific([GPUImageContext contextKey]))
-#endif
- {
- block();
- }else
- {
- dispatch_async(videoProcessingQueue, block);
- }
-}
-
-void reportAvailableMemoryForGPUImage(NSString *tag)
-{
- if (!tag)
- tag = @"Default";
-
- struct task_basic_info info;
-
- mach_msg_type_number_t size = sizeof(info);
-
- kern_return_t kerr = task_info(mach_task_self(),
-
- TASK_BASIC_INFO,
-
- (task_info_t)&info,
-
- &size);
- if( kerr == KERN_SUCCESS ) {
- NSLog(@"%@ - Memory used: %u", tag, (unsigned int)info.resident_size); //in bytes
- } else {
- NSLog(@"%@ - Error: %s", tag, mach_error_string(kerr));
- }
-}
-
-@implementation GPUImageOutput
-
-@synthesize shouldSmoothlyScaleOutput = _shouldSmoothlyScaleOutput;
-@synthesize shouldIgnoreUpdatesToThisTarget = _shouldIgnoreUpdatesToThisTarget;
-@synthesize audioEncodingTarget = _audioEncodingTarget;
-@synthesize targetToIgnoreForUpdates = _targetToIgnoreForUpdates;
-@synthesize frameProcessingCompletionBlock = _frameProcessingCompletionBlock;
-@synthesize enabled = _enabled;
-@synthesize outputTextureOptions = _outputTextureOptions;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- targets = [[NSMutableArray alloc] init];
- targetTextureIndices = [[NSMutableArray alloc] init];
- _enabled = YES;
- allTargetsWantMonochromeData = YES;
- usingNextFrameForImageCapture = NO;
-
- // set default texture options
- _outputTextureOptions.minFilter = GL_LINEAR;
- _outputTextureOptions.magFilter = GL_LINEAR;
- _outputTextureOptions.wrapS = GL_CLAMP_TO_EDGE;
- _outputTextureOptions.wrapT = GL_CLAMP_TO_EDGE;
- _outputTextureOptions.internalFormat = GL_RGBA;
- _outputTextureOptions.format = GL_BGRA;
- _outputTextureOptions.type = GL_UNSIGNED_BYTE;
-
- return self;
-}
-
-- (void)dealloc
-{
- [self removeAllTargets];
-}
-
-#pragma mark -
-#pragma mark Managing targets
-
-- (void)setInputFramebufferForTarget:(id)target atIndex:(NSInteger)inputTextureIndex;
-{
- [target setInputFramebuffer:[self framebufferForOutput] atIndex:inputTextureIndex];
-}
-
-- (GPUImageFramebuffer *)framebufferForOutput;
-{
- return outputFramebuffer;
-}
-
-- (void)removeOutputFramebuffer;
-{
- outputFramebuffer = nil;
-}
-
-- (void)notifyTargetsAboutNewOutputTexture;
-{
- for (id currentTarget in targets)
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger textureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
-
- [self setInputFramebufferForTarget:currentTarget atIndex:textureIndex];
- }
-}
-
-- (NSArray*)targets;
-{
- return [NSArray arrayWithArray:targets];
-}
-
-- (void)addTarget:(id)newTarget;
-{
- NSInteger nextAvailableTextureIndex = [newTarget nextAvailableTextureIndex];
- [self addTarget:newTarget atTextureLocation:nextAvailableTextureIndex];
-
- if ([newTarget shouldIgnoreUpdatesToThisTarget])
- {
- _targetToIgnoreForUpdates = newTarget;
- }
-}
-
-- (void)addTarget:(id)newTarget atTextureLocation:(NSInteger)textureLocation;
-{
- if([targets containsObject:newTarget])
- {
- return;
- }
-
- cachedMaximumOutputSize = CGSizeZero;
- runSynchronouslyOnVideoProcessingQueue(^{
- [self setInputFramebufferForTarget:newTarget atIndex:textureLocation];
- [targets addObject:newTarget];
- [targetTextureIndices addObject:[NSNumber numberWithInteger:textureLocation]];
-
- allTargetsWantMonochromeData = allTargetsWantMonochromeData && [newTarget wantsMonochromeInput];
- });
-}
-
-- (void)removeTarget:(id)targetToRemove;
-{
- if(![targets containsObject:targetToRemove])
- {
- return;
- }
-
- if (_targetToIgnoreForUpdates == targetToRemove)
- {
- _targetToIgnoreForUpdates = nil;
- }
-
- cachedMaximumOutputSize = CGSizeZero;
-
- NSInteger indexOfObject = [targets indexOfObject:targetToRemove];
- NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
-
- runSynchronouslyOnVideoProcessingQueue(^{
- [targetToRemove setInputSize:CGSizeZero atIndex:textureIndexOfTarget];
- [targetToRemove setInputRotation:kGPUImageNoRotation atIndex:textureIndexOfTarget];
-
- [targetTextureIndices removeObjectAtIndex:indexOfObject];
- [targets removeObject:targetToRemove];
- [targetToRemove endProcessing];
- });
-}
-
-- (void)removeAllTargets;
-{
- cachedMaximumOutputSize = CGSizeZero;
- runSynchronouslyOnVideoProcessingQueue(^{
- for (id targetToRemove in targets)
- {
- NSInteger indexOfObject = [targets indexOfObject:targetToRemove];
- NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
-
- [targetToRemove setInputSize:CGSizeZero atIndex:textureIndexOfTarget];
- [targetToRemove setInputRotation:kGPUImageNoRotation atIndex:textureIndexOfTarget];
- }
- [targets removeAllObjects];
- [targetTextureIndices removeAllObjects];
-
- allTargetsWantMonochromeData = YES;
- });
-}
-
-#pragma mark -
-#pragma mark Manage the output texture
-
-- (void)forceProcessingAtSize:(CGSize)frameSize;
-{
-
-}
-
-- (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize;
-{
-}
-
-#pragma mark -
-#pragma mark Still image processing
-
-- (void)useNextFrameForImageCapture;
-{
-
-}
-
-- (CGImageRef)newCGImageFromCurrentlyProcessedOutput;
-{
- return nil;
-}
-
-- (CGImageRef)newCGImageByFilteringCGImage:(CGImageRef)imageToFilter;
-{
- GPUImagePicture *stillImageSource = [[GPUImagePicture alloc] initWithCGImage:imageToFilter];
-
- [self useNextFrameForImageCapture];
- [stillImageSource addTarget:(id)self];
- [stillImageSource processImage];
-
- CGImageRef processedImage = [self newCGImageFromCurrentlyProcessedOutput];
-
- [stillImageSource removeTarget:(id)self];
- return processedImage;
-}
-
-- (BOOL)providesMonochromeOutput;
-{
- return NO;
-}
-
-#pragma mark -
-#pragma mark Platform-specific image output methods
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-
-- (UIImage *)imageFromCurrentFramebuffer;
-{
- UIDeviceOrientation deviceOrientation = [[UIDevice currentDevice] orientation];
- UIImageOrientation imageOrientation = UIImageOrientationLeft;
- switch (deviceOrientation)
- {
- case UIDeviceOrientationPortrait:
- imageOrientation = UIImageOrientationUp;
- break;
- case UIDeviceOrientationPortraitUpsideDown:
- imageOrientation = UIImageOrientationDown;
- break;
- case UIDeviceOrientationLandscapeLeft:
- imageOrientation = UIImageOrientationLeft;
- break;
- case UIDeviceOrientationLandscapeRight:
- imageOrientation = UIImageOrientationRight;
- break;
- default:
- imageOrientation = UIImageOrientationUp;
- break;
- }
-
- return [self imageFromCurrentFramebufferWithOrientation:imageOrientation];
-}
-
-- (UIImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
-{
- CGImageRef cgImageFromBytes = [self newCGImageFromCurrentlyProcessedOutput];
- UIImage *finalImage = [UIImage imageWithCGImage:cgImageFromBytes scale:1.0 orientation:imageOrientation];
- CGImageRelease(cgImageFromBytes);
-
- return finalImage;
-}
-
-- (UIImage *)imageByFilteringImage:(UIImage *)imageToFilter;
-{
- CGImageRef image = [self newCGImageByFilteringCGImage:[imageToFilter CGImage]];
- UIImage *processedImage = [UIImage imageWithCGImage:image scale:[imageToFilter scale] orientation:[imageToFilter imageOrientation]];
- CGImageRelease(image);
- return processedImage;
-}
-
-- (CGImageRef)newCGImageByFilteringImage:(UIImage *)imageToFilter
-{
- return [self newCGImageByFilteringCGImage:[imageToFilter CGImage]];
-}
-
-#else
-
-- (NSImage *)imageFromCurrentFramebuffer;
-{
- return [self imageFromCurrentFramebufferWithOrientation:UIImageOrientationLeft];
-}
-
-- (NSImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation;
-{
- CGImageRef cgImageFromBytes = [self newCGImageFromCurrentlyProcessedOutput];
- NSImage *finalImage = [[NSImage alloc] initWithCGImage:cgImageFromBytes size:NSZeroSize];
- CGImageRelease(cgImageFromBytes);
-
- return finalImage;
-}
-
-- (NSImage *)imageByFilteringImage:(NSImage *)imageToFilter;
-{
- CGImageRef image = [self newCGImageByFilteringCGImage:[imageToFilter CGImageForProposedRect:NULL context:[NSGraphicsContext currentContext] hints:nil]];
- NSImage *processedImage = [[NSImage alloc] initWithCGImage:image size:NSZeroSize];
- CGImageRelease(image);
- return processedImage;
-}
-
-- (CGImageRef)newCGImageByFilteringImage:(NSImage *)imageToFilter
-{
- return [self newCGImageByFilteringCGImage:[imageToFilter CGImageForProposedRect:NULL context:[NSGraphicsContext currentContext] hints:nil]];
-}
-
-#endif
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setAudioEncodingTarget:(GPUImageMovieWriter *)newValue;
-{
- _audioEncodingTarget = newValue;
- if( ! _audioEncodingTarget.hasAudioTrack )
- {
- _audioEncodingTarget.hasAudioTrack = YES;
- }
-}
-
--(void)setOutputTextureOptions:(GPUTextureOptions)outputTextureOptions
-{
- _outputTextureOptions = outputTextureOptions;
-
- if( outputFramebuffer.texture )
- {
- glBindTexture(GL_TEXTURE_2D, outputFramebuffer.texture);
- //_outputTextureOptions.format
- //_outputTextureOptions.internalFormat
- //_outputTextureOptions.magFilter
- //_outputTextureOptions.minFilter
- //_outputTextureOptions.type
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, _outputTextureOptions.wrapS);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, _outputTextureOptions.wrapT);
- glBindTexture(GL_TEXTURE_2D, 0);
- }
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageOverlayBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageOverlayBlendFilter.h
deleted file mode 100755
index 57eb840..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageOverlayBlendFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageOverlayBlendFilter : GPUImageTwoInputFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageOverlayBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageOverlayBlendFilter.m
deleted file mode 100755
index c8c5185..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageOverlayBlendFilter.m
+++ /dev/null
@@ -1,94 +0,0 @@
-#import "GPUImageOverlayBlendFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageOverlayBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- mediump vec4 base = texture2D(inputImageTexture, textureCoordinate);
- mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
-
- mediump float ra;
- if (2.0 * base.r < base.a) {
- ra = 2.0 * overlay.r * base.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
- } else {
- ra = overlay.a * base.a - 2.0 * (base.a - base.r) * (overlay.a - overlay.r) + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
- }
-
- mediump float ga;
- if (2.0 * base.g < base.a) {
- ga = 2.0 * overlay.g * base.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
- } else {
- ga = overlay.a * base.a - 2.0 * (base.a - base.g) * (overlay.a - overlay.g) + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
- }
-
- mediump float ba;
- if (2.0 * base.b < base.a) {
- ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
- } else {
- ba = overlay.a * base.a - 2.0 * (base.a - base.b) * (overlay.a - overlay.b) + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
- }
-
- gl_FragColor = vec4(ra, ga, ba, 1.0);
- }
-);
-#else
-NSString *const kGPUImageOverlayBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 base = texture2D(inputImageTexture, textureCoordinate);
- vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
-
- float ra;
- if (2.0 * base.r < base.a) {
- ra = 2.0 * overlay.r * base.r + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
- } else {
- ra = overlay.a * base.a - 2.0 * (base.a - base.r) * (overlay.a - overlay.r) + overlay.r * (1.0 - base.a) + base.r * (1.0 - overlay.a);
- }
-
- float ga;
- if (2.0 * base.g < base.a) {
- ga = 2.0 * overlay.g * base.g + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
- } else {
- ga = overlay.a * base.a - 2.0 * (base.a - base.g) * (overlay.a - overlay.g) + overlay.g * (1.0 - base.a) + base.g * (1.0 - overlay.a);
- }
-
- float ba;
- if (2.0 * base.b < base.a) {
- ba = 2.0 * overlay.b * base.b + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
- } else {
- ba = overlay.a * base.a - 2.0 * (base.a - base.b) * (overlay.a - overlay.b) + overlay.b * (1.0 - base.a) + base.b * (1.0 - overlay.a);
- }
-
- gl_FragColor = vec4(ra, ga, ba, 1.0);
- }
-);
-#endif
-
-@implementation GPUImageOverlayBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageOverlayBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageParallelCoordinateLineTransformFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageParallelCoordinateLineTransformFilter.h
deleted file mode 100644
index aa8f3f4..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageParallelCoordinateLineTransformFilter.h
+++ /dev/null
@@ -1,16 +0,0 @@
-#import "GPUImageFilter.h"
-
-// This is an accumulator that uses a Hough transform in parallel coordinate space to identify probable lines in a scene.
-//
-// It is entirely based on the work of the Graph@FIT research group at the Brno University of Technology and their publications:
-// M. Dubská, J. Havel, and A. Herout. Real-Time Detection of Lines using Parallel Coordinates and OpenGL. Proceedings of SCCG 2011, Bratislava, SK, p. 7.
-// M. Dubská, J. Havel, and A. Herout. PClines — Line detection using parallel coordinates. 2011 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), p. 1489- 1494.
-
-@interface GPUImageParallelCoordinateLineTransformFilter : GPUImageFilter
-{
- GLubyte *rawImagePixels;
- GLfloat *lineCoordinates;
- unsigned int maxLinePairsToRender, linePairsToRender;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageParallelCoordinateLineTransformFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageParallelCoordinateLineTransformFilter.m
deleted file mode 100644
index 0a2f6e4..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageParallelCoordinateLineTransformFilter.m
+++ /dev/null
@@ -1,266 +0,0 @@
-#import "GPUImageParallelCoordinateLineTransformFilter.h"
-
-NSString *const kGPUImageHoughAccumulationVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
-
- void main()
- {
- gl_Position = position;
- }
-);
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageHoughAccumulationFragmentShaderString = SHADER_STRING
-(
- const lowp float scalingFactor = 1.0 / 256.0;
-
- void main()
- {
- gl_FragColor = vec4(0.004, 0.004, 0.004, 1.0);
- }
-);
-
-// highp - 16-bit, floating point range: -2^62 to 2^62, integer range: -2^16 to 2^16
-// NOTE: See below for where I'm tacking on the required extension as a prefix
-NSString *const kGPUImageHoughAccumulationFBOReadFragmentShaderString = SHADER_STRING
-(
- const lowp float scalingFactor = 0.004;
-// const lowp float scalingFactor = 0.1;
-
- void main()
- {
- mediump vec4 fragmentData = gl_LastFragData[0];
-
- fragmentData.r = fragmentData.r + scalingFactor;
- fragmentData.g = scalingFactor * floor(fragmentData.r) + fragmentData.g;
- fragmentData.b = scalingFactor * floor(fragmentData.g) + fragmentData.b;
- fragmentData.a = scalingFactor * floor(fragmentData.b) + fragmentData.a;
-
- fragmentData = fract(fragmentData);
-
- gl_FragColor = vec4(fragmentData.rgb, 1.0);
- }
-);
-
-#else
-NSString *const kGPUImageHoughAccumulationFragmentShaderString = SHADER_STRING
-(
- const float scalingFactor = 1.0 / 256.0;
-
- void main()
- {
- gl_FragColor = vec4(0.004, 0.004, 0.004, 1.0);
- }
-);
-
-NSString *const kGPUImageHoughAccumulationFBOReadFragmentShaderString = SHADER_STRING
-(
- const float scalingFactor = 1.0 / 256.0;
-
- void main()
- {
- // gl_FragColor = vec4(scalingFactor, scalingFactor, scalingFactor, 1.0);
- gl_FragColor = vec4(0.004, 0.004, 0.004, 1.0);
- }
-);
-#endif
-
-@interface GPUImageParallelCoordinateLineTransformFilter()
-// Rendering
-- (void)generateLineCoordinates;
-
-@end
-
-@implementation GPUImageParallelCoordinateLineTransformFilter
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- NSString *fragmentShaderToUse = nil;
-
- if ([GPUImageContext deviceSupportsFramebufferReads])
- {
- fragmentShaderToUse = [NSString stringWithFormat:@"#extension GL_EXT_shader_framebuffer_fetch : require\n %@",kGPUImageHoughAccumulationFBOReadFragmentShaderString];
- }
- else
- {
- fragmentShaderToUse = kGPUImageHoughAccumulationFragmentShaderString;
- }
-
- if (!(self = [super initWithVertexShaderFromString:kGPUImageHoughAccumulationVertexShaderString fragmentShaderFromString:fragmentShaderToUse]))
- {
- return nil;
- }
-
-
- return self;
-}
-
-// TODO: have this be regenerated on change of image size
-- (void)dealloc;
-{
- free(rawImagePixels);
- free(lineCoordinates);
-}
-
-- (void)initializeAttributes;
-{
- [filterProgram addAttribute:@"position"];
-}
-
-#pragma mark -
-#pragma mark Rendering
-
-#define MAXLINESCALINGFACTOR 4
-
-- (void)generateLineCoordinates;
-{
- unsigned int imageByteSize = inputTextureSize.width * inputTextureSize.height * 4;
- rawImagePixels = (GLubyte *)malloc(imageByteSize);
-
- maxLinePairsToRender = (inputTextureSize.width * inputTextureSize.height) / MAXLINESCALINGFACTOR;
- lineCoordinates = calloc(maxLinePairsToRender * 8, sizeof(GLfloat));
-}
-
-- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
-{
- if (lineCoordinates == NULL)
- {
- [self generateLineCoordinates];
- }
-
- [self renderToTextureWithVertices:NULL textureCoordinates:NULL];
-
- [self informTargetsAboutNewFrameAtTime:frameTime];
-}
-
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-{
- // we need a normal color texture for this filter
- NSAssert(self.outputTextureOptions.internalFormat == GL_RGBA, @"The output texture format for this filter must be GL_RGBA.");
- NSAssert(self.outputTextureOptions.type == GL_UNSIGNED_BYTE, @"The type of the output texture of this filter must be GL_UNSIGNED_BYTE.");
-
- if (self.preventRendering)
- {
- [firstInputFramebuffer unlock];
- return;
- }
-
- // Grab the edge points from the previous frame and create the parallel coordinate lines for them
- // This would be a great place to have a working histogram pyramid implementation
-
- [GPUImageContext useImageProcessingContext];
- [firstInputFramebuffer activateFramebuffer];
-
- glFinish();
- glReadPixels(0, 0, inputTextureSize.width, inputTextureSize.height, GL_RGBA, GL_UNSIGNED_BYTE, rawImagePixels);
-
- CGFloat xAspectMultiplier = 1.0, yAspectMultiplier = 1.0;
-
-// if (inputTextureSize.width > inputTextureSize.height)
-// {
-// yAspectMultiplier = inputTextureSize.height / inputTextureSize.width;
-// }
-// else
-// {
-// xAspectMultiplier = inputTextureSize.width / inputTextureSize.height;
-// }
-
-// CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
-
- unsigned int imageByteSize = inputTextureSize.width * inputTextureSize.height * 4;
- unsigned int imageWidth = inputTextureSize.width * 4;
-
- linePairsToRender = 0;
- unsigned int currentByte = 0;
- unsigned int lineStorageIndex = 0;
- unsigned int maxLineStorageIndex = maxLinePairsToRender * 8 - 8;
-
- GLfloat minY = 100, maxY = -100, minX = 100, maxX = -100;
- while (currentByte < imageByteSize)
- {
- GLubyte colorByte = rawImagePixels[currentByte];
-
- if (colorByte > 0)
- {
- unsigned int xCoordinate = currentByte % imageWidth;
- unsigned int yCoordinate = currentByte / imageWidth;
-
- CGFloat normalizedXCoordinate = (-1.0 + 2.0 * (CGFloat)(xCoordinate / 4) / inputTextureSize.width) * xAspectMultiplier;
- CGFloat normalizedYCoordinate = (-1.0 + 2.0 * (CGFloat)(yCoordinate) / inputTextureSize.height) * yAspectMultiplier;
-
- minY = MIN(minY, normalizedYCoordinate);
- maxY = MAX(maxY, normalizedYCoordinate);
- minX = MIN(minX, normalizedXCoordinate);
- maxX = MAX(maxX, normalizedXCoordinate);
-
-// NSLog(@"Parallel line coordinates: (%f, %f) - (%f, %f) - (%f, %f)", -1.0, -normalizedYCoordinate, 0.0, normalizedXCoordinate, 1.0, normalizedYCoordinate);
- // T space coordinates, (-d, -y) to (0, x)
- lineCoordinates[lineStorageIndex++] = -1.0;
- lineCoordinates[lineStorageIndex++] = -normalizedYCoordinate;
- lineCoordinates[lineStorageIndex++] = 0.0;
- lineCoordinates[lineStorageIndex++] = normalizedXCoordinate;
-
- // S space coordinates, (0, x) to (d, y)
- lineCoordinates[lineStorageIndex++] = 0.0;
- lineCoordinates[lineStorageIndex++] = normalizedXCoordinate;
- lineCoordinates[lineStorageIndex++] = 1.0;
- lineCoordinates[lineStorageIndex++] = normalizedYCoordinate;
-
- linePairsToRender++;
-
- linePairsToRender = MIN(linePairsToRender, maxLinePairsToRender);
- lineStorageIndex = MIN(lineStorageIndex, maxLineStorageIndex);
- }
- currentByte +=8;
- }
-
-// NSLog(@"Line pairs to render: %d out of max: %d", linePairsToRender, maxLinePairsToRender);
-
-// CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
-// NSLog(@"Line generation processing time : %f ms", 1000.0 * currentFrameTime);
-
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
- [outputFramebuffer activateFramebuffer];
-
- if (usingNextFrameForImageCapture)
- {
- [outputFramebuffer lock];
- }
-
- [GPUImageContext setActiveShaderProgram:filterProgram];
- [self setUniformsForProgramAtIndex:0];
-
- glClearColor(0.0, 0.0, 0.0, 1.0);
- glClear(GL_COLOR_BUFFER_BIT);
-
- if (![GPUImageContext deviceSupportsFramebufferReads])
- {
- glBlendEquation(GL_FUNC_ADD);
- glBlendFunc(GL_ONE, GL_ONE);
- glEnable(GL_BLEND);
- }
- else
- {
- }
-
- glLineWidth(1);
-
- glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, lineCoordinates);
- glDrawArrays(GL_LINES, 0, (linePairsToRender * 4));
-
- if (![GPUImageContext deviceSupportsFramebufferReads])
- {
- glDisable(GL_BLEND);
- }
- [firstInputFramebuffer unlock];
- if (usingNextFrameForImageCapture)
- {
- dispatch_semaphore_signal(imageCaptureSemaphore);
- }
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImagePerlinNoiseFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImagePerlinNoiseFilter.h
deleted file mode 100644
index 922f4d3..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImagePerlinNoiseFilter.h
+++ /dev/null
@@ -1,13 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImagePerlinNoiseFilter : GPUImageFilter
-{
- GLint scaleUniform, colorStartUniform, colorFinishUniform;
-}
-
-@property (readwrite, nonatomic) GPUVector4 colorStart;
-@property (readwrite, nonatomic) GPUVector4 colorFinish;
-
-@property (readwrite, nonatomic) float scale;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImagePerlinNoiseFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImagePerlinNoiseFilter.m
deleted file mode 100644
index 9ca7cba..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImagePerlinNoiseFilter.m
+++ /dev/null
@@ -1,239 +0,0 @@
-#import "GPUImagePerlinNoiseFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImagePerlinNoiseFragmentShaderString = SHADER_STRING
-(
- precision highp float;
- varying highp vec2 textureCoordinate;
- uniform float scale;
-
- uniform vec4 colorStart;
- uniform vec4 colorFinish;
-
- //
- // Description : Array and textureless GLSL 2D/3D/4D simplex
- // noise functions.
- // Author : Ian McEwan, Ashima Arts.
- // Maintainer : ijm
- // Lastmod : 20110822 (ijm)
- // License : Copyright (C) 2011 Ashima Arts. All rights reserved.
- // Distributed under the MIT License. See LICENSE file.
- // https://github.com/ashima/webgl-noise
- //
-
- vec4 mod289(vec4 x)
-{
- return x - floor(x * (1.0 / 289.0)) * 289.0;
-}
-
- vec4 permute(vec4 x)
-{
- return mod289(((x*34.0)+1.0)*x);
-}
-
- vec4 taylorInvSqrt(vec4 r)
-{
- return 1.79284291400159 - 0.85373472095314 * r;
-}
-
- vec2 fade(vec2 t) {
- return t*t*t*(t*(t*6.0-15.0)+10.0);
- }
-
- // Classic Perlin noise
- float cnoise(vec2 P)
-{
- vec4 Pi = floor(P.xyxy) + vec4(0.0, 0.0, 1.0, 1.0);
- vec4 Pf = fract(P.xyxy) - vec4(0.0, 0.0, 1.0, 1.0);
- Pi = mod289(Pi); // To avoid truncation effects in permutation
- vec4 ix = Pi.xzxz;
- vec4 iy = Pi.yyww;
- vec4 fx = Pf.xzxz;
- vec4 fy = Pf.yyww;
-
- vec4 i = permute(permute(ix) + iy);
-
- vec4 gx = fract(i * (1.0 / 41.0)) * 2.0 - 1.0 ;
- vec4 gy = abs(gx) - 0.5 ;
- vec4 tx = floor(gx + 0.5);
- gx = gx - tx;
-
- vec2 g00 = vec2(gx.x,gy.x);
- vec2 g10 = vec2(gx.y,gy.y);
- vec2 g01 = vec2(gx.z,gy.z);
- vec2 g11 = vec2(gx.w,gy.w);
-
- vec4 norm = taylorInvSqrt(vec4(dot(g00, g00), dot(g01, g01), dot(g10, g10), dot(g11, g11)));
- g00 *= norm.x;
- g01 *= norm.y;
- g10 *= norm.z;
- g11 *= norm.w;
-
- float n00 = dot(g00, vec2(fx.x, fy.x));
- float n10 = dot(g10, vec2(fx.y, fy.y));
- float n01 = dot(g01, vec2(fx.z, fy.z));
- float n11 = dot(g11, vec2(fx.w, fy.w));
-
- vec2 fade_xy = fade(Pf.xy);
- vec2 n_x = mix(vec2(n00, n01), vec2(n10, n11), fade_xy.x);
- float n_xy = mix(n_x.x, n_x.y, fade_xy.y);
- return 2.3 * n_xy;
-}
-
-
- void main()
- {
-
- float n1 = (cnoise(textureCoordinate * scale) + 1.0) / 2.0;
-
- vec4 colorDiff = colorFinish - colorStart;
- vec4 color = colorStart + colorDiff * n1;
-
- gl_FragColor = color;
- }
-);
-#else
-NSString *const kGPUImagePerlinNoiseFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- uniform float scale;
-
- uniform vec4 colorStart;
- uniform vec4 colorFinish;
-
- //
- // Description : Array and textureless GLSL 2D/3D/4D simplex
- // noise functions.
- // Author : Ian McEwan, Ashima Arts.
- // Maintainer : ijm
- // Lastmod : 20110822 (ijm)
- // License : Copyright (C) 2011 Ashima Arts. All rights reserved.
- // Distributed under the MIT License. See LICENSE file.
- // https://github.com/ashima/webgl-noise
- //
-
- vec4 mod289(vec4 x)
-{
- return x - floor(x * (1.0 / 289.0)) * 289.0;
-}
-
- vec4 permute(vec4 x)
-{
- return mod289(((x*34.0)+1.0)*x);
-}
-
- vec4 taylorInvSqrt(vec4 r)
-{
- return 1.79284291400159 - 0.85373472095314 * r;
-}
-
- vec2 fade(vec2 t) {
- return t*t*t*(t*(t*6.0-15.0)+10.0);
- }
-
- // Classic Perlin noise
- float cnoise(vec2 P)
-{
- vec4 Pi = floor(P.xyxy) + vec4(0.0, 0.0, 1.0, 1.0);
- vec4 Pf = fract(P.xyxy) - vec4(0.0, 0.0, 1.0, 1.0);
- Pi = mod289(Pi); // To avoid truncation effects in permutation
- vec4 ix = Pi.xzxz;
- vec4 iy = Pi.yyww;
- vec4 fx = Pf.xzxz;
- vec4 fy = Pf.yyww;
-
- vec4 i = permute(permute(ix) + iy);
-
- vec4 gx = fract(i * (1.0 / 41.0)) * 2.0 - 1.0 ;
- vec4 gy = abs(gx) - 0.5 ;
- vec4 tx = floor(gx + 0.5);
- gx = gx - tx;
-
- vec2 g00 = vec2(gx.x,gy.x);
- vec2 g10 = vec2(gx.y,gy.y);
- vec2 g01 = vec2(gx.z,gy.z);
- vec2 g11 = vec2(gx.w,gy.w);
-
- vec4 norm = taylorInvSqrt(vec4(dot(g00, g00), dot(g01, g01), dot(g10, g10), dot(g11, g11)));
- g00 *= norm.x;
- g01 *= norm.y;
- g10 *= norm.z;
- g11 *= norm.w;
-
- float n00 = dot(g00, vec2(fx.x, fy.x));
- float n10 = dot(g10, vec2(fx.y, fy.y));
- float n01 = dot(g01, vec2(fx.z, fy.z));
- float n11 = dot(g11, vec2(fx.w, fy.w));
-
- vec2 fade_xy = fade(Pf.xy);
- vec2 n_x = mix(vec2(n00, n01), vec2(n10, n11), fade_xy.x);
- float n_xy = mix(n_x.x, n_x.y, fade_xy.y);
- return 2.3 * n_xy;
- }
-
- void main()
- {
-
- float n1 = (cnoise(textureCoordinate * scale) + 1.0) / 2.0;
-
- vec4 colorDiff = colorFinish - colorStart;
- vec4 color = colorStart + colorDiff * n1;
-
- gl_FragColor = color;
- }
-);
-#endif
-
-
-@implementation GPUImagePerlinNoiseFilter
-
-@synthesize scale = _scale, colorStart = _colorStart, colorFinish = _colorFinish;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImagePerlinNoiseFragmentShaderString]))
- {
- return nil;
- }
-
- scaleUniform = [filterProgram uniformIndex:@"scale"];
-
- colorStartUniform = [filterProgram uniformIndex:@"colorStart"];
- colorFinishUniform = [filterProgram uniformIndex:@"colorFinish"];
-
- [self setScale:8.0];
-
- [self setColorStart:(GPUVector4){0.0, 0.0, 0.0, 1.0}];
- [self setColorFinish:(GPUVector4){1.0, 1.0, 1.0, 1.0}];
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setScale:(float)scale
-{
- _scale = scale;
-
- [self setFloat:_scale forUniform:scaleUniform program:filterProgram];
-}
-
-- (void)setColorStart:(GPUVector4)colorStart
-{
- _colorStart = colorStart;
-
- [self setVec4:_colorStart forUniform:colorStartUniform program:filterProgram];
-}
-
-- (void)setColorFinish:(GPUVector4)colorFinish
-{
- _colorFinish = colorFinish;
-
- [self setVec4:_colorFinish forUniform:colorFinishUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImagePinchDistortionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImagePinchDistortionFilter.h
deleted file mode 100755
index 994774f..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImagePinchDistortionFilter.h
+++ /dev/null
@@ -1,20 +0,0 @@
-#import "GPUImageFilter.h"
-
-/** Creates a pinch distortion of the image
- */
-@interface GPUImagePinchDistortionFilter : GPUImageFilter
-{
- GLint aspectRatioUniform, radiusUniform, centerUniform, scaleUniform;
-}
-
-/** The center about which to apply the distortion, with a default of (0.5, 0.5)
- */
-@property(readwrite, nonatomic) CGPoint center;
-/** The radius of the distortion, ranging from 0.0 to 2.0, with a default of 1.0
- */
-@property(readwrite, nonatomic) CGFloat radius;
-/** The amount of distortion to apply, from -2.0 to 2.0, with a default of 0.5
- */
-@property(readwrite, nonatomic) CGFloat scale;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImagePinchDistortionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImagePinchDistortionFilter.m
deleted file mode 100755
index 76d7909..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImagePinchDistortionFilter.m
+++ /dev/null
@@ -1,176 +0,0 @@
-#import "GPUImagePinchDistortionFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImagePinchDistortionFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform highp float aspectRatio;
- uniform highp vec2 center;
- uniform highp float radius;
- uniform highp float scale;
-
- void main()
- {
- highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- highp float dist = distance(center, textureCoordinateToUse);
- textureCoordinateToUse = textureCoordinate;
-
- if (dist < radius)
- {
- textureCoordinateToUse -= center;
- highp float percent = 1.0 + ((0.5 - dist) / 0.5) * scale;
- textureCoordinateToUse = textureCoordinateToUse * percent;
- textureCoordinateToUse += center;
-
- gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );
- }
- else
- {
- gl_FragColor = texture2D(inputImageTexture, textureCoordinate );
- }
- }
-);
-#else
-NSString *const kGPUImagePinchDistortionFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform float aspectRatio;
- uniform vec2 center;
- uniform float radius;
- uniform float scale;
-
- void main()
- {
- vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- float dist = distance(center, textureCoordinateToUse);
- textureCoordinateToUse = textureCoordinate;
-
- if (dist < radius)
- {
- textureCoordinateToUse -= center;
- float percent = 1.0 + ((0.5 - dist) / 0.5) * scale;
- textureCoordinateToUse = textureCoordinateToUse * percent;
- textureCoordinateToUse += center;
-
- gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );
- }
- else
- {
- gl_FragColor = texture2D(inputImageTexture, textureCoordinate );
- }
- }
-);
-#endif
-
-@interface GPUImagePinchDistortionFilter ()
-
-- (void)adjustAspectRatio;
-
-@property (readwrite, nonatomic) CGFloat aspectRatio;
-
-@end
-
-@implementation GPUImagePinchDistortionFilter
-
-@synthesize aspectRatio = _aspectRatio;
-@synthesize center = _center;
-@synthesize radius = _radius;
-@synthesize scale = _scale;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImagePinchDistortionFragmentShaderString]))
- {
- return nil;
- }
-
- aspectRatioUniform = [filterProgram uniformIndex:@"aspectRatio"];
- radiusUniform = [filterProgram uniformIndex:@"radius"];
- scaleUniform = [filterProgram uniformIndex:@"scale"];
- centerUniform = [filterProgram uniformIndex:@"center"];
-
- self.radius = 1.0;
- self.scale = 0.5;
- self.center = CGPointMake(0.5, 0.5);
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)adjustAspectRatio;
-{
- if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
- {
- [self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)];
- }
- else
- {
- [self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)];
- }
-}
-
-- (void)forceProcessingAtSize:(CGSize)frameSize;
-{
- [super forceProcessingAtSize:frameSize];
- [self adjustAspectRatio];
-}
-
-- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
-{
- CGSize oldInputSize = inputTextureSize;
- [super setInputSize:newSize atIndex:textureIndex];
-
- if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )
- {
- [self adjustAspectRatio];
- }
-}
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
- [super setInputRotation:newInputRotation atIndex:textureIndex];
- [self setCenter:self.center];
- [self adjustAspectRatio];
-}
-
-- (void)setAspectRatio:(CGFloat)newValue;
-{
- _aspectRatio = newValue;
-
- [self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram];
-}
-
-- (void)setRadius:(CGFloat)newValue;
-{
- _radius = newValue;
-
- [self setFloat:_radius forUniform:radiusUniform program:filterProgram];
-}
-
-- (void)setScale:(CGFloat)newValue;
-{
- _scale = newValue;
-
- [self setFloat:_scale forUniform:scaleUniform program:filterProgram];
-}
-
-- (void)setCenter:(CGPoint)newValue;
-{
- _center = newValue;
-
- CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation];
- [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImagePixellateFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImagePixellateFilter.h
deleted file mode 100755
index d0f6ae0..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImagePixellateFilter.h
+++ /dev/null
@@ -1,12 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImagePixellateFilter : GPUImageFilter
-{
- GLint fractionalWidthOfAPixelUniform, aspectRatioUniform;
-}
-
-// The fractional width of the image to use as a size for the pixels in the resulting image. Values below one pixel width in the source image are ignored.
-@property(readwrite, nonatomic) CGFloat fractionalWidthOfAPixel;
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImagePixellateFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImagePixellateFilter.m
deleted file mode 100755
index 88430d0..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImagePixellateFilter.m
+++ /dev/null
@@ -1,151 +0,0 @@
-#import "GPUImagePixellateFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImagePixellationFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform highp float fractionalWidthOfPixel;
- uniform highp float aspectRatio;
-
- void main()
- {
- highp vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);
-
- highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;
- gl_FragColor = texture2D(inputImageTexture, samplePos );
- }
-);
-#else
-NSString *const kGPUImagePixellationFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform float fractionalWidthOfPixel;
- uniform float aspectRatio;
-
- void main()
- {
- vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);
-
- vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;
- gl_FragColor = texture2D(inputImageTexture, samplePos );
- }
-);
-#endif
-
-@interface GPUImagePixellateFilter ()
-
-@property (readwrite, nonatomic) CGFloat aspectRatio;
-
-- (void)adjustAspectRatio;
-
-@end
-
-@implementation GPUImagePixellateFilter
-
-@synthesize fractionalWidthOfAPixel = _fractionalWidthOfAPixel;
-@synthesize aspectRatio = _aspectRatio;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [self initWithFragmentShaderFromString:kGPUImagePixellationFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
-{
- if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString]))
- {
- return nil;
- }
-
- fractionalWidthOfAPixelUniform = [filterProgram uniformIndex:@"fractionalWidthOfPixel"];
- aspectRatioUniform = [filterProgram uniformIndex:@"aspectRatio"];
-
- self.fractionalWidthOfAPixel = 0.05;
-
- return self;
-}
-
-- (void)adjustAspectRatio;
-{
- if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
- {
- [self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)];
- }
- else
- {
- [self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)];
- }
-}
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
- [super setInputRotation:newInputRotation atIndex:textureIndex];
- [self adjustAspectRatio];
-}
-
-- (void)forceProcessingAtSize:(CGSize)frameSize;
-{
- [super forceProcessingAtSize:frameSize];
- [self adjustAspectRatio];
-}
-
-- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
-{
- CGSize oldInputSize = inputTextureSize;
- [super setInputSize:newSize atIndex:textureIndex];
-
- if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )
- {
- [self adjustAspectRatio];
- }
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setFractionalWidthOfAPixel:(CGFloat)newValue;
-{
- CGFloat singlePixelSpacing;
- if (inputTextureSize.width != 0.0)
- {
- singlePixelSpacing = 1.0 / inputTextureSize.width;
- }
- else
- {
- singlePixelSpacing = 1.0 / 2048.0;
- }
-
- if (newValue < singlePixelSpacing)
- {
- _fractionalWidthOfAPixel = singlePixelSpacing;
- }
- else
- {
- _fractionalWidthOfAPixel = newValue;
- }
-
- [self setFloat:_fractionalWidthOfAPixel forUniform:fractionalWidthOfAPixelUniform program:filterProgram];
-}
-
-- (void)setAspectRatio:(CGFloat)newValue;
-{
- _aspectRatio = newValue;
-
- [self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImagePixellatePositionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImagePixellatePositionFilter.h
deleted file mode 100755
index 9d304c9..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImagePixellatePositionFilter.h
+++ /dev/null
@@ -1,17 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImagePixellatePositionFilter : GPUImageFilter
-{
- GLint fractionalWidthOfAPixelUniform, aspectRatioUniform, centerUniform, radiusUniform;
-}
-
-// The fractional width of the image to use as a size for the pixels in the resulting image. Values below one pixel width in the source image are ignored.
-@property(readwrite, nonatomic) CGFloat fractionalWidthOfAPixel;
-
-// the center point to start pixelation in texture coordinates, default 0.5, 0.5
-@property(readwrite, nonatomic) CGPoint center;
-
-// the radius (0.0 - 1.0) in which to pixelate, default 1.0
-@property(readwrite, nonatomic) CGFloat radius;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImagePixellatePositionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImagePixellatePositionFilter.m
deleted file mode 100755
index f1bd09c..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImagePixellatePositionFilter.m
+++ /dev/null
@@ -1,194 +0,0 @@
-#import "GPUImagePixellatePositionFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImagePixellationPositionFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform highp float fractionalWidthOfPixel;
- uniform highp float aspectRatio;
- uniform lowp vec2 pixelateCenter;
- uniform highp float pixelateRadius;
-
- void main()
- {
- highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- highp float dist = distance(pixelateCenter, textureCoordinateToUse);
-
- if (dist < pixelateRadius)
- {
- highp vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);
- highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;
- gl_FragColor = texture2D(inputImageTexture, samplePos );
- }
- else
- {
- gl_FragColor = texture2D(inputImageTexture, textureCoordinate );
- }
- }
-);
-#else
-NSString *const kGPUImagePixellationPositionFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform float fractionalWidthOfPixel;
- uniform float aspectRatio;
- uniform vec2 pixelateCenter;
- uniform float pixelateRadius;
-
- void main()
- {
- vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- float dist = distance(pixelateCenter, textureCoordinateToUse);
-
- if (dist < pixelateRadius)
- {
- vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);
- vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;
- gl_FragColor = texture2D(inputImageTexture, samplePos );
- }
- else
- {
- gl_FragColor = texture2D(inputImageTexture, textureCoordinate );
- }
- }
-);
-#endif
-
-@interface GPUImagePixellatePositionFilter ()
-
-- (void)adjustAspectRatio;
-
-@property (readwrite, nonatomic) CGFloat aspectRatio;
-
-@end
-
-@implementation GPUImagePixellatePositionFilter
-
-@synthesize fractionalWidthOfAPixel = _fractionalWidthOfAPixel;
-@synthesize aspectRatio = _aspectRatio;
-@synthesize center = _center;
-@synthesize radius = _radius;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [self initWithFragmentShaderFromString:kGPUImagePixellationPositionFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
-{
- if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString]))
- {
- return nil;
- }
-
- fractionalWidthOfAPixelUniform = [filterProgram uniformIndex:@"fractionalWidthOfPixel"];
- aspectRatioUniform = [filterProgram uniformIndex:@"aspectRatio"];
- centerUniform = [filterProgram uniformIndex:@"pixelateCenter"];
- radiusUniform = [filterProgram uniformIndex:@"pixelateRadius"];
-
- self.fractionalWidthOfAPixel = 0.05;
- self.center = CGPointMake(0.5f, 0.5f);
- self.radius = 0.25f;
-
- return self;
-}
-
-- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
-{
- CGSize oldInputSize = inputTextureSize;
- [super setInputSize:newSize atIndex:textureIndex];
-
- if ( (!CGSizeEqualToSize(oldInputSize, inputTextureSize)) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )
- {
- [self adjustAspectRatio];
- }
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
- [super setInputRotation:newInputRotation atIndex:textureIndex];
- [self setCenter:self.center];
- [self adjustAspectRatio];
-}
-
-- (void)adjustAspectRatio;
-{
- if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
- {
- [self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)];
- }
- else
- {
- [self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)];
- }
-}
-
-- (void)forceProcessingAtSize:(CGSize)frameSize;
-{
- [super forceProcessingAtSize:frameSize];
- [self adjustAspectRatio];
-}
-
-- (void)setFractionalWidthOfAPixel:(CGFloat)newValue;
-{
- CGFloat singlePixelSpacing;
- if (inputTextureSize.width != 0.0)
- {
- singlePixelSpacing = 1.0 / inputTextureSize.width;
- }
- else
- {
- singlePixelSpacing = 1.0 / 2048.0;
- }
-
- if (newValue < singlePixelSpacing)
- {
- _fractionalWidthOfAPixel = singlePixelSpacing;
- }
- else
- {
- _fractionalWidthOfAPixel = newValue;
- }
-
- [self setFloat:_fractionalWidthOfAPixel forUniform:fractionalWidthOfAPixelUniform program:filterProgram];
-}
-
-- (void)setAspectRatio:(CGFloat)newValue;
-{
- _aspectRatio = newValue;
-
- [self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram];
-}
-
-- (void)setCenter:(CGPoint)center
-{
- _center = center;
- CGPoint rotatedPoint = [self rotatedPoint:center forRotation:inputRotation];
- [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];
-}
-
-- (void)setRadius:(CGFloat)radius
-{
- _radius = radius;
-
- [self setFloat:_radius forUniform:radiusUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImagePoissonBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImagePoissonBlendFilter.h
deleted file mode 100644
index 58eff22..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImagePoissonBlendFilter.h
+++ /dev/null
@@ -1,18 +0,0 @@
-#import "GPUImageTwoInputCrossTextureSamplingFilter.h"
-#import "GPUImageFilterGroup.h"
-
-@interface GPUImagePoissonBlendFilter : GPUImageTwoInputCrossTextureSamplingFilter
-{
- GLint mixUniform;
-
- GPUImageFramebuffer *secondOutputFramebuffer;
-}
-
-// Mix ranges from 0.0 (only image 1) to 1.0 (only image 2 gradients), with 1.0 as the normal level
-@property(readwrite, nonatomic) CGFloat mix;
-
-// The number of times to propagate the gradients.
-// Crank this up to 100 or even 1000 if you want to get anywhere near convergence. Yes, this will be slow.
-@property(readwrite, nonatomic) NSUInteger numIterations;
-
-@end
\ No newline at end of file
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImagePoissonBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImagePoissonBlendFilter.m
deleted file mode 100644
index 0167e02..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImagePoissonBlendFilter.m
+++ /dev/null
@@ -1,175 +0,0 @@
-#import "GPUImagePoissonBlendFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImagePoissonBlendFragmentShaderString = SHADER_STRING
-(
- precision mediump float;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
- varying vec2 topTextureCoordinate;
- varying vec2 bottomTextureCoordinate;
-
- varying vec2 textureCoordinate2;
- varying vec2 leftTextureCoordinate2;
- varying vec2 rightTextureCoordinate2;
- varying vec2 topTextureCoordinate2;
- varying vec2 bottomTextureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- uniform lowp float mixturePercent;
-
- void main()
- {
- vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
- vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
- vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
- vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
- vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;
-
- vec4 centerColor2 = texture2D(inputImageTexture2, textureCoordinate2);
- vec3 bottomColor2 = texture2D(inputImageTexture2, bottomTextureCoordinate2).rgb;
- vec3 leftColor2 = texture2D(inputImageTexture2, leftTextureCoordinate2).rgb;
- vec3 rightColor2 = texture2D(inputImageTexture2, rightTextureCoordinate2).rgb;
- vec3 topColor2 = texture2D(inputImageTexture2, topTextureCoordinate2).rgb;
-
- vec3 meanColor = (bottomColor + leftColor + rightColor + topColor) / 4.0;
- vec3 diffColor = centerColor.rgb - meanColor;
-
- vec3 meanColor2 = (bottomColor2 + leftColor2 + rightColor2 + topColor2) / 4.0;
- vec3 diffColor2 = centerColor2.rgb - meanColor2;
-
- vec3 gradColor = (meanColor + diffColor2);
-
- gl_FragColor = vec4(mix(centerColor.rgb, gradColor, centerColor2.a * mixturePercent), centerColor.a);
- }
-);
-#else
-NSString *const kGPUImagePoissonBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
- varying vec2 topTextureCoordinate;
- varying vec2 bottomTextureCoordinate;
-
- varying vec2 textureCoordinate2;
- varying vec2 leftTextureCoordinate2;
- varying vec2 rightTextureCoordinate2;
- varying vec2 topTextureCoordinate2;
- varying vec2 bottomTextureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- uniform float mixturePercent;
-
- void main()
- {
- vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
- vec3 bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
- vec3 leftColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
- vec3 rightColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
- vec3 topColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;
-
- vec4 centerColor2 = texture2D(inputImageTexture2, textureCoordinate2);
- vec3 bottomColor2 = texture2D(inputImageTexture2, bottomTextureCoordinate2).rgb;
- vec3 leftColor2 = texture2D(inputImageTexture2, leftTextureCoordinate2).rgb;
- vec3 rightColor2 = texture2D(inputImageTexture2, rightTextureCoordinate2).rgb;
- vec3 topColor2 = texture2D(inputImageTexture2, topTextureCoordinate2).rgb;
-
- vec3 meanColor = (bottomColor + leftColor + rightColor + topColor) / 4.0;
- vec3 diffColor = centerColor.rgb - meanColor;
-
- vec3 meanColor2 = (bottomColor2 + leftColor2 + rightColor2 + topColor2) / 4.0;
- vec3 diffColor2 = centerColor2.rgb - meanColor2;
-
- vec3 gradColor = (meanColor + diffColor2);
-
- gl_FragColor = vec4(mix(centerColor.rgb, gradColor, centerColor2.a * mixturePercent), centerColor.a);
- }
-);
-#endif
-
-@implementation GPUImagePoissonBlendFilter
-
-@synthesize mix = _mix;
-@synthesize numIterations = _numIterations;
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImagePoissonBlendFragmentShaderString]))
- {
- return nil;
- }
-
- mixUniform = [filterProgram uniformIndex:@"mixturePercent"];
- self.mix = 0.5;
-
- self.numIterations = 10;
-
- return self;
-}
-
-- (void)setMix:(CGFloat)newValue;
-{
- _mix = newValue;
-
- [self setFloat:_mix forUniform:mixUniform program:filterProgram];
-}
-
-//- (void)setOutputFBO;
-//{
-// if (self.numIterations % 2 == 1) {
-// [self setSecondFilterFBO];
-// } else {
-// [self setFilterFBO];
-// }
-//}
-
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-{
- // Run the first stage of the two-pass filter
- [GPUImageContext setActiveShaderProgram:filterProgram];
-
- [super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];
-
- for (int pass = 1; pass < self.numIterations; pass++) {
-
- if (pass % 2 == 0) {
-
- [GPUImageContext setActiveShaderProgram:filterProgram];
-
- // TODO: This will over-unlock the incoming framebuffer
- [super renderToTextureWithVertices:vertices textureCoordinates:[[self class] textureCoordinatesForRotation:kGPUImageNoRotation]];
- } else {
- // Run the second stage of the two-pass filter
- secondOutputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
- [secondOutputFramebuffer activateFramebuffer];
-
- [GPUImageContext setActiveShaderProgram:filterProgram];
-
- glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
- glClear(GL_COLOR_BUFFER_BIT);
-
- glActiveTexture(GL_TEXTURE2);
- glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
- glUniform1i(filterInputTextureUniform, 2);
-
- glActiveTexture(GL_TEXTURE3);
- glBindTexture(GL_TEXTURE_2D, [secondInputFramebuffer texture]);
- glUniform1i(filterInputTextureUniform2, 3);
-
- glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
- glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]);
- glVertexAttribPointer(filterSecondTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]);
-
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
- }
- }
-}
-
-@end
\ No newline at end of file
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImagePolarPixellateFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImagePolarPixellateFilter.h
deleted file mode 100755
index 3de6a4d..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImagePolarPixellateFilter.h
+++ /dev/null
@@ -1,13 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImagePolarPixellateFilter : GPUImageFilter {
- GLint centerUniform, pixelSizeUniform;
-}
-
-// The center about which to apply the distortion, with a default of (0.5, 0.5)
-@property(readwrite, nonatomic) CGPoint center;
-// The amount of distortion to apply, from (-2.0, -2.0) to (2.0, 2.0), with a default of (0.05, 0.05)
-@property(readwrite, nonatomic) CGSize pixelSize;
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImagePolarPixellateFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImagePolarPixellateFilter.m
deleted file mode 100755
index 5677db4..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImagePolarPixellateFilter.m
+++ /dev/null
@@ -1,128 +0,0 @@
-#import "GPUImagePolarPixellateFilter.h"
-
-// @fattjake based on vid by toneburst
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImagePolarPixellateFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform highp vec2 center;
- uniform highp vec2 pixelSize;
-
-
- void main()
- {
- highp vec2 normCoord = 2.0 * textureCoordinate - 1.0;
- highp vec2 normCenter = 2.0 * center - 1.0;
-
- normCoord -= normCenter;
-
- highp float r = length(normCoord); // to polar coords
- highp float phi = atan(normCoord.y, normCoord.x); // to polar coords
-
- r = r - mod(r, pixelSize.x) + 0.03;
- phi = phi - mod(phi, pixelSize.y);
-
- normCoord.x = r * cos(phi);
- normCoord.y = r * sin(phi);
-
- normCoord += normCenter;
-
- mediump vec2 textureCoordinateToUse = normCoord / 2.0 + 0.5;
-
- gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );
-
- }
-);
-#else
-NSString *const kGPUImagePolarPixellateFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform vec2 center;
- uniform vec2 pixelSize;
-
-
- void main()
- {
- vec2 normCoord = 2.0 * textureCoordinate - 1.0;
- vec2 normCenter = 2.0 * center - 1.0;
-
- normCoord -= normCenter;
-
- float r = length(normCoord); // to polar coords
- float phi = atan(normCoord.y, normCoord.x); // to polar coords
-
- r = r - mod(r, pixelSize.x) + 0.03;
- phi = phi - mod(phi, pixelSize.y);
-
- normCoord.x = r * cos(phi);
- normCoord.y = r * sin(phi);
-
- normCoord += normCenter;
-
- vec2 textureCoordinateToUse = normCoord / 2.0 + 0.5;
-
- gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );
-
- }
-);
-#endif
-
-
-@implementation GPUImagePolarPixellateFilter
-
-@synthesize center = _center;
-
-@synthesize pixelSize = _pixelSize;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImagePolarPixellateFragmentShaderString]))
- {
- return nil;
- }
-
- pixelSizeUniform = [filterProgram uniformIndex:@"pixelSize"];
- centerUniform = [filterProgram uniformIndex:@"center"];
-
-
- self.pixelSize = CGSizeMake(0.05, 0.05);
- self.center = CGPointMake(0.5, 0.5);
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
- [super setInputRotation:newInputRotation atIndex:textureIndex];
- [self setCenter:self.center];
-}
-
-- (void)setPixelSize:(CGSize)pixelSize
-{
- _pixelSize = pixelSize;
-
- [self setSize:_pixelSize forUniform:pixelSizeUniform program:filterProgram];
-}
-
-- (void)setCenter:(CGPoint)newValue;
-{
- _center = newValue;
-
- CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation];
- [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImagePolkaDotFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImagePolkaDotFilter.h
deleted file mode 100644
index 369b773..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImagePolkaDotFilter.h
+++ /dev/null
@@ -1,10 +0,0 @@
-#import "GPUImagePixellateFilter.h"
-
-@interface GPUImagePolkaDotFilter : GPUImagePixellateFilter
-{
- GLint dotScalingUniform;
-}
-
-@property(readwrite, nonatomic) CGFloat dotScaling;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImagePolkaDotFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImagePolkaDotFilter.m
deleted file mode 100644
index a439a04..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImagePolkaDotFilter.m
+++ /dev/null
@@ -1,85 +0,0 @@
-#import "GPUImagePolkaDotFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImagePolkaDotFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform highp float fractionalWidthOfPixel;
- uniform highp float aspectRatio;
- uniform highp float dotScaling;
-
- void main()
- {
- highp vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);
-
- highp vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;
- highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- highp vec2 adjustedSamplePos = vec2(samplePos.x, (samplePos.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- highp float distanceFromSamplePoint = distance(adjustedSamplePos, textureCoordinateToUse);
- lowp float checkForPresenceWithinDot = step(distanceFromSamplePoint, (fractionalWidthOfPixel * 0.5) * dotScaling);
-
- lowp vec4 inputColor = texture2D(inputImageTexture, samplePos);
-
- gl_FragColor = vec4(inputColor.rgb * checkForPresenceWithinDot, inputColor.a);
- }
-);
-#else
-NSString *const kGPUImagePolkaDotFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform float fractionalWidthOfPixel;
- uniform float aspectRatio;
- uniform float dotScaling;
-
- void main()
- {
- vec2 sampleDivisor = vec2(fractionalWidthOfPixel, fractionalWidthOfPixel / aspectRatio);
-
- vec2 samplePos = textureCoordinate - mod(textureCoordinate, sampleDivisor) + 0.5 * sampleDivisor;
- vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- vec2 adjustedSamplePos = vec2(samplePos.x, (samplePos.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- float distanceFromSamplePoint = distance(adjustedSamplePos, textureCoordinateToUse);
- float checkForPresenceWithinDot = step(distanceFromSamplePoint, (fractionalWidthOfPixel * 0.5) * dotScaling);
-
- vec4 inputColor = texture2D(inputImageTexture, samplePos);
-
- gl_FragColor = vec4(inputColor.rgb * checkForPresenceWithinDot, inputColor.a);
- }
-);
-#endif
-
-@implementation GPUImagePolkaDotFilter
-
-@synthesize dotScaling = _dotScaling;
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImagePolkaDotFragmentShaderString]))
- {
- return nil;
- }
-
- dotScalingUniform = [filterProgram uniformIndex:@"dotScaling"];
-
- self.dotScaling = 0.90;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setDotScaling:(CGFloat)newValue;
-{
- _dotScaling = newValue;
-
- [self setFloat:_dotScaling forUniform:dotScalingUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImagePosterizeFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImagePosterizeFilter.h
deleted file mode 100755
index 6f655b3..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImagePosterizeFilter.h
+++ /dev/null
@@ -1,14 +0,0 @@
-#import "GPUImageFilter.h"
-
-/** This reduces the color dynamic range into the number of steps specified, leading to a cartoon-like simple shading of the image.
- */
-@interface GPUImagePosterizeFilter : GPUImageFilter
-{
- GLint colorLevelsUniform;
-}
-
-/** The number of color levels to reduce the image space to. This ranges from 1 to 256, with a default of 10.
- */
-@property(readwrite, nonatomic) NSUInteger colorLevels;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImagePosterizeFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImagePosterizeFilter.m
deleted file mode 100755
index a438cea..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImagePosterizeFilter.m
+++ /dev/null
@@ -1,66 +0,0 @@
-#import "GPUImagePosterizeFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImagePosterizeFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform highp float colorLevels;
-
- void main()
- {
- highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- gl_FragColor = floor((textureColor * colorLevels) + vec4(0.5)) / colorLevels;
- }
-);
-#else
-NSString *const kGPUImagePosterizeFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float colorLevels;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- gl_FragColor = floor((textureColor * colorLevels) + vec4(0.5)) / colorLevels;
- }
-);
-#endif
-
-@implementation GPUImagePosterizeFilter
-
-@synthesize colorLevels = _colorLevels;
-
-#pragma mark -
-#pragma mark Initialization
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImagePosterizeFragmentShaderString]))
- {
- return nil;
- }
-
- colorLevelsUniform = [filterProgram uniformIndex:@"colorLevels"];
- self.colorLevels = 10;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setColorLevels:(NSUInteger)newValue;
-{
- _colorLevels = newValue;
-
- [self setFloat:_colorLevels forUniform:colorLevelsUniform program:filterProgram];
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImagePrewittEdgeDetectionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImagePrewittEdgeDetectionFilter.h
deleted file mode 100755
index 141f8c5..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImagePrewittEdgeDetectionFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImageSobelEdgeDetectionFilter.h"
-
-@interface GPUImagePrewittEdgeDetectionFilter : GPUImageSobelEdgeDetectionFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImagePrewittEdgeDetectionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImagePrewittEdgeDetectionFilter.m
deleted file mode 100755
index a990693..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImagePrewittEdgeDetectionFilter.m
+++ /dev/null
@@ -1,97 +0,0 @@
-#import "GPUImagePrewittEdgeDetectionFilter.h"
-
-@implementation GPUImagePrewittEdgeDetectionFilter
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImagePrewittFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float edgeStrength;
-
- void main()
- {
- float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
- float h = -topLeftIntensity - topIntensity - topRightIntensity + bottomLeftIntensity + bottomIntensity + bottomRightIntensity;
- float v = -bottomLeftIntensity - leftIntensity - topLeftIntensity + bottomRightIntensity + rightIntensity + topRightIntensity;
-
- float mag = length(vec2(h, v)) * edgeStrength;
-
- gl_FragColor = vec4(vec3(mag), 1.0);
- }
-);
-#else
-NSString *const kGPUImagePrewittFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float edgeStrength;
-
- void main()
- {
- float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
- float h = -topLeftIntensity - topIntensity - topRightIntensity + bottomLeftIntensity + bottomIntensity + bottomRightIntensity;
- float v = -bottomLeftIntensity - leftIntensity - topLeftIntensity + bottomRightIntensity + rightIntensity + topRightIntensity;
-
- float mag = length(vec2(h, v)) * edgeStrength;
-
- gl_FragColor = vec4(vec3(mag), 1.0);
- }
-);
-#endif
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [self initWithFragmentShaderFromString:kGPUImagePrewittFragmentShaderString]))
- {
- return nil;
- }
-
- self.edgeStrength = 1.0;
-
- return self;
-}
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageRGBClosingFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageRGBClosingFilter.h
deleted file mode 100644
index 08d13f8..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageRGBClosingFilter.h
+++ /dev/null
@@ -1,18 +0,0 @@
-#import "GPUImageFilterGroup.h"
-
-@class GPUImageRGBErosionFilter;
-@class GPUImageRGBDilationFilter;
-
-// A filter that first performs a dilation on each color channel of an image, followed by an erosion of the same radius.
-// This helps to filter out smaller dark elements.
-
-@interface GPUImageRGBClosingFilter : GPUImageFilterGroup
-{
- GPUImageRGBErosionFilter *erosionFilter;
- GPUImageRGBDilationFilter *dilationFilter;
-}
-
-- (id)initWithRadius:(NSUInteger)radius;
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageRGBClosingFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageRGBClosingFilter.m
deleted file mode 100644
index c5bb1c8..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageRGBClosingFilter.m
+++ /dev/null
@@ -1,41 +0,0 @@
-#import "GPUImageRGBClosingFilter.h"
-#import "GPUImageRGBErosionFilter.h"
-#import "GPUImageRGBDilationFilter.h"
-
-@implementation GPUImageRGBClosingFilter
-
-- (id)init;
-{
- if (!(self = [self initWithRadius:1]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)initWithRadius:(NSUInteger)radius;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- // First pass: dilation
- dilationFilter = [[GPUImageRGBDilationFilter alloc] initWithRadius:radius];
- [self addFilter:dilationFilter];
-
- // Second pass: erosion
- erosionFilter = [[GPUImageRGBErosionFilter alloc] initWithRadius:radius];
- [self addFilter:erosionFilter];
-
- [dilationFilter addTarget:erosionFilter];
-
- self.initialFilters = [NSArray arrayWithObjects:dilationFilter, nil];
- self.terminalFilter = erosionFilter;
-
- return self;
-}
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageRGBDilationFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageRGBDilationFilter.h
deleted file mode 100644
index 68276f8..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageRGBDilationFilter.h
+++ /dev/null
@@ -1,11 +0,0 @@
-#import "GPUImageTwoPassTextureSamplingFilter.h"
-
-// For each pixel, this sets it to the maximum value of each color channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
-// This extends out brighter colors, and can be used for abstraction of color images.
-
-@interface GPUImageRGBDilationFilter : GPUImageTwoPassTextureSamplingFilter
-
-// Acceptable values for dilationRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
-- (id)initWithRadius:(NSUInteger)dilationRadius;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageRGBDilationFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageRGBDilationFilter.m
deleted file mode 100644
index 9702c78..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageRGBDilationFilter.m
+++ /dev/null
@@ -1,306 +0,0 @@
-#import "GPUImageRGBDilationFilter.h"
-#import "GPUImageDilationFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageRGBDilationRadiusOneFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
- lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
- lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
-
- lowp vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);
-
- gl_FragColor = max(maxValue, oneStepNegativeIntensity);
- }
-);
-
-NSString *const kGPUImageRGBDilationRadiusTwoFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
- lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
- lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
- lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
- lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
-
- lowp vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);
- maxValue = max(maxValue, oneStepNegativeIntensity);
- maxValue = max(maxValue, twoStepsPositiveIntensity);
- maxValue = max(maxValue, twoStepsNegativeIntensity);
-
- gl_FragColor = max(maxValue, twoStepsNegativeIntensity);
- }
-);
-
-NSString *const kGPUImageRGBDilationRadiusThreeFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
- varying vec2 threeStepsPositiveTextureCoordinate;
- varying vec2 threeStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
- lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
- lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
- lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
- lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
- lowp vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);
- lowp vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);
-
- lowp vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);
- maxValue = max(maxValue, oneStepNegativeIntensity);
- maxValue = max(maxValue, twoStepsPositiveIntensity);
- maxValue = max(maxValue, twoStepsNegativeIntensity);
- maxValue = max(maxValue, threeStepsPositiveIntensity);
-
- gl_FragColor = max(maxValue, threeStepsNegativeIntensity);
- }
-);
-
-NSString *const kGPUImageRGBDilationRadiusFourFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
- varying vec2 threeStepsPositiveTextureCoordinate;
- varying vec2 threeStepsNegativeTextureCoordinate;
- varying vec2 fourStepsPositiveTextureCoordinate;
- varying vec2 fourStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
- lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
- lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
- lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
- lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
- lowp vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);
- lowp vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);
- lowp vec4 fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate);
- lowp vec4 fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate);
-
- lowp vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);
- maxValue = max(maxValue, oneStepNegativeIntensity);
- maxValue = max(maxValue, twoStepsPositiveIntensity);
- maxValue = max(maxValue, twoStepsNegativeIntensity);
- maxValue = max(maxValue, threeStepsPositiveIntensity);
- maxValue = max(maxValue, threeStepsNegativeIntensity);
- maxValue = max(maxValue, fourStepsPositiveIntensity);
-
- gl_FragColor = max(maxValue, fourStepsNegativeIntensity);
- }
-);
-#else
-NSString *const kGPUImageRGBDilationRadiusOneFragmentShaderString = SHADER_STRING
-(
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
- vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
- vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
-
- vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);
-
- gl_FragColor = max(maxValue, oneStepNegativeIntensity);
- }
- );
-
-NSString *const kGPUImageRGBDilationRadiusTwoFragmentShaderString = SHADER_STRING
-(
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
- vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
- vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
- vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
- vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
-
- vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);
- maxValue = max(maxValue, oneStepNegativeIntensity);
- maxValue = max(maxValue, twoStepsPositiveIntensity);
- maxValue = max(maxValue, twoStepsNegativeIntensity);
-
- gl_FragColor = max(maxValue, twoStepsNegativeIntensity);
- }
- );
-
-NSString *const kGPUImageRGBDilationRadiusThreeFragmentShaderString = SHADER_STRING
-(
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
- varying vec2 threeStepsPositiveTextureCoordinate;
- varying vec2 threeStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
- vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
- vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
- vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
- vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
- vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);
- vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);
-
- vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);
- maxValue = max(maxValue, oneStepNegativeIntensity);
- maxValue = max(maxValue, twoStepsPositiveIntensity);
- maxValue = max(maxValue, twoStepsNegativeIntensity);
- maxValue = max(maxValue, threeStepsPositiveIntensity);
-
- gl_FragColor = max(maxValue, threeStepsNegativeIntensity);
- }
-);
-
-NSString *const kGPUImageRGBDilationRadiusFourFragmentShaderString = SHADER_STRING
-(
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
- varying vec2 threeStepsPositiveTextureCoordinate;
- varying vec2 threeStepsNegativeTextureCoordinate;
- varying vec2 fourStepsPositiveTextureCoordinate;
- varying vec2 fourStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
- vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
- vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
- vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
- vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
- vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);
- vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);
- vec4 fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate);
- vec4 fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate);
-
- vec4 maxValue = max(centerIntensity, oneStepPositiveIntensity);
- maxValue = max(maxValue, oneStepNegativeIntensity);
- maxValue = max(maxValue, twoStepsPositiveIntensity);
- maxValue = max(maxValue, twoStepsNegativeIntensity);
- maxValue = max(maxValue, threeStepsPositiveIntensity);
- maxValue = max(maxValue, threeStepsNegativeIntensity);
- maxValue = max(maxValue, fourStepsPositiveIntensity);
-
- gl_FragColor = max(maxValue, fourStepsNegativeIntensity);
- }
-);
-#endif
-
-@implementation GPUImageRGBDilationFilter
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithRadius:(NSUInteger)dilationRadius;
-{
- NSString *fragmentShaderForThisRadius = nil;
- NSString *vertexShaderForThisRadius = nil;
-
- switch (dilationRadius)
- {
- case 0:
- case 1:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusOneVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusOneFragmentShaderString;
- }; break;
- case 2:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusTwoVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusTwoFragmentShaderString;
- }; break;
- case 3:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusThreeVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusThreeFragmentShaderString;
- }; break;
- case 4:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusFourFragmentShaderString;
- }; break;
- default:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageRGBDilationRadiusFourFragmentShaderString;
- }; break;
- }
-
- if (!(self = [super initWithFirstStageVertexShaderFromString:vertexShaderForThisRadius firstStageFragmentShaderFromString:fragmentShaderForThisRadius secondStageVertexShaderFromString:vertexShaderForThisRadius secondStageFragmentShaderFromString:fragmentShaderForThisRadius]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)init;
-{
- if (!(self = [self initWithRadius:1]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageRGBErosionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageRGBErosionFilter.h
deleted file mode 100644
index 5979cb7..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageRGBErosionFilter.h
+++ /dev/null
@@ -1,11 +0,0 @@
-#import "GPUImageTwoPassTextureSamplingFilter.h"
-
-// For each pixel, this sets it to the minimum value of each color channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
-// This extends out dark features, and can be used for abstraction of color images.
-
-@interface GPUImageRGBErosionFilter : GPUImageTwoPassTextureSamplingFilter
-
-// Acceptable values for erosionRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
-- (id)initWithRadius:(NSUInteger)erosionRadius;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageRGBErosionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageRGBErosionFilter.m
deleted file mode 100644
index 91e5f33..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageRGBErosionFilter.m
+++ /dev/null
@@ -1,304 +0,0 @@
-#import "GPUImageRGBErosionFilter.h"
-#import "GPUImageDilationFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageRGBErosionRadiusOneFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
- lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
- lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
-
- lowp vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);
-
- gl_FragColor = min(minValue, oneStepNegativeIntensity);
- }
-);
-
-NSString *const kGPUImageRGBErosionRadiusTwoFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
- lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
- lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
- lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
- lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
-
- lowp vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);
- minValue = min(minValue, oneStepNegativeIntensity);
- minValue = min(minValue, twoStepsPositiveIntensity);
-
- gl_FragColor = min(minValue, twoStepsNegativeIntensity);
- }
- );
-
-NSString *const kGPUImageRGBErosionRadiusThreeFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
- varying vec2 threeStepsPositiveTextureCoordinate;
- varying vec2 threeStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
- lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
- lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
- lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
- lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
- lowp vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);
- lowp vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);
-
- lowp vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);
- minValue = min(minValue, oneStepNegativeIntensity);
- minValue = min(minValue, twoStepsPositiveIntensity);
- minValue = min(minValue, twoStepsNegativeIntensity);
- minValue = min(minValue, threeStepsPositiveIntensity);
-
- gl_FragColor = min(minValue, threeStepsNegativeIntensity);
- }
- );
-
-NSString *const kGPUImageRGBErosionRadiusFourFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
- varying vec2 threeStepsPositiveTextureCoordinate;
- varying vec2 threeStepsNegativeTextureCoordinate;
- varying vec2 fourStepsPositiveTextureCoordinate;
- varying vec2 fourStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- lowp vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
- lowp vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
- lowp vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
- lowp vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
- lowp vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
- lowp vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);
- lowp vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);
- lowp vec4 fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate);
- lowp vec4 fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate);
-
- lowp vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);
- minValue = min(minValue, oneStepNegativeIntensity);
- minValue = min(minValue, twoStepsPositiveIntensity);
- minValue = min(minValue, twoStepsNegativeIntensity);
- minValue = min(minValue, threeStepsPositiveIntensity);
- minValue = min(minValue, threeStepsNegativeIntensity);
- minValue = min(minValue, fourStepsPositiveIntensity);
-
- gl_FragColor = min(minValue, fourStepsNegativeIntensity);
- }
-);
-#else
-NSString *const kGPUImageRGBErosionRadiusOneFragmentShaderString = SHADER_STRING
-(
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
- vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
- vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
-
- vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);
-
- gl_FragColor = min(minValue, oneStepNegativeIntensity);
- }
-);
-
-NSString *const kGPUImageRGBErosionRadiusTwoFragmentShaderString = SHADER_STRING
-(
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
- vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
- vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
- vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
- vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
-
- vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);
- minValue = min(minValue, oneStepNegativeIntensity);
- minValue = min(minValue, twoStepsPositiveIntensity);
-
- gl_FragColor = min(minValue, twoStepsNegativeIntensity);
- }
-);
-
-NSString *const kGPUImageRGBErosionRadiusThreeFragmentShaderString = SHADER_STRING
-(
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
- varying vec2 threeStepsPositiveTextureCoordinate;
- varying vec2 threeStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
- vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
- vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
- vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
- vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
- vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);
- vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);
-
- vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);
- minValue = min(minValue, oneStepNegativeIntensity);
- minValue = min(minValue, twoStepsPositiveIntensity);
- minValue = min(minValue, twoStepsNegativeIntensity);
- minValue = min(minValue, threeStepsPositiveIntensity);
-
- gl_FragColor = min(minValue, threeStepsNegativeIntensity);
- }
-);
-
-NSString *const kGPUImageRGBErosionRadiusFourFragmentShaderString = SHADER_STRING
-(
- varying vec2 centerTextureCoordinate;
- varying vec2 oneStepPositiveTextureCoordinate;
- varying vec2 oneStepNegativeTextureCoordinate;
- varying vec2 twoStepsPositiveTextureCoordinate;
- varying vec2 twoStepsNegativeTextureCoordinate;
- varying vec2 threeStepsPositiveTextureCoordinate;
- varying vec2 threeStepsNegativeTextureCoordinate;
- varying vec2 fourStepsPositiveTextureCoordinate;
- varying vec2 fourStepsNegativeTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- vec4 centerIntensity = texture2D(inputImageTexture, centerTextureCoordinate);
- vec4 oneStepPositiveIntensity = texture2D(inputImageTexture, oneStepPositiveTextureCoordinate);
- vec4 oneStepNegativeIntensity = texture2D(inputImageTexture, oneStepNegativeTextureCoordinate);
- vec4 twoStepsPositiveIntensity = texture2D(inputImageTexture, twoStepsPositiveTextureCoordinate);
- vec4 twoStepsNegativeIntensity = texture2D(inputImageTexture, twoStepsNegativeTextureCoordinate);
- vec4 threeStepsPositiveIntensity = texture2D(inputImageTexture, threeStepsPositiveTextureCoordinate);
- vec4 threeStepsNegativeIntensity = texture2D(inputImageTexture, threeStepsNegativeTextureCoordinate);
- vec4 fourStepsPositiveIntensity = texture2D(inputImageTexture, fourStepsPositiveTextureCoordinate);
- vec4 fourStepsNegativeIntensity = texture2D(inputImageTexture, fourStepsNegativeTextureCoordinate);
-
- vec4 minValue = min(centerIntensity, oneStepPositiveIntensity);
- minValue = min(minValue, oneStepNegativeIntensity);
- minValue = min(minValue, twoStepsPositiveIntensity);
- minValue = min(minValue, twoStepsNegativeIntensity);
- minValue = min(minValue, threeStepsPositiveIntensity);
- minValue = min(minValue, threeStepsNegativeIntensity);
- minValue = min(minValue, fourStepsPositiveIntensity);
-
- gl_FragColor = min(minValue, fourStepsNegativeIntensity);
- }
-);
-#endif
-
-@implementation GPUImageRGBErosionFilter
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithRadius:(NSUInteger)erosionRadius;
-{
- NSString *fragmentShaderForThisRadius = nil;
- NSString *vertexShaderForThisRadius = nil;
-
- switch (erosionRadius)
- {
- case 0:
- case 1:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusOneVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusOneFragmentShaderString;
- }; break;
- case 2:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusTwoVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusTwoFragmentShaderString;
- }; break;
- case 3:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusThreeVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusThreeFragmentShaderString;
- }; break;
- case 4:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusFourFragmentShaderString;
- }; break;
- default:
- {
- vertexShaderForThisRadius = kGPUImageDilationRadiusFourVertexShaderString;
- fragmentShaderForThisRadius = kGPUImageRGBErosionRadiusFourFragmentShaderString;
- }; break;
- }
-
- if (!(self = [super initWithFirstStageVertexShaderFromString:vertexShaderForThisRadius firstStageFragmentShaderFromString:fragmentShaderForThisRadius secondStageVertexShaderFromString:vertexShaderForThisRadius secondStageFragmentShaderFromString:fragmentShaderForThisRadius]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)init;
-{
- if (!(self = [self initWithRadius:1]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageRGBFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageRGBFilter.h
deleted file mode 100755
index 18966b1..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageRGBFilter.h
+++ /dev/null
@@ -1,15 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageRGBFilter : GPUImageFilter
-{
- GLint redUniform;
- GLint greenUniform;
- GLint blueUniform;
-}
-
-// Normalized values by which each color channel is multiplied. The range is from 0.0 up, with 1.0 as the default.
-@property (readwrite, nonatomic) CGFloat red;
-@property (readwrite, nonatomic) CGFloat green;
-@property (readwrite, nonatomic) CGFloat blue;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageRGBFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageRGBFilter.m
deleted file mode 100755
index 7a2e568..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageRGBFilter.m
+++ /dev/null
@@ -1,89 +0,0 @@
-#import "GPUImageRGBFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageRGBFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform highp float redAdjustment;
- uniform highp float greenAdjustment;
- uniform highp float blueAdjustment;
-
- void main()
- {
- highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- gl_FragColor = vec4(textureColor.r * redAdjustment, textureColor.g * greenAdjustment, textureColor.b * blueAdjustment, textureColor.a);
- }
-);
-#else
-NSString *const kGPUImageRGBFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float redAdjustment;
- uniform float greenAdjustment;
- uniform float blueAdjustment;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- gl_FragColor = vec4(textureColor.r * redAdjustment, textureColor.g * greenAdjustment, textureColor.b * blueAdjustment, textureColor.a);
- }
- );
-#endif
-
-@implementation GPUImageRGBFilter
-
-@synthesize red = _red, blue = _blue, green = _green;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageRGBFragmentShaderString]))
- {
- return nil;
- }
-
- redUniform = [filterProgram uniformIndex:@"redAdjustment"];
- self.red = 1.0;
-
- greenUniform = [filterProgram uniformIndex:@"greenAdjustment"];
- self.green = 1.0;
-
- blueUniform = [filterProgram uniformIndex:@"blueAdjustment"];
- self.blue = 1.0;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setRed:(CGFloat)newValue;
-{
- _red = newValue;
-
- [self setFloat:_red forUniform:redUniform program:filterProgram];
-}
-
-- (void)setGreen:(CGFloat)newValue;
-{
- _green = newValue;
-
- [self setFloat:_green forUniform:greenUniform program:filterProgram];
-}
-
-- (void)setBlue:(CGFloat)newValue;
-{
- _blue = newValue;
-
- [self setFloat:_blue forUniform:blueUniform program:filterProgram];
-}
-
-@end
\ No newline at end of file
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageRGBOpeningFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageRGBOpeningFilter.h
deleted file mode 100644
index dbec75f..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageRGBOpeningFilter.h
+++ /dev/null
@@ -1,17 +0,0 @@
-#import "GPUImageFilterGroup.h"
-
-@class GPUImageRGBErosionFilter;
-@class GPUImageRGBDilationFilter;
-
-// A filter that first performs an erosion on each color channel of an image, followed by a dilation of the same radius.
-// This helps to filter out smaller bright elements.
-
-@interface GPUImageRGBOpeningFilter : GPUImageFilterGroup
-{
- GPUImageRGBErosionFilter *erosionFilter;
- GPUImageRGBDilationFilter *dilationFilter;
-}
-
-- (id)initWithRadius:(NSUInteger)radius;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageRGBOpeningFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageRGBOpeningFilter.m
deleted file mode 100644
index 9d53021..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageRGBOpeningFilter.m
+++ /dev/null
@@ -1,41 +0,0 @@
-#import "GPUImageRGBOpeningFilter.h"
-#import "GPUImageRGBErosionFilter.h"
-#import "GPUImageRGBDilationFilter.h"
-
-@implementation GPUImageRGBOpeningFilter
-
-- (id)init;
-{
- if (!(self = [self initWithRadius:1]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)initWithRadius:(NSUInteger)radius;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- // First pass: erosion
- erosionFilter = [[GPUImageRGBErosionFilter alloc] initWithRadius:radius];
- [self addFilter:erosionFilter];
-
- // Second pass: dilation
- dilationFilter = [[GPUImageRGBDilationFilter alloc] initWithRadius:radius];
- [self addFilter:dilationFilter];
-
- [erosionFilter addTarget:dilationFilter];
-
- self.initialFilters = [NSArray arrayWithObjects:erosionFilter, nil];
- self.terminalFilter = dilationFilter;
-
- return self;
-}
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageRawDataInput.h b/Example/Pods/GPUImage/framework/Source/GPUImageRawDataInput.h
deleted file mode 100644
index 6ec4720..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageRawDataInput.h
+++ /dev/null
@@ -1,43 +0,0 @@
-#import "GPUImageOutput.h"
-
-// The bytes passed into this input are not copied or retained, but you are free to deallocate them after they are used by this filter.
-// The bytes are uploaded and stored within a texture, so nothing is kept locally.
-// The default format for input bytes is GPUPixelFormatBGRA, unless specified with pixelFormat:
-// The default type for input bytes is GPUPixelTypeUByte, unless specified with pixelType:
-
-typedef enum {
- GPUPixelFormatBGRA = GL_BGRA,
- GPUPixelFormatRGBA = GL_RGBA,
- GPUPixelFormatRGB = GL_RGB,
- GPUPixelFormatLuminance = GL_LUMINANCE
-} GPUPixelFormat;
-
-typedef enum {
- GPUPixelTypeUByte = GL_UNSIGNED_BYTE,
- GPUPixelTypeFloat = GL_FLOAT
-} GPUPixelType;
-
-@interface GPUImageRawDataInput : GPUImageOutput
-{
- CGSize uploadedImageSize;
-
- dispatch_semaphore_t dataUpdateSemaphore;
-}
-
-// Initialization and teardown
-- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;
-- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat;
-- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat type:(GPUPixelType)pixelType;
-
-/** Input data pixel format
- */
-@property (readwrite, nonatomic) GPUPixelFormat pixelFormat;
-@property (readwrite, nonatomic) GPUPixelType pixelType;
-
-// Image rendering
-- (void)updateDataFromBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;
-- (void)processData;
-- (void)processDataForTimestamp:(CMTime)frameTime;
-- (CGSize)outputImageSize;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageRawDataInput.m b/Example/Pods/GPUImage/framework/Source/GPUImageRawDataInput.m
deleted file mode 100644
index 4b6bfa7..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageRawDataInput.m
+++ /dev/null
@@ -1,139 +0,0 @@
-#import "GPUImageRawDataInput.h"
-
-@interface GPUImageRawDataInput()
-- (void)uploadBytes:(GLubyte *)bytesToUpload;
-@end
-
-@implementation GPUImageRawDataInput
-
-@synthesize pixelFormat = _pixelFormat;
-@synthesize pixelType = _pixelType;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;
-{
- if (!(self = [self initWithBytes:bytesToUpload size:imageSize pixelFormat:GPUPixelFormatBGRA type:GPUPixelTypeUByte]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat;
-{
- if (!(self = [self initWithBytes:bytesToUpload size:imageSize pixelFormat:pixelFormat type:GPUPixelTypeUByte]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat type:(GPUPixelType)pixelType;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- dataUpdateSemaphore = dispatch_semaphore_create(1);
-
- uploadedImageSize = imageSize;
- self.pixelFormat = pixelFormat;
- self.pixelType = pixelType;
-
- [self uploadBytes:bytesToUpload];
-
- return self;
-}
-
-// ARC forbids explicit message send of 'release'; since iOS 6 even for dispatch_release() calls: stripping it out in that case is required.
-- (void)dealloc;
-{
-#if !OS_OBJECT_USE_OBJC
- if (dataUpdateSemaphore != NULL)
- {
- dispatch_release(dataUpdateSemaphore);
- }
-#endif
-}
-
-#pragma mark -
-#pragma mark Image rendering
-
-- (void)uploadBytes:(GLubyte *)bytesToUpload;
-{
- [GPUImageContext useImageProcessingContext];
-
- // TODO: This probably isn't right, and will need to be corrected
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:uploadedImageSize textureOptions:self.outputTextureOptions onlyTexture:YES];
-
- glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
- glTexImage2D(GL_TEXTURE_2D, 0, _pixelFormat==GPUPixelFormatRGB ? GL_RGB : GL_RGBA, (int)uploadedImageSize.width, (int)uploadedImageSize.height, 0, (GLint)_pixelFormat, (GLenum)_pixelType, bytesToUpload);
-}
-
-- (void)updateDataFromBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;
-{
- uploadedImageSize = imageSize;
-
- [self uploadBytes:bytesToUpload];
-}
-
-- (void)processData;
-{
- if (dispatch_semaphore_wait(dataUpdateSemaphore, DISPATCH_TIME_NOW) != 0)
- {
- return;
- }
-
- runAsynchronouslyOnVideoProcessingQueue(^{
-
- CGSize pixelSizeOfImage = [self outputImageSize];
-
- for (id currentTarget in targets)
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
-
- [currentTarget setInputSize:pixelSizeOfImage atIndex:textureIndexOfTarget];
- [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
- [currentTarget newFrameReadyAtTime:kCMTimeInvalid atIndex:textureIndexOfTarget];
- }
-
- dispatch_semaphore_signal(dataUpdateSemaphore);
- });
-}
-
-- (void)processDataForTimestamp:(CMTime)frameTime;
-{
- if (dispatch_semaphore_wait(dataUpdateSemaphore, DISPATCH_TIME_NOW) != 0)
- {
- return;
- }
-
- runAsynchronouslyOnVideoProcessingQueue(^{
-
- CGSize pixelSizeOfImage = [self outputImageSize];
-
- for (id currentTarget in targets)
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
-
- [currentTarget setInputSize:pixelSizeOfImage atIndex:textureIndexOfTarget];
- [currentTarget newFrameReadyAtTime:frameTime atIndex:textureIndexOfTarget];
- }
-
- dispatch_semaphore_signal(dataUpdateSemaphore);
- });
-}
-
-- (CGSize)outputImageSize;
-{
- return uploadedImageSize;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageRawDataOutput.h b/Example/Pods/GPUImage/framework/Source/GPUImageRawDataOutput.h
deleted file mode 100755
index 5a4538c..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageRawDataOutput.h
+++ /dev/null
@@ -1,44 +0,0 @@
-#import
-#import "GPUImageContext.h"
-
-struct GPUByteColorVector {
- GLubyte red;
- GLubyte green;
- GLubyte blue;
- GLubyte alpha;
-};
-typedef struct GPUByteColorVector GPUByteColorVector;
-
-@protocol GPUImageRawDataProcessor;
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-@interface GPUImageRawDataOutput : NSObject {
- CGSize imageSize;
- GPUImageRotationMode inputRotation;
- BOOL outputBGRA;
-}
-#else
-@interface GPUImageRawDataOutput : NSObject {
- CGSize imageSize;
- GPUImageRotationMode inputRotation;
- BOOL outputBGRA;
-}
-#endif
-
-@property(readonly) GLubyte *rawBytesForImage;
-@property(nonatomic, copy) void(^newFrameAvailableBlock)(void);
-@property(nonatomic) BOOL enabled;
-
-// Initialization and teardown
-- (id)initWithImageSize:(CGSize)newImageSize resultsInBGRAFormat:(BOOL)resultsInBGRAFormat;
-
-// Data access
-- (GPUByteColorVector)colorAtLocation:(CGPoint)locationInImage;
-- (NSUInteger)bytesPerRowInOutput;
-
-- (void)setImageSize:(CGSize)newImageSize;
-
-- (void)lockFramebufferForReading;
-- (void)unlockFramebufferAfterReading;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageRawDataOutput.m b/Example/Pods/GPUImage/framework/Source/GPUImageRawDataOutput.m
deleted file mode 100755
index 18101e2..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageRawDataOutput.m
+++ /dev/null
@@ -1,307 +0,0 @@
-#import "GPUImageRawDataOutput.h"
-
-#import "GPUImageContext.h"
-#import "GLProgram.h"
-#import "GPUImageFilter.h"
-#import "GPUImageMovieWriter.h"
-
-@interface GPUImageRawDataOutput ()
-{
- GPUImageFramebuffer *firstInputFramebuffer, *outputFramebuffer, *retainedFramebuffer;
-
- BOOL hasReadFromTheCurrentFrame;
-
- GLProgram *dataProgram;
- GLint dataPositionAttribute, dataTextureCoordinateAttribute;
- GLint dataInputTextureUniform;
-
- GLubyte *_rawBytesForImage;
-
- BOOL lockNextFramebuffer;
-}
-
-// Frame rendering
-- (void)renderAtInternalSize;
-
-@end
-
-@implementation GPUImageRawDataOutput
-
-@synthesize rawBytesForImage = _rawBytesForImage;
-@synthesize newFrameAvailableBlock = _newFrameAvailableBlock;
-@synthesize enabled;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithImageSize:(CGSize)newImageSize resultsInBGRAFormat:(BOOL)resultsInBGRAFormat;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- self.enabled = YES;
- lockNextFramebuffer = NO;
- outputBGRA = resultsInBGRAFormat;
- imageSize = newImageSize;
- hasReadFromTheCurrentFrame = NO;
- _rawBytesForImage = NULL;
- inputRotation = kGPUImageNoRotation;
-
- [GPUImageContext useImageProcessingContext];
- if ( (outputBGRA && ![GPUImageContext supportsFastTextureUpload]) || (!outputBGRA && [GPUImageContext supportsFastTextureUpload]) )
- {
- dataProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageColorSwizzlingFragmentShaderString];
- }
- else
- {
- dataProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImagePassthroughFragmentShaderString];
- }
-
- if (!dataProgram.initialized)
- {
- [dataProgram addAttribute:@"position"];
- [dataProgram addAttribute:@"inputTextureCoordinate"];
-
- if (![dataProgram link])
- {
- NSString *progLog = [dataProgram programLog];
- NSLog(@"Program link log: %@", progLog);
- NSString *fragLog = [dataProgram fragmentShaderLog];
- NSLog(@"Fragment shader compile log: %@", fragLog);
- NSString *vertLog = [dataProgram vertexShaderLog];
- NSLog(@"Vertex shader compile log: %@", vertLog);
- dataProgram = nil;
- NSAssert(NO, @"Filter shader link failed");
- }
- }
-
- dataPositionAttribute = [dataProgram attributeIndex:@"position"];
- dataTextureCoordinateAttribute = [dataProgram attributeIndex:@"inputTextureCoordinate"];
- dataInputTextureUniform = [dataProgram uniformIndex:@"inputImageTexture"];
-
- return self;
-}
-
-- (void)dealloc
-{
- if (_rawBytesForImage != NULL && (![GPUImageContext supportsFastTextureUpload]))
- {
- free(_rawBytesForImage);
- _rawBytesForImage = NULL;
- }
-}
-
-#pragma mark -
-#pragma mark Data access
-
-- (void)renderAtInternalSize;
-{
- [GPUImageContext setActiveShaderProgram:dataProgram];
-
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:imageSize onlyTexture:NO];
- [outputFramebuffer activateFramebuffer];
-
- if(lockNextFramebuffer)
- {
- retainedFramebuffer = outputFramebuffer;
- [retainedFramebuffer lock];
- [retainedFramebuffer lockForReading];
- lockNextFramebuffer = NO;
- }
-
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
-
- static const GLfloat squareVertices[] = {
- -1.0f, -1.0f,
- 1.0f, -1.0f,
- -1.0f, 1.0f,
- 1.0f, 1.0f,
- };
-
- static const GLfloat textureCoordinates[] = {
- 0.0f, 0.0f,
- 1.0f, 0.0f,
- 0.0f, 1.0f,
- 1.0f, 1.0f,
- };
-
- glActiveTexture(GL_TEXTURE4);
- glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
- glUniform1i(dataInputTextureUniform, 4);
-
- glVertexAttribPointer(dataPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
- glVertexAttribPointer(dataTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
-
- glEnableVertexAttribArray(dataPositionAttribute);
- glEnableVertexAttribArray(dataTextureCoordinateAttribute);
-
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
- [firstInputFramebuffer unlock];
-}
-
-- (GPUByteColorVector)colorAtLocation:(CGPoint)locationInImage;
-{
- GPUByteColorVector *imageColorBytes = (GPUByteColorVector *)self.rawBytesForImage;
-// NSLog(@"Row start");
-// for (unsigned int currentXPosition = 0; currentXPosition < (imageSize.width * 2.0); currentXPosition++)
-// {
-// GPUByteColorVector byteAtPosition = imageColorBytes[currentXPosition];
-// NSLog(@"%d - %d, %d, %d", currentXPosition, byteAtPosition.red, byteAtPosition.green, byteAtPosition.blue);
-// }
-// NSLog(@"Row end");
-
-// GPUByteColorVector byteAtOne = imageColorBytes[1];
-// GPUByteColorVector byteAtWidth = imageColorBytes[(int)imageSize.width - 3];
-// GPUByteColorVector byteAtHeight = imageColorBytes[(int)(imageSize.height - 1) * (int)imageSize.width];
-// NSLog(@"Byte 1: %d, %d, %d, byte 2: %d, %d, %d, byte 3: %d, %d, %d", byteAtOne.red, byteAtOne.green, byteAtOne.blue, byteAtWidth.red, byteAtWidth.green, byteAtWidth.blue, byteAtHeight.red, byteAtHeight.green, byteAtHeight.blue);
-
- CGPoint locationToPickFrom = CGPointZero;
- locationToPickFrom.x = MIN(MAX(locationInImage.x, 0.0), (imageSize.width - 1.0));
- locationToPickFrom.y = MIN(MAX((imageSize.height - locationInImage.y), 0.0), (imageSize.height - 1.0));
-
- if (outputBGRA)
- {
- GPUByteColorVector flippedColor = imageColorBytes[(int)(round((locationToPickFrom.y * imageSize.width) + locationToPickFrom.x))];
- GLubyte temporaryRed = flippedColor.red;
-
- flippedColor.red = flippedColor.blue;
- flippedColor.blue = temporaryRed;
-
- return flippedColor;
- }
- else
- {
- return imageColorBytes[(int)(round((locationToPickFrom.y * imageSize.width) + locationToPickFrom.x))];
- }
-}
-
-#pragma mark -
-#pragma mark GPUImageInput protocol
-
-- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
-{
- hasReadFromTheCurrentFrame = NO;
-
- if (_newFrameAvailableBlock != NULL)
- {
- _newFrameAvailableBlock();
- }
-}
-
-- (NSInteger)nextAvailableTextureIndex;
-{
- return 0;
-}
-
-- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
-{
- firstInputFramebuffer = newInputFramebuffer;
- [firstInputFramebuffer lock];
-}
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
- inputRotation = newInputRotation;
-}
-
-- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
-{
-}
-
-- (CGSize)maximumOutputSize;
-{
- return imageSize;
-}
-
-- (void)endProcessing;
-{
-}
-
-- (BOOL)shouldIgnoreUpdatesToThisTarget;
-{
- return NO;
-}
-
-- (BOOL)wantsMonochromeInput;
-{
- return NO;
-}
-
-- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
-{
-
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (GLubyte *)rawBytesForImage;
-{
- if ( (_rawBytesForImage == NULL) && (![GPUImageContext supportsFastTextureUpload]) )
- {
- _rawBytesForImage = (GLubyte *) calloc(imageSize.width * imageSize.height * 4, sizeof(GLubyte));
- hasReadFromTheCurrentFrame = NO;
- }
-
- if (hasReadFromTheCurrentFrame)
- {
- return _rawBytesForImage;
- }
- else
- {
- runSynchronouslyOnVideoProcessingQueue(^{
- // Note: the fast texture caches speed up 640x480 frame reads from 9.6 ms to 3.1 ms on iPhone 4S
-
- [GPUImageContext useImageProcessingContext];
- [self renderAtInternalSize];
-
- if ([GPUImageContext supportsFastTextureUpload])
- {
- glFinish();
- _rawBytesForImage = [outputFramebuffer byteBuffer];
- }
- else
- {
- glReadPixels(0, 0, imageSize.width, imageSize.height, GL_RGBA, GL_UNSIGNED_BYTE, _rawBytesForImage);
- // GL_EXT_read_format_bgra
- // glReadPixels(0, 0, imageSize.width, imageSize.height, GL_BGRA_EXT, GL_UNSIGNED_BYTE, _rawBytesForImage);
- }
-
- hasReadFromTheCurrentFrame = YES;
-
- });
-
- return _rawBytesForImage;
- }
-}
-
-- (NSUInteger)bytesPerRowInOutput;
-{
- return [retainedFramebuffer bytesPerRow];
-}
-
-- (void)setImageSize:(CGSize)newImageSize {
- imageSize = newImageSize;
- if (_rawBytesForImage != NULL && (![GPUImageContext supportsFastTextureUpload]))
- {
- free(_rawBytesForImage);
- _rawBytesForImage = NULL;
- }
-}
-
-- (void)lockFramebufferForReading;
-{
- lockNextFramebuffer = YES;
-}
-
-- (void)unlockFramebufferAfterReading;
-{
- [retainedFramebuffer unlockAfterReading];
- [retainedFramebuffer unlock];
- retainedFramebuffer = nil;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSaturationBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageSaturationBlendFilter.h
deleted file mode 100644
index 767892a..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSaturationBlendFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageSaturationBlendFilter : GPUImageTwoInputFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSaturationBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageSaturationBlendFilter.m
deleted file mode 100644
index da37f6a..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSaturationBlendFilter.m
+++ /dev/null
@@ -1,213 +0,0 @@
-#import "GPUImageSaturationBlendFilter.h"
-
-/**
- * Saturation blend mode based upon pseudo code from the PDF specification.
- */
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageSaturationBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- highp float lum(lowp vec3 c) {
- return dot(c, vec3(0.3, 0.59, 0.11));
- }
-
- lowp vec3 clipcolor(lowp vec3 c) {
- highp float l = lum(c);
- lowp float n = min(min(c.r, c.g), c.b);
- lowp float x = max(max(c.r, c.g), c.b);
-
- if (n < 0.0) {
- c.r = l + ((c.r - l) * l) / (l - n);
- c.g = l + ((c.g - l) * l) / (l - n);
- c.b = l + ((c.b - l) * l) / (l - n);
- }
- if (x > 1.0) {
- c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);
- c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);
- c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);
- }
-
- return c;
- }
-
- lowp vec3 setlum(lowp vec3 c, highp float l) {
- highp float d = l - lum(c);
- c = c + vec3(d);
- return clipcolor(c);
- }
-
- highp float sat(lowp vec3 c) {
- lowp float n = min(min(c.r, c.g), c.b);
- lowp float x = max(max(c.r, c.g), c.b);
- return x - n;
- }
-
- lowp float mid(lowp float cmin, lowp float cmid, lowp float cmax, highp float s) {
- return ((cmid - cmin) * s) / (cmax - cmin);
- }
-
- lowp vec3 setsat(lowp vec3 c, highp float s) {
- if (c.r > c.g) {
- if (c.r > c.b) {
- if (c.g > c.b) {
- /* g is mid, b is min */
- c.g = mid(c.b, c.g, c.r, s);
- c.b = 0.0;
- } else {
- /* b is mid, g is min */
- c.b = mid(c.g, c.b, c.r, s);
- c.g = 0.0;
- }
- c.r = s;
- } else {
- /* b is max, r is mid, g is min */
- c.r = mid(c.g, c.r, c.b, s);
- c.b = s;
- c.r = 0.0;
- }
- } else if (c.r > c.b) {
- /* g is max, r is mid, b is min */
- c.r = mid(c.b, c.r, c.g, s);
- c.g = s;
- c.b = 0.0;
- } else if (c.g > c.b) {
- /* g is max, b is mid, r is min */
- c.b = mid(c.r, c.b, c.g, s);
- c.g = s;
- c.r = 0.0;
- } else if (c.b > c.g) {
- /* b is max, g is mid, r is min */
- c.g = mid(c.r, c.g, c.b, s);
- c.b = s;
- c.r = 0.0;
- } else {
- c = vec3(0.0);
- }
- return c;
- }
-
- void main()
- {
- highp vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);
- highp vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(setsat(baseColor.rgb, sat(overlayColor.rgb)), lum(baseColor.rgb)) * overlayColor.a, baseColor.a);
- }
-);
-#else
-NSString *const kGPUImageSaturationBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- float lum(vec3 c) {
- return dot(c, vec3(0.3, 0.59, 0.11));
- }
-
- vec3 clipcolor(vec3 c) {
- float l = lum(c);
- float n = min(min(c.r, c.g), c.b);
- float x = max(max(c.r, c.g), c.b);
-
- if (n < 0.0) {
- c.r = l + ((c.r - l) * l) / (l - n);
- c.g = l + ((c.g - l) * l) / (l - n);
- c.b = l + ((c.b - l) * l) / (l - n);
- }
- if (x > 1.0) {
- c.r = l + ((c.r - l) * (1.0 - l)) / (x - l);
- c.g = l + ((c.g - l) * (1.0 - l)) / (x - l);
- c.b = l + ((c.b - l) * (1.0 - l)) / (x - l);
- }
-
- return c;
- }
-
- vec3 setlum(vec3 c, float l) {
- float d = l - lum(c);
- c = c + vec3(d);
- return clipcolor(c);
- }
-
- float sat(vec3 c) {
- float n = min(min(c.r, c.g), c.b);
- float x = max(max(c.r, c.g), c.b);
- return x - n;
- }
-
- float mid(float cmin, float cmid, float cmax, float s) {
- return ((cmid - cmin) * s) / (cmax - cmin);
- }
-
- vec3 setsat(vec3 c, float s) {
- if (c.r > c.g) {
- if (c.r > c.b) {
- if (c.g > c.b) {
- /* g is mid, b is min */
- c.g = mid(c.b, c.g, c.r, s);
- c.b = 0.0;
- } else {
- /* b is mid, g is min */
- c.b = mid(c.g, c.b, c.r, s);
- c.g = 0.0;
- }
- c.r = s;
- } else {
- /* b is max, r is mid, g is min */
- c.r = mid(c.g, c.r, c.b, s);
- c.b = s;
- c.r = 0.0;
- }
- } else if (c.r > c.b) {
- /* g is max, r is mid, b is min */
- c.r = mid(c.b, c.r, c.g, s);
- c.g = s;
- c.b = 0.0;
- } else if (c.g > c.b) {
- /* g is max, b is mid, r is min */
- c.b = mid(c.r, c.b, c.g, s);
- c.g = s;
- c.r = 0.0;
- } else if (c.b > c.g) {
- /* b is max, g is mid, r is min */
- c.g = mid(c.r, c.g, c.b, s);
- c.b = s;
- c.r = 0.0;
- } else {
- c = vec3(0.0);
- }
- return c;
- }
-
- void main()
- {
- vec4 baseColor = texture2D(inputImageTexture, textureCoordinate);
- vec4 overlayColor = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = vec4(baseColor.rgb * (1.0 - overlayColor.a) + setlum(setsat(baseColor.rgb, sat(overlayColor.rgb)), lum(baseColor.rgb)) * overlayColor.a, baseColor.a);
- }
-);
-#endif
-
-
-@implementation GPUImageSaturationBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageSaturationBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSaturationFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageSaturationFilter.h
deleted file mode 100755
index 1c6ff5b..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSaturationFilter.h
+++ /dev/null
@@ -1,14 +0,0 @@
-#import "GPUImageFilter.h"
-
-/** Adjusts the saturation of an image
- */
-@interface GPUImageSaturationFilter : GPUImageFilter
-{
- GLint saturationUniform;
-}
-
-/** Saturation ranges from 0.0 (fully desaturated) to 2.0 (max saturation), with 1.0 as the normal level
- */
-@property(readwrite, nonatomic) CGFloat saturation;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSaturationFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageSaturationFilter.m
deleted file mode 100755
index fc373d4..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSaturationFilter.m
+++ /dev/null
@@ -1,78 +0,0 @@
-#import "GPUImageSaturationFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageSaturationFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform lowp float saturation;
-
- // Values from "Graphics Shaders: Theory and Practice" by Bailey and Cunningham
- const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- lowp float luminance = dot(textureColor.rgb, luminanceWeighting);
- lowp vec3 greyScaleColor = vec3(luminance);
-
- gl_FragColor = vec4(mix(greyScaleColor, textureColor.rgb, saturation), textureColor.w);
-
- }
-);
-#else
-NSString *const kGPUImageSaturationFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float saturation;
-
- // Values from "Graphics Shaders: Theory and Practice" by Bailey and Cunningham
- const vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- float luminance = dot(textureColor.rgb, luminanceWeighting);
- vec3 greyScaleColor = vec3(luminance);
-
- gl_FragColor = vec4(mix(greyScaleColor, textureColor.rgb, saturation), textureColor.w);
-
- }
- );
-#endif
-
-@implementation GPUImageSaturationFilter
-
-@synthesize saturation = _saturation;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageSaturationFragmentShaderString]))
- {
- return nil;
- }
-
- saturationUniform = [filterProgram uniformIndex:@"saturation"];
- self.saturation = 1.0;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setSaturation:(CGFloat)newValue;
-{
- _saturation = newValue;
-
- [self setFloat:_saturation forUniform:saturationUniform program:filterProgram];
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageScreenBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageScreenBlendFilter.h
deleted file mode 100755
index 2df3abf..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageScreenBlendFilter.h
+++ /dev/null
@@ -1,7 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageScreenBlendFilter : GPUImageTwoInputFilter
-{
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageScreenBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageScreenBlendFilter.m
deleted file mode 100755
index d871e7d..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageScreenBlendFilter.m
+++ /dev/null
@@ -1,52 +0,0 @@
-#import "GPUImageScreenBlendFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageScreenBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
- mediump vec4 whiteColor = vec4(1.0);
- gl_FragColor = whiteColor - ((whiteColor - textureColor2) * (whiteColor - textureColor));
- }
-);
-#else
-NSString *const kGPUImageScreenBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
- vec4 whiteColor = vec4(1.0);
- gl_FragColor = whiteColor - ((whiteColor - textureColor2) * (whiteColor - textureColor));
- }
-);
-#endif
-
-@implementation GPUImageScreenBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageScreenBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSepiaFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageSepiaFilter.h
deleted file mode 100755
index a45164f..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSepiaFilter.h
+++ /dev/null
@@ -1,6 +0,0 @@
-#import "GPUImageColorMatrixFilter.h"
-
-/// Simple sepia tone filter
-@interface GPUImageSepiaFilter : GPUImageColorMatrixFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSepiaFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageSepiaFilter.m
deleted file mode 100755
index 71668d6..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSepiaFilter.m
+++ /dev/null
@@ -1,24 +0,0 @@
-#import "GPUImageSepiaFilter.h"
-
-@implementation GPUImageSepiaFilter
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- self.intensity = 1.0;
- self.colorMatrix = (GPUMatrix4x4){
- {0.3588, 0.7044, 0.1368, 0.0},
- {0.2990, 0.5870, 0.1140, 0.0},
- {0.2392, 0.4696, 0.0912 ,0.0},
- {0,0,0,1.0},
- };
-
- return self;
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSharpenFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageSharpenFilter.h
deleted file mode 100755
index 739df50..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSharpenFilter.h
+++ /dev/null
@@ -1,12 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageSharpenFilter : GPUImageFilter
-{
- GLint sharpnessUniform;
- GLint imageWidthFactorUniform, imageHeightFactorUniform;
-}
-
-// Sharpness ranges from -4.0 to 4.0, with 0.0 as the normal level
-@property(readwrite, nonatomic) CGFloat sharpness;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSharpenFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageSharpenFilter.m
deleted file mode 100755
index 6d7367a..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSharpenFilter.m
+++ /dev/null
@@ -1,147 +0,0 @@
-#import "GPUImageSharpenFilter.h"
-
-NSString *const kGPUImageSharpenVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec4 inputTextureCoordinate;
-
- uniform float imageWidthFactor;
- uniform float imageHeightFactor;
- uniform float sharpness;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
- varying vec2 topTextureCoordinate;
- varying vec2 bottomTextureCoordinate;
-
- varying float centerMultiplier;
- varying float edgeMultiplier;
-
- void main()
- {
- gl_Position = position;
-
- vec2 widthStep = vec2(imageWidthFactor, 0.0);
- vec2 heightStep = vec2(0.0, imageHeightFactor);
-
- textureCoordinate = inputTextureCoordinate.xy;
- leftTextureCoordinate = inputTextureCoordinate.xy - widthStep;
- rightTextureCoordinate = inputTextureCoordinate.xy + widthStep;
- topTextureCoordinate = inputTextureCoordinate.xy + heightStep;
- bottomTextureCoordinate = inputTextureCoordinate.xy - heightStep;
-
- centerMultiplier = 1.0 + 4.0 * sharpness;
- edgeMultiplier = sharpness;
- }
-);
-
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageSharpenFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying highp vec2 textureCoordinate;
- varying highp vec2 leftTextureCoordinate;
- varying highp vec2 rightTextureCoordinate;
- varying highp vec2 topTextureCoordinate;
- varying highp vec2 bottomTextureCoordinate;
-
- varying highp float centerMultiplier;
- varying highp float edgeMultiplier;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- mediump vec3 textureColor = texture2D(inputImageTexture, textureCoordinate).rgb;
- mediump vec3 leftTextureColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
- mediump vec3 rightTextureColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
- mediump vec3 topTextureColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;
- mediump vec3 bottomTextureColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
-
- gl_FragColor = vec4((textureColor * centerMultiplier - (leftTextureColor * edgeMultiplier + rightTextureColor * edgeMultiplier + topTextureColor * edgeMultiplier + bottomTextureColor * edgeMultiplier)), texture2D(inputImageTexture, bottomTextureCoordinate).w);
- }
-);
-#else
-NSString *const kGPUImageSharpenFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
- varying vec2 topTextureCoordinate;
- varying vec2 bottomTextureCoordinate;
-
- varying float centerMultiplier;
- varying float edgeMultiplier;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- vec3 textureColor = texture2D(inputImageTexture, textureCoordinate).rgb;
- vec3 leftTextureColor = texture2D(inputImageTexture, leftTextureCoordinate).rgb;
- vec3 rightTextureColor = texture2D(inputImageTexture, rightTextureCoordinate).rgb;
- vec3 topTextureColor = texture2D(inputImageTexture, topTextureCoordinate).rgb;
- vec3 bottomTextureColor = texture2D(inputImageTexture, bottomTextureCoordinate).rgb;
-
- gl_FragColor = vec4((textureColor * centerMultiplier - (leftTextureColor * edgeMultiplier + rightTextureColor * edgeMultiplier + topTextureColor * edgeMultiplier + bottomTextureColor * edgeMultiplier)), texture2D(inputImageTexture, bottomTextureCoordinate).w);
- }
-);
-#endif
-
-
-@implementation GPUImageSharpenFilter
-
-@synthesize sharpness = _sharpness;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithVertexShaderFromString:kGPUImageSharpenVertexShaderString fragmentShaderFromString:kGPUImageSharpenFragmentShaderString]))
- {
- return nil;
- }
-
- sharpnessUniform = [filterProgram uniformIndex:@"sharpness"];
- self.sharpness = 0.0;
-
- imageWidthFactorUniform = [filterProgram uniformIndex:@"imageWidthFactor"];
- imageHeightFactorUniform = [filterProgram uniformIndex:@"imageHeightFactor"];
-
- return self;
-}
-
-- (void)setupFilterForSize:(CGSize)filterFrameSize;
-{
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext setActiveShaderProgram:filterProgram];
-
- if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
- {
- glUniform1f(imageWidthFactorUniform, 1.0 / filterFrameSize.height);
- glUniform1f(imageHeightFactorUniform, 1.0 / filterFrameSize.width);
- }
- else
- {
- glUniform1f(imageWidthFactorUniform, 1.0 / filterFrameSize.width);
- glUniform1f(imageHeightFactorUniform, 1.0 / filterFrameSize.height);
- }
- });
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setSharpness:(CGFloat)newValue;
-{
- _sharpness = newValue;
-
- [self setFloat:_sharpness forUniform:sharpnessUniform program:filterProgram];
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageShiTomasiFeatureDetectionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageShiTomasiFeatureDetectionFilter.h
deleted file mode 100644
index b16ebc0..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageShiTomasiFeatureDetectionFilter.h
+++ /dev/null
@@ -1,13 +0,0 @@
-#import "GPUImageHarrisCornerDetectionFilter.h"
-
-/** Shi-Tomasi feature detector
-
- This is the Shi-Tomasi feature detector, as described in
- J. Shi and C. Tomasi. Good features to track. Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition, pages 593-600, June 1994.
- */
-
-@interface GPUImageShiTomasiFeatureDetectionFilter : GPUImageHarrisCornerDetectionFilter
-
-// Compared to the Harris corner detector, the default sensitivity value for this detector is set to 1.5
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageShiTomasiFeatureDetectionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageShiTomasiFeatureDetectionFilter.m
deleted file mode 100644
index e58cbb4..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageShiTomasiFeatureDetectionFilter.m
+++ /dev/null
@@ -1,65 +0,0 @@
-#import "GPUImageShiTomasiFeatureDetectionFilter.h"
-
-@implementation GPUImageShiTomasiFeatureDetectionFilter
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageShiTomasiCornerDetectionFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform lowp float sensitivity;
-
- void main()
- {
- mediump vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;
-
- mediump float derivativeDifference = derivativeElements.x - derivativeElements.y;
- mediump float zElement = (derivativeElements.z * 2.0) - 1.0;
-
- // R = Ix^2 + Iy^2 - sqrt( (Ix^2 - Iy^2)^2 + 4 * Ixy * Ixy)
- mediump float cornerness = derivativeElements.x + derivativeElements.y - sqrt(derivativeDifference * derivativeDifference + 4.0 * zElement * zElement);
-
- gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);
- }
-);
-#else
-NSString *const kGPUImageShiTomasiCornerDetectionFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float sensitivity;
-
- void main()
- {
- vec3 derivativeElements = texture2D(inputImageTexture, textureCoordinate).rgb;
-
- float derivativeDifference = derivativeElements.x - derivativeElements.y;
- float zElement = (derivativeElements.z * 2.0) - 1.0;
-
- // R = Ix^2 + Iy^2 - sqrt( (Ix^2 - Iy^2)^2 + 4 * Ixy * Ixy)
- float cornerness = derivativeElements.x + derivativeElements.y - sqrt(derivativeDifference * derivativeDifference + 4.0 * zElement * zElement);
-
- gl_FragColor = vec4(vec3(cornerness * sensitivity), 1.0);
- }
-);
-#endif
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [self initWithCornerDetectionFragmentShader:kGPUImageShiTomasiCornerDetectionFragmentShaderString]))
- {
- return nil;
- }
-
- self.sensitivity = 1.5;
-
- return self;
-}
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSingleComponentGaussianBlurFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageSingleComponentGaussianBlurFilter.h
deleted file mode 100644
index 934b1e3..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSingleComponentGaussianBlurFilter.h
+++ /dev/null
@@ -1,7 +0,0 @@
-#import "GPUImageGaussianBlurFilter.h"
-
-// This filter merely performs the standard Gaussian blur on the red color channel (assuming a luminance image)
-
-@interface GPUImageSingleComponentGaussianBlurFilter : GPUImageGaussianBlurFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSingleComponentGaussianBlurFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageSingleComponentGaussianBlurFilter.m
deleted file mode 100644
index 4ff0d91..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSingleComponentGaussianBlurFilter.m
+++ /dev/null
@@ -1,189 +0,0 @@
-#import "GPUImageSingleComponentGaussianBlurFilter.h"
-
-@implementation GPUImageSingleComponentGaussianBlurFilter
-
-+ (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
-{
- if (blurRadius < 1)
- {
- return kGPUImageVertexShaderString;
- }
-
- // First, generate the normal Gaussian weights for a given sigma
- GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat));
- GLfloat sumOfWeights = 0.0;
- for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
- {
- standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0)));
-
- if (currentGaussianWeightIndex == 0)
- {
- sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex];
- }
- else
- {
- sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex];
- }
- }
-
- // Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance
- for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
- {
- standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights;
- }
-
- // From these weights we calculate the offsets to read interpolated values from
- NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);
- GLfloat *optimizedGaussianOffsets = calloc(numberOfOptimizedOffsets, sizeof(GLfloat));
-
- for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++)
- {
- GLfloat firstWeight = standardGaussianWeights[currentOptimizedOffset*2 + 1];
- GLfloat secondWeight = standardGaussianWeights[currentOptimizedOffset*2 + 2];
-
- GLfloat optimizedWeight = firstWeight + secondWeight;
-
- optimizedGaussianOffsets[currentOptimizedOffset] = (firstWeight * (currentOptimizedOffset*2 + 1) + secondWeight * (currentOptimizedOffset*2 + 2)) / optimizedWeight;
- }
-
- NSMutableString *shaderString = [[NSMutableString alloc] init];
- // Header
- [shaderString appendFormat:@"\
- attribute vec4 position;\n\
- attribute vec4 inputTextureCoordinate;\n\
- \n\
- uniform float texelWidthOffset;\n\
- uniform float texelHeightOffset;\n\
- \n\
- varying vec2 blurCoordinates[%lu];\n\
- \n\
- void main()\n\
- {\n\
- gl_Position = position;\n\
- \n\
- vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2))];
-
- // Inner offset loop
- [shaderString appendString:@"blurCoordinates[0] = inputTextureCoordinate.xy;\n"];
- for (NSUInteger currentOptimizedOffset = 0; currentOptimizedOffset < numberOfOptimizedOffsets; currentOptimizedOffset++)
- {
- [shaderString appendFormat:@"\
- blurCoordinates[%lu] = inputTextureCoordinate.xy + singleStepOffset * %f;\n\
- blurCoordinates[%lu] = inputTextureCoordinate.xy - singleStepOffset * %f;\n", (unsigned long)((currentOptimizedOffset * 2) + 1), optimizedGaussianOffsets[currentOptimizedOffset], (unsigned long)((currentOptimizedOffset * 2) + 2), optimizedGaussianOffsets[currentOptimizedOffset]];
- }
-
- // Footer
- [shaderString appendString:@"}\n"];
-
- free(optimizedGaussianOffsets);
- free(standardGaussianWeights);
- return shaderString;
-}
-
-+ (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
-{
- if (blurRadius < 1)
- {
- return kGPUImagePassthroughFragmentShaderString;
- }
-
- // First, generate the normal Gaussian weights for a given sigma
- GLfloat *standardGaussianWeights = calloc(blurRadius + 1, sizeof(GLfloat));
- GLfloat sumOfWeights = 0.0;
- for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
- {
- standardGaussianWeights[currentGaussianWeightIndex] = (1.0 / sqrt(2.0 * M_PI * pow(sigma, 2.0))) * exp(-pow(currentGaussianWeightIndex, 2.0) / (2.0 * pow(sigma, 2.0)));
-
- if (currentGaussianWeightIndex == 0)
- {
- sumOfWeights += standardGaussianWeights[currentGaussianWeightIndex];
- }
- else
- {
- sumOfWeights += 2.0 * standardGaussianWeights[currentGaussianWeightIndex];
- }
- }
-
- // Next, normalize these weights to prevent the clipping of the Gaussian curve at the end of the discrete samples from reducing luminance
- for (NSUInteger currentGaussianWeightIndex = 0; currentGaussianWeightIndex < blurRadius + 1; currentGaussianWeightIndex++)
- {
- standardGaussianWeights[currentGaussianWeightIndex] = standardGaussianWeights[currentGaussianWeightIndex] / sumOfWeights;
- }
-
- // From these weights we calculate the offsets to read interpolated values from
- NSUInteger numberOfOptimizedOffsets = MIN(blurRadius / 2 + (blurRadius % 2), 7);
- NSUInteger trueNumberOfOptimizedOffsets = blurRadius / 2 + (blurRadius % 2);
-
- NSMutableString *shaderString = [[NSMutableString alloc] init];
-
- // Header
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- [shaderString appendFormat:@"\
- uniform sampler2D inputImageTexture;\n\
- uniform highp float texelWidthOffset;\n\
- uniform highp float texelHeightOffset;\n\
- \n\
- varying highp vec2 blurCoordinates[%lu];\n\
- \n\
- void main()\n\
- {\n\
- lowp float sum = 0.0;\n", (unsigned long)(1 + (numberOfOptimizedOffsets * 2)) ];
-#else
- [shaderString appendFormat:@"\
- uniform sampler2D inputImageTexture;\n\
- uniform float texelWidthOffset;\n\
- uniform float texelHeightOffset;\n\
- \n\
- varying vec2 blurCoordinates[%lu];\n\
- \n\
- void main()\n\
- {\n\
- float sum = 0.0;\n", 1 + (numberOfOptimizedOffsets * 2) ];
-#endif
-
- // Inner texture loop
- [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0]).r * %f;\n", standardGaussianWeights[0]];
-
- for (NSUInteger currentBlurCoordinateIndex = 0; currentBlurCoordinateIndex < numberOfOptimizedOffsets; currentBlurCoordinateIndex++)
- {
- GLfloat firstWeight = standardGaussianWeights[currentBlurCoordinateIndex * 2 + 1];
- GLfloat secondWeight = standardGaussianWeights[currentBlurCoordinateIndex * 2 + 2];
- GLfloat optimizedWeight = firstWeight + secondWeight;
-
- [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]).r * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 1), optimizedWeight];
- [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[%lu]).r * %f;\n", (unsigned long)((currentBlurCoordinateIndex * 2) + 2), optimizedWeight];
- }
-
- // If the number of required samples exceeds the amount we can pass in via varyings, we have to do dependent texture reads in the fragment shader
- if (trueNumberOfOptimizedOffsets > numberOfOptimizedOffsets)
- {
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- [shaderString appendString:@"highp vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n"];
-#else
- [shaderString appendString:@"highp vec2 singleStepOffset = vec2(texelWidthOffset, texelHeightOffset);\n"];
-#endif
-
- for (NSUInteger currentOverlowTextureRead = numberOfOptimizedOffsets; currentOverlowTextureRead < trueNumberOfOptimizedOffsets; currentOverlowTextureRead++)
- {
- GLfloat firstWeight = standardGaussianWeights[currentOverlowTextureRead * 2 + 1];
- GLfloat secondWeight = standardGaussianWeights[currentOverlowTextureRead * 2 + 2];
-
- GLfloat optimizedWeight = firstWeight + secondWeight;
- GLfloat optimizedOffset = (firstWeight * (currentOverlowTextureRead * 2 + 1) + secondWeight * (currentOverlowTextureRead * 2 + 2)) / optimizedWeight;
-
- [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0] + singleStepOffset * %f).r * %f;\n", optimizedOffset, optimizedWeight];
- [shaderString appendFormat:@"sum += texture2D(inputImageTexture, blurCoordinates[0] - singleStepOffset * %f).r * %f;\n", optimizedOffset, optimizedWeight];
- }
- }
-
- // Footer
- [shaderString appendString:@"\
- gl_FragColor = vec4(sum, sum, sum, 1.0);\n\
- }\n"];
-
- free(standardGaussianWeights);
- return shaderString;
-}
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSketchFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageSketchFilter.h
deleted file mode 100755
index 598145a..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSketchFilter.h
+++ /dev/null
@@ -1,11 +0,0 @@
-#import "GPUImageSobelEdgeDetectionFilter.h"
-
-/** Converts video to look like a sketch.
-
- This is just the Sobel edge detection filter with the colors inverted.
- */
-@interface GPUImageSketchFilter : GPUImageSobelEdgeDetectionFilter
-{
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSketchFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageSketchFilter.m
deleted file mode 100755
index 3cda220..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSketchFilter.m
+++ /dev/null
@@ -1,98 +0,0 @@
-#import "GPUImageSketchFilter.h"
-
-@implementation GPUImageSketchFilter
-
-// Invert the colorspace for a sketch
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageSketchFragmentShaderString = SHADER_STRING
-(
- precision mediump float;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform float edgeStrength;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
- float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
- float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
-
- float mag = 1.0 - (length(vec2(h, v)) * edgeStrength);
-
- gl_FragColor = vec4(vec3(mag), 1.0);
- }
-);
-#else
-NSString *const kGPUImageSketchFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform float edgeStrength;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
- float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
- float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
-
- float mag = 1.0 - (length(vec2(h, v)) * edgeStrength);
-
- gl_FragColor = vec4(vec3(mag), 1.0);
- }
-);
-#endif
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [self initWithFragmentShaderFromString:kGPUImageSketchFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSmoothToonFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageSmoothToonFilter.h
deleted file mode 100755
index f89caac..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSmoothToonFilter.h
+++ /dev/null
@@ -1,28 +0,0 @@
-#import "GPUImageFilterGroup.h"
-
-@class GPUImageGaussianBlurFilter;
-@class GPUImageToonFilter;
-
-/** This uses a similar process as the GPUImageToonFilter, only it precedes the toon effect with a Gaussian blur to smooth out noise.
- */
-@interface GPUImageSmoothToonFilter : GPUImageFilterGroup
-{
- GPUImageGaussianBlurFilter *blurFilter;
- GPUImageToonFilter *toonFilter;
-}
-
-/// The image width and height factors tweak the appearance of the edges. By default, they match the filter size in pixels
-@property(readwrite, nonatomic) CGFloat texelWidth;
-/// The image width and height factors tweak the appearance of the edges. By default, they match the filter size in pixels
-@property(readwrite, nonatomic) CGFloat texelHeight;
-
-/// The radius of the underlying Gaussian blur. The default is 2.0.
-@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
-
-/// The threshold at which to apply the edges, default of 0.2
-@property(readwrite, nonatomic) CGFloat threshold;
-
-/// The levels of quantization for the posterization of colors within the scene, with a default of 10.0
-@property(readwrite, nonatomic) CGFloat quantizationLevels;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSmoothToonFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageSmoothToonFilter.m
deleted file mode 100755
index 03828f4..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSmoothToonFilter.m
+++ /dev/null
@@ -1,94 +0,0 @@
-#import "GPUImageSmoothToonFilter.h"
-#import "GPUImageGaussianBlurFilter.h"
-#import "GPUImageToonFilter.h"
-
-@implementation GPUImageSmoothToonFilter
-
-@synthesize threshold;
-@synthesize blurRadiusInPixels;
-@synthesize quantizationLevels;
-@synthesize texelWidth;
-@synthesize texelHeight;
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- // First pass: apply a variable Gaussian blur
- blurFilter = [[GPUImageGaussianBlurFilter alloc] init];
- [self addFilter:blurFilter];
-
- // Second pass: run the Sobel edge detection on this blurred image, along with a posterization effect
- toonFilter = [[GPUImageToonFilter alloc] init];
- [self addFilter:toonFilter];
-
- // Texture location 0 needs to be the sharp image for both the blur and the second stage processing
- [blurFilter addTarget:toonFilter];
-
- self.initialFilters = [NSArray arrayWithObject:blurFilter];
- self.terminalFilter = toonFilter;
-
- self.blurRadiusInPixels = 2.0;
- self.threshold = 0.2;
- self.quantizationLevels = 10.0;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setBlurRadiusInPixels:(CGFloat)newValue;
-{
- blurFilter.blurRadiusInPixels = newValue;
-}
-
-- (CGFloat)blurRadiusInPixels;
-{
- return blurFilter.blurRadiusInPixels;
-}
-
-- (void)setTexelWidth:(CGFloat)newValue;
-{
- toonFilter.texelWidth = newValue;
-}
-
-- (CGFloat)texelWidth;
-{
- return toonFilter.texelWidth;
-}
-
-- (void)setTexelHeight:(CGFloat)newValue;
-{
- toonFilter.texelHeight = newValue;
-}
-
-- (CGFloat)texelHeight;
-{
- return toonFilter.texelHeight;
-}
-
-- (void)setThreshold:(CGFloat)newValue;
-{
- toonFilter.threshold = newValue;
-}
-
-- (CGFloat)threshold;
-{
- return toonFilter.threshold;
-}
-
-- (void)setQuantizationLevels:(CGFloat)newValue;
-{
- toonFilter.quantizationLevels = newValue;
-}
-
-- (CGFloat)quantizationLevels;
-{
- return toonFilter.quantizationLevels;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSobelEdgeDetectionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageSobelEdgeDetectionFilter.h
deleted file mode 100755
index d6b2c13..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSobelEdgeDetectionFilter.h
+++ /dev/null
@@ -1,16 +0,0 @@
-#import "GPUImageTwoPassFilter.h"
-
-@interface GPUImageSobelEdgeDetectionFilter : GPUImageTwoPassFilter
-{
- GLint texelWidthUniform, texelHeightUniform, edgeStrengthUniform;
- BOOL hasOverriddenImageSizeFactor;
-}
-
-// The texel width and height factors tweak the appearance of the edges. By default, they match the inverse of the filter size in pixels
-@property(readwrite, nonatomic) CGFloat texelWidth;
-@property(readwrite, nonatomic) CGFloat texelHeight;
-
-// The filter strength property affects the dynamic range of the filter. High values can make edges more visible, but can lead to saturation. Default of 1.0.
-@property(readwrite, nonatomic) CGFloat edgeStrength;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSobelEdgeDetectionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageSobelEdgeDetectionFilter.m
deleted file mode 100755
index e193f02..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSobelEdgeDetectionFilter.m
+++ /dev/null
@@ -1,188 +0,0 @@
-#import "GPUImageSobelEdgeDetectionFilter.h"
-#import "GPUImageGrayscaleFilter.h"
-#import "GPUImage3x3ConvolutionFilter.h"
-
-// Code from "Graphics Shaders: Theory and Practice" by M. Bailey and S. Cunningham
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageSobelEdgeDetectionFragmentShaderString = SHADER_STRING
-(
- precision mediump float;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float edgeStrength;
-
- void main()
- {
- float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
- float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
- float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
-
- float mag = length(vec2(h, v)) * edgeStrength;
-
- gl_FragColor = vec4(vec3(mag), 1.0);
- }
-);
-#else
-NSString *const kGPUImageSobelEdgeDetectionFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float edgeStrength;
-
- void main()
- {
- float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
- float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
- float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
-
- float mag = length(vec2(h, v)) * edgeStrength;
-
- gl_FragColor = vec4(vec3(mag), 1.0);
- }
-);
-#endif
-
-@implementation GPUImageSobelEdgeDetectionFilter
-
-@synthesize texelWidth = _texelWidth;
-@synthesize texelHeight = _texelHeight;
-@synthesize edgeStrength = _edgeStrength;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [self initWithFragmentShaderFromString:kGPUImageSobelEdgeDetectionFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
-{
- // Do a luminance pass first to reduce the calculations performed at each fragment in the edge detection phase
-
- if (!(self = [super initWithFirstStageVertexShaderFromString:kGPUImageVertexShaderString firstStageFragmentShaderFromString:kGPUImageLuminanceFragmentShaderString secondStageVertexShaderFromString:kGPUImageNearbyTexelSamplingVertexShaderString secondStageFragmentShaderFromString:fragmentShaderString]))
- {
- return nil;
- }
-
- hasOverriddenImageSizeFactor = NO;
-
- texelWidthUniform = [secondFilterProgram uniformIndex:@"texelWidth"];
- texelHeightUniform = [secondFilterProgram uniformIndex:@"texelHeight"];
- edgeStrengthUniform = [secondFilterProgram uniformIndex:@"edgeStrength"];
-
- self.edgeStrength = 1.0;
- return self;
-}
-
-- (void)setupFilterForSize:(CGSize)filterFrameSize;
-{
- if (!hasOverriddenImageSizeFactor)
- {
- _texelWidth = 1.0 / filterFrameSize.width;
- _texelHeight = 1.0 / filterFrameSize.height;
-
- runSynchronouslyOnVideoProcessingQueue(^{
- GLProgram *previousProgram = [GPUImageContext sharedImageProcessingContext].currentShaderProgram;
- [GPUImageContext setActiveShaderProgram:secondFilterProgram];
- glUniform1f(texelWidthUniform, _texelWidth);
- glUniform1f(texelHeightUniform, _texelHeight);
- [GPUImageContext setActiveShaderProgram:previousProgram];
- });
- }
-}
-
-- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;
-{
- [super setUniformsForProgramAtIndex:programIndex];
-
- if (programIndex == 1)
- {
- glUniform1f(texelWidthUniform, _texelWidth);
- glUniform1f(texelHeightUniform, _texelHeight);
- }
-}
-
-- (BOOL)wantsMonochromeInput;
-{
-// return YES;
- return NO;
-}
-
-- (BOOL)providesMonochromeOutput;
-{
-// return YES;
- return NO;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setTexelWidth:(CGFloat)newValue;
-{
- hasOverriddenImageSizeFactor = YES;
- _texelWidth = newValue;
-
- [self setFloat:_texelWidth forUniform:texelWidthUniform program:secondFilterProgram];
-}
-
-- (void)setTexelHeight:(CGFloat)newValue;
-{
- hasOverriddenImageSizeFactor = YES;
- _texelHeight = newValue;
-
- [self setFloat:_texelHeight forUniform:texelHeightUniform program:secondFilterProgram];
-}
-
-- (void)setEdgeStrength:(CGFloat)newValue;
-{
- _edgeStrength = newValue;
-
- [self setFloat:_edgeStrength forUniform:edgeStrengthUniform program:secondFilterProgram];
-}
-
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSoftEleganceFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageSoftEleganceFilter.h
deleted file mode 100755
index 596e156..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSoftEleganceFilter.h
+++ /dev/null
@@ -1,19 +0,0 @@
-#import "GPUImageFilterGroup.h"
-
-@class GPUImagePicture;
-
-/** A photo filter based on Soft Elegance Photoshop action
- http://h-d-stock.deviantart.com/art/H-D-A-soft-elegance-70107603
- */
-
-// Note: If you want to use this effect you have to add
-// lookup_soft_elegance_1.png and lookup_soft_elegance_2.png
-// from Resources folder to your application bundle.
-
-@interface GPUImageSoftEleganceFilter : GPUImageFilterGroup
-{
- GPUImagePicture *lookupImageSource1;
- GPUImagePicture *lookupImageSource2;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSoftEleganceFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageSoftEleganceFilter.m
deleted file mode 100755
index e1d4e02..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSoftEleganceFilter.m
+++ /dev/null
@@ -1,62 +0,0 @@
-#import "GPUImageSoftEleganceFilter.h"
-#import "GPUImagePicture.h"
-#import "GPUImageLookupFilter.h"
-#import "GPUImageGaussianBlurFilter.h"
-#import "GPUImageAlphaBlendFilter.h"
-
-@implementation GPUImageSoftEleganceFilter
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- UIImage *image1 = [UIImage imageNamed:@"lookup_soft_elegance_1.png"];
- UIImage *image2 = [UIImage imageNamed:@"lookup_soft_elegance_2.png"];
-#else
- NSImage *image1 = [NSImage imageNamed:@"lookup_soft_elegance_1.png"];
- NSImage *image2 = [NSImage imageNamed:@"lookup_soft_elegance_2.png"];
-#endif
-
- NSAssert(image1 && image2,
- @"To use GPUImageSoftEleganceFilter you need to add lookup_soft_elegance_1.png and lookup_soft_elegance_2.png from GPUImage/framework/Resources to your application bundle.");
-
- lookupImageSource1 = [[GPUImagePicture alloc] initWithImage:image1];
- GPUImageLookupFilter *lookupFilter1 = [[GPUImageLookupFilter alloc] init];
- [self addFilter:lookupFilter1];
-
- [lookupImageSource1 addTarget:lookupFilter1 atTextureLocation:1];
- [lookupImageSource1 processImage];
-
- GPUImageGaussianBlurFilter *gaussianBlur = [[GPUImageGaussianBlurFilter alloc] init];
- gaussianBlur.blurRadiusInPixels = 10.0;
- [lookupFilter1 addTarget:gaussianBlur];
- [self addFilter:gaussianBlur];
-
- GPUImageAlphaBlendFilter *alphaBlend = [[GPUImageAlphaBlendFilter alloc] init];
- alphaBlend.mix = 0.14;
- [lookupFilter1 addTarget:alphaBlend];
- [gaussianBlur addTarget:alphaBlend];
- [self addFilter:alphaBlend];
-
- lookupImageSource2 = [[GPUImagePicture alloc] initWithImage:image2];
-
- GPUImageLookupFilter *lookupFilter2 = [[GPUImageLookupFilter alloc] init];
- [alphaBlend addTarget:lookupFilter2];
- [lookupImageSource2 addTarget:lookupFilter2];
- [lookupImageSource2 processImage];
- [self addFilter:lookupFilter2];
-
- self.initialFilters = [NSArray arrayWithObjects:lookupFilter1, nil];
- self.terminalFilter = lookupFilter2;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSoftLightBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageSoftLightBlendFilter.h
deleted file mode 100755
index 13fc877..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSoftLightBlendFilter.h
+++ /dev/null
@@ -1,7 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageSoftLightBlendFilter : GPUImageTwoInputFilter
-{
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSoftLightBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageSoftLightBlendFilter.m
deleted file mode 100755
index 368bce0..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSoftLightBlendFilter.m
+++ /dev/null
@@ -1,54 +0,0 @@
-#import "GPUImageSoftLightBlendFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageSoftLightBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- mediump vec4 base = texture2D(inputImageTexture, textureCoordinate);
- mediump vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
-
- lowp float alphaDivisor = base.a + step(base.a, 0.0); // Protect against a divide-by-zero blacking out things in the output
- gl_FragColor = base * (overlay.a * (base / alphaDivisor) + (2.0 * overlay * (1.0 - (base / alphaDivisor)))) + overlay * (1.0 - base.a) + base * (1.0 - overlay.a);
- }
-);
-#else
-NSString *const kGPUImageSoftLightBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 base = texture2D(inputImageTexture, textureCoordinate);
- vec4 overlay = texture2D(inputImageTexture2, textureCoordinate2);
-
- float alphaDivisor = base.a + step(base.a, 0.0); // Protect against a divide-by-zero blacking out things in the output
- gl_FragColor = base * (overlay.a * (base / alphaDivisor) + (2.0 * overlay * (1.0 - (base / alphaDivisor)))) + overlay * (1.0 - base.a) + base * (1.0 - overlay.a);
- }
-);
-#endif
-
-@implementation GPUImageSoftLightBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageSoftLightBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSolidColorGenerator.h b/Example/Pods/GPUImage/framework/Source/GPUImageSolidColorGenerator.h
deleted file mode 100644
index 58b1383..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSolidColorGenerator.h
+++ /dev/null
@@ -1,19 +0,0 @@
-#import "GPUImageFilter.h"
-
-// This outputs an image with a constant color. You need to use -forceProcessingAtSize: in order to set the output image
-// dimensions, or this won't work correctly
-
-
-@interface GPUImageSolidColorGenerator : GPUImageFilter
-{
- GLint colorUniform;
- GLint useExistingAlphaUniform;
-}
-
-// This color dictates what the output image will be filled with
-@property(readwrite, nonatomic) GPUVector4 color;
-@property(readwrite, nonatomic, assign) BOOL useExistingAlpha; // whether to use the alpha of the existing image or not, default is NO
-
-- (void)setColorRed:(CGFloat)redComponent green:(CGFloat)greenComponent blue:(CGFloat)blueComponent alpha:(CGFloat)alphaComponent;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSolidColorGenerator.m b/Example/Pods/GPUImage/framework/Source/GPUImageSolidColorGenerator.m
deleted file mode 100644
index 9b555ce..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSolidColorGenerator.m
+++ /dev/null
@@ -1,123 +0,0 @@
-#import "GPUImageSolidColorGenerator.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUSolidColorFragmentShaderString = SHADER_STRING
-(
- precision lowp float;
-
- varying highp vec2 textureCoordinate;
- uniform sampler2D inputImageTexture;
- uniform vec4 color;
- uniform float useExistingAlpha;
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- gl_FragColor = vec4(color.rgb, max(textureColor.a, 1.0 - useExistingAlpha));
- }
- );
-#else
-NSString *const kGPUSolidColorFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- uniform sampler2D inputImageTexture;
- uniform vec4 color;
- uniform float useExistingAlpha;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- gl_FragColor = vec4(color.rgb, max(textureColor.a, 1.0 - useExistingAlpha));
- }
- );
-#endif
-
-@implementation GPUImageSolidColorGenerator
-
-@synthesize color = _color;
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUSolidColorFragmentShaderString]))
- {
- return nil;
- }
-
- colorUniform = [filterProgram uniformIndex:@"color"];
- useExistingAlphaUniform = [filterProgram uniformIndex:@"useExistingAlpha"];
-
- _color = (GPUVector4){0.0f, 0.0f, 0.5f, 1.0f};
- self.useExistingAlpha = NO;
-
- return self;
-}
-
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-{
- if (self.preventRendering)
- {
- return;
- }
-
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext setActiveShaderProgram:filterProgram];
-
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
- [outputFramebuffer activateFramebuffer];
-
- glClearColor(_color.one, _color.two, _color.three, _color.four);
- glClear(GL_COLOR_BUFFER_BIT);
- });
-}
-
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)forceProcessingAtSize:(CGSize)frameSize;
-{
- [super forceProcessingAtSize:frameSize];
-
- if (!CGSizeEqualToSize(inputTextureSize, CGSizeZero))
- {
- [self newFrameReadyAtTime:kCMTimeIndefinite atIndex:0];
- }
-}
-
-- (void)addTarget:(id)newTarget atTextureLocation:(NSInteger)textureLocation;
-{
- [super addTarget:newTarget atTextureLocation:textureLocation];
-
- if (!CGSizeEqualToSize(inputTextureSize, CGSizeZero))
- {
- [newTarget setInputSize:inputTextureSize atIndex:textureLocation];
- [newTarget newFrameReadyAtTime:kCMTimeIndefinite atIndex:textureLocation];
- }
-}
-
-- (void)setColor:(GPUVector4)newValue;
-{
- [self setColorRed:newValue.one green:newValue.two blue:newValue.three alpha:newValue.four];
-}
-
-- (void)setColorRed:(CGFloat)redComponent green:(CGFloat)greenComponent blue:(CGFloat)blueComponent alpha:(CGFloat)alphaComponent;
-{
- _color.one = (GLfloat)redComponent;
- _color.two = (GLfloat)greenComponent;
- _color.three = (GLfloat)blueComponent;
- _color.four = (GLfloat)alphaComponent;
-
-// [self setVec4:_color forUniform:colorUniform program:filterProgram];
- runAsynchronouslyOnVideoProcessingQueue(^{
- [self newFrameReadyAtTime:kCMTimeIndefinite atIndex:0];
- });
-}
-
-- (void)setUseExistingAlpha:(BOOL)useExistingAlpha;
-{
- _useExistingAlpha = useExistingAlpha;
-
- [self setInteger:(useExistingAlpha ? 1 : 0) forUniform:useExistingAlphaUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSourceOverBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageSourceOverBlendFilter.h
deleted file mode 100644
index 29e3063..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSourceOverBlendFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageSourceOverBlendFilter : GPUImageTwoInputFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSourceOverBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageSourceOverBlendFilter.m
deleted file mode 100644
index 432adc4..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSourceOverBlendFilter.m
+++ /dev/null
@@ -1,51 +0,0 @@
-#import "GPUImageSourceOverBlendFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageSourceOverBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate);
-
- gl_FragColor = mix(textureColor, textureColor2, textureColor2.a);
- }
-);
-#else
-NSString *const kGPUImageSourceOverBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate);
-
- gl_FragColor = mix(textureColor, textureColor2, textureColor2.a);
- }
- );
-#endif
-
-@implementation GPUImageSourceOverBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageSourceOverBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSphereRefractionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageSphereRefractionFilter.h
deleted file mode 100644
index cbbd2af..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSphereRefractionFilter.h
+++ /dev/null
@@ -1,15 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageSphereRefractionFilter : GPUImageFilter
-{
- GLint radiusUniform, centerUniform, aspectRatioUniform, refractiveIndexUniform;
-}
-
-/// The center about which to apply the distortion, with a default of (0.5, 0.5)
-@property(readwrite, nonatomic) CGPoint center;
-/// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25
-@property(readwrite, nonatomic) CGFloat radius;
-/// The index of refraction for the sphere, with a default of 0.71
-@property(readwrite, nonatomic) CGFloat refractiveIndex;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSphereRefractionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageSphereRefractionFilter.m
deleted file mode 100644
index b0f5404..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSphereRefractionFilter.m
+++ /dev/null
@@ -1,179 +0,0 @@
-#import "GPUImageSphereRefractionFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageSphereRefractionFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform highp vec2 center;
- uniform highp float radius;
- uniform highp float aspectRatio;
- uniform highp float refractiveIndex;
-
- void main()
- {
- highp vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- highp float distanceFromCenter = distance(center, textureCoordinateToUse);
- lowp float checkForPresenceWithinSphere = step(distanceFromCenter, radius);
-
- distanceFromCenter = distanceFromCenter / radius;
-
- highp float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter);
- highp vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth));
-
- highp vec3 refractedVector = refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex);
-
- gl_FragColor = texture2D(inputImageTexture, (refractedVector.xy + 1.0) * 0.5) * checkForPresenceWithinSphere;
- }
-);
-#else
-NSString *const kGPUImageSphereRefractionFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform vec2 center;
- uniform float radius;
- uniform float aspectRatio;
- uniform float refractiveIndex;
-
- void main()
- {
- vec2 textureCoordinateToUse = vec2(textureCoordinate.x, (textureCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
- float distanceFromCenter = distance(center, textureCoordinateToUse);
- float checkForPresenceWithinSphere = step(distanceFromCenter, radius);
-
- distanceFromCenter = distanceFromCenter / radius;
-
- float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter);
- vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth));
-
- vec3 refractedVector = refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex);
-
- gl_FragColor = texture2D(inputImageTexture, (refractedVector.xy + 1.0) * 0.5) * checkForPresenceWithinSphere;
- }
-);
-#endif
-
-@interface GPUImageSphereRefractionFilter ()
-
-- (void)adjustAspectRatio;
-
-@property (readwrite, nonatomic) CGFloat aspectRatio;
-
-@end
-
-
-@implementation GPUImageSphereRefractionFilter
-
-@synthesize center = _center;
-@synthesize radius = _radius;
-@synthesize aspectRatio = _aspectRatio;
-@synthesize refractiveIndex = _refractiveIndex;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [self initWithFragmentShaderFromString:kGPUImageSphereRefractionFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
-{
- if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString]))
- {
- return nil;
- }
-
- radiusUniform = [filterProgram uniformIndex:@"radius"];
- aspectRatioUniform = [filterProgram uniformIndex:@"aspectRatio"];
- centerUniform = [filterProgram uniformIndex:@"center"];
- refractiveIndexUniform = [filterProgram uniformIndex:@"refractiveIndex"];
-
- self.radius = 0.25;
- self.center = CGPointMake(0.5, 0.5);
- self.refractiveIndex = 0.71;
-
- [self setBackgroundColorRed:0.0 green:0.0 blue:0.0 alpha:0.0];
-
- return self;
-}
-
-- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
-{
- CGSize oldInputSize = inputTextureSize;
- [super setInputSize:newSize atIndex:textureIndex];
-
- if (!CGSizeEqualToSize(oldInputSize, inputTextureSize) && (!CGSizeEqualToSize(newSize, CGSizeZero)) )
- {
- [self adjustAspectRatio];
- }
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)adjustAspectRatio;
-{
- if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
- {
- [self setAspectRatio:(inputTextureSize.width / inputTextureSize.height)];
- }
- else
- {
- [self setAspectRatio:(inputTextureSize.height / inputTextureSize.width)];
- }
-}
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
- [super setInputRotation:newInputRotation atIndex:textureIndex];
- [self setCenter:self.center];
- [self adjustAspectRatio];
-}
-
-- (void)forceProcessingAtSize:(CGSize)frameSize;
-{
- [super forceProcessingAtSize:frameSize];
- [self adjustAspectRatio];
-}
-
-- (void)setRadius:(CGFloat)newValue;
-{
- _radius = newValue;
-
- [self setFloat:_radius forUniform:radiusUniform program:filterProgram];
-}
-
-- (void)setCenter:(CGPoint)newValue;
-{
- _center = newValue;
-
- CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation];
- [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];
-}
-
-- (void)setAspectRatio:(CGFloat)newValue;
-{
- _aspectRatio = newValue;
-
- [self setFloat:_aspectRatio forUniform:aspectRatioUniform program:filterProgram];
-}
-
-- (void)setRefractiveIndex:(CGFloat)newValue;
-{
- _refractiveIndex = newValue;
-
- [self setFloat:_refractiveIndex forUniform:refractiveIndexUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageStillCamera.h b/Example/Pods/GPUImage/framework/Source/GPUImageStillCamera.h
deleted file mode 100755
index e4db59b..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageStillCamera.h
+++ /dev/null
@@ -1,24 +0,0 @@
-#import "GPUImageVideoCamera.h"
-
-void stillImageDataReleaseCallback(void *releaseRefCon, const void *baseAddress);
-void GPUImageCreateResizedSampleBuffer(CVPixelBufferRef cameraFrame, CGSize finalSize, CMSampleBufferRef *sampleBuffer);
-
-@interface GPUImageStillCamera : GPUImageVideoCamera
-
-/** The JPEG compression quality to use when capturing a photo as a JPEG.
- */
-@property CGFloat jpegCompressionQuality;
-
-// Only reliably set inside the context of the completion handler of one of the capture methods
-@property (readonly) NSDictionary *currentCaptureMetadata;
-
-// Photography controls
-- (void)capturePhotoAsSampleBufferWithCompletionHandler:(void (^)(CMSampleBufferRef imageSampleBuffer, NSError *error))block;
-- (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block;
-- (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block;
-- (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedJPEG, NSError *error))block;
-- (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(NSData *processedJPEG, NSError *error))block;
-- (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;
-- (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageStillCamera.m b/Example/Pods/GPUImage/framework/Source/GPUImageStillCamera.m
deleted file mode 100755
index 447f79f..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageStillCamera.m
+++ /dev/null
@@ -1,338 +0,0 @@
-// 2448x3264 pixel image = 31,961,088 bytes for uncompressed RGBA
-
-#import "GPUImageStillCamera.h"
-
-void stillImageDataReleaseCallback(void *releaseRefCon, const void *baseAddress)
-{
- free((void *)baseAddress);
-}
-
-void GPUImageCreateResizedSampleBuffer(CVPixelBufferRef cameraFrame, CGSize finalSize, CMSampleBufferRef *sampleBuffer)
-{
- // CVPixelBufferCreateWithPlanarBytes for YUV input
-
- CGSize originalSize = CGSizeMake(CVPixelBufferGetWidth(cameraFrame), CVPixelBufferGetHeight(cameraFrame));
-
- CVPixelBufferLockBaseAddress(cameraFrame, 0);
- GLubyte *sourceImageBytes = CVPixelBufferGetBaseAddress(cameraFrame);
- CGDataProviderRef dataProvider = CGDataProviderCreateWithData(NULL, sourceImageBytes, CVPixelBufferGetBytesPerRow(cameraFrame) * originalSize.height, NULL);
- CGColorSpaceRef genericRGBColorspace = CGColorSpaceCreateDeviceRGB();
- CGImageRef cgImageFromBytes = CGImageCreate((int)originalSize.width, (int)originalSize.height, 8, 32, CVPixelBufferGetBytesPerRow(cameraFrame), genericRGBColorspace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst, dataProvider, NULL, NO, kCGRenderingIntentDefault);
-
- GLubyte *imageData = (GLubyte *) calloc(1, (int)finalSize.width * (int)finalSize.height * 4);
-
- CGContextRef imageContext = CGBitmapContextCreate(imageData, (int)finalSize.width, (int)finalSize.height, 8, (int)finalSize.width * 4, genericRGBColorspace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
- CGContextDrawImage(imageContext, CGRectMake(0.0, 0.0, finalSize.width, finalSize.height), cgImageFromBytes);
- CGImageRelease(cgImageFromBytes);
- CGContextRelease(imageContext);
- CGColorSpaceRelease(genericRGBColorspace);
- CGDataProviderRelease(dataProvider);
-
- CVPixelBufferRef pixel_buffer = NULL;
- CVPixelBufferCreateWithBytes(kCFAllocatorDefault, finalSize.width, finalSize.height, kCVPixelFormatType_32BGRA, imageData, finalSize.width * 4, stillImageDataReleaseCallback, NULL, NULL, &pixel_buffer);
- CMVideoFormatDescriptionRef videoInfo = NULL;
- CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixel_buffer, &videoInfo);
-
- CMTime frameTime = CMTimeMake(1, 30);
- CMSampleTimingInfo timing = {frameTime, frameTime, kCMTimeInvalid};
-
- CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixel_buffer, YES, NULL, NULL, videoInfo, &timing, sampleBuffer);
- CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
- CFRelease(videoInfo);
- CVPixelBufferRelease(pixel_buffer);
-}
-
-@interface GPUImageStillCamera ()
-{
- AVCaptureStillImageOutput *photoOutput;
-}
-
-// Methods calling this are responsible for calling dispatch_semaphore_signal(frameRenderingSemaphore) somewhere inside the block
-- (void)capturePhotoProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withImageOnGPUHandler:(void (^)(NSError *error))block;
-
-@end
-
-@implementation GPUImageStillCamera {
- BOOL requiresFrontCameraTextureCacheCorruptionWorkaround;
-}
-
-@synthesize currentCaptureMetadata = _currentCaptureMetadata;
-@synthesize jpegCompressionQuality = _jpegCompressionQuality;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
-{
- if (!(self = [super initWithSessionPreset:sessionPreset cameraPosition:cameraPosition]))
- {
- return nil;
- }
-
- /* Detect iOS version < 6 which require a texture cache corruption workaround */
-#pragma clang diagnostic push
-#pragma clang diagnostic ignored "-Wdeprecated-declarations"
- requiresFrontCameraTextureCacheCorruptionWorkaround = [[[UIDevice currentDevice] systemVersion] compare:@"6.0" options:NSNumericSearch] == NSOrderedAscending;
-#pragma clang diagnostic pop
-
- [self.captureSession beginConfiguration];
-
- photoOutput = [[AVCaptureStillImageOutput alloc] init];
-
- // Having a still photo input set to BGRA and video to YUV doesn't work well, so since I don't have YUV resizing for iPhone 4 yet, kick back to BGRA for that device
-// if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
- if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
- {
- BOOL supportsFullYUVRange = NO;
- NSArray *supportedPixelFormats = photoOutput.availableImageDataCVPixelFormatTypes;
- for (NSNumber *currentPixelFormat in supportedPixelFormats)
- {
- if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
- {
- supportsFullYUVRange = YES;
- }
- }
-
- if (supportsFullYUVRange)
- {
- [photoOutput setOutputSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
- }
- else
- {
- [photoOutput setOutputSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
- }
- }
- else
- {
- captureAsYUV = NO;
- [photoOutput setOutputSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
- [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
- }
-
- [self.captureSession addOutput:photoOutput];
-
- [self.captureSession commitConfiguration];
-
- self.jpegCompressionQuality = 0.8;
-
- return self;
-}
-
-- (id)init;
-{
- if (!(self = [self initWithSessionPreset:AVCaptureSessionPresetPhoto cameraPosition:AVCaptureDevicePositionBack]))
- {
- return nil;
- }
- return self;
-}
-
-- (void)removeInputsAndOutputs;
-{
- [self.captureSession removeOutput:photoOutput];
- [super removeInputsAndOutputs];
-}
-
-#pragma mark -
-#pragma mark Photography controls
-
-- (void)capturePhotoAsSampleBufferWithCompletionHandler:(void (^)(CMSampleBufferRef imageSampleBuffer, NSError *error))block
-{
- NSLog(@"If you want to use the method capturePhotoAsSampleBufferWithCompletionHandler:, you must comment out the line in GPUImageStillCamera.m in the method initWithSessionPreset:cameraPosition: which sets the CVPixelBufferPixelFormatTypeKey, as well as uncomment the rest of the method capturePhotoAsSampleBufferWithCompletionHandler:. However, if you do this you cannot use any of the photo capture methods to take a photo if you also supply a filter.");
-
- /*dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);
-
- [photoOutput captureStillImageAsynchronouslyFromConnection:[[photoOutput connections] objectAtIndex:0] completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
- block(imageSampleBuffer, error);
- }];
-
- dispatch_semaphore_signal(frameRenderingSemaphore);
-
- */
-
- return;
-}
-
-- (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block;
-{
- [self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {
- UIImage *filteredPhoto = nil;
-
- if(!error){
- filteredPhoto = [finalFilterInChain imageFromCurrentFramebuffer];
- }
- dispatch_semaphore_signal(frameRenderingSemaphore);
-
- block(filteredPhoto, error);
- }];
-}
-
-- (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block {
- [self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {
- UIImage *filteredPhoto = nil;
-
- if(!error) {
- filteredPhoto = [finalFilterInChain imageFromCurrentFramebufferWithOrientation:orientation];
- }
- dispatch_semaphore_signal(frameRenderingSemaphore);
-
- block(filteredPhoto, error);
- }];
-}
-
-- (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedJPEG, NSError *error))block;
-{
-// reportAvailableMemoryForGPUImage(@"Before Capture");
-
- [self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {
- NSData *dataForJPEGFile = nil;
-
- if(!error){
- @autoreleasepool {
- UIImage *filteredPhoto = [finalFilterInChain imageFromCurrentFramebuffer];
- dispatch_semaphore_signal(frameRenderingSemaphore);
-// reportAvailableMemoryForGPUImage(@"After UIImage generation");
-
- dataForJPEGFile = UIImageJPEGRepresentation(filteredPhoto,self.jpegCompressionQuality);
-// reportAvailableMemoryForGPUImage(@"After JPEG generation");
- }
-
-// reportAvailableMemoryForGPUImage(@"After autorelease pool");
- }else{
- dispatch_semaphore_signal(frameRenderingSemaphore);
- }
-
- block(dataForJPEGFile, error);
- }];
-}
-
-- (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(NSData *processedImage, NSError *error))block {
- [self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {
- NSData *dataForJPEGFile = nil;
-
- if(!error) {
- @autoreleasepool {
- UIImage *filteredPhoto = [finalFilterInChain imageFromCurrentFramebufferWithOrientation:orientation];
- dispatch_semaphore_signal(frameRenderingSemaphore);
-
- dataForJPEGFile = UIImageJPEGRepresentation(filteredPhoto, self.jpegCompressionQuality);
- }
- } else {
- dispatch_semaphore_signal(frameRenderingSemaphore);
- }
-
- block(dataForJPEGFile, error);
- }];
-}
-
-- (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;
-{
-
- [self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {
- NSData *dataForPNGFile = nil;
-
- if(!error){
- @autoreleasepool {
- UIImage *filteredPhoto = [finalFilterInChain imageFromCurrentFramebuffer];
- dispatch_semaphore_signal(frameRenderingSemaphore);
- dataForPNGFile = UIImagePNGRepresentation(filteredPhoto);
- }
- }else{
- dispatch_semaphore_signal(frameRenderingSemaphore);
- }
-
- block(dataForPNGFile, error);
- }];
-
- return;
-}
-
-- (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;
-{
-
- [self capturePhotoProcessedUpToFilter:finalFilterInChain withImageOnGPUHandler:^(NSError *error) {
- NSData *dataForPNGFile = nil;
-
- if(!error){
- @autoreleasepool {
- UIImage *filteredPhoto = [finalFilterInChain imageFromCurrentFramebufferWithOrientation:orientation];
- dispatch_semaphore_signal(frameRenderingSemaphore);
- dataForPNGFile = UIImagePNGRepresentation(filteredPhoto);
- }
- }else{
- dispatch_semaphore_signal(frameRenderingSemaphore);
- }
-
- block(dataForPNGFile, error);
- }];
-
- return;
-}
-
-#pragma mark - Private Methods
-
-- (void)capturePhotoProcessedUpToFilter:(GPUImageOutput *)finalFilterInChain withImageOnGPUHandler:(void (^)(NSError *error))block
-{
- dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);
-
- if(photoOutput.isCapturingStillImage){
- block([NSError errorWithDomain:AVFoundationErrorDomain code:AVErrorMaximumStillImageCaptureRequestsExceeded userInfo:nil]);
- return;
- }
-
- [photoOutput captureStillImageAsynchronouslyFromConnection:[[photoOutput connections] objectAtIndex:0] completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
- if(imageSampleBuffer == NULL){
- block(error);
- return;
- }
-
- // For now, resize photos to fix within the max texture size of the GPU
- CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(imageSampleBuffer);
-
- CGSize sizeOfPhoto = CGSizeMake(CVPixelBufferGetWidth(cameraFrame), CVPixelBufferGetHeight(cameraFrame));
- CGSize scaledImageSizeToFitOnGPU = [GPUImageContext sizeThatFitsWithinATextureForSize:sizeOfPhoto];
- if (!CGSizeEqualToSize(sizeOfPhoto, scaledImageSizeToFitOnGPU))
- {
- CMSampleBufferRef sampleBuffer = NULL;
-
- if (CVPixelBufferGetPlaneCount(cameraFrame) > 0)
- {
- NSAssert(NO, @"Error: no downsampling for YUV input in the framework yet");
- }
- else
- {
- GPUImageCreateResizedSampleBuffer(cameraFrame, scaledImageSizeToFitOnGPU, &sampleBuffer);
- }
-
- dispatch_semaphore_signal(frameRenderingSemaphore);
- [finalFilterInChain useNextFrameForImageCapture];
- [self captureOutput:photoOutput didOutputSampleBuffer:sampleBuffer fromConnection:[[photoOutput connections] objectAtIndex:0]];
- dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);
- if (sampleBuffer != NULL)
- CFRelease(sampleBuffer);
- }
- else
- {
- // This is a workaround for the corrupt images that are sometimes returned when taking a photo with the front camera and using the iOS 5.0 texture caches
- AVCaptureDevicePosition currentCameraPosition = [[videoInput device] position];
- if ( (currentCameraPosition != AVCaptureDevicePositionFront) || (![GPUImageContext supportsFastTextureUpload]) || !requiresFrontCameraTextureCacheCorruptionWorkaround)
- {
- dispatch_semaphore_signal(frameRenderingSemaphore);
- [finalFilterInChain useNextFrameForImageCapture];
- [self captureOutput:photoOutput didOutputSampleBuffer:imageSampleBuffer fromConnection:[[photoOutput connections] objectAtIndex:0]];
- dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);
- }
- }
-
- CFDictionaryRef metadata = CMCopyDictionaryOfAttachments(NULL, imageSampleBuffer, kCMAttachmentMode_ShouldPropagate);
- _currentCaptureMetadata = (__bridge_transfer NSDictionary *)metadata;
-
- block(nil);
-
- _currentCaptureMetadata = nil;
- }];
-}
-
-
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageStretchDistortionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageStretchDistortionFilter.h
deleted file mode 100755
index 0780309..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageStretchDistortionFilter.h
+++ /dev/null
@@ -1,13 +0,0 @@
-#import "GPUImageFilter.h"
-
-/** Creates a stretch distortion of the image
- */
-@interface GPUImageStretchDistortionFilter : GPUImageFilter {
- GLint centerUniform;
-}
-
-/** The center about which to apply the distortion, with a default of (0.5, 0.5)
- */
-@property(readwrite, nonatomic) CGPoint center;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageStretchDistortionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageStretchDistortionFilter.m
deleted file mode 100755
index d38cac3..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageStretchDistortionFilter.m
+++ /dev/null
@@ -1,99 +0,0 @@
-#import "GPUImageStretchDistortionFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageStretchDistortionFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform highp vec2 center;
-
- void main()
- {
- highp vec2 normCoord = 2.0 * textureCoordinate - 1.0;
- highp vec2 normCenter = 2.0 * center - 1.0;
-
- normCoord -= normCenter;
- mediump vec2 s = sign(normCoord);
- normCoord = abs(normCoord);
- normCoord = 0.5 * normCoord + 0.5 * smoothstep(0.25, 0.5, normCoord) * normCoord;
- normCoord = s * normCoord;
-
- normCoord += normCenter;
-
- mediump vec2 textureCoordinateToUse = normCoord / 2.0 + 0.5;
-
-
- gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );
-
- }
-);
-#else
-NSString *const kGPUImageStretchDistortionFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform vec2 center;
-
- void main()
- {
- vec2 normCoord = 2.0 * textureCoordinate - 1.0;
- vec2 normCenter = 2.0 * center - 1.0;
-
- normCoord -= normCenter;
- vec2 s = sign(normCoord);
- normCoord = abs(normCoord);
- normCoord = 0.5 * normCoord + 0.5 * smoothstep(0.25, 0.5, normCoord) * normCoord;
- normCoord = s * normCoord;
-
- normCoord += normCenter;
-
- vec2 textureCoordinateToUse = normCoord / 2.0 + 0.5;
-
- gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse);
- }
-);
-#endif
-
-@implementation GPUImageStretchDistortionFilter
-
-@synthesize center = _center;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageStretchDistortionFragmentShaderString]))
- {
- return nil;
- }
-
- centerUniform = [filterProgram uniformIndex:@"center"];
-
- self.center = CGPointMake(0.5, 0.5);
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
- [super setInputRotation:newInputRotation atIndex:textureIndex];
- [self setCenter:self.center];
-}
-
-- (void)setCenter:(CGPoint)newValue;
-{
- _center = newValue;
-
- CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation];
- [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSubtractBlendFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageSubtractBlendFilter.h
deleted file mode 100755
index 8dee821..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSubtractBlendFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageSubtractBlendFilter : GPUImageTwoInputFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSubtractBlendFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageSubtractBlendFilter.m
deleted file mode 100755
index 8938bae..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSubtractBlendFilter.m
+++ /dev/null
@@ -1,52 +0,0 @@
-#import "GPUImageSubtractBlendFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageSubtractBlendFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = vec4(textureColor.rgb - textureColor2.rgb, textureColor.a);
- }
-);
-#else
-NSString *const kGPUImageSubtractBlendFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
-
- gl_FragColor = vec4(textureColor.rgb - textureColor2.rgb, textureColor.a);
- }
-);
-#endif
-
-@implementation GPUImageSubtractBlendFilter
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageSubtractBlendFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSwirlFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageSwirlFilter.h
deleted file mode 100755
index ed7d012..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSwirlFilter.h
+++ /dev/null
@@ -1,17 +0,0 @@
-#import "GPUImageFilter.h"
-
-/** Creates a swirl distortion on the image
- */
-@interface GPUImageSwirlFilter : GPUImageFilter
-{
- GLint radiusUniform, centerUniform, angleUniform;
-}
-
-/// The center about which to apply the distortion, with a default of (0.5, 0.5)
-@property(readwrite, nonatomic) CGPoint center;
-/// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.5
-@property(readwrite, nonatomic) CGFloat radius;
-/// The amount of distortion to apply, with a minimum of 0.0 and a default of 1.0
-@property(readwrite, nonatomic) CGFloat angle;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageSwirlFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageSwirlFilter.m
deleted file mode 100755
index 5462bc6..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageSwirlFilter.m
+++ /dev/null
@@ -1,123 +0,0 @@
-#import "GPUImageSwirlFilter.h"
-
-// Adapted from the shader example here: http://www.geeks3d.com/20110428/shader-library-swirl-post-processing-filter-in-glsl/
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageSwirlFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform highp vec2 center;
- uniform highp float radius;
- uniform highp float angle;
-
- void main()
- {
- highp vec2 textureCoordinateToUse = textureCoordinate;
- highp float dist = distance(center, textureCoordinate);
- if (dist < radius)
- {
- textureCoordinateToUse -= center;
- highp float percent = (radius - dist) / radius;
- highp float theta = percent * percent * angle * 8.0;
- highp float s = sin(theta);
- highp float c = cos(theta);
- textureCoordinateToUse = vec2(dot(textureCoordinateToUse, vec2(c, -s)), dot(textureCoordinateToUse, vec2(s, c)));
- textureCoordinateToUse += center;
- }
-
- gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );
-
- }
-);
-#else
-NSString *const kGPUImageSwirlFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform vec2 center;
- uniform float radius;
- uniform float angle;
-
- void main()
- {
- vec2 textureCoordinateToUse = textureCoordinate;
- float dist = distance(center, textureCoordinate);
- if (dist < radius)
- {
- textureCoordinateToUse -= center;
- float percent = (radius - dist) / radius;
- float theta = percent * percent * angle * 8.0;
- float s = sin(theta);
- float c = cos(theta);
- textureCoordinateToUse = vec2(dot(textureCoordinateToUse, vec2(c, -s)), dot(textureCoordinateToUse, vec2(s, c)));
- textureCoordinateToUse += center;
- }
-
- gl_FragColor = texture2D(inputImageTexture, textureCoordinateToUse );
- }
-);
-#endif
-
-@implementation GPUImageSwirlFilter
-
-@synthesize center = _center;
-@synthesize radius = _radius;
-@synthesize angle = _angle;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageSwirlFragmentShaderString]))
- {
- return nil;
- }
-
- radiusUniform = [filterProgram uniformIndex:@"radius"];
- angleUniform = [filterProgram uniformIndex:@"angle"];
- centerUniform = [filterProgram uniformIndex:@"center"];
-
- self.radius = 0.5;
- self.angle = 1.0;
- self.center = CGPointMake(0.5, 0.5);
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
- [super setInputRotation:newInputRotation atIndex:textureIndex];
- [self setCenter:self.center];
-}
-
-- (void)setRadius:(CGFloat)newValue;
-{
- _radius = newValue;
-
- [self setFloat:_radius forUniform:radiusUniform program:filterProgram];
-}
-
-- (void)setAngle:(CGFloat)newValue;
-{
- _angle = newValue;
-
- [self setFloat:_angle forUniform:angleUniform program:filterProgram];
-}
-
-- (void)setCenter:(CGPoint)newValue;
-{
- _center = newValue;
-
- CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation];
- [self setPoint:rotatedPoint forUniform:centerUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageTextureInput.h b/Example/Pods/GPUImage/framework/Source/GPUImageTextureInput.h
deleted file mode 100755
index 8190305..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageTextureInput.h
+++ /dev/null
@@ -1,14 +0,0 @@
-#import "GPUImageOutput.h"
-
-@interface GPUImageTextureInput : GPUImageOutput
-{
- CGSize textureSize;
-}
-
-// Initialization and teardown
-- (id)initWithTexture:(GLuint)newInputTexture size:(CGSize)newTextureSize;
-
-// Image rendering
-- (void)processTextureWithFrameTime:(CMTime)frameTime;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageTextureInput.m b/Example/Pods/GPUImage/framework/Source/GPUImageTextureInput.m
deleted file mode 100755
index ad3ca1d..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageTextureInput.m
+++ /dev/null
@@ -1,46 +0,0 @@
-#import "GPUImageTextureInput.h"
-
-@implementation GPUImageTextureInput
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithTexture:(GLuint)newInputTexture size:(CGSize)newTextureSize;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext useImageProcessingContext];
- });
-
- textureSize = newTextureSize;
-
- runSynchronouslyOnVideoProcessingQueue(^{
- outputFramebuffer = [[GPUImageFramebuffer alloc] initWithSize:newTextureSize overriddenTexture:newInputTexture];
- });
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Image rendering
-
-- (void)processTextureWithFrameTime:(CMTime)frameTime;
-{
- runAsynchronouslyOnVideoProcessingQueue(^{
- for (id currentTarget in targets)
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
-
- [currentTarget setInputSize:textureSize atIndex:targetTextureIndex];
- [currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
- [currentTarget newFrameReadyAtTime:frameTime atIndex:targetTextureIndex];
- }
- });
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageTextureOutput.h b/Example/Pods/GPUImage/framework/Source/GPUImageTextureOutput.h
deleted file mode 100755
index 05e1f36..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageTextureOutput.h
+++ /dev/null
@@ -1,21 +0,0 @@
-#import
-#import "GPUImageContext.h"
-
-@protocol GPUImageTextureOutputDelegate;
-
-@interface GPUImageTextureOutput : NSObject
-{
- GPUImageFramebuffer *firstInputFramebuffer;
-}
-
-@property(readwrite, unsafe_unretained, nonatomic) id delegate;
-@property(readonly) GLuint texture;
-@property(nonatomic) BOOL enabled;
-
-- (void)doneWithTexture;
-
-@end
-
-@protocol GPUImageTextureOutputDelegate
-- (void)newFrameReadyFromTextureOutput:(GPUImageTextureOutput *)callbackTextureOutput;
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageTextureOutput.m b/Example/Pods/GPUImage/framework/Source/GPUImageTextureOutput.m
deleted file mode 100755
index 1e1f24f..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageTextureOutput.m
+++ /dev/null
@@ -1,83 +0,0 @@
-#import "GPUImageTextureOutput.h"
-
-@implementation GPUImageTextureOutput
-
-@synthesize delegate = _delegate;
-@synthesize texture = _texture;
-@synthesize enabled;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- self.enabled = YES;
-
- return self;
-}
-
-- (void)doneWithTexture;
-{
- [firstInputFramebuffer unlock];
-}
-
-#pragma mark -
-#pragma mark GPUImageInput protocol
-
-- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
-{
- [_delegate newFrameReadyFromTextureOutput:self];
-}
-
-- (NSInteger)nextAvailableTextureIndex;
-{
- return 0;
-}
-
-// TODO: Deal with the fact that the texture changes regularly as a result of the caching
-- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
-{
- firstInputFramebuffer = newInputFramebuffer;
- [firstInputFramebuffer lock];
-
- _texture = [firstInputFramebuffer texture];
-}
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
-}
-
-- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
-{
-}
-
-- (CGSize)maximumOutputSize;
-{
- return CGSizeZero;
-}
-
-- (void)endProcessing
-{
-}
-
-- (BOOL)shouldIgnoreUpdatesToThisTarget;
-{
- return NO;
-}
-
-- (BOOL)wantsMonochromeInput;
-{
- return NO;
-}
-
-- (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue;
-{
-
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageThreeInputFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageThreeInputFilter.h
deleted file mode 100644
index 5ecd53e..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageThreeInputFilter.h
+++ /dev/null
@@ -1,21 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-extern NSString *const kGPUImageThreeInputTextureVertexShaderString;
-
-@interface GPUImageThreeInputFilter : GPUImageTwoInputFilter
-{
- GPUImageFramebuffer *thirdInputFramebuffer;
-
- GLint filterThirdTextureCoordinateAttribute;
- GLint filterInputTextureUniform3;
- GPUImageRotationMode inputRotation3;
- GLuint filterSourceTexture3;
- CMTime thirdFrameTime;
-
- BOOL hasSetSecondTexture, hasReceivedThirdFrame, thirdFrameWasVideo;
- BOOL thirdFrameCheckDisabled;
-}
-
-- (void)disableThirdFrameCheck;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageThreeInputFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageThreeInputFilter.m
deleted file mode 100644
index 2f4f113..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageThreeInputFilter.m
+++ /dev/null
@@ -1,328 +0,0 @@
-#import "GPUImageThreeInputFilter.h"
-
-
-NSString *const kGPUImageThreeInputTextureVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec4 inputTextureCoordinate;
- attribute vec4 inputTextureCoordinate2;
- attribute vec4 inputTextureCoordinate3;
-
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
- varying vec2 textureCoordinate3;
-
- void main()
- {
- gl_Position = position;
- textureCoordinate = inputTextureCoordinate.xy;
- textureCoordinate2 = inputTextureCoordinate2.xy;
- textureCoordinate3 = inputTextureCoordinate3.xy;
- }
-);
-
-@implementation GPUImageThreeInputFilter
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
-{
- if (!(self = [self initWithVertexShaderFromString:kGPUImageThreeInputTextureVertexShaderString fragmentShaderFromString:fragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
-{
- if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString]))
- {
- return nil;
- }
-
- inputRotation3 = kGPUImageNoRotation;
-
- hasSetSecondTexture = NO;
-
- hasReceivedThirdFrame = NO;
- thirdFrameWasVideo = NO;
- thirdFrameCheckDisabled = NO;
-
- thirdFrameTime = kCMTimeInvalid;
-
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext useImageProcessingContext];
- filterThirdTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate3"];
-
- filterInputTextureUniform3 = [filterProgram uniformIndex:@"inputImageTexture3"]; // This does assume a name of "inputImageTexture3" for the third input texture in the fragment shader
- glEnableVertexAttribArray(filterThirdTextureCoordinateAttribute);
- });
-
- return self;
-}
-
-- (void)initializeAttributes;
-{
- [super initializeAttributes];
- [filterProgram addAttribute:@"inputTextureCoordinate3"];
-}
-
-- (void)disableThirdFrameCheck;
-{
- thirdFrameCheckDisabled = YES;
-}
-
-#pragma mark -
-#pragma mark Rendering
-
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-{
- if (self.preventRendering)
- {
- [firstInputFramebuffer unlock];
- [secondInputFramebuffer unlock];
- [thirdInputFramebuffer unlock];
- return;
- }
-
- [GPUImageContext setActiveShaderProgram:filterProgram];
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
- [outputFramebuffer activateFramebuffer];
- if (usingNextFrameForImageCapture)
- {
- [outputFramebuffer lock];
- }
-
- [self setUniformsForProgramAtIndex:0];
-
- glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
- glClear(GL_COLOR_BUFFER_BIT);
-
- glActiveTexture(GL_TEXTURE2);
- glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
- glUniform1i(filterInputTextureUniform, 2);
-
- glActiveTexture(GL_TEXTURE3);
- glBindTexture(GL_TEXTURE_2D, [secondInputFramebuffer texture]);
- glUniform1i(filterInputTextureUniform2, 3);
-
- glActiveTexture(GL_TEXTURE4);
- glBindTexture(GL_TEXTURE_2D, [thirdInputFramebuffer texture]);
- glUniform1i(filterInputTextureUniform3, 4);
-
- glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
- glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
- glVertexAttribPointer(filterSecondTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]);
- glVertexAttribPointer(filterThirdTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation3]);
-
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
- [firstInputFramebuffer unlock];
- [secondInputFramebuffer unlock];
- [thirdInputFramebuffer unlock];
- if (usingNextFrameForImageCapture)
- {
- dispatch_semaphore_signal(imageCaptureSemaphore);
- }
-}
-
-#pragma mark -
-#pragma mark GPUImageInput
-
-- (NSInteger)nextAvailableTextureIndex;
-{
- if (hasSetSecondTexture)
- {
- return 2;
- }
- else if (hasSetFirstTexture)
- {
- return 1;
- }
- else
- {
- return 0;
- }
-}
-
-- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
-{
- if (textureIndex == 0)
- {
- firstInputFramebuffer = newInputFramebuffer;
- hasSetFirstTexture = YES;
- [firstInputFramebuffer lock];
- }
- else if (textureIndex == 1)
- {
- secondInputFramebuffer = newInputFramebuffer;
- hasSetSecondTexture = YES;
- [secondInputFramebuffer lock];
- }
- else
- {
- thirdInputFramebuffer = newInputFramebuffer;
- [thirdInputFramebuffer lock];
- }
-}
-
-- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
-{
- if (textureIndex == 0)
- {
- [super setInputSize:newSize atIndex:textureIndex];
-
- if (CGSizeEqualToSize(newSize, CGSizeZero))
- {
- hasSetFirstTexture = NO;
- }
- }
- else if (textureIndex == 1)
- {
- if (CGSizeEqualToSize(newSize, CGSizeZero))
- {
- hasSetSecondTexture = NO;
- }
- }
-}
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
- if (textureIndex == 0)
- {
- inputRotation = newInputRotation;
- }
- else if (textureIndex == 1)
- {
- inputRotation2 = newInputRotation;
- }
- else
- {
- inputRotation3 = newInputRotation;
- }
-}
-
-- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;
-{
- CGSize rotatedSize = sizeToRotate;
-
- GPUImageRotationMode rotationToCheck;
- if (textureIndex == 0)
- {
- rotationToCheck = inputRotation;
- }
- else if (textureIndex == 1)
- {
- rotationToCheck = inputRotation2;
- }
- else
- {
- rotationToCheck = inputRotation3;
- }
-
- if (GPUImageRotationSwapsWidthAndHeight(rotationToCheck))
- {
- rotatedSize.width = sizeToRotate.height;
- rotatedSize.height = sizeToRotate.width;
- }
-
- return rotatedSize;
-}
-
-- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
-{
- // You can set up infinite update loops, so this helps to short circuit them
- if (hasReceivedFirstFrame && hasReceivedSecondFrame && hasReceivedThirdFrame)
- {
- return;
- }
-
- BOOL updatedMovieFrameOppositeStillImage = NO;
-
- if (textureIndex == 0)
- {
- hasReceivedFirstFrame = YES;
- firstFrameTime = frameTime;
- if (secondFrameCheckDisabled)
- {
- hasReceivedSecondFrame = YES;
- }
- if (thirdFrameCheckDisabled)
- {
- hasReceivedThirdFrame = YES;
- }
-
- if (!CMTIME_IS_INDEFINITE(frameTime))
- {
- if CMTIME_IS_INDEFINITE(secondFrameTime)
- {
- updatedMovieFrameOppositeStillImage = YES;
- }
- }
- }
- else if (textureIndex == 1)
- {
- hasReceivedSecondFrame = YES;
- secondFrameTime = frameTime;
- if (firstFrameCheckDisabled)
- {
- hasReceivedFirstFrame = YES;
- }
- if (thirdFrameCheckDisabled)
- {
- hasReceivedThirdFrame = YES;
- }
-
- if (!CMTIME_IS_INDEFINITE(frameTime))
- {
- if CMTIME_IS_INDEFINITE(firstFrameTime)
- {
- updatedMovieFrameOppositeStillImage = YES;
- }
- }
- }
- else
- {
- hasReceivedThirdFrame = YES;
- thirdFrameTime = frameTime;
- if (firstFrameCheckDisabled)
- {
- hasReceivedFirstFrame = YES;
- }
- if (secondFrameCheckDisabled)
- {
- hasReceivedSecondFrame = YES;
- }
-
- if (!CMTIME_IS_INDEFINITE(frameTime))
- {
- if CMTIME_IS_INDEFINITE(firstFrameTime)
- {
- updatedMovieFrameOppositeStillImage = YES;
- }
- }
- }
-
- // || (hasReceivedFirstFrame && secondFrameCheckDisabled) || (hasReceivedSecondFrame && firstFrameCheckDisabled)
- if ((hasReceivedFirstFrame && hasReceivedSecondFrame && hasReceivedThirdFrame) || updatedMovieFrameOppositeStillImage)
- {
- static const GLfloat imageVertices[] = {
- -1.0f, -1.0f,
- 1.0f, -1.0f,
- -1.0f, 1.0f,
- 1.0f, 1.0f,
- };
-
- [self renderToTextureWithVertices:imageVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
-
- [self informTargetsAboutNewFrameAtTime:frameTime];
-
- hasReceivedFirstFrame = NO;
- hasReceivedSecondFrame = NO;
- hasReceivedThirdFrame = NO;
- }
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageThresholdEdgeDetectionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageThresholdEdgeDetectionFilter.h
deleted file mode 100755
index 2036030..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageThresholdEdgeDetectionFilter.h
+++ /dev/null
@@ -1,12 +0,0 @@
-#import "GPUImageSobelEdgeDetectionFilter.h"
-
-@interface GPUImageThresholdEdgeDetectionFilter : GPUImageSobelEdgeDetectionFilter
-{
- GLint thresholdUniform;
-}
-
-/** Any edge above this threshold will be black, and anything below white. Ranges from 0.0 to 1.0, with 0.8 as the default
- */
-@property(readwrite, nonatomic) CGFloat threshold;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageThresholdEdgeDetectionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageThresholdEdgeDetectionFilter.m
deleted file mode 100755
index 553c600..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageThresholdEdgeDetectionFilter.m
+++ /dev/null
@@ -1,145 +0,0 @@
-#import "GPUImageThresholdEdgeDetectionFilter.h"
-
-@implementation GPUImageThresholdEdgeDetectionFilter
-
-// Invert the colorspace for a sketch
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageThresholdEdgeDetectionFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform lowp float threshold;
-
- uniform float edgeStrength;
-
- void main()
- {
-// float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
-// float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
-// float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
-// float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
- float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r;
-// float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
-// float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
-// float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + leftIntensity + 2.0 * centerIntensity + rightIntensity;
-// float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomIntensity + 2.0 * centerIntensity + topIntensity;
- float h = (centerIntensity - topIntensity) + (bottomIntensity - centerIntensity);
- float v = (centerIntensity - leftIntensity) + (rightIntensity - centerIntensity);
-// float h = (centerIntensity - topIntensity);
-// float j = (topIntensity - centerIntensity);
-// h = max(h,j);
-// j = abs(h);
-// float v = (centerIntensity - leftIntensity);
-
- float mag = length(vec2(h, v)) * edgeStrength;
- mag = step(threshold, mag);
-
-// float mag = abs(h);
-
-// gl_FragColor = vec4(h, h, h, 1.0);
-// gl_FragColor = vec4(texture2D(inputImageTexture, textureCoordinate));
-// gl_FragColor = vec4(h, centerIntensity, j, 1.0);
- gl_FragColor = vec4(mag, mag, mag, 1.0);
- }
-);
-#else
-NSString *const kGPUImageThresholdEdgeDetectionFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float threshold;
-
- uniform float edgeStrength;
-
- void main()
- {
- float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
- float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
- h = max(0.0, h);
- float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
- v = max(0.0, v);
-
- float mag = length(vec2(h, v)) * edgeStrength;
- mag = step(threshold, mag);
-
- gl_FragColor = vec4(vec3(mag), 1.0);
- }
-);
-#endif
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-@synthesize threshold = _threshold;
-
-- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
-{
- if (!(self = [super initWithFragmentShaderFromString:fragmentShaderString]))
- {
- return nil;
- }
-
- thresholdUniform = [secondFilterProgram uniformIndex:@"threshold"];
- self.threshold = 0.25;
- self.edgeStrength = 1.0;
-
- return self;
-}
-
-
-- (id)init;
-{
- if (!(self = [self initWithFragmentShaderFromString:kGPUImageThresholdEdgeDetectionFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setThreshold:(CGFloat)newValue;
-{
- _threshold = newValue;
-
- [self setFloat:_threshold forUniform:thresholdUniform program:secondFilterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageThresholdSketchFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageThresholdSketchFilter.h
deleted file mode 100644
index fda5897..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageThresholdSketchFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImageThresholdEdgeDetectionFilter.h"
-
-@interface GPUImageThresholdSketchFilter : GPUImageThresholdEdgeDetectionFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageThresholdSketchFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageThresholdSketchFilter.m
deleted file mode 100644
index d24e9de..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageThresholdSketchFilter.m
+++ /dev/null
@@ -1,103 +0,0 @@
-#import "GPUImageThresholdSketchFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageThresholdSketchFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform lowp float threshold;
- uniform float edgeStrength;
-
- const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
- float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
- float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
-
- float mag = (length(vec2(h, v)) * edgeStrength);
- mag = step(threshold, mag);
- mag = 1.0 - mag;
-
- gl_FragColor = vec4(vec3(mag), 1.0);
- }
-);
-#else
-NSString *const kGPUImageThresholdSketchFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
- uniform float threshold;
- uniform float edgeStrength;
-
- const vec3 W = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
- float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
- float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
-
- float mag = 1.0 - length(vec2(h, v) * edgeStrength);
- mag = step(threshold, mag);
-
- gl_FragColor = vec4(vec3(mag), 1.0);
- }
-);
-#endif
-
-@implementation GPUImageThresholdSketchFilter
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [self initWithFragmentShaderFromString:kGPUImageThresholdSketchFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageThresholdedNonMaximumSuppressionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageThresholdedNonMaximumSuppressionFilter.h
deleted file mode 100644
index 9c6e5d7..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageThresholdedNonMaximumSuppressionFilter.h
+++ /dev/null
@@ -1,14 +0,0 @@
-#import "GPUImage3x3TextureSamplingFilter.h"
-
-@interface GPUImageThresholdedNonMaximumSuppressionFilter : GPUImage3x3TextureSamplingFilter
-{
- GLint thresholdUniform;
-}
-
-/** Any local maximum above this threshold will be white, and anything below black. Ranges from 0.0 to 1.0, with 0.8 as the default
- */
-@property(readwrite, nonatomic) CGFloat threshold;
-
-- (id)initWithPackedColorspace:(BOOL)inputUsesPackedColorspace;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageThresholdedNonMaximumSuppressionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageThresholdedNonMaximumSuppressionFilter.m
deleted file mode 100644
index 439d311..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageThresholdedNonMaximumSuppressionFilter.m
+++ /dev/null
@@ -1,297 +0,0 @@
-#import "GPUImageThresholdedNonMaximumSuppressionFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageThresholdedNonMaximumSuppressionFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
-
- varying highp vec2 textureCoordinate;
- varying highp vec2 leftTextureCoordinate;
- varying highp vec2 rightTextureCoordinate;
-
- varying highp vec2 topTextureCoordinate;
- varying highp vec2 topLeftTextureCoordinate;
- varying highp vec2 topRightTextureCoordinate;
-
- varying highp vec2 bottomTextureCoordinate;
- varying highp vec2 bottomLeftTextureCoordinate;
- varying highp vec2 bottomRightTextureCoordinate;
-
- uniform lowp float threshold;
-
- void main()
- {
- lowp float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- lowp float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- lowp float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- lowp vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
- lowp float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r;
- lowp float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r;
- lowp float topColor = texture2D(inputImageTexture, topTextureCoordinate).r;
- lowp float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- lowp float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
-
- // Use a tiebreaker for pixels to the left and immediately above this one
- lowp float multiplier = 1.0 - step(centerColor.r, topColor);
- multiplier = multiplier * (1.0 - step(centerColor.r, topLeftColor));
- multiplier = multiplier * (1.0 - step(centerColor.r, leftColor));
- multiplier = multiplier * (1.0 - step(centerColor.r, bottomLeftColor));
-
- lowp float maxValue = max(centerColor.r, bottomColor);
- maxValue = max(maxValue, bottomRightColor);
- maxValue = max(maxValue, rightColor);
- maxValue = max(maxValue, topRightColor);
-
- lowp float finalValue = centerColor.r * step(maxValue, centerColor.r) * multiplier;
- finalValue = step(threshold, finalValue);
-
- gl_FragColor = vec4(finalValue, finalValue, finalValue, 1.0);
-//
-// gl_FragColor = vec4((centerColor.rgb * step(maxValue, step(threshold, centerColor.r)) * multiplier), 1.0);
- }
-);
-
-NSString *const kGPUImageThresholdedNonMaximumSuppressionPackedColorspaceFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
-
- varying highp vec2 textureCoordinate;
- varying highp vec2 leftTextureCoordinate;
- varying highp vec2 rightTextureCoordinate;
-
- varying highp vec2 topTextureCoordinate;
- varying highp vec2 topLeftTextureCoordinate;
- varying highp vec2 topRightTextureCoordinate;
-
- varying highp vec2 bottomTextureCoordinate;
- varying highp vec2 bottomLeftTextureCoordinate;
- varying highp vec2 bottomRightTextureCoordinate;
-
- uniform lowp float threshold;
- uniform highp float texelWidth;
- uniform highp float texelHeight;
-
- highp float encodedIntensity(highp vec3 sourceColor)
- {
- return (sourceColor.b * 256.0 * 256.0 + sourceColor.g * 256.0 + sourceColor.r);
- }
-
- void main()
- {
- highp float bottomColor = encodedIntensity(texture2D(inputImageTexture, bottomTextureCoordinate).rgb);
- highp float bottomLeftColor = encodedIntensity(texture2D(inputImageTexture, bottomLeftTextureCoordinate).rgb);
- highp float bottomRightColor = encodedIntensity(texture2D(inputImageTexture, bottomRightTextureCoordinate).rgb);
- highp float centerColor = encodedIntensity(texture2D(inputImageTexture, textureCoordinate).rgb);
- highp float leftColor = encodedIntensity(texture2D(inputImageTexture, leftTextureCoordinate).rgb);
- highp float rightColor = encodedIntensity(texture2D(inputImageTexture, rightTextureCoordinate).rgb);
- highp float topColor = encodedIntensity(texture2D(inputImageTexture, topTextureCoordinate).rgb);
- highp float topRightColor = encodedIntensity(texture2D(inputImageTexture, topRightTextureCoordinate).rgb);
- highp float topLeftColor = encodedIntensity(texture2D(inputImageTexture, topLeftTextureCoordinate).rgb);
-
- highp float secondStageColor1 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-2.0 * texelWidth, -2.0 * texelHeight)).rgb);
- highp float secondStageColor2 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-2.0 * texelWidth, -1.0 * texelHeight)).rgb);
- highp float secondStageColor3 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-2.0 * texelWidth, 0.0)).rgb);
- highp float secondStageColor4 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-2.0 * texelWidth, 1.0 * texelHeight)).rgb);
- highp float secondStageColor5 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-2.0 * texelWidth, 2.0 * texelHeight)).rgb);
- highp float secondStageColor6 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-1.0 * texelWidth, 2.0 * texelHeight)).rgb);
- highp float secondStageColor7 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(0.0, 2.0 * texelHeight)).rgb);
- highp float secondStageColor8 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(1.0 * texelWidth, 2.0 * texelHeight)).rgb);
-
- highp float thirdStageColor1 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(-1.0 * texelWidth, -2.0 * texelHeight)).rgb);
- highp float thirdStageColor2 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(0.0, -2.0 * texelHeight)).rgb);
- highp float thirdStageColor3 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(1.0 * texelWidth, -2.0 * texelHeight)).rgb);
- highp float thirdStageColor4 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(2.0 * texelWidth, -2.0 * texelHeight)).rgb);
- highp float thirdStageColor5 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(2.0 * texelWidth, -1.0 * texelHeight)).rgb);
- highp float thirdStageColor6 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(2.0 * texelWidth, 0.0)).rgb);
- highp float thirdStageColor7 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(2.0 * texelWidth, 1.0 * texelHeight)).rgb);
- highp float thirdStageColor8 = encodedIntensity(texture2D(inputImageTexture, textureCoordinate + vec2(2.0 * texelWidth, 2.0 * texelHeight)).rgb);
-
- // Use a tiebreaker for pixels to the left and immediately above this one
- highp float multiplier = 1.0 - step(centerColor, topColor);
- multiplier = multiplier * (1.0 - step(centerColor, topLeftColor));
- multiplier = multiplier * (1.0 - step(centerColor, leftColor));
- multiplier = multiplier * (1.0 - step(centerColor, bottomLeftColor));
-
- multiplier = multiplier * (1.0 - step(centerColor, secondStageColor1));
- multiplier = multiplier * (1.0 - step(centerColor, secondStageColor2));
- multiplier = multiplier * (1.0 - step(centerColor, secondStageColor3));
- multiplier = multiplier * (1.0 - step(centerColor, secondStageColor4));
- multiplier = multiplier * (1.0 - step(centerColor, secondStageColor5));
- multiplier = multiplier * (1.0 - step(centerColor, secondStageColor6));
- multiplier = multiplier * (1.0 - step(centerColor, secondStageColor7));
- multiplier = multiplier * (1.0 - step(centerColor, secondStageColor8));
-
- highp float maxValue = max(centerColor, bottomColor);
- maxValue = max(maxValue, bottomRightColor);
- maxValue = max(maxValue, rightColor);
- maxValue = max(maxValue, topRightColor);
-
- maxValue = max(maxValue, thirdStageColor1);
- maxValue = max(maxValue, thirdStageColor2);
- maxValue = max(maxValue, thirdStageColor3);
- maxValue = max(maxValue, thirdStageColor4);
- maxValue = max(maxValue, thirdStageColor5);
- maxValue = max(maxValue, thirdStageColor6);
- maxValue = max(maxValue, thirdStageColor7);
- maxValue = max(maxValue, thirdStageColor8);
-
- highp float midValue = centerColor * step(maxValue, centerColor) * multiplier;
- highp float finalValue = step(threshold, midValue);
-
- gl_FragColor = vec4(finalValue * centerColor, topLeftColor, topRightColor, topColor);
- }
-);
-#else
-NSString *const kGPUImageThresholdedNonMaximumSuppressionFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform float threshold;
-
- void main()
- {
- float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
- float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float topColor = texture2D(inputImageTexture, topTextureCoordinate).r;
- float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
-
- // Use a tiebreaker for pixels to the left and immediately above this one
- float multiplier = 1.0 - step(centerColor.r, topColor);
- multiplier = multiplier * (1.0 - step(centerColor.r, topLeftColor));
- multiplier = multiplier * (1.0 - step(centerColor.r, leftColor));
- multiplier = multiplier * (1.0 - step(centerColor.r, bottomLeftColor));
-
- float maxValue = max(centerColor.r, bottomColor);
- maxValue = max(maxValue, bottomRightColor);
- maxValue = max(maxValue, rightColor);
- maxValue = max(maxValue, topRightColor);
-
- float finalValue = centerColor.r * step(maxValue, centerColor.r) * multiplier;
- finalValue = step(threshold, finalValue);
-
- gl_FragColor = vec4(finalValue, finalValue, finalValue, 1.0);
- //
- // gl_FragColor = vec4((centerColor.rgb * step(maxValue, step(threshold, centerColor.r)) * multiplier), 1.0);
- }
-);
-
-NSString *const kGPUImageThresholdedNonMaximumSuppressionPackedColorspaceFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform float threshold;
-
- void main()
- {
- float bottomColor = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float bottomLeftColor = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float bottomRightColor = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- vec4 centerColor = texture2D(inputImageTexture, textureCoordinate);
- float leftColor = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightColor = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float topColor = texture2D(inputImageTexture, topTextureCoordinate).r;
- float topRightColor = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftColor = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
-
- // Use a tiebreaker for pixels to the left and immediately above this one
- float multiplier = 1.0 - step(centerColor.r, topColor);
- multiplier = multiplier * (1.0 - step(centerColor.r, topLeftColor));
- multiplier = multiplier * (1.0 - step(centerColor.r, leftColor));
- multiplier = multiplier * (1.0 - step(centerColor.r, bottomLeftColor));
-
- float maxValue = max(centerColor.r, bottomColor);
- maxValue = max(maxValue, bottomRightColor);
- maxValue = max(maxValue, rightColor);
- maxValue = max(maxValue, topRightColor);
-
- float finalValue = centerColor.r * step(maxValue, centerColor.r) * multiplier;
- finalValue = step(threshold, finalValue);
-
- gl_FragColor = vec4(finalValue, finalValue, finalValue, 1.0);
- //
- // gl_FragColor = vec4((centerColor.rgb * step(maxValue, step(threshold, centerColor.r)) * multiplier), 1.0);
- }
- );
-#endif
-
-@implementation GPUImageThresholdedNonMaximumSuppressionFilter
-
-@synthesize threshold = _threshold;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [self initWithPackedColorspace:NO]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)initWithPackedColorspace:(BOOL)inputUsesPackedColorspace;
-{
- NSString *shaderString;
- if (inputUsesPackedColorspace)
- {
- shaderString = kGPUImageThresholdedNonMaximumSuppressionPackedColorspaceFragmentShaderString;
- }
- else
- {
- shaderString = kGPUImageThresholdedNonMaximumSuppressionFragmentShaderString;
- }
-
-
- if (!(self = [super initWithFragmentShaderFromString:shaderString]))
- {
- return nil;
- }
-
- thresholdUniform = [filterProgram uniformIndex:@"threshold"];
- self.threshold = 0.9;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setThreshold:(CGFloat)newValue;
-{
- _threshold = newValue;
-
- [self setFloat:_threshold forUniform:thresholdUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageTiltShiftFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageTiltShiftFilter.h
deleted file mode 100755
index e41adee..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageTiltShiftFilter.h
+++ /dev/null
@@ -1,24 +0,0 @@
-#import "GPUImageFilterGroup.h"
-
-@class GPUImageGaussianBlurFilter;
-
-/// A simulated tilt shift lens effect
-@interface GPUImageTiltShiftFilter : GPUImageFilterGroup
-{
- GPUImageGaussianBlurFilter *blurFilter;
- GPUImageFilter *tiltShiftFilter;
-}
-
-/// The radius of the underlying blur, in pixels. This is 7.0 by default.
-@property(readwrite, nonatomic) CGFloat blurRadiusInPixels;
-
-/// The normalized location of the top of the in-focus area in the image, this value should be lower than bottomFocusLevel, default 0.4
-@property(readwrite, nonatomic) CGFloat topFocusLevel;
-
-/// The normalized location of the bottom of the in-focus area in the image, this value should be higher than topFocusLevel, default 0.6
-@property(readwrite, nonatomic) CGFloat bottomFocusLevel;
-
-/// The rate at which the image gets blurry away from the in-focus region, default 0.2
-@property(readwrite, nonatomic) CGFloat focusFallOffRate;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageTiltShiftFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageTiltShiftFilter.m
deleted file mode 100755
index e2f632b..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageTiltShiftFilter.m
+++ /dev/null
@@ -1,126 +0,0 @@
-#import "GPUImageTiltShiftFilter.h"
-#import "GPUImageFilter.h"
-#import "GPUImageTwoInputFilter.h"
-#import "GPUImageGaussianBlurFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageTiltShiftFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- uniform highp float topFocusLevel;
- uniform highp float bottomFocusLevel;
- uniform highp float focusFallOffRate;
-
- void main()
- {
- lowp vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate);
- lowp vec4 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2);
-
- lowp float blurIntensity = 1.0 - smoothstep(topFocusLevel - focusFallOffRate, topFocusLevel, textureCoordinate2.y);
- blurIntensity += smoothstep(bottomFocusLevel, bottomFocusLevel + focusFallOffRate, textureCoordinate2.y);
-
- gl_FragColor = mix(sharpImageColor, blurredImageColor, blurIntensity);
- }
-);
-#else
-NSString *const kGPUImageTiltShiftFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- uniform float topFocusLevel;
- uniform float bottomFocusLevel;
- uniform float focusFallOffRate;
-
- void main()
- {
- vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate);
- vec4 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2);
-
- float blurIntensity = 1.0 - smoothstep(topFocusLevel - focusFallOffRate, topFocusLevel, textureCoordinate2.y);
- blurIntensity += smoothstep(bottomFocusLevel, bottomFocusLevel + focusFallOffRate, textureCoordinate2.y);
-
- gl_FragColor = mix(sharpImageColor, blurredImageColor, blurIntensity);
- }
-);
-#endif
-
-@implementation GPUImageTiltShiftFilter
-
-@synthesize blurRadiusInPixels;
-@synthesize topFocusLevel = _topFocusLevel;
-@synthesize bottomFocusLevel = _bottomFocusLevel;
-@synthesize focusFallOffRate = _focusFallOffRate;
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- // First pass: apply a variable Gaussian blur
- blurFilter = [[GPUImageGaussianBlurFilter alloc] init];
- [self addFilter:blurFilter];
-
- // Second pass: combine the blurred image with the original sharp one
- tiltShiftFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageTiltShiftFragmentShaderString];
- [self addFilter:tiltShiftFilter];
-
- // Texture location 0 needs to be the sharp image for both the blur and the second stage processing
- [blurFilter addTarget:tiltShiftFilter atTextureLocation:1];
-
- // To prevent double updating of this filter, disable updates from the sharp image side
-// self.inputFilterToIgnoreForUpdates = tiltShiftFilter;
-
- self.initialFilters = [NSArray arrayWithObjects:blurFilter, tiltShiftFilter, nil];
- self.terminalFilter = tiltShiftFilter;
-
- self.topFocusLevel = 0.4;
- self.bottomFocusLevel = 0.6;
- self.focusFallOffRate = 0.2;
- self.blurRadiusInPixels = 7.0;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setBlurRadiusInPixels:(CGFloat)newValue;
-{
- blurFilter.blurRadiusInPixels = newValue;
-}
-
-- (CGFloat)blurRadiusInPixels;
-{
- return blurFilter.blurRadiusInPixels;
-}
-
-- (void)setTopFocusLevel:(CGFloat)newValue;
-{
- _topFocusLevel = newValue;
- [tiltShiftFilter setFloat:newValue forUniformName:@"topFocusLevel"];
-}
-
-- (void)setBottomFocusLevel:(CGFloat)newValue;
-{
- _bottomFocusLevel = newValue;
- [tiltShiftFilter setFloat:newValue forUniformName:@"bottomFocusLevel"];
-}
-
-- (void)setFocusFallOffRate:(CGFloat)newValue;
-{
- _focusFallOffRate = newValue;
- [tiltShiftFilter setFloat:newValue forUniformName:@"focusFallOffRate"];
-}
-
-@end
\ No newline at end of file
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageToneCurveFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageToneCurveFilter.h
deleted file mode 100755
index ff4ae92..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageToneCurveFilter.h
+++ /dev/null
@@ -1,30 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageToneCurveFilter : GPUImageFilter
-
-@property(readwrite, nonatomic, copy) NSArray *redControlPoints;
-@property(readwrite, nonatomic, copy) NSArray *greenControlPoints;
-@property(readwrite, nonatomic, copy) NSArray *blueControlPoints;
-@property(readwrite, nonatomic, copy) NSArray *rgbCompositeControlPoints;
-
-// Initialization and teardown
-- (id)initWithACVData:(NSData*)data;
-
-- (id)initWithACV:(NSString*)curveFilename;
-- (id)initWithACVURL:(NSURL*)curveFileURL;
-
-// This lets you set all three red, green, and blue tone curves at once.
-// NOTE: Deprecated this function because this effect can be accomplished
-// using the rgbComposite channel rather then setting all 3 R, G, and B channels.
-- (void)setRGBControlPoints:(NSArray *)points DEPRECATED_ATTRIBUTE;
-
-- (void)setPointsWithACV:(NSString*)curveFilename;
-- (void)setPointsWithACVURL:(NSURL*)curveFileURL;
-
-// Curve calculation
-- (NSMutableArray *)getPreparedSplineCurve:(NSArray *)points;
-- (NSMutableArray *)splineCurve:(NSArray *)points;
-- (NSMutableArray *)secondDerivative:(NSArray *)cgPoints;
-- (void)updateToneCurveTexture;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageToneCurveFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageToneCurveFilter.m
deleted file mode 100644
index 18a717e..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageToneCurveFilter.m
+++ /dev/null
@@ -1,621 +0,0 @@
-#import "GPUImageToneCurveFilter.h"
-
-#pragma mark -
-#pragma mark GPUImageACVFile Helper
-
-// GPUImageACVFile
-//
-// ACV File format Parser
-// Please refer to http://www.adobe.com/devnet-apps/photoshop/fileformatashtml/PhotoshopFileFormats.htm#50577411_pgfId-1056330
-//
-
-@interface GPUImageACVFile : NSObject{
- short version;
- short totalCurves;
-
- NSArray *rgbCompositeCurvePoints;
- NSArray *redCurvePoints;
- NSArray *greenCurvePoints;
- NSArray *blueCurvePoints;
-}
-
-@property(strong,nonatomic) NSArray *rgbCompositeCurvePoints;
-@property(strong,nonatomic) NSArray *redCurvePoints;
-@property(strong,nonatomic) NSArray *greenCurvePoints;
-@property(strong,nonatomic) NSArray *blueCurvePoints;
-
-- (id) initWithACVFileData:(NSData*)data;
-
-
-unsigned short int16WithBytes(Byte* bytes);
-@end
-
-@implementation GPUImageACVFile
-
-@synthesize rgbCompositeCurvePoints, redCurvePoints, greenCurvePoints, blueCurvePoints;
-
-- (id) initWithACVFileData:(NSData *)data {
- self = [super init];
- if (self != nil)
- {
- if (data.length == 0)
- {
- NSLog(@"failed to init ACVFile with data:%@", data);
-
- return self;
- }
-
- Byte* rawBytes = (Byte*) [data bytes];
- version = int16WithBytes(rawBytes);
- rawBytes+=2;
-
- totalCurves = int16WithBytes(rawBytes);
- rawBytes+=2;
-
- NSMutableArray *curves = [NSMutableArray new];
-
- float pointRate = (1.0 / 255);
- // The following is the data for each curve specified by count above
- for (NSInteger x = 0; x 0)
- {
- // Sort the array.
- NSArray *sortedPoints = [points sortedArrayUsingComparator:^NSComparisonResult(id a, id b) {
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- float x1 = [(NSValue *)a CGPointValue].x;
- float x2 = [(NSValue *)b CGPointValue].x;
-#else
- float x1 = [(NSValue *)a pointValue].x;
- float x2 = [(NSValue *)b pointValue].x;
-#endif
- return x1 > x2;
- }];
-
- // Convert from (0, 1) to (0, 255).
- NSMutableArray *convertedPoints = [NSMutableArray arrayWithCapacity:[sortedPoints count]];
- for (int i=0; i<[points count]; i++){
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- CGPoint point = [[sortedPoints objectAtIndex:i] CGPointValue];
-#else
- NSPoint point = [[sortedPoints objectAtIndex:i] pointValue];
-#endif
- point.x = point.x * 255;
- point.y = point.y * 255;
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- [convertedPoints addObject:[NSValue valueWithCGPoint:point]];
-#else
- [convertedPoints addObject:[NSValue valueWithPoint:point]];
-#endif
- }
-
-
- NSMutableArray *splinePoints = [self splineCurve:convertedPoints];
-
- // If we have a first point like (0.3, 0) we'll be missing some points at the beginning
- // that should be 0.
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- CGPoint firstSplinePoint = [[splinePoints objectAtIndex:0] CGPointValue];
-#else
- NSPoint firstSplinePoint = [[splinePoints objectAtIndex:0] pointValue];
-#endif
-
- if (firstSplinePoint.x > 0) {
- for (int i=firstSplinePoint.x; i >= 0; i--) {
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- CGPoint newCGPoint = CGPointMake(i, 0);
- [splinePoints insertObject:[NSValue valueWithCGPoint:newCGPoint] atIndex:0];
-#else
- NSPoint newNSPoint = NSMakePoint(i, 0);
- [splinePoints insertObject:[NSValue valueWithPoint:newNSPoint] atIndex:0];
-#endif
- }
- }
-
- // Insert points similarly at the end, if necessary.
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- CGPoint lastSplinePoint = [[splinePoints lastObject] CGPointValue];
-
- if (lastSplinePoint.x < 255) {
- for (int i = lastSplinePoint.x + 1; i <= 255; i++) {
- CGPoint newCGPoint = CGPointMake(i, 255);
- [splinePoints addObject:[NSValue valueWithCGPoint:newCGPoint]];
- }
- }
-#else
- NSPoint lastSplinePoint = [[splinePoints lastObject] pointValue];
-
- if (lastSplinePoint.x < 255) {
- for (int i = lastSplinePoint.x + 1; i <= 255; i++) {
- NSPoint newNSPoint = NSMakePoint(i, 255);
- [splinePoints addObject:[NSValue valueWithPoint:newNSPoint]];
- }
- }
-#endif
-
- // Prepare the spline points.
- NSMutableArray *preparedSplinePoints = [NSMutableArray arrayWithCapacity:[splinePoints count]];
- for (int i=0; i<[splinePoints count]; i++)
- {
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- CGPoint newPoint = [[splinePoints objectAtIndex:i] CGPointValue];
-#else
- NSPoint newPoint = [[splinePoints objectAtIndex:i] pointValue];
-#endif
- CGPoint origPoint = CGPointMake(newPoint.x, newPoint.x);
-
- float distance = sqrt(pow((origPoint.x - newPoint.x), 2.0) + pow((origPoint.y - newPoint.y), 2.0));
-
- if (origPoint.y > newPoint.y)
- {
- distance = -distance;
- }
-
- [preparedSplinePoints addObject:[NSNumber numberWithFloat:distance]];
- }
-
- return preparedSplinePoints;
- }
-
- return nil;
-}
-
-
-- (NSMutableArray *)splineCurve:(NSArray *)points
-{
- NSMutableArray *sdA = [self secondDerivative:points];
-
- // [points count] is equal to [sdA count]
- NSInteger n = [sdA count];
- if (n < 1)
- {
- return nil;
- }
- double sd[n];
-
- // From NSMutableArray to sd[n];
- for (int i=0; i 255.0)
- {
- y = 255.0;
- }
- else if (y < 0.0)
- {
- y = 0.0;
- }
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
- [output addObject:[NSValue valueWithCGPoint:CGPointMake(x, y)]];
-#else
- [output addObject:[NSValue valueWithPoint:NSMakePoint(x, y)]];
-#endif
- }
- }
-
- // The above always misses the last point because the last point is the last next, so we approach but don't equal it.
- [output addObject:[points lastObject]];
- return output;
-}
-
-- (NSMutableArray *)secondDerivative:(NSArray *)points
-{
- const NSInteger n = [points count];
- if ((n <= 0) || (n == 1))
- {
- return nil;
- }
-
- double matrix[n][3];
- double result[n];
- matrix[0][1]=1;
- // What about matrix[0][1] and matrix[0][0]? Assuming 0 for now (Brad L.)
- matrix[0][0]=0;
- matrix[0][2]=0;
-
- for(int i=1;idown)
- for(int i=1;iup)
- for(NSInteger i=n-2;i>=0;i--)
- {
- double k = matrix[i][2]/matrix[i+1][1];
- matrix[i][1] -= k*matrix[i+1][0];
- matrix[i][2] = 0;
- result[i] -= k*result[i+1];
- }
-
- double y2[n];
- for(int i=0;i= 256) && ([_greenCurve count] >= 256) && ([_blueCurve count] >= 256) && ([_rgbCompositeCurve count] >= 256))
- {
- for (unsigned int currentCurveIndex = 0; currentCurveIndex < 256; currentCurveIndex++)
- {
- // BGRA for upload to texture
- GLubyte b = fmin(fmax(currentCurveIndex + [[_blueCurve objectAtIndex:currentCurveIndex] floatValue], 0), 255);
- toneCurveByteArray[currentCurveIndex * 4] = fmin(fmax(b + [[_rgbCompositeCurve objectAtIndex:b] floatValue], 0), 255);
- GLubyte g = fmin(fmax(currentCurveIndex + [[_greenCurve objectAtIndex:currentCurveIndex] floatValue], 0), 255);
- toneCurveByteArray[currentCurveIndex * 4 + 1] = fmin(fmax(g + [[_rgbCompositeCurve objectAtIndex:g] floatValue], 0), 255);
- GLubyte r = fmin(fmax(currentCurveIndex + [[_redCurve objectAtIndex:currentCurveIndex] floatValue], 0), 255);
- toneCurveByteArray[currentCurveIndex * 4 + 2] = fmin(fmax(r + [[_rgbCompositeCurve objectAtIndex:r] floatValue], 0), 255);
- toneCurveByteArray[currentCurveIndex * 4 + 3] = 255;
- }
-
- glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 256 /*width*/, 1 /*height*/, 0, GL_BGRA, GL_UNSIGNED_BYTE, toneCurveByteArray);
- }
- });
-}
-
-#pragma mark -
-#pragma mark Rendering
-
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-{
- if (self.preventRendering)
- {
- [firstInputFramebuffer unlock];
- return;
- }
-
- [GPUImageContext setActiveShaderProgram:filterProgram];
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
- [outputFramebuffer activateFramebuffer];
- if (usingNextFrameForImageCapture)
- {
- [outputFramebuffer lock];
- }
-
- glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
- glClear(GL_COLOR_BUFFER_BIT);
-
- glActiveTexture(GL_TEXTURE2);
- glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
- glUniform1i(filterInputTextureUniform, 2);
-
- glActiveTexture(GL_TEXTURE3);
- glBindTexture(GL_TEXTURE_2D, toneCurveTexture);
- glUniform1i(toneCurveTextureUniform, 3);
-
- glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
- glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
-
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
- [firstInputFramebuffer unlock];
- if (usingNextFrameForImageCapture)
- {
- dispatch_semaphore_signal(imageCaptureSemaphore);
- }
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setRGBControlPoints:(NSArray *)points
-{
- _redControlPoints = [points copy];
- _redCurve = [self getPreparedSplineCurve:_redControlPoints];
-
- _greenControlPoints = [points copy];
- _greenCurve = [self getPreparedSplineCurve:_greenControlPoints];
-
- _blueControlPoints = [points copy];
- _blueCurve = [self getPreparedSplineCurve:_blueControlPoints];
-
- [self updateToneCurveTexture];
-}
-
-
-- (void)setRgbCompositeControlPoints:(NSArray *)newValue
-{
- _rgbCompositeControlPoints = [newValue copy];
- _rgbCompositeCurve = [self getPreparedSplineCurve:_rgbCompositeControlPoints];
-
- [self updateToneCurveTexture];
-}
-
-
-- (void)setRedControlPoints:(NSArray *)newValue;
-{
- _redControlPoints = [newValue copy];
- _redCurve = [self getPreparedSplineCurve:_redControlPoints];
-
- [self updateToneCurveTexture];
-}
-
-
-- (void)setGreenControlPoints:(NSArray *)newValue
-{
- _greenControlPoints = [newValue copy];
- _greenCurve = [self getPreparedSplineCurve:_greenControlPoints];
-
- [self updateToneCurveTexture];
-}
-
-
-- (void)setBlueControlPoints:(NSArray *)newValue
-{
- _blueControlPoints = [newValue copy];
- _blueCurve = [self getPreparedSplineCurve:_blueControlPoints];
-
- [self updateToneCurveTexture];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageToonFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageToonFilter.h
deleted file mode 100755
index ef8e17c..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageToonFilter.h
+++ /dev/null
@@ -1,19 +0,0 @@
-#import "GPUImage3x3TextureSamplingFilter.h"
-
-/** This uses Sobel edge detection to place a black border around objects,
- and then it quantizes the colors present in the image to give a cartoon-like quality to the image.
- */
-@interface GPUImageToonFilter : GPUImage3x3TextureSamplingFilter
-{
- GLint thresholdUniform, quantizationLevelsUniform;
-}
-
-/** The threshold at which to apply the edges, default of 0.2
- */
-@property(readwrite, nonatomic) CGFloat threshold;
-
-/** The levels of quantization for the posterization of colors within the scene, with a default of 10.0
- */
-@property(readwrite, nonatomic) CGFloat quantizationLevels;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageToonFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageToonFilter.m
deleted file mode 100755
index e8ff104..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageToonFilter.m
+++ /dev/null
@@ -1,149 +0,0 @@
-#import "GPUImageToonFilter.h"
-#import "GPUImageSobelEdgeDetectionFilter.h"
-#import "GPUImage3x3ConvolutionFilter.h"
-
-// Code from "Graphics Shaders: Theory and Practice" by M. Bailey and S. Cunningham
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageToonFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform highp float intensity;
- uniform highp float threshold;
- uniform highp float quantizationLevels;
-
- const highp vec3 W = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
- float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
- float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
-
- float mag = length(vec2(h, v));
-
- vec3 posterizedImageColor = floor((textureColor.rgb * quantizationLevels) + 0.5) / quantizationLevels;
-
- float thresholdTest = 1.0 - step(threshold, mag);
-
- gl_FragColor = vec4(posterizedImageColor * thresholdTest, textureColor.a);
- }
-);
-#else
-NSString *const kGPUImageToonFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform float intensity;
- uniform float threshold;
- uniform float quantizationLevels;
-
- const vec3 W = vec3(0.2125, 0.7154, 0.0721);
-
- void main()
- {
- vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
-
- float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
- float h = -topLeftIntensity - 2.0 * topIntensity - topRightIntensity + bottomLeftIntensity + 2.0 * bottomIntensity + bottomRightIntensity;
- float v = -bottomLeftIntensity - 2.0 * leftIntensity - topLeftIntensity + bottomRightIntensity + 2.0 * rightIntensity + topRightIntensity;
-
- float mag = length(vec2(h, v));
-
- vec3 posterizedImageColor = floor((textureColor.rgb * quantizationLevels) + 0.5) / quantizationLevels;
-
- float thresholdTest = 1.0 - step(threshold, mag);
-
- gl_FragColor = vec4(posterizedImageColor * thresholdTest, textureColor.a);
- }
-);
-#endif
-
-@implementation GPUImageToonFilter
-
-@synthesize threshold = _threshold;
-@synthesize quantizationLevels = _quantizationLevels;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageToonFragmentShaderString]))
- {
- return nil;
- }
-
- hasOverriddenImageSizeFactor = NO;
-
- thresholdUniform = [filterProgram uniformIndex:@"threshold"];
- quantizationLevelsUniform = [filterProgram uniformIndex:@"quantizationLevels"];
-
- self.threshold = 0.2;
- self.quantizationLevels = 10.0;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setThreshold:(CGFloat)newValue;
-{
- _threshold = newValue;
-
- [self setFloat:_threshold forUniform:thresholdUniform program:filterProgram];
-}
-
-- (void)setQuantizationLevels:(CGFloat)newValue;
-{
- _quantizationLevels = newValue;
-
- [self setFloat:_quantizationLevels forUniform:quantizationLevelsUniform program:filterProgram];
-}
-
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageTransformFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageTransformFilter.h
deleted file mode 100755
index 9865b85..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageTransformFilter.h
+++ /dev/null
@@ -1,19 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageTransformFilter : GPUImageFilter
-{
- GLint transformMatrixUniform, orthographicMatrixUniform;
- GPUMatrix4x4 orthographicMatrix;
-}
-
-// You can either set the transform to apply to be a 2-D affine transform or a 3-D transform. The default is the identity transform (the output image is identical to the input).
-@property(readwrite, nonatomic) CGAffineTransform affineTransform;
-@property(readwrite, nonatomic) CATransform3D transform3D;
-
-// This applies the transform to the raw frame data if set to YES, the default of NO takes the aspect ratio of the image input into account when rotating
-@property(readwrite, nonatomic) BOOL ignoreAspectRatio;
-
-// sets the anchor point to top left corner
-@property(readwrite, nonatomic) BOOL anchorTopLeft;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageTransformFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageTransformFilter.m
deleted file mode 100755
index 1793309..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageTransformFilter.m
+++ /dev/null
@@ -1,260 +0,0 @@
-#import "GPUImageTransformFilter.h"
-
-NSString *const kGPUImageTransformVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec4 inputTextureCoordinate;
-
- uniform mat4 transformMatrix;
- uniform mat4 orthographicMatrix;
-
- varying vec2 textureCoordinate;
-
- void main()
- {
- gl_Position = transformMatrix * vec4(position.xyz, 1.0) * orthographicMatrix;
- textureCoordinate = inputTextureCoordinate.xy;
- }
-);
-
-@implementation GPUImageTransformFilter
-
-@synthesize affineTransform;
-@synthesize transform3D = _transform3D;
-@synthesize ignoreAspectRatio = _ignoreAspectRatio;
-@synthesize anchorTopLeft = _anchorTopLeft;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithVertexShaderFromString:kGPUImageTransformVertexShaderString fragmentShaderFromString:kGPUImagePassthroughFragmentShaderString]))
- {
- return nil;
- }
-
- transformMatrixUniform = [filterProgram uniformIndex:@"transformMatrix"];
- orthographicMatrixUniform = [filterProgram uniformIndex:@"orthographicMatrix"];
-
- self.transform3D = CATransform3DIdentity;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Conversion from matrix formats
-
-- (void)loadOrthoMatrix:(GLfloat *)matrix left:(GLfloat)left right:(GLfloat)right bottom:(GLfloat)bottom top:(GLfloat)top near:(GLfloat)near far:(GLfloat)far;
-{
- GLfloat r_l = right - left;
- GLfloat t_b = top - bottom;
- GLfloat f_n = far - near;
- GLfloat tx = - (right + left) / (right - left);
- GLfloat ty = - (top + bottom) / (top - bottom);
- GLfloat tz = - (far + near) / (far - near);
-
- float scale = 2.0f;
- if (_anchorTopLeft)
- {
- scale = 4.0f;
- tx=-1.0f;
- ty=-1.0f;
- }
-
- matrix[0] = scale / r_l;
- matrix[1] = 0.0f;
- matrix[2] = 0.0f;
- matrix[3] = tx;
-
- matrix[4] = 0.0f;
- matrix[5] = scale / t_b;
- matrix[6] = 0.0f;
- matrix[7] = ty;
-
- matrix[8] = 0.0f;
- matrix[9] = 0.0f;
- matrix[10] = scale / f_n;
- matrix[11] = tz;
-
- matrix[12] = 0.0f;
- matrix[13] = 0.0f;
- matrix[14] = 0.0f;
- matrix[15] = 1.0f;
-}
-
-//- (void)convert3DTransform:(CATransform3D *)transform3D toMatrix:(GLfloat *)matrix;
-//{
-// // struct CATransform3D
-// // {
-// // CGFloat m11, m12, m13, m14;
-// // CGFloat m21, m22, m23, m24;
-// // CGFloat m31, m32, m33, m34;
-// // CGFloat m41, m42, m43, m44;
-// // };
-//
-// matrix[0] = (GLfloat)transform3D->m11;
-// matrix[1] = (GLfloat)transform3D->m12;
-// matrix[2] = (GLfloat)transform3D->m13;
-// matrix[3] = (GLfloat)transform3D->m14;
-// matrix[4] = (GLfloat)transform3D->m21;
-// matrix[5] = (GLfloat)transform3D->m22;
-// matrix[6] = (GLfloat)transform3D->m23;
-// matrix[7] = (GLfloat)transform3D->m24;
-// matrix[8] = (GLfloat)transform3D->m31;
-// matrix[9] = (GLfloat)transform3D->m32;
-// matrix[10] = (GLfloat)transform3D->m33;
-// matrix[11] = (GLfloat)transform3D->m34;
-// matrix[12] = (GLfloat)transform3D->m41;
-// matrix[13] = (GLfloat)transform3D->m42;
-// matrix[14] = (GLfloat)transform3D->m43;
-// matrix[15] = (GLfloat)transform3D->m44;
-//}
-
-- (void)convert3DTransform:(CATransform3D *)transform3D toMatrix:(GPUMatrix4x4 *)matrix;
-{
- // struct CATransform3D
- // {
- // CGFloat m11, m12, m13, m14;
- // CGFloat m21, m22, m23, m24;
- // CGFloat m31, m32, m33, m34;
- // CGFloat m41, m42, m43, m44;
- // };
-
- GLfloat *mappedMatrix = (GLfloat *)matrix;
-
- mappedMatrix[0] = (GLfloat)transform3D->m11;
- mappedMatrix[1] = (GLfloat)transform3D->m12;
- mappedMatrix[2] = (GLfloat)transform3D->m13;
- mappedMatrix[3] = (GLfloat)transform3D->m14;
- mappedMatrix[4] = (GLfloat)transform3D->m21;
- mappedMatrix[5] = (GLfloat)transform3D->m22;
- mappedMatrix[6] = (GLfloat)transform3D->m23;
- mappedMatrix[7] = (GLfloat)transform3D->m24;
- mappedMatrix[8] = (GLfloat)transform3D->m31;
- mappedMatrix[9] = (GLfloat)transform3D->m32;
- mappedMatrix[10] = (GLfloat)transform3D->m33;
- mappedMatrix[11] = (GLfloat)transform3D->m34;
- mappedMatrix[12] = (GLfloat)transform3D->m41;
- mappedMatrix[13] = (GLfloat)transform3D->m42;
- mappedMatrix[14] = (GLfloat)transform3D->m43;
- mappedMatrix[15] = (GLfloat)transform3D->m44;
-}
-
-#pragma mark -
-#pragma mark GPUImageInput
-
-- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
-{
- CGSize currentFBOSize = [self sizeOfFBO];
- CGFloat normalizedHeight = currentFBOSize.height / currentFBOSize.width;
-
- GLfloat adjustedVertices[] = {
- -1.0f, -normalizedHeight,
- 1.0f, -normalizedHeight,
- -1.0f, normalizedHeight,
- 1.0f, normalizedHeight,
- };
- static const GLfloat squareVertices[] = {
- -1.0f, -1.0f,
- 1.0f, -1.0f,
- -1.0f, 1.0f,
- 1.0f, 1.0f,
- };
-
- GLfloat adjustedVerticesAnchorTL[] = {
- 0.0f, 0.0f,
- 1.0f, 0.0f,
- 0.0f, normalizedHeight,
- 1.0f, normalizedHeight,
- };
-
- static const GLfloat squareVerticesAnchorTL[] = {
- 0.0f, 0.0f,
- 1.0f, 0.0f,
- 0.0f, 1.0f,
- 1.0f, 1.0f,
- };
-
- if (_ignoreAspectRatio)
- {
- if (_anchorTopLeft)
- {
- [self renderToTextureWithVertices:squareVerticesAnchorTL textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
- }
- else
- {
- [self renderToTextureWithVertices:squareVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
- }
- }
- else
- {
- if (_anchorTopLeft)
- {
- [self renderToTextureWithVertices:adjustedVerticesAnchorTL textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
- }
- else
- {
- [self renderToTextureWithVertices:adjustedVertices textureCoordinates:[[self class] textureCoordinatesForRotation:inputRotation]];
- }
- }
-
- [self informTargetsAboutNewFrameAtTime:frameTime];
-}
-
-- (void)setupFilterForSize:(CGSize)filterFrameSize;
-{
- if (!_ignoreAspectRatio)
- {
- [self loadOrthoMatrix:(GLfloat *)&orthographicMatrix left:-1.0 right:1.0 bottom:(-1.0 * filterFrameSize.height / filterFrameSize.width) top:(1.0 * filterFrameSize.height / filterFrameSize.width) near:-1.0 far:1.0];
- // [self loadOrthoMatrix:orthographicMatrix left:-1.0 right:1.0 bottom:(-1.0 * (GLfloat)backingHeight / (GLfloat)backingWidth) top:(1.0 * (GLfloat)backingHeight / (GLfloat)backingWidth) near:-2.0 far:2.0];
-
- [self setMatrix4f:orthographicMatrix forUniform:orthographicMatrixUniform program:filterProgram];
- }
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setAffineTransform:(CGAffineTransform)newValue;
-{
- self.transform3D = CATransform3DMakeAffineTransform(newValue);
-}
-
-- (CGAffineTransform)affineTransform;
-{
- return CATransform3DGetAffineTransform(self.transform3D);
-}
-
-- (void)setTransform3D:(CATransform3D)newValue;
-{
- _transform3D = newValue;
-
- GPUMatrix4x4 temporaryMatrix;
-
- [self convert3DTransform:&_transform3D toMatrix:&temporaryMatrix];
- [self setMatrix4f:temporaryMatrix forUniform:transformMatrixUniform program:filterProgram];
-}
-
-- (void)setIgnoreAspectRatio:(BOOL)newValue;
-{
- _ignoreAspectRatio = newValue;
-
- if (_ignoreAspectRatio)
- {
- [self loadOrthoMatrix:(GLfloat *)&orthographicMatrix left:-1.0 right:1.0 bottom:-1.0 top:1.0 near:-1.0 far:1.0];
- [self setMatrix4f:orthographicMatrix forUniform:orthographicMatrixUniform program:filterProgram];
- }
- else
- {
- [self setupFilterForSize:[self sizeOfFBO]];
- }
-}
-
-- (void)setAnchorTopLeft:(BOOL)newValue
-{
- _anchorTopLeft = newValue;
- [self setIgnoreAspectRatio:_ignoreAspectRatio];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageTwoInputCrossTextureSamplingFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageTwoInputCrossTextureSamplingFilter.h
deleted file mode 100644
index 64eac9d..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageTwoInputCrossTextureSamplingFilter.h
+++ /dev/null
@@ -1,15 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageTwoInputCrossTextureSamplingFilter : GPUImageTwoInputFilter
-{
- GLint texelWidthUniform, texelHeightUniform;
-
- CGFloat texelWidth, texelHeight;
- BOOL hasOverriddenImageSizeFactor;
-}
-
-// The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
-@property(readwrite, nonatomic) CGFloat texelWidth;
-@property(readwrite, nonatomic) CGFloat texelHeight;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageTwoInputCrossTextureSamplingFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageTwoInputCrossTextureSamplingFilter.m
deleted file mode 100644
index aa338f8..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageTwoInputCrossTextureSamplingFilter.m
+++ /dev/null
@@ -1,108 +0,0 @@
-#import "GPUImageTwoInputCrossTextureSamplingFilter.h"
-
-NSString *const kGPUImageTwoInputNearbyTexelSamplingVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec4 inputTextureCoordinate;
- attribute vec4 inputTextureCoordinate2;
-
- uniform float texelWidth;
- uniform float texelHeight;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
- varying vec2 topTextureCoordinate;
- varying vec2 bottomTextureCoordinate;
-
- varying vec2 textureCoordinate2;
- varying vec2 leftTextureCoordinate2;
- varying vec2 rightTextureCoordinate2;
- varying vec2 topTextureCoordinate2;
- varying vec2 bottomTextureCoordinate2;
-
- void main()
- {
- gl_Position = position;
-
- vec2 widthStep = vec2(texelWidth, 0.0);
- vec2 heightStep = vec2(0.0, texelHeight);
-
- textureCoordinate = inputTextureCoordinate.xy;
- leftTextureCoordinate = inputTextureCoordinate.xy - widthStep;
- rightTextureCoordinate = inputTextureCoordinate.xy + widthStep;
- topTextureCoordinate = inputTextureCoordinate.xy - heightStep;
- bottomTextureCoordinate = inputTextureCoordinate.xy + heightStep;
-
- textureCoordinate2 = inputTextureCoordinate2.xy;
- leftTextureCoordinate2 = inputTextureCoordinate2.xy - widthStep;
- rightTextureCoordinate2 = inputTextureCoordinate2.xy + widthStep;
- topTextureCoordinate2 = inputTextureCoordinate2.xy - heightStep;
- bottomTextureCoordinate2 = inputTextureCoordinate2.xy + heightStep;
- }
-);
-
-@implementation GPUImageTwoInputCrossTextureSamplingFilter
-
-@synthesize texelWidth = _texelWidth;
-@synthesize texelHeight = _texelHeight;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
-{
- if (!(self = [super initWithVertexShaderFromString:kGPUImageTwoInputNearbyTexelSamplingVertexShaderString fragmentShaderFromString:fragmentShaderString]))
- {
- return nil;
- }
-
- texelWidthUniform = [filterProgram uniformIndex:@"texelWidth"];
- texelHeightUniform = [filterProgram uniformIndex:@"texelHeight"];
-
- return self;
-}
-
-- (void)setupFilterForSize:(CGSize)filterFrameSize;
-{
- if (!hasOverriddenImageSizeFactor)
- {
- _texelWidth = 1.0 / filterFrameSize.width;
- _texelHeight = 1.0 / filterFrameSize.height;
-
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext setActiveShaderProgram:filterProgram];
- if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
- {
- glUniform1f(texelWidthUniform, _texelHeight);
- glUniform1f(texelHeightUniform, _texelWidth);
- }
- else
- {
- glUniform1f(texelWidthUniform, _texelWidth);
- glUniform1f(texelHeightUniform, _texelHeight);
- }
- });
- }
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setTexelWidth:(CGFloat)newValue;
-{
- hasOverriddenImageSizeFactor = YES;
- _texelWidth = newValue;
-
- [self setFloat:_texelWidth forUniform:texelWidthUniform program:filterProgram];
-}
-
-- (void)setTexelHeight:(CGFloat)newValue;
-{
- hasOverriddenImageSizeFactor = YES;
- _texelHeight = newValue;
-
- [self setFloat:_texelHeight forUniform:texelHeightUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageTwoInputFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageTwoInputFilter.h
deleted file mode 100644
index da3a134..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageTwoInputFilter.h
+++ /dev/null
@@ -1,21 +0,0 @@
-#import "GPUImageFilter.h"
-
-extern NSString *const kGPUImageTwoInputTextureVertexShaderString;
-
-@interface GPUImageTwoInputFilter : GPUImageFilter
-{
- GPUImageFramebuffer *secondInputFramebuffer;
-
- GLint filterSecondTextureCoordinateAttribute;
- GLint filterInputTextureUniform2;
- GPUImageRotationMode inputRotation2;
- CMTime firstFrameTime, secondFrameTime;
-
- BOOL hasSetFirstTexture, hasReceivedFirstFrame, hasReceivedSecondFrame, firstFrameWasVideo, secondFrameWasVideo;
- BOOL firstFrameCheckDisabled, secondFrameCheckDisabled;
-}
-
-- (void)disableFirstFrameCheck;
-- (void)disableSecondFrameCheck;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageTwoInputFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageTwoInputFilter.m
deleted file mode 100644
index cf31873..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageTwoInputFilter.m
+++ /dev/null
@@ -1,264 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-NSString *const kGPUImageTwoInputTextureVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec4 inputTextureCoordinate;
- attribute vec4 inputTextureCoordinate2;
-
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- void main()
- {
- gl_Position = position;
- textureCoordinate = inputTextureCoordinate.xy;
- textureCoordinate2 = inputTextureCoordinate2.xy;
- }
-);
-
-
-@implementation GPUImageTwoInputFilter
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
-{
- if (!(self = [self initWithVertexShaderFromString:kGPUImageTwoInputTextureVertexShaderString fragmentShaderFromString:fragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
-{
- if (!(self = [super initWithVertexShaderFromString:vertexShaderString fragmentShaderFromString:fragmentShaderString]))
- {
- return nil;
- }
-
- inputRotation2 = kGPUImageNoRotation;
-
- hasSetFirstTexture = NO;
-
- hasReceivedFirstFrame = NO;
- hasReceivedSecondFrame = NO;
- firstFrameWasVideo = NO;
- secondFrameWasVideo = NO;
- firstFrameCheckDisabled = NO;
- secondFrameCheckDisabled = NO;
-
- firstFrameTime = kCMTimeInvalid;
- secondFrameTime = kCMTimeInvalid;
-
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext useImageProcessingContext];
- filterSecondTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate2"];
-
- filterInputTextureUniform2 = [filterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
- glEnableVertexAttribArray(filterSecondTextureCoordinateAttribute);
- });
-
- return self;
-}
-
-- (void)initializeAttributes;
-{
- [super initializeAttributes];
- [filterProgram addAttribute:@"inputTextureCoordinate2"];
-}
-
-- (void)disableFirstFrameCheck;
-{
- firstFrameCheckDisabled = YES;
-}
-
-- (void)disableSecondFrameCheck;
-{
- secondFrameCheckDisabled = YES;
-}
-
-#pragma mark -
-#pragma mark Rendering
-
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-{
- if (self.preventRendering)
- {
- [firstInputFramebuffer unlock];
- [secondInputFramebuffer unlock];
- return;
- }
-
- [GPUImageContext setActiveShaderProgram:filterProgram];
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
- [outputFramebuffer activateFramebuffer];
- if (usingNextFrameForImageCapture)
- {
- [outputFramebuffer lock];
- }
-
- [self setUniformsForProgramAtIndex:0];
-
- glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
- glClear(GL_COLOR_BUFFER_BIT);
-
- glActiveTexture(GL_TEXTURE2);
- glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
- glUniform1i(filterInputTextureUniform, 2);
-
- glActiveTexture(GL_TEXTURE3);
- glBindTexture(GL_TEXTURE_2D, [secondInputFramebuffer texture]);
- glUniform1i(filterInputTextureUniform2, 3);
-
- glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
- glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
- glVertexAttribPointer(filterSecondTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:inputRotation2]);
-
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
-
- [firstInputFramebuffer unlock];
- [secondInputFramebuffer unlock];
- if (usingNextFrameForImageCapture)
- {
- dispatch_semaphore_signal(imageCaptureSemaphore);
- }
-}
-
-#pragma mark -
-#pragma mark GPUImageInput
-
-- (NSInteger)nextAvailableTextureIndex;
-{
- if (hasSetFirstTexture)
- {
- return 1;
- }
- else
- {
- return 0;
- }
-}
-
-- (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex;
-{
- if (textureIndex == 0)
- {
- firstInputFramebuffer = newInputFramebuffer;
- hasSetFirstTexture = YES;
- [firstInputFramebuffer lock];
- }
- else
- {
- secondInputFramebuffer = newInputFramebuffer;
- [secondInputFramebuffer lock];
- }
-}
-
-- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
-{
- if (textureIndex == 0)
- {
- [super setInputSize:newSize atIndex:textureIndex];
-
- if (CGSizeEqualToSize(newSize, CGSizeZero))
- {
- hasSetFirstTexture = NO;
- }
- }
-}
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
- if (textureIndex == 0)
- {
- inputRotation = newInputRotation;
- }
- else
- {
- inputRotation2 = newInputRotation;
- }
-}
-
-- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;
-{
- CGSize rotatedSize = sizeToRotate;
-
- GPUImageRotationMode rotationToCheck;
- if (textureIndex == 0)
- {
- rotationToCheck = inputRotation;
- }
- else
- {
- rotationToCheck = inputRotation2;
- }
-
- if (GPUImageRotationSwapsWidthAndHeight(rotationToCheck))
- {
- rotatedSize.width = sizeToRotate.height;
- rotatedSize.height = sizeToRotate.width;
- }
-
- return rotatedSize;
-}
-
-- (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex;
-{
- // You can set up infinite update loops, so this helps to short circuit them
- if (hasReceivedFirstFrame && hasReceivedSecondFrame)
- {
- return;
- }
-
- BOOL updatedMovieFrameOppositeStillImage = NO;
-
- if (textureIndex == 0)
- {
- hasReceivedFirstFrame = YES;
- firstFrameTime = frameTime;
- if (secondFrameCheckDisabled)
- {
- hasReceivedSecondFrame = YES;
- }
-
- if (!CMTIME_IS_INDEFINITE(frameTime))
- {
- if CMTIME_IS_INDEFINITE(secondFrameTime)
- {
- updatedMovieFrameOppositeStillImage = YES;
- }
- }
- }
- else
- {
- hasReceivedSecondFrame = YES;
- secondFrameTime = frameTime;
- if (firstFrameCheckDisabled)
- {
- hasReceivedFirstFrame = YES;
- }
-
- if (!CMTIME_IS_INDEFINITE(frameTime))
- {
- if CMTIME_IS_INDEFINITE(firstFrameTime)
- {
- updatedMovieFrameOppositeStillImage = YES;
- }
- }
- }
-
- // || (hasReceivedFirstFrame && secondFrameCheckDisabled) || (hasReceivedSecondFrame && firstFrameCheckDisabled)
- if ((hasReceivedFirstFrame && hasReceivedSecondFrame) || updatedMovieFrameOppositeStillImage)
- {
- CMTime passOnFrameTime = (!CMTIME_IS_INDEFINITE(firstFrameTime)) ? firstFrameTime : secondFrameTime;
- [super newFrameReadyAtTime:passOnFrameTime atIndex:0]; // Bugfix when trying to record: always use time from first input (unless indefinite, in which case use the second input)
- hasReceivedFirstFrame = NO;
- hasReceivedSecondFrame = NO;
- }
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageTwoPassFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageTwoPassFilter.h
deleted file mode 100755
index 23087f3..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageTwoPassFilter.h
+++ /dev/null
@@ -1,19 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageTwoPassFilter : GPUImageFilter
-{
- GPUImageFramebuffer *secondOutputFramebuffer;
-
- GLProgram *secondFilterProgram;
- GLint secondFilterPositionAttribute, secondFilterTextureCoordinateAttribute;
- GLint secondFilterInputTextureUniform, secondFilterInputTextureUniform2;
-
- NSMutableDictionary *secondProgramUniformStateRestorationBlocks;
-}
-
-// Initialization and teardown
-- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
-- (id)initWithFirstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
-- (void)initializeSecondaryAttributes;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageTwoPassFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageTwoPassFilter.m
deleted file mode 100755
index 9eb292b..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageTwoPassFilter.m
+++ /dev/null
@@ -1,201 +0,0 @@
-#import "GPUImageTwoPassFilter.h"
-
-@implementation GPUImageTwoPassFilter
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
-{
- if (!(self = [super initWithVertexShaderFromString:firstStageVertexShaderString fragmentShaderFromString:firstStageFragmentShaderString]))
- {
- return nil;
- }
-
- secondProgramUniformStateRestorationBlocks = [NSMutableDictionary dictionaryWithCapacity:10];
-
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext useImageProcessingContext];
-
- secondFilterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:secondStageVertexShaderString fragmentShaderString:secondStageFragmentShaderString];
-
- if (!secondFilterProgram.initialized)
- {
- [self initializeSecondaryAttributes];
-
- if (![secondFilterProgram link])
- {
- NSString *progLog = [secondFilterProgram programLog];
- NSLog(@"Program link log: %@", progLog);
- NSString *fragLog = [secondFilterProgram fragmentShaderLog];
- NSLog(@"Fragment shader compile log: %@", fragLog);
- NSString *vertLog = [secondFilterProgram vertexShaderLog];
- NSLog(@"Vertex shader compile log: %@", vertLog);
- secondFilterProgram = nil;
- NSAssert(NO, @"Filter shader link failed");
- }
- }
-
- secondFilterPositionAttribute = [secondFilterProgram attributeIndex:@"position"];
- secondFilterTextureCoordinateAttribute = [secondFilterProgram attributeIndex:@"inputTextureCoordinate"];
- secondFilterInputTextureUniform = [secondFilterProgram uniformIndex:@"inputImageTexture"]; // This does assume a name of "inputImageTexture" for the fragment shader
- secondFilterInputTextureUniform2 = [secondFilterProgram uniformIndex:@"inputImageTexture2"]; // This does assume a name of "inputImageTexture2" for second input texture in the fragment shader
-
- [GPUImageContext setActiveShaderProgram:secondFilterProgram];
-
- glEnableVertexAttribArray(secondFilterPositionAttribute);
- glEnableVertexAttribArray(secondFilterTextureCoordinateAttribute);
- });
-
- return self;
-}
-
-- (id)initWithFirstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString;
-{
- if (!(self = [self initWithFirstStageVertexShaderFromString:kGPUImageVertexShaderString firstStageFragmentShaderFromString:firstStageFragmentShaderString secondStageVertexShaderFromString:kGPUImageVertexShaderString secondStageFragmentShaderFromString:secondStageFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (void)initializeSecondaryAttributes;
-{
- [secondFilterProgram addAttribute:@"position"];
- [secondFilterProgram addAttribute:@"inputTextureCoordinate"];
-}
-
-#pragma mark -
-#pragma mark Managing targets
-
-- (GPUImageFramebuffer *)framebufferForOutput;
-{
- return secondOutputFramebuffer;
-}
-
-- (void)removeOutputFramebuffer;
-{
- secondOutputFramebuffer = nil;
-}
-
-#pragma mark -
-#pragma mark Rendering
-
-- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
-{
- if (self.preventRendering)
- {
- [firstInputFramebuffer unlock];
- return;
- }
-
- [GPUImageContext setActiveShaderProgram:filterProgram];
-
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
- [outputFramebuffer activateFramebuffer];
-
- [self setUniformsForProgramAtIndex:0];
-
- glClearColor(backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha);
- glClear(GL_COLOR_BUFFER_BIT);
-
- glActiveTexture(GL_TEXTURE2);
- glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
-
- glUniform1i(filterInputTextureUniform, 2);
-
- glVertexAttribPointer(filterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
- glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
-
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
-
- [firstInputFramebuffer unlock];
- firstInputFramebuffer = nil;
-
- // This assumes that any two-pass filter that says it desires monochrome input is using the first pass for a luminance conversion, which can be dropped
-// if (!currentlyReceivingMonochromeInput)
-// {
- // Run the first stage of the two-pass filter
-// [super renderToTextureWithVertices:vertices textureCoordinates:textureCoordinates];
-// }
-
- // Run the second stage of the two-pass filter
- secondOutputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
- [secondOutputFramebuffer activateFramebuffer];
- [GPUImageContext setActiveShaderProgram:secondFilterProgram];
- if (usingNextFrameForImageCapture)
- {
- [secondOutputFramebuffer lock];
- }
-
- [self setUniformsForProgramAtIndex:1];
-
- glActiveTexture(GL_TEXTURE3);
- glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
- glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]);
-
- // TODO: Re-enable this monochrome optimization
-// if (!currentlyReceivingMonochromeInput)
-// {
-// glActiveTexture(GL_TEXTURE3);
-// glBindTexture(GL_TEXTURE_2D, outputTexture);
-// glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [[self class] textureCoordinatesForRotation:kGPUImageNoRotation]);
-// }
-// else
-// {
-// glActiveTexture(GL_TEXTURE3);
-// glBindTexture(GL_TEXTURE_2D, sourceTexture);
-// glVertexAttribPointer(secondFilterTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, textureCoordinates);
-// }
-
- glUniform1i(secondFilterInputTextureUniform, 3);
-
- glVertexAttribPointer(secondFilterPositionAttribute, 2, GL_FLOAT, 0, 0, vertices);
-
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT);
-
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
- [outputFramebuffer unlock];
- outputFramebuffer = nil;
-
- if (usingNextFrameForImageCapture)
- {
- dispatch_semaphore_signal(imageCaptureSemaphore);
- }
-}
-
-- (void)setAndExecuteUniformStateCallbackAtIndex:(GLint)uniform forProgram:(GLProgram *)shaderProgram toBlock:(dispatch_block_t)uniformStateBlock;
-{
-// TODO: Deal with the fact that two-pass filters may have the same shader program identifier
- if (shaderProgram == filterProgram)
- {
- [uniformStateRestorationBlocks setObject:[uniformStateBlock copy] forKey:[NSNumber numberWithInt:uniform]];
- }
- else
- {
- [secondProgramUniformStateRestorationBlocks setObject:[uniformStateBlock copy] forKey:[NSNumber numberWithInt:uniform]];
- }
- uniformStateBlock();
-}
-
-- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;
-{
- if (programIndex == 0)
- {
- [uniformStateRestorationBlocks enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop){
- dispatch_block_t currentBlock = obj;
- currentBlock();
- }];
- }
- else
- {
- [secondProgramUniformStateRestorationBlocks enumerateKeysAndObjectsUsingBlock:^(id key, id obj, BOOL *stop){
- dispatch_block_t currentBlock = obj;
- currentBlock();
- }];
- }
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageTwoPassTextureSamplingFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageTwoPassTextureSamplingFilter.h
deleted file mode 100644
index 73ab79d..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageTwoPassTextureSamplingFilter.h
+++ /dev/null
@@ -1,13 +0,0 @@
-#import "GPUImageTwoPassFilter.h"
-
-@interface GPUImageTwoPassTextureSamplingFilter : GPUImageTwoPassFilter
-{
- GLint verticalPassTexelWidthOffsetUniform, verticalPassTexelHeightOffsetUniform, horizontalPassTexelWidthOffsetUniform, horizontalPassTexelHeightOffsetUniform;
- GLfloat verticalPassTexelWidthOffset, verticalPassTexelHeightOffset, horizontalPassTexelWidthOffset, horizontalPassTexelHeightOffset;
- CGFloat _verticalTexelSpacing, _horizontalTexelSpacing;
-}
-
-// This sets the spacing between texels (in pixels) when sampling for the first. By default, this is 1.0
-@property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageTwoPassTextureSamplingFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageTwoPassTextureSamplingFilter.m
deleted file mode 100644
index b6a2ec5..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageTwoPassTextureSamplingFilter.m
+++ /dev/null
@@ -1,85 +0,0 @@
-#import "GPUImageTwoPassTextureSamplingFilter.h"
-
-@implementation GPUImageTwoPassTextureSamplingFilter
-
-@synthesize verticalTexelSpacing = _verticalTexelSpacing;
-@synthesize horizontalTexelSpacing = _horizontalTexelSpacing;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString
-{
- if (!(self = [super initWithFirstStageVertexShaderFromString:firstStageVertexShaderString firstStageFragmentShaderFromString:firstStageFragmentShaderString secondStageVertexShaderFromString:secondStageVertexShaderString secondStageFragmentShaderFromString:secondStageFragmentShaderString]))
- {
- return nil;
- }
-
- runSynchronouslyOnVideoProcessingQueue(^{
- [GPUImageContext useImageProcessingContext];
-
- verticalPassTexelWidthOffsetUniform = [filterProgram uniformIndex:@"texelWidthOffset"];
- verticalPassTexelHeightOffsetUniform = [filterProgram uniformIndex:@"texelHeightOffset"];
-
- horizontalPassTexelWidthOffsetUniform = [secondFilterProgram uniformIndex:@"texelWidthOffset"];
- horizontalPassTexelHeightOffsetUniform = [secondFilterProgram uniformIndex:@"texelHeightOffset"];
- });
-
- self.verticalTexelSpacing = 1.0;
- self.horizontalTexelSpacing = 1.0;
-
- return self;
-}
-
-- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;
-{
- [super setUniformsForProgramAtIndex:programIndex];
-
- if (programIndex == 0)
- {
- glUniform1f(verticalPassTexelWidthOffsetUniform, verticalPassTexelWidthOffset);
- glUniform1f(verticalPassTexelHeightOffsetUniform, verticalPassTexelHeightOffset);
- }
- else
- {
- glUniform1f(horizontalPassTexelWidthOffsetUniform, horizontalPassTexelWidthOffset);
- glUniform1f(horizontalPassTexelHeightOffsetUniform, horizontalPassTexelHeightOffset);
- }
-}
-
-- (void)setupFilterForSize:(CGSize)filterFrameSize;
-{
- runSynchronouslyOnVideoProcessingQueue(^{
- // The first pass through the framebuffer may rotate the inbound image, so need to account for that by changing up the kernel ordering for that pass
- if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
- {
- verticalPassTexelWidthOffset = _verticalTexelSpacing / filterFrameSize.height;
- verticalPassTexelHeightOffset = 0.0;
- }
- else
- {
- verticalPassTexelWidthOffset = 0.0;
- verticalPassTexelHeightOffset = _verticalTexelSpacing / filterFrameSize.height;
- }
-
- horizontalPassTexelWidthOffset = _horizontalTexelSpacing / filterFrameSize.width;
- horizontalPassTexelHeightOffset = 0.0;
- });
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setVerticalTexelSpacing:(CGFloat)newValue;
-{
- _verticalTexelSpacing = newValue;
- [self setupFilterForSize:[self sizeOfFBO]];
-}
-
-- (void)setHorizontalTexelSpacing:(CGFloat)newValue;
-{
- _horizontalTexelSpacing = newValue;
- [self setupFilterForSize:[self sizeOfFBO]];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageUIElement.h b/Example/Pods/GPUImage/framework/Source/GPUImageUIElement.h
deleted file mode 100644
index 984ff2a..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageUIElement.h
+++ /dev/null
@@ -1,15 +0,0 @@
-#import "GPUImageOutput.h"
-
-@interface GPUImageUIElement : GPUImageOutput
-
-// Initialization and teardown
-- (id)initWithView:(UIView *)inputView;
-- (id)initWithLayer:(CALayer *)inputLayer;
-
-// Layer management
-- (CGSize)layerSizeInPixels;
-- (void)update;
-- (void)updateUsingCurrentTime;
-- (void)updateWithTimestamp:(CMTime)frameTime;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageUIElement.m b/Example/Pods/GPUImage/framework/Source/GPUImageUIElement.m
deleted file mode 100644
index 3320892..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageUIElement.m
+++ /dev/null
@@ -1,123 +0,0 @@
-#import "GPUImageUIElement.h"
-
-@interface GPUImageUIElement ()
-{
- UIView *view;
- CALayer *layer;
-
- CGSize previousLayerSizeInPixels;
- CMTime time;
- NSTimeInterval actualTimeOfLastUpdate;
-}
-
-@end
-
-@implementation GPUImageUIElement
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)initWithView:(UIView *)inputView;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- view = inputView;
- layer = inputView.layer;
-
- previousLayerSizeInPixels = CGSizeZero;
- [self update];
-
- return self;
-}
-
-- (id)initWithLayer:(CALayer *)inputLayer;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- view = nil;
- layer = inputLayer;
-
- previousLayerSizeInPixels = CGSizeZero;
- [self update];
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Layer management
-
-- (CGSize)layerSizeInPixels;
-{
- CGSize pointSize = layer.bounds.size;
- return CGSizeMake(layer.contentsScale * pointSize.width, layer.contentsScale * pointSize.height);
-}
-
-- (void)update;
-{
- [self updateWithTimestamp:kCMTimeIndefinite];
-}
-
-- (void)updateUsingCurrentTime;
-{
- if(CMTIME_IS_INVALID(time)) {
- time = CMTimeMakeWithSeconds(0, 600);
- actualTimeOfLastUpdate = [NSDate timeIntervalSinceReferenceDate];
- } else {
- NSTimeInterval now = [NSDate timeIntervalSinceReferenceDate];
- NSTimeInterval diff = now - actualTimeOfLastUpdate;
- time = CMTimeAdd(time, CMTimeMakeWithSeconds(diff, 600));
- actualTimeOfLastUpdate = now;
- }
-
- [self updateWithTimestamp:time];
-}
-
-- (void)updateWithTimestamp:(CMTime)frameTime;
-{
- [GPUImageContext useImageProcessingContext];
-
- CGSize layerPixelSize = [self layerSizeInPixels];
-
- GLubyte *imageData = (GLubyte *) calloc(1, (int)layerPixelSize.width * (int)layerPixelSize.height * 4);
-
- CGColorSpaceRef genericRGBColorspace = CGColorSpaceCreateDeviceRGB();
- CGContextRef imageContext = CGBitmapContextCreate(imageData, (int)layerPixelSize.width, (int)layerPixelSize.height, 8, (int)layerPixelSize.width * 4, genericRGBColorspace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
-// CGContextRotateCTM(imageContext, M_PI_2);
- CGContextTranslateCTM(imageContext, 0.0f, layerPixelSize.height);
- CGContextScaleCTM(imageContext, layer.contentsScale, -layer.contentsScale);
- // CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html
-
- [layer renderInContext:imageContext];
-
- CGContextRelease(imageContext);
- CGColorSpaceRelease(genericRGBColorspace);
-
- // TODO: This may not work
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:layerPixelSize textureOptions:self.outputTextureOptions onlyTexture:YES];
-
- glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
- // no need to use self.outputTextureOptions here, we always need these texture options
- glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, (int)layerPixelSize.width, (int)layerPixelSize.height, 0, GL_BGRA, GL_UNSIGNED_BYTE, imageData);
-
- free(imageData);
-
- for (id currentTarget in targets)
- {
- if (currentTarget != self.targetToIgnoreForUpdates)
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
-
- [currentTarget setInputSize:layerPixelSize atIndex:textureIndexOfTarget];
- [currentTarget newFrameReadyAtTime:frameTime atIndex:textureIndexOfTarget];
- }
- }
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageUnsharpMaskFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageUnsharpMaskFilter.h
deleted file mode 100755
index 9d8aff0..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageUnsharpMaskFilter.h
+++ /dev/null
@@ -1,16 +0,0 @@
-#import "GPUImageFilterGroup.h"
-
-@class GPUImageGaussianBlurFilter;
-
-@interface GPUImageUnsharpMaskFilter : GPUImageFilterGroup
-{
- GPUImageGaussianBlurFilter *blurFilter;
- GPUImageFilter *unsharpMaskFilter;
-}
-// The blur radius of the underlying Gaussian blur. The default is 4.0.
-@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
-
-// The strength of the sharpening, from 0.0 on up, with a default of 1.0
-@property(readwrite, nonatomic) CGFloat intensity;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageUnsharpMaskFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageUnsharpMaskFilter.m
deleted file mode 100755
index 542c5ea..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageUnsharpMaskFilter.m
+++ /dev/null
@@ -1,101 +0,0 @@
-#import "GPUImageUnsharpMaskFilter.h"
-#import "GPUImageFilter.h"
-#import "GPUImageTwoInputFilter.h"
-#import "GPUImageGaussianBlurFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageUnsharpMaskFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
- varying highp vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- uniform highp float intensity;
-
- void main()
- {
- lowp vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate);
- lowp vec3 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2).rgb;
-
- gl_FragColor = vec4(sharpImageColor.rgb * intensity + blurredImageColor * (1.0 - intensity), sharpImageColor.a);
-// gl_FragColor = mix(blurredImageColor, sharpImageColor, intensity);
-// gl_FragColor = vec4(sharpImageColor.rgb - (blurredImageColor.rgb * intensity), 1.0);
- }
-);
-#else
-NSString *const kGPUImageUnsharpMaskFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 textureCoordinate2;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
-
- uniform float intensity;
-
- void main()
- {
- vec4 sharpImageColor = texture2D(inputImageTexture, textureCoordinate);
- vec3 blurredImageColor = texture2D(inputImageTexture2, textureCoordinate2).rgb;
-
- gl_FragColor = vec4(sharpImageColor.rgb * intensity + blurredImageColor * (1.0 - intensity), sharpImageColor.a);
- // gl_FragColor = mix(blurredImageColor, sharpImageColor, intensity);
- // gl_FragColor = vec4(sharpImageColor.rgb - (blurredImageColor.rgb * intensity), 1.0);
- }
-);
-#endif
-
-@implementation GPUImageUnsharpMaskFilter
-
-@synthesize blurRadiusInPixels;
-@synthesize intensity = _intensity;
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- // First pass: apply a variable Gaussian blur
- blurFilter = [[GPUImageGaussianBlurFilter alloc] init];
- [self addFilter:blurFilter];
-
- // Second pass: combine the blurred image with the original sharp one
- unsharpMaskFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageUnsharpMaskFragmentShaderString];
- [self addFilter:unsharpMaskFilter];
-
- // Texture location 0 needs to be the sharp image for both the blur and the second stage processing
- [blurFilter addTarget:unsharpMaskFilter atTextureLocation:1];
-
- self.initialFilters = [NSArray arrayWithObjects:blurFilter, unsharpMaskFilter, nil];
- self.terminalFilter = unsharpMaskFilter;
-
- self.intensity = 1.0;
- self.blurRadiusInPixels = 4.0;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setBlurRadiusInPixels:(CGFloat)newValue;
-{
- blurFilter.blurRadiusInPixels = newValue;
-}
-
-- (CGFloat)blurRadiusInPixels;
-{
- return blurFilter.blurRadiusInPixels;
-}
-
-- (void)setIntensity:(CGFloat)newValue;
-{
- _intensity = newValue;
- [unsharpMaskFilter setFloat:newValue forUniformName:@"intensity"];
-}
-
-@end
\ No newline at end of file
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageVideoCamera.h b/Example/Pods/GPUImage/framework/Source/GPUImageVideoCamera.h
deleted file mode 100755
index 22ccb94..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageVideoCamera.h
+++ /dev/null
@@ -1,154 +0,0 @@
-#import
-#import
-#import
-#import "GPUImageContext.h"
-#import "GPUImageOutput.h"
-
-extern const GLfloat kColorConversion601[];
-extern const GLfloat kColorConversion601FullRange[];
-extern const GLfloat kColorConversion709[];
-extern NSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString;
-extern NSString *const kGPUImageYUVFullRangeConversionForLAFragmentShaderString;
-extern NSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString;
-
-
-//Delegate Protocal for Face Detection.
-@protocol GPUImageVideoCameraDelegate
-
-@optional
-- (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer;
-@end
-
-
-/**
- A GPUImageOutput that provides frames from either camera
-*/
-@interface GPUImageVideoCamera : GPUImageOutput
-{
- NSUInteger numberOfFramesCaptured;
- CGFloat totalFrameTimeDuringCapture;
-
- AVCaptureSession *_captureSession;
- AVCaptureDevice *_inputCamera;
- AVCaptureDevice *_microphone;
- AVCaptureDeviceInput *videoInput;
- AVCaptureVideoDataOutput *videoOutput;
-
- BOOL capturePaused;
- GPUImageRotationMode outputRotation, internalRotation;
- dispatch_semaphore_t frameRenderingSemaphore;
-
- BOOL captureAsYUV;
- GLuint luminanceTexture, chrominanceTexture;
-
- __unsafe_unretained id _delegate;
-}
-
-/// The AVCaptureSession used to capture from the camera
-@property(readonly, retain, nonatomic) AVCaptureSession *captureSession;
-
-/// This enables the capture session preset to be changed on the fly
-@property (readwrite, nonatomic, copy) NSString *captureSessionPreset;
-
-/// This sets the frame rate of the camera (iOS 5 and above only)
-/**
- Setting this to 0 or below will set the frame rate back to the default setting for a particular preset.
- */
-@property (readwrite) int32_t frameRate;
-
-/// Easy way to tell which cameras are present on device
-@property (readonly, getter = isFrontFacingCameraPresent) BOOL frontFacingCameraPresent;
-@property (readonly, getter = isBackFacingCameraPresent) BOOL backFacingCameraPresent;
-
-/// This enables the benchmarking mode, which logs out instantaneous and average frame times to the console
-@property(readwrite, nonatomic) BOOL runBenchmark;
-
-/// Use this property to manage camera settings. Focus point, exposure point, etc.
-@property(readonly) AVCaptureDevice *inputCamera;
-
-/// This determines the rotation applied to the output image, based on the source material
-@property(readwrite, nonatomic) UIInterfaceOrientation outputImageOrientation;
-
-/// These properties determine whether or not the two camera orientations should be mirrored. By default, both are NO.
-@property(readwrite, nonatomic) BOOL horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera;
-
-@property(nonatomic, assign) id delegate;
-
-/// @name Initialization and teardown
-
-/** Begin a capture session
-
- See AVCaptureSession for acceptable values
-
- @param sessionPreset Session preset to use
- @param cameraPosition Camera to capture from
- */
-- (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
-
-/** Add audio capture to the session. Adding inputs and outputs freezes the capture session momentarily, so you
- can use this method to add the audio inputs and outputs early, if you're going to set the audioEncodingTarget
- later. Returns YES is the audio inputs and outputs were added, or NO if they had already been added.
- */
-- (BOOL)addAudioInputsAndOutputs;
-
-/** Remove the audio capture inputs and outputs from this session. Returns YES if the audio inputs and outputs
- were removed, or NO is they hadn't already been added.
- */
-- (BOOL)removeAudioInputsAndOutputs;
-
-/** Tear down the capture session
- */
-- (void)removeInputsAndOutputs;
-
-/// @name Manage the camera video stream
-
-/** Start camera capturing
- */
-- (void)startCameraCapture;
-
-/** Stop camera capturing
- */
-- (void)stopCameraCapture;
-
-/** Pause camera capturing
- */
-- (void)pauseCameraCapture;
-
-/** Resume camera capturing
- */
-- (void)resumeCameraCapture;
-
-/** Process a video sample
- @param sampleBuffer Buffer to process
- */
-- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
-
-/** Process an audio sample
- @param sampleBuffer Buffer to process
- */
-- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
-
-/** Get the position (front, rear) of the source camera
- */
-- (AVCaptureDevicePosition)cameraPosition;
-
-/** Get the AVCaptureConnection of the source camera
- */
-- (AVCaptureConnection *)videoCaptureConnection;
-
-/** This flips between the front and rear cameras
- */
-- (void)rotateCamera;
-
-/// @name Benchmarking
-
-/** When benchmarking is enabled, this will keep a running average of the time from uploading, processing, and final recording or display
- */
-- (CGFloat)averageFrameDurationDuringCapture;
-
-- (void)resetBenchmarkAverage;
-
-+ (BOOL)isBackFacingCameraPresent;
-+ (BOOL)isFrontFacingCameraPresent;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageVideoCamera.m b/Example/Pods/GPUImage/framework/Source/GPUImageVideoCamera.m
deleted file mode 100644
index b84f88a..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageVideoCamera.m
+++ /dev/null
@@ -1,1129 +0,0 @@
-#import "GPUImageVideoCamera.h"
-#import "GPUImageMovieWriter.h"
-#import "GPUImageFilter.h"
-
-// Color Conversion Constants (YUV to RGB) including adjustment from 16-235/16-240 (video range)
-
-// BT.601, which is the standard for SDTV.
-const GLfloat kColorConversion601[] = {
- 1.164, 1.164, 1.164,
- 0.0, -0.392, 2.017,
- 1.596, -0.813, 0.0,
-};
-
-// BT.709, which is the standard for HDTV.
-const GLfloat kColorConversion709[] = {
- 1.164, 1.164, 1.164,
- 0.0, -0.213, 2.112,
- 1.793, -0.533, 0.0,
-};
-
-// BT.601 full range (ref: http://www.equasys.de/colorconversion.html)
-const GLfloat kColorConversion601FullRange[] = {
- 1.0, 1.0, 1.0,
- 0.0, -0.343, 1.765,
- 1.4, -0.711, 0.0,
-};
-
-NSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D luminanceTexture;
- uniform sampler2D chrominanceTexture;
- uniform mediump mat3 colorConversionMatrix;
-
- void main()
- {
- mediump vec3 yuv;
- lowp vec3 rgb;
-
- yuv.x = texture2D(luminanceTexture, textureCoordinate).r;
- yuv.yz = texture2D(chrominanceTexture, textureCoordinate).rg - vec2(0.5, 0.5);
- rgb = colorConversionMatrix * yuv;
-
- gl_FragColor = vec4(rgb, 1);
- }
- );
-
-NSString *const kGPUImageYUVFullRangeConversionForLAFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D luminanceTexture;
- uniform sampler2D chrominanceTexture;
- uniform mediump mat3 colorConversionMatrix;
-
- void main()
- {
- mediump vec3 yuv;
- lowp vec3 rgb;
-
- yuv.x = texture2D(luminanceTexture, textureCoordinate).r;
- yuv.yz = texture2D(chrominanceTexture, textureCoordinate).ra - vec2(0.5, 0.5);
- rgb = colorConversionMatrix * yuv;
-
- gl_FragColor = vec4(rgb, 1);
- }
- );
-
-NSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D luminanceTexture;
- uniform sampler2D chrominanceTexture;
- uniform mediump mat3 colorConversionMatrix;
-
- void main()
- {
- mediump vec3 yuv;
- lowp vec3 rgb;
-
- yuv.x = texture2D(luminanceTexture, textureCoordinate).r - (16.0/255.0);
- yuv.yz = texture2D(chrominanceTexture, textureCoordinate).ra - vec2(0.5, 0.5);
- rgb = colorConversionMatrix * yuv;
-
- gl_FragColor = vec4(rgb, 1);
- }
- );
-
-
-#pragma mark -
-#pragma mark Private methods and instance variables
-
-@interface GPUImageVideoCamera ()
-{
- AVCaptureDeviceInput *audioInput;
- AVCaptureAudioDataOutput *audioOutput;
- NSDate *startingCaptureTime;
-
- dispatch_queue_t cameraProcessingQueue, audioProcessingQueue;
-
- GLProgram *yuvConversionProgram;
- GLint yuvConversionPositionAttribute, yuvConversionTextureCoordinateAttribute;
- GLint yuvConversionLuminanceTextureUniform, yuvConversionChrominanceTextureUniform;
- GLint yuvConversionMatrixUniform;
- const GLfloat *_preferredConversion;
-
- BOOL isFullYUVRange;
-
- int imageBufferWidth, imageBufferHeight;
-
- BOOL addedAudioInputsDueToEncodingTarget;
-}
-
-- (void)updateOrientationSendToTargets;
-- (void)convertYUVToRGBOutput;
-
-@end
-
-@implementation GPUImageVideoCamera
-
-@synthesize captureSessionPreset = _captureSessionPreset;
-@synthesize captureSession = _captureSession;
-@synthesize inputCamera = _inputCamera;
-@synthesize runBenchmark = _runBenchmark;
-@synthesize outputImageOrientation = _outputImageOrientation;
-@synthesize delegate = _delegate;
-@synthesize horizontallyMirrorFrontFacingCamera = _horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera = _horizontallyMirrorRearFacingCamera;
-@synthesize frameRate = _frameRate;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [self initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack]))
- {
- return nil;
- }
-
- return self;
-}
-
-- (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- cameraProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH,0);
- audioProcessingQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW,0);
-
- frameRenderingSemaphore = dispatch_semaphore_create(1);
-
- _frameRate = 0; // This will not set frame rate unless this value gets set to 1 or above
- _runBenchmark = NO;
- capturePaused = NO;
- outputRotation = kGPUImageNoRotation;
- internalRotation = kGPUImageNoRotation;
- captureAsYUV = YES;
- _preferredConversion = kColorConversion709;
-
- // Grab the back-facing or front-facing camera
- _inputCamera = nil;
- NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
- for (AVCaptureDevice *device in devices)
- {
- if ([device position] == cameraPosition)
- {
- _inputCamera = device;
- }
- }
-
- if (!_inputCamera) {
- return nil;
- }
-
- // Create the capture session
- _captureSession = [[AVCaptureSession alloc] init];
-
- [_captureSession beginConfiguration];
-
- // Add the video input
- NSError *error = nil;
- videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:_inputCamera error:&error];
- if ([_captureSession canAddInput:videoInput])
- {
- [_captureSession addInput:videoInput];
- }
-
- // Add the video frame output
- videoOutput = [[AVCaptureVideoDataOutput alloc] init];
- [videoOutput setAlwaysDiscardsLateVideoFrames:NO];
-
-// if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
- if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
- {
- BOOL supportsFullYUVRange = NO;
- NSArray *supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes;
- for (NSNumber *currentPixelFormat in supportedPixelFormats)
- {
- if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
- {
- supportsFullYUVRange = YES;
- }
- }
-
- if (supportsFullYUVRange)
- {
- [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
- isFullYUVRange = YES;
- }
- else
- {
- [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
- isFullYUVRange = NO;
- }
- }
- else
- {
- [videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
- }
-
- runSynchronouslyOnVideoProcessingQueue(^{
-
- if (captureAsYUV)
- {
- [GPUImageContext useImageProcessingContext];
- // if ([GPUImageContext deviceSupportsRedTextures])
- // {
- // yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForRGFragmentShaderString];
- // }
- // else
- // {
- if (isFullYUVRange)
- {
- yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];
- }
- else
- {
- yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForLAFragmentShaderString];
- }
-
- // }
-
- if (!yuvConversionProgram.initialized)
- {
- [yuvConversionProgram addAttribute:@"position"];
- [yuvConversionProgram addAttribute:@"inputTextureCoordinate"];
-
- if (![yuvConversionProgram link])
- {
- NSString *progLog = [yuvConversionProgram programLog];
- NSLog(@"Program link log: %@", progLog);
- NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
- NSLog(@"Fragment shader compile log: %@", fragLog);
- NSString *vertLog = [yuvConversionProgram vertexShaderLog];
- NSLog(@"Vertex shader compile log: %@", vertLog);
- yuvConversionProgram = nil;
- NSAssert(NO, @"Filter shader link failed");
- }
- }
-
- yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
- yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
- yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
- yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
- yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];
-
- [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
-
- glEnableVertexAttribArray(yuvConversionPositionAttribute);
- glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
- }
- });
-
- [videoOutput setSampleBufferDelegate:self queue:cameraProcessingQueue];
- if ([_captureSession canAddOutput:videoOutput])
- {
- [_captureSession addOutput:videoOutput];
- }
- else
- {
- NSLog(@"Couldn't add video output");
- return nil;
- }
-
- _captureSessionPreset = sessionPreset;
- [_captureSession setSessionPreset:_captureSessionPreset];
-
-// This will let you get 60 FPS video from the 720p preset on an iPhone 4S, but only that device and that preset
-// AVCaptureConnection *conn = [videoOutput connectionWithMediaType:AVMediaTypeVideo];
-//
-// if (conn.supportsVideoMinFrameDuration)
-// conn.videoMinFrameDuration = CMTimeMake(1,60);
-// if (conn.supportsVideoMaxFrameDuration)
-// conn.videoMaxFrameDuration = CMTimeMake(1,60);
-
- [_captureSession commitConfiguration];
-
- return self;
-}
-
-- (GPUImageFramebuffer *)framebufferForOutput;
-{
- return outputFramebuffer;
-}
-
-- (void)dealloc
-{
- [self stopCameraCapture];
- [videoOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];
- [audioOutput setSampleBufferDelegate:nil queue:dispatch_get_main_queue()];
-
- [self removeInputsAndOutputs];
-
-// ARC forbids explicit message send of 'release'; since iOS 6 even for dispatch_release() calls: stripping it out in that case is required.
-#if !OS_OBJECT_USE_OBJC
- if (frameRenderingSemaphore != NULL)
- {
- dispatch_release(frameRenderingSemaphore);
- }
-#endif
-}
-
-- (BOOL)addAudioInputsAndOutputs
-{
- if (audioOutput)
- return NO;
-
- [_captureSession beginConfiguration];
-
- _microphone = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
- audioInput = [AVCaptureDeviceInput deviceInputWithDevice:_microphone error:nil];
- if ([_captureSession canAddInput:audioInput])
- {
- [_captureSession addInput:audioInput];
- }
- audioOutput = [[AVCaptureAudioDataOutput alloc] init];
-
- if ([_captureSession canAddOutput:audioOutput])
- {
- [_captureSession addOutput:audioOutput];
- }
- else
- {
- NSLog(@"Couldn't add audio output");
- }
- [audioOutput setSampleBufferDelegate:self queue:audioProcessingQueue];
-
- [_captureSession commitConfiguration];
- return YES;
-}
-
-- (BOOL)removeAudioInputsAndOutputs
-{
- if (!audioOutput)
- return NO;
-
- [_captureSession beginConfiguration];
- [_captureSession removeInput:audioInput];
- [_captureSession removeOutput:audioOutput];
- audioInput = nil;
- audioOutput = nil;
- _microphone = nil;
- [_captureSession commitConfiguration];
- return YES;
-}
-
-- (void)removeInputsAndOutputs;
-{
- [_captureSession beginConfiguration];
- if (videoInput) {
- [_captureSession removeInput:videoInput];
- [_captureSession removeOutput:videoOutput];
- videoInput = nil;
- videoOutput = nil;
- }
- if (_microphone != nil)
- {
- [_captureSession removeInput:audioInput];
- [_captureSession removeOutput:audioOutput];
- audioInput = nil;
- audioOutput = nil;
- _microphone = nil;
- }
- [_captureSession commitConfiguration];
-}
-
-#pragma mark -
-#pragma mark Managing targets
-
-- (void)addTarget:(id)newTarget atTextureLocation:(NSInteger)textureLocation;
-{
- [super addTarget:newTarget atTextureLocation:textureLocation];
-
- [newTarget setInputRotation:outputRotation atIndex:textureLocation];
-}
-
-#pragma mark -
-#pragma mark Manage the camera video stream
-
-- (void)startCameraCapture;
-{
- if (![_captureSession isRunning])
- {
- startingCaptureTime = [NSDate date];
- [_captureSession startRunning];
- };
-}
-
-- (void)stopCameraCapture;
-{
- if ([_captureSession isRunning])
- {
- [_captureSession stopRunning];
- }
-}
-
-- (void)pauseCameraCapture;
-{
- capturePaused = YES;
-}
-
-- (void)resumeCameraCapture;
-{
- capturePaused = NO;
-}
-
-- (void)rotateCamera
-{
- if (self.frontFacingCameraPresent == NO)
- return;
-
- NSError *error;
- AVCaptureDeviceInput *newVideoInput;
- AVCaptureDevicePosition currentCameraPosition = [[videoInput device] position];
-
- if (currentCameraPosition == AVCaptureDevicePositionBack)
- {
- currentCameraPosition = AVCaptureDevicePositionFront;
- }
- else
- {
- currentCameraPosition = AVCaptureDevicePositionBack;
- }
-
- AVCaptureDevice *backFacingCamera = nil;
- NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
- for (AVCaptureDevice *device in devices)
- {
- if ([device position] == currentCameraPosition)
- {
- backFacingCamera = device;
- }
- }
- newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:backFacingCamera error:&error];
-
- if (newVideoInput != nil)
- {
- [_captureSession beginConfiguration];
-
- [_captureSession removeInput:videoInput];
- if ([_captureSession canAddInput:newVideoInput])
- {
- [_captureSession addInput:newVideoInput];
- videoInput = newVideoInput;
- }
- else
- {
- [_captureSession addInput:videoInput];
- }
- //captureSession.sessionPreset = oriPreset;
- [_captureSession commitConfiguration];
- }
-
- _inputCamera = backFacingCamera;
- [self setOutputImageOrientation:_outputImageOrientation];
-}
-
-- (AVCaptureDevicePosition)cameraPosition
-{
- return [[videoInput device] position];
-}
-
-+ (BOOL)isBackFacingCameraPresent;
-{
- NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
-
- for (AVCaptureDevice *device in devices)
- {
- if ([device position] == AVCaptureDevicePositionBack)
- return YES;
- }
-
- return NO;
-}
-
-- (BOOL)isBackFacingCameraPresent
-{
- return [GPUImageVideoCamera isBackFacingCameraPresent];
-}
-
-+ (BOOL)isFrontFacingCameraPresent;
-{
- NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
-
- for (AVCaptureDevice *device in devices)
- {
- if ([device position] == AVCaptureDevicePositionFront)
- return YES;
- }
-
- return NO;
-}
-
-- (BOOL)isFrontFacingCameraPresent
-{
- return [GPUImageVideoCamera isFrontFacingCameraPresent];
-}
-
-- (void)setCaptureSessionPreset:(NSString *)captureSessionPreset;
-{
- [_captureSession beginConfiguration];
-
- _captureSessionPreset = captureSessionPreset;
- [_captureSession setSessionPreset:_captureSessionPreset];
-
- [_captureSession commitConfiguration];
-}
-
-- (void)setFrameRate:(int32_t)frameRate;
-{
- _frameRate = frameRate;
-
- if (_frameRate > 0)
- {
- if ([_inputCamera respondsToSelector:@selector(setActiveVideoMinFrameDuration:)] &&
- [_inputCamera respondsToSelector:@selector(setActiveVideoMaxFrameDuration:)]) {
-
- NSError *error;
- [_inputCamera lockForConfiguration:&error];
- if (error == nil) {
-#if defined(__IPHONE_7_0)
- [_inputCamera setActiveVideoMinFrameDuration:CMTimeMake(1, _frameRate)];
- [_inputCamera setActiveVideoMaxFrameDuration:CMTimeMake(1, _frameRate)];
-#endif
- }
- [_inputCamera unlockForConfiguration];
-
- } else {
-
- for (AVCaptureConnection *connection in videoOutput.connections)
- {
-#pragma clang diagnostic push
-#pragma clang diagnostic ignored "-Wdeprecated-declarations"
- if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])
- connection.videoMinFrameDuration = CMTimeMake(1, _frameRate);
-
- if ([connection respondsToSelector:@selector(setVideoMaxFrameDuration:)])
- connection.videoMaxFrameDuration = CMTimeMake(1, _frameRate);
-#pragma clang diagnostic pop
- }
- }
-
- }
- else
- {
- if ([_inputCamera respondsToSelector:@selector(setActiveVideoMinFrameDuration:)] &&
- [_inputCamera respondsToSelector:@selector(setActiveVideoMaxFrameDuration:)]) {
-
- NSError *error;
- [_inputCamera lockForConfiguration:&error];
- if (error == nil) {
-#if defined(__IPHONE_7_0)
- [_inputCamera setActiveVideoMinFrameDuration:kCMTimeInvalid];
- [_inputCamera setActiveVideoMaxFrameDuration:kCMTimeInvalid];
-#endif
- }
- [_inputCamera unlockForConfiguration];
-
- } else {
-
- for (AVCaptureConnection *connection in videoOutput.connections)
- {
-#pragma clang diagnostic push
-#pragma clang diagnostic ignored "-Wdeprecated-declarations"
- if ([connection respondsToSelector:@selector(setVideoMinFrameDuration:)])
- connection.videoMinFrameDuration = kCMTimeInvalid; // This sets videoMinFrameDuration back to default
-
- if ([connection respondsToSelector:@selector(setVideoMaxFrameDuration:)])
- connection.videoMaxFrameDuration = kCMTimeInvalid; // This sets videoMaxFrameDuration back to default
-#pragma clang diagnostic pop
- }
- }
-
- }
-}
-
-- (int32_t)frameRate;
-{
- return _frameRate;
-}
-
-- (AVCaptureConnection *)videoCaptureConnection {
- for (AVCaptureConnection *connection in [videoOutput connections] ) {
- for ( AVCaptureInputPort *port in [connection inputPorts] ) {
- if ( [[port mediaType] isEqual:AVMediaTypeVideo] ) {
- return connection;
- }
- }
- }
-
- return nil;
-}
-
-#define INITIALFRAMESTOIGNOREFORBENCHMARK 5
-
-- (void)updateTargetsForVideoCameraUsingCacheTextureAtWidth:(int)bufferWidth height:(int)bufferHeight time:(CMTime)currentTime;
-{
- // First, update all the framebuffers in the targets
- for (id currentTarget in targets)
- {
- if ([currentTarget enabled])
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
-
- if (currentTarget != self.targetToIgnoreForUpdates)
- {
- [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
- [currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget];
-
- if ([currentTarget wantsMonochromeInput] && captureAsYUV)
- {
- [currentTarget setCurrentlyReceivingMonochromeInput:YES];
- // TODO: Replace optimization for monochrome output
- [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
- }
- else
- {
- [currentTarget setCurrentlyReceivingMonochromeInput:NO];
- [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
- }
- }
- else
- {
- [currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
- [currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
- }
- }
- }
-
- // Then release our hold on the local framebuffer to send it back to the cache as soon as it's no longer needed
- [outputFramebuffer unlock];
- outputFramebuffer = nil;
-
- // Finally, trigger rendering as needed
- for (id currentTarget in targets)
- {
- if ([currentTarget enabled])
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
-
- if (currentTarget != self.targetToIgnoreForUpdates)
- {
- [currentTarget newFrameReadyAtTime:currentTime atIndex:textureIndexOfTarget];
- }
- }
- }
-}
-
-- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
-{
- if (capturePaused)
- {
- return;
- }
-
- CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
- CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
- int bufferWidth = (int) CVPixelBufferGetWidth(cameraFrame);
- int bufferHeight = (int) CVPixelBufferGetHeight(cameraFrame);
- CFTypeRef colorAttachments = CVBufferGetAttachment(cameraFrame, kCVImageBufferYCbCrMatrixKey, NULL);
- if (colorAttachments != NULL)
- {
- if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)
- {
- if (isFullYUVRange)
- {
- _preferredConversion = kColorConversion601FullRange;
- }
- else
- {
- _preferredConversion = kColorConversion601;
- }
- }
- else
- {
- _preferredConversion = kColorConversion709;
- }
- }
- else
- {
- if (isFullYUVRange)
- {
- _preferredConversion = kColorConversion601FullRange;
- }
- else
- {
- _preferredConversion = kColorConversion601;
- }
- }
-
- CMTime currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
-
- [GPUImageContext useImageProcessingContext];
-
- if ([GPUImageContext supportsFastTextureUpload] && captureAsYUV)
- {
- CVOpenGLESTextureRef luminanceTextureRef = NULL;
- CVOpenGLESTextureRef chrominanceTextureRef = NULL;
-
-// if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
- if (CVPixelBufferGetPlaneCount(cameraFrame) > 0) // Check for YUV planar inputs to do RGB conversion
- {
- CVPixelBufferLockBaseAddress(cameraFrame, 0);
-
- if ( (imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight) )
- {
- imageBufferWidth = bufferWidth;
- imageBufferHeight = bufferHeight;
- }
-
- CVReturn err;
- // Y-plane
- glActiveTexture(GL_TEXTURE4);
- if ([GPUImageContext deviceSupportsRedTextures])
- {
-// err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_RED_EXT, bufferWidth, bufferHeight, GL_RED_EXT, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
- err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
- }
- else
- {
- err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
- }
- if (err)
- {
- NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
- }
-
- luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);
- glBindTexture(GL_TEXTURE_2D, luminanceTexture);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
-
- // UV-plane
- glActiveTexture(GL_TEXTURE5);
- if ([GPUImageContext deviceSupportsRedTextures])
- {
-// err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, coreVideoTextureCache, cameraFrame, NULL, GL_TEXTURE_2D, GL_RG_EXT, bufferWidth/2, bufferHeight/2, GL_RG_EXT, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
- err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
- }
- else
- {
- err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
- }
- if (err)
- {
- NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
- }
-
- chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);
- glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
-
-// if (!allTargetsWantMonochromeData)
-// {
- [self convertYUVToRGBOutput];
-// }
-
- int rotatedImageBufferWidth = bufferWidth, rotatedImageBufferHeight = bufferHeight;
-
- if (GPUImageRotationSwapsWidthAndHeight(internalRotation))
- {
- rotatedImageBufferWidth = bufferHeight;
- rotatedImageBufferHeight = bufferWidth;
- }
-
- [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:rotatedImageBufferWidth height:rotatedImageBufferHeight time:currentTime];
-
- CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
- CFRelease(luminanceTextureRef);
- CFRelease(chrominanceTextureRef);
- }
- else
- {
- // TODO: Mesh this with the output framebuffer structure
-
-// CVPixelBufferLockBaseAddress(cameraFrame, 0);
-//
-// CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_RGBA, bufferWidth, bufferHeight, GL_BGRA, GL_UNSIGNED_BYTE, 0, &texture);
-//
-// if (!texture || err) {
-// NSLog(@"Camera CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err);
-// NSAssert(NO, @"Camera failure");
-// return;
-// }
-//
-// outputTexture = CVOpenGLESTextureGetName(texture);
-// // glBindTexture(CVOpenGLESTextureGetTarget(texture), outputTexture);
-// glBindTexture(GL_TEXTURE_2D, outputTexture);
-// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
-// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
-// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
-// glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
-//
-// [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bufferWidth height:bufferHeight time:currentTime];
-//
-// CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
-// CFRelease(texture);
-//
-// outputTexture = 0;
- }
-
-
- if (_runBenchmark)
- {
- numberOfFramesCaptured++;
- if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
- {
- CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
- totalFrameTimeDuringCapture += currentFrameTime;
- NSLog(@"Average frame time : %f ms", [self averageFrameDurationDuringCapture]);
- NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
- }
- }
- }
- else
- {
- CVPixelBufferLockBaseAddress(cameraFrame, 0);
-
- int bytesPerRow = (int) CVPixelBufferGetBytesPerRow(cameraFrame);
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bytesPerRow / 4, bufferHeight) onlyTexture:YES];
- [outputFramebuffer activateFramebuffer];
-
- glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
-
- // glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bufferWidth, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
-
- // Using BGRA extension to pull in video frame data directly
- // The use of bytesPerRow / 4 accounts for a display glitch present in preview video frames when using the photo preset on the camera
- glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bytesPerRow / 4, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
-
- [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bytesPerRow / 4 height:bufferHeight time:currentTime];
-
- CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
-
- if (_runBenchmark)
- {
- numberOfFramesCaptured++;
- if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
- {
- CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
- totalFrameTimeDuringCapture += currentFrameTime;
- }
- }
- }
-}
-
-- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
-{
- [self.audioEncodingTarget processAudioBuffer:sampleBuffer];
-}
-
-- (void)convertYUVToRGBOutput;
-{
- [GPUImageContext setActiveShaderProgram:yuvConversionProgram];
-
- int rotatedImageBufferWidth = imageBufferWidth, rotatedImageBufferHeight = imageBufferHeight;
-
- if (GPUImageRotationSwapsWidthAndHeight(internalRotation))
- {
- rotatedImageBufferWidth = imageBufferHeight;
- rotatedImageBufferHeight = imageBufferWidth;
- }
-
- outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(rotatedImageBufferWidth, rotatedImageBufferHeight) textureOptions:self.outputTextureOptions onlyTexture:NO];
- [outputFramebuffer activateFramebuffer];
-
- glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
- glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
-
- static const GLfloat squareVertices[] = {
- -1.0f, -1.0f,
- 1.0f, -1.0f,
- -1.0f, 1.0f,
- 1.0f, 1.0f,
- };
-
- glActiveTexture(GL_TEXTURE4);
- glBindTexture(GL_TEXTURE_2D, luminanceTexture);
- glUniform1i(yuvConversionLuminanceTextureUniform, 4);
-
- glActiveTexture(GL_TEXTURE5);
- glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
- glUniform1i(yuvConversionChrominanceTextureUniform, 5);
-
- glUniformMatrix3fv(yuvConversionMatrixUniform, 1, GL_FALSE, _preferredConversion);
-
- glVertexAttribPointer(yuvConversionPositionAttribute, 2, GL_FLOAT, 0, 0, squareVertices);
- glVertexAttribPointer(yuvConversionTextureCoordinateAttribute, 2, GL_FLOAT, 0, 0, [GPUImageFilter textureCoordinatesForRotation:internalRotation]);
-
- glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
-}
-
-#pragma mark -
-#pragma mark Benchmarking
-
-- (CGFloat)averageFrameDurationDuringCapture;
-{
- return (totalFrameTimeDuringCapture / (CGFloat)(numberOfFramesCaptured - INITIALFRAMESTOIGNOREFORBENCHMARK)) * 1000.0;
-}
-
-- (void)resetBenchmarkAverage;
-{
- numberOfFramesCaptured = 0;
- totalFrameTimeDuringCapture = 0.0;
-}
-
-#pragma mark -
-#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
-
-- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
-{
- if (!self.captureSession.isRunning)
- {
- return;
- }
- else if (captureOutput == audioOutput)
- {
- [self processAudioSampleBuffer:sampleBuffer];
- }
- else
- {
- if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0)
- {
- return;
- }
-
- CFRetain(sampleBuffer);
- runAsynchronouslyOnVideoProcessingQueue(^{
- //Feature Detection Hook.
- if (self.delegate)
- {
- [self.delegate willOutputSampleBuffer:sampleBuffer];
- }
-
- [self processVideoSampleBuffer:sampleBuffer];
-
- CFRelease(sampleBuffer);
- dispatch_semaphore_signal(frameRenderingSemaphore);
- });
- }
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setAudioEncodingTarget:(GPUImageMovieWriter *)newValue;
-{
- if (newValue) {
- /* Add audio inputs and outputs, if necessary */
- addedAudioInputsDueToEncodingTarget |= [self addAudioInputsAndOutputs];
- } else if (addedAudioInputsDueToEncodingTarget) {
- /* Remove audio inputs and outputs, if they were added by previously setting the audio encoding target */
- [self removeAudioInputsAndOutputs];
- addedAudioInputsDueToEncodingTarget = NO;
- }
-
- [super setAudioEncodingTarget:newValue];
-}
-
-- (void)updateOrientationSendToTargets;
-{
- runSynchronouslyOnVideoProcessingQueue(^{
-
- // From the iOS 5.0 release notes:
- // In previous iOS versions, the front-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeLeft and the back-facing camera would always deliver buffers in AVCaptureVideoOrientationLandscapeRight.
-
- if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
- {
- outputRotation = kGPUImageNoRotation;
- if ([self cameraPosition] == AVCaptureDevicePositionBack)
- {
- if (_horizontallyMirrorRearFacingCamera)
- {
- switch(_outputImageOrientation)
- {
- case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRightFlipVertical; break;
- case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotate180; break;
- case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageFlipHorizonal; break;
- case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageFlipVertical; break;
- default:internalRotation = kGPUImageNoRotation;
- }
- }
- else
- {
- switch(_outputImageOrientation)
- {
- case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRight; break;
- case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateLeft; break;
- case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageRotate180; break;
- case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageNoRotation; break;
- default:internalRotation = kGPUImageNoRotation;
- }
- }
- }
- else
- {
- if (_horizontallyMirrorFrontFacingCamera)
- {
- switch(_outputImageOrientation)
- {
- case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRightFlipVertical; break;
- case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateRightFlipHorizontal; break;
- case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageFlipHorizonal; break;
- case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageFlipVertical; break;
- default:internalRotation = kGPUImageNoRotation;
- }
- }
- else
- {
- switch(_outputImageOrientation)
- {
- case UIInterfaceOrientationPortrait:internalRotation = kGPUImageRotateRight; break;
- case UIInterfaceOrientationPortraitUpsideDown:internalRotation = kGPUImageRotateLeft; break;
- case UIInterfaceOrientationLandscapeLeft:internalRotation = kGPUImageNoRotation; break;
- case UIInterfaceOrientationLandscapeRight:internalRotation = kGPUImageRotate180; break;
- default:internalRotation = kGPUImageNoRotation;
- }
- }
- }
- }
- else
- {
- if ([self cameraPosition] == AVCaptureDevicePositionBack)
- {
- if (_horizontallyMirrorRearFacingCamera)
- {
- switch(_outputImageOrientation)
- {
- case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRightFlipVertical; break;
- case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotate180; break;
- case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageFlipHorizonal; break;
- case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageFlipVertical; break;
- default:outputRotation = kGPUImageNoRotation;
- }
- }
- else
- {
- switch(_outputImageOrientation)
- {
- case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRight; break;
- case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateLeft; break;
- case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageRotate180; break;
- case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageNoRotation; break;
- default:outputRotation = kGPUImageNoRotation;
- }
- }
- }
- else
- {
- if (_horizontallyMirrorFrontFacingCamera)
- {
- switch(_outputImageOrientation)
- {
- case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRightFlipVertical; break;
- case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateRightFlipHorizontal; break;
- case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageFlipHorizonal; break;
- case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageFlipVertical; break;
- default:outputRotation = kGPUImageNoRotation;
- }
- }
- else
- {
- switch(_outputImageOrientation)
- {
- case UIInterfaceOrientationPortrait:outputRotation = kGPUImageRotateRight; break;
- case UIInterfaceOrientationPortraitUpsideDown:outputRotation = kGPUImageRotateLeft; break;
- case UIInterfaceOrientationLandscapeLeft:outputRotation = kGPUImageNoRotation; break;
- case UIInterfaceOrientationLandscapeRight:outputRotation = kGPUImageRotate180; break;
- default:outputRotation = kGPUImageNoRotation;
- }
- }
- }
- }
-
- for (id currentTarget in targets)
- {
- NSInteger indexOfObject = [targets indexOfObject:currentTarget];
- [currentTarget setInputRotation:outputRotation atIndex:[[targetTextureIndices objectAtIndex:indexOfObject] integerValue]];
- }
- });
-}
-
-- (void)setOutputImageOrientation:(UIInterfaceOrientation)newValue;
-{
- _outputImageOrientation = newValue;
- [self updateOrientationSendToTargets];
-}
-
-- (void)setHorizontallyMirrorFrontFacingCamera:(BOOL)newValue
-{
- _horizontallyMirrorFrontFacingCamera = newValue;
- [self updateOrientationSendToTargets];
-}
-
-- (void)setHorizontallyMirrorRearFacingCamera:(BOOL)newValue
-{
- _horizontallyMirrorRearFacingCamera = newValue;
- [self updateOrientationSendToTargets];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageVignetteFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageVignetteFilter.h
deleted file mode 100755
index 37be944..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageVignetteFilter.h
+++ /dev/null
@@ -1,22 +0,0 @@
-#import "GPUImageFilter.h"
-
-/** Performs a vignetting effect, fading out the image at the edges
- */
-@interface GPUImageVignetteFilter : GPUImageFilter
-{
- GLint vignetteCenterUniform, vignetteColorUniform, vignetteStartUniform, vignetteEndUniform;
-}
-
-// the center for the vignette in tex coords (defaults to 0.5, 0.5)
-@property (nonatomic, readwrite) CGPoint vignetteCenter;
-
-// The color to use for the Vignette (defaults to black)
-@property (nonatomic, readwrite) GPUVector3 vignetteColor;
-
-// The normalized distance from the center where the vignette effect starts. Default of 0.5.
-@property (nonatomic, readwrite) CGFloat vignetteStart;
-
-// The normalized distance from the center where the vignette effect ends. Default of 0.75.
-@property (nonatomic, readwrite) CGFloat vignetteEnd;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageVignetteFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageVignetteFilter.m
deleted file mode 100755
index 6e1eadb..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageVignetteFilter.m
+++ /dev/null
@@ -1,104 +0,0 @@
-#import "GPUImageVignetteFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageVignetteFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
- varying highp vec2 textureCoordinate;
-
- uniform lowp vec2 vignetteCenter;
- uniform lowp vec3 vignetteColor;
- uniform highp float vignetteStart;
- uniform highp float vignetteEnd;
-
- void main()
- {
- lowp vec4 sourceImageColor = texture2D(inputImageTexture, textureCoordinate);
- lowp float d = distance(textureCoordinate, vec2(vignetteCenter.x, vignetteCenter.y));
- lowp float percent = smoothstep(vignetteStart, vignetteEnd, d);
- gl_FragColor = vec4(mix(sourceImageColor.rgb, vignetteColor, percent), sourceImageColor.a);
- }
-);
-#else
-NSString *const kGPUImageVignetteFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
- varying vec2 textureCoordinate;
-
- uniform vec2 vignetteCenter;
- uniform vec3 vignetteColor;
- uniform float vignetteStart;
- uniform float vignetteEnd;
-
- void main()
- {
- vec4 sourceImageColor = texture2D(inputImageTexture, textureCoordinate);
- float d = distance(textureCoordinate, vec2(vignetteCenter.x, vignetteCenter.y));
- float percent = smoothstep(vignetteStart, vignetteEnd, d);
- gl_FragColor = vec4(mix(sourceImageColor.rgb, vignetteColor, percent), sourceImageColor.a);
- }
-);
-#endif
-
-@implementation GPUImageVignetteFilter
-
-@synthesize vignetteCenter = _vignetteCenter;
-@synthesize vignetteColor = _vignetteColor;
-@synthesize vignetteStart =_vignetteStart;
-@synthesize vignetteEnd = _vignetteEnd;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageVignetteFragmentShaderString]))
- {
- return nil;
- }
-
- vignetteCenterUniform = [filterProgram uniformIndex:@"vignetteCenter"];
- vignetteColorUniform = [filterProgram uniformIndex:@"vignetteColor"];
- vignetteStartUniform = [filterProgram uniformIndex:@"vignetteStart"];
- vignetteEndUniform = [filterProgram uniformIndex:@"vignetteEnd"];
-
- self.vignetteCenter = (CGPoint){ 0.5f, 0.5f };
- self.vignetteColor = (GPUVector3){ 0.0f, 0.0f, 0.0f };
- self.vignetteStart = 0.3;
- self.vignetteEnd = 0.75;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setVignetteCenter:(CGPoint)newValue
-{
- _vignetteCenter = newValue;
-
- [self setPoint:newValue forUniform:vignetteCenterUniform program:filterProgram];
-}
-
-- (void)setVignetteColor:(GPUVector3)newValue
-{
- _vignetteColor = newValue;
-
- [self setVec3:newValue forUniform:vignetteColorUniform program:filterProgram];
-}
-
-- (void)setVignetteStart:(CGFloat)newValue;
-{
- _vignetteStart = newValue;
-
- [self setFloat:_vignetteStart forUniform:vignetteStartUniform program:filterProgram];
-}
-
-- (void)setVignetteEnd:(CGFloat)newValue;
-{
- _vignetteEnd = newValue;
-
- [self setFloat:_vignetteEnd forUniform:vignetteEndUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageVoronoiConsumerFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageVoronoiConsumerFilter.h
deleted file mode 100644
index 659e39d..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageVoronoiConsumerFilter.h
+++ /dev/null
@@ -1,10 +0,0 @@
-#import "GPUImageTwoInputFilter.h"
-
-@interface GPUImageVoronoiConsumerFilter : GPUImageTwoInputFilter
-{
- GLint sizeUniform;
-}
-
-@property (nonatomic, readwrite) CGSize sizeInPixels;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageVoronoiConsumerFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageVoronoiConsumerFilter.m
deleted file mode 100644
index c12c34f..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageVoronoiConsumerFilter.m
+++ /dev/null
@@ -1,94 +0,0 @@
-#import "GPUImageVoronoiConsumerFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageVoronoiConsumerFragmentShaderString = SHADER_STRING
-(
-
- precision highp float;
-
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
- uniform vec2 size;
- varying vec2 textureCoordinate;
-
- vec2 getCoordFromColor(vec4 color)
-{
- float z = color.z * 256.0;
- float yoff = floor(z / 8.0);
- float xoff = mod(z, 8.0);
- float x = color.x*256.0 + xoff*256.0;
- float y = color.y*256.0 + yoff*256.0;
- return vec2(x,y) / size;
-}
-
- void main(void) {
- vec4 colorLoc = texture2D(inputImageTexture2, textureCoordinate);
- vec4 color = texture2D(inputImageTexture, getCoordFromColor(colorLoc));
-
- gl_FragColor = color;
- }
-);
-#else
-NSString *const kGPUImageVoronoiConsumerFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
- uniform sampler2D inputImageTexture2;
- uniform vec2 size;
- varying vec2 textureCoordinate;
-
- vec2 getCoordFromColor(vec4 color)
- {
- float z = color.z * 256.0;
- float yoff = floor(z / 8.0);
- float xoff = mod(z, 8.0);
- float x = color.x*256.0 + xoff*256.0;
- float y = color.y*256.0 + yoff*256.0;
- return vec2(x,y) / size;
- }
-
- void main(void)
- {
- vec4 colorLoc = texture2D(inputImageTexture2, textureCoordinate);
- vec4 color = texture2D(inputImageTexture, getCoordFromColor(colorLoc));
-
- gl_FragColor = color;
- }
-);
-#endif
-
-@implementation GPUImageVoronoiConsumerFilter
-
-@synthesize sizeInPixels = _sizeInPixels;
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageVoronoiConsumerFragmentShaderString]))
- {
- return nil;
- }
-
- sizeUniform = [filterProgram uniformIndex:@"size"];
-
- return self;
-}
-
--(void)setSizeInPixels:(CGSize)sizeInPixels {
- _sizeInPixels = sizeInPixels;
-
- //validate that it's a power of 2 and square
-
- float width = log2(sizeInPixels.width);
- float height = log2(sizeInPixels.height);
-
- if (width != height) {
- NSLog(@"Voronoi point texture must be square");
- return;
- }
- if (width != floor(width) || height != floor(height)) {
- NSLog(@"Voronoi point texture must be a power of 2. Texture size %f, %f", sizeInPixels.width, sizeInPixels.height);
- return;
- }
- glUniform2f(sizeUniform, _sizeInPixels.width, _sizeInPixels.height);
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageWeakPixelInclusionFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageWeakPixelInclusionFilter.h
deleted file mode 100644
index 44b76c6..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageWeakPixelInclusionFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImage3x3TextureSamplingFilter.h"
-
-@interface GPUImageWeakPixelInclusionFilter : GPUImage3x3TextureSamplingFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageWeakPixelInclusionFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageWeakPixelInclusionFilter.m
deleted file mode 100644
index 4e95ad5..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageWeakPixelInclusionFilter.m
+++ /dev/null
@@ -1,94 +0,0 @@
-#import "GPUImageWeakPixelInclusionFilter.h"
-
-@implementation GPUImageWeakPixelInclusionFilter
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageWeakPixelInclusionFragmentShaderString = SHADER_STRING
-(
- precision lowp float;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
- float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r;
-
- float pixelIntensitySum = bottomLeftIntensity + topRightIntensity + topLeftIntensity + bottomRightIntensity + leftIntensity + rightIntensity + bottomIntensity + topIntensity + centerIntensity;
- float sumTest = step(1.5, pixelIntensitySum);
- float pixelTest = step(0.01, centerIntensity);
-
- gl_FragColor = vec4(vec3(sumTest * pixelTest), 1.0);
- }
-);
-#else
-NSString *const kGPUImageWeakPixelInclusionFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- void main()
- {
- float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
- float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
- float centerIntensity = texture2D(inputImageTexture, textureCoordinate).r;
-
- float pixelIntensitySum = bottomLeftIntensity + topRightIntensity + topLeftIntensity + bottomRightIntensity + leftIntensity + rightIntensity + bottomIntensity + topIntensity + centerIntensity;
- float sumTest = step(1.5, pixelIntensitySum);
- float pixelTest = step(0.01, centerIntensity);
-
- gl_FragColor = vec4(vec3(sumTest * pixelTest), 1.0);
- }
-);
-#endif
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [self initWithFragmentShaderFromString:kGPUImageWeakPixelInclusionFragmentShaderString]))
- {
- return nil;
- }
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageWhiteBalanceFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageWhiteBalanceFilter.h
deleted file mode 100644
index 6b09c33..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageWhiteBalanceFilter.h
+++ /dev/null
@@ -1,17 +0,0 @@
-#import "GPUImageFilter.h"
-/**
- * Created by Alaric Cole
- * Allows adjustment of color temperature in terms of what an image was effectively shot in. This means higher Kelvin values will warm the image, while lower values will cool it.
-
- */
-@interface GPUImageWhiteBalanceFilter : GPUImageFilter
-{
- GLint temperatureUniform, tintUniform;
-}
-//choose color temperature, in degrees Kelvin
-@property(readwrite, nonatomic) CGFloat temperature;
-
-//adjust tint to compensate
-@property(readwrite, nonatomic) CGFloat tint;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageWhiteBalanceFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageWhiteBalanceFilter.m
deleted file mode 100644
index 17c9bce..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageWhiteBalanceFilter.m
+++ /dev/null
@@ -1,107 +0,0 @@
-#import "GPUImageWhiteBalanceFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageWhiteBalanceFragmentShaderString = SHADER_STRING
-(
-uniform sampler2D inputImageTexture;
-varying highp vec2 textureCoordinate;
-
-uniform lowp float temperature;
-uniform lowp float tint;
-
-const lowp vec3 warmFilter = vec3(0.93, 0.54, 0.0);
-
-const mediump mat3 RGBtoYIQ = mat3(0.299, 0.587, 0.114, 0.596, -0.274, -0.322, 0.212, -0.523, 0.311);
-const mediump mat3 YIQtoRGB = mat3(1.0, 0.956, 0.621, 1.0, -0.272, -0.647, 1.0, -1.105, 1.702);
-
-void main()
-{
- lowp vec4 source = texture2D(inputImageTexture, textureCoordinate);
-
- mediump vec3 yiq = RGBtoYIQ * source.rgb; //adjusting tint
- yiq.b = clamp(yiq.b + tint*0.5226*0.1, -0.5226, 0.5226);
- lowp vec3 rgb = YIQtoRGB * yiq;
-
- lowp vec3 processed = vec3(
- (rgb.r < 0.5 ? (2.0 * rgb.r * warmFilter.r) : (1.0 - 2.0 * (1.0 - rgb.r) * (1.0 - warmFilter.r))), //adjusting temperature
- (rgb.g < 0.5 ? (2.0 * rgb.g * warmFilter.g) : (1.0 - 2.0 * (1.0 - rgb.g) * (1.0 - warmFilter.g))),
- (rgb.b < 0.5 ? (2.0 * rgb.b * warmFilter.b) : (1.0 - 2.0 * (1.0 - rgb.b) * (1.0 - warmFilter.b))));
-
- gl_FragColor = vec4(mix(rgb, processed, temperature), source.a);
-}
-);
-#else
-NSString *const kGPUImageWhiteBalanceFragmentShaderString = SHADER_STRING
-(
- uniform sampler2D inputImageTexture;
- varying vec2 textureCoordinate;
-
- uniform float temperature;
- uniform float tint;
-
- const vec3 warmFilter = vec3(0.93, 0.54, 0.0);
-
- const mat3 RGBtoYIQ = mat3(0.299, 0.587, 0.114, 0.596, -0.274, -0.322, 0.212, -0.523, 0.311);
- const mat3 YIQtoRGB = mat3(1.0, 0.956, 0.621, 1.0, -0.272, -0.647, 1.0, -1.105, 1.702);
-
- void main()
-{
- vec4 source = texture2D(inputImageTexture, textureCoordinate);
-
- vec3 yiq = RGBtoYIQ * source.rgb; //adjusting tint
- yiq.b = clamp(yiq.b + tint*0.5226*0.1, -0.5226, 0.5226);
- vec3 rgb = YIQtoRGB * yiq;
-
- vec3 processed = vec3(
- (rgb.r < 0.5 ? (2.0 * rgb.r * warmFilter.r) : (1.0 - 2.0 * (1.0 - rgb.r) * (1.0 - warmFilter.r))), //adjusting temperature
- (rgb.g < 0.5 ? (2.0 * rgb.g * warmFilter.g) : (1.0 - 2.0 * (1.0 - rgb.g) * (1.0 - warmFilter.g))),
- (rgb.b < 0.5 ? (2.0 * rgb.b * warmFilter.b) : (1.0 - 2.0 * (1.0 - rgb.b) * (1.0 - warmFilter.b))));
-
- gl_FragColor = vec4(mix(rgb, processed, temperature), source.a);
-}
-);
-#endif
-
-@implementation GPUImageWhiteBalanceFilter
-
-@synthesize temperature = _temperature;
-@synthesize tint = _tint;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageWhiteBalanceFragmentShaderString]))
- {
- return nil;
- }
-
- temperatureUniform = [filterProgram uniformIndex:@"temperature"];
- tintUniform = [filterProgram uniformIndex:@"tint"];
-
- self.temperature = 5000.0;
- self.tint = 0.0;
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setTemperature:(CGFloat)newValue;
-{
- _temperature = newValue;
-
- [self setFloat:_temperature < 5000 ? 0.0004 * (_temperature-5000.0) : 0.00006 * (_temperature-5000.0) forUniform:temperatureUniform program:filterProgram];
-}
-
-- (void)setTint:(CGFloat)newValue;
-{
- _tint = newValue;
-
- [self setFloat:_tint / 100.0 forUniform:tintUniform program:filterProgram];
-}
-
-@end
-
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageXYDerivativeFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageXYDerivativeFilter.h
deleted file mode 100755
index 8db5745..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageXYDerivativeFilter.h
+++ /dev/null
@@ -1,5 +0,0 @@
-#import "GPUImageSobelEdgeDetectionFilter.h"
-
-@interface GPUImageXYDerivativeFilter : GPUImageSobelEdgeDetectionFilter
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageXYDerivativeFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageXYDerivativeFilter.m
deleted file mode 100755
index 7e19e9d..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageXYDerivativeFilter.m
+++ /dev/null
@@ -1,106 +0,0 @@
-#import "GPUImageXYDerivativeFilter.h"
-
-// I'm using the Prewitt operator to obtain the derivative, then squaring the X and Y components and placing the product of the two in Z.
-// In tests, Prewitt seemed to be tied with Sobel for the best, and it's just a little cheaper to compute.
-// This is primarily intended to be used with corner detection filters.
-
-@implementation GPUImageXYDerivativeFilter
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageGradientFragmentShaderString = SHADER_STRING
-(
- precision highp float;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform float edgeStrength;
-
- void main()
- {
- float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
- float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
-
- float verticalDerivative = -topLeftIntensity - topIntensity - topRightIntensity + bottomLeftIntensity + bottomIntensity + bottomRightIntensity;
- float horizontalDerivative = -bottomLeftIntensity - leftIntensity - topLeftIntensity + bottomRightIntensity + rightIntensity + topRightIntensity;
- verticalDerivative = verticalDerivative * edgeStrength;
- horizontalDerivative = horizontalDerivative * edgeStrength;
-
- // Scaling the X * Y operation so that negative numbers are not clipped in the 0..1 range. This will be expanded in the corner detection filter
- gl_FragColor = vec4(horizontalDerivative * horizontalDerivative, verticalDerivative * verticalDerivative, ((verticalDerivative * horizontalDerivative) + 1.0) / 2.0, 1.0);
- }
-);
-#else
-NSString *const kGPUImageGradientFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform float edgeStrength;
-
- void main()
- {
- float topIntensity = texture2D(inputImageTexture, topTextureCoordinate).r;
- float topRightIntensity = texture2D(inputImageTexture, topRightTextureCoordinate).r;
- float topLeftIntensity = texture2D(inputImageTexture, topLeftTextureCoordinate).r;
- float bottomIntensity = texture2D(inputImageTexture, bottomTextureCoordinate).r;
- float bottomLeftIntensity = texture2D(inputImageTexture, bottomLeftTextureCoordinate).r;
- float bottomRightIntensity = texture2D(inputImageTexture, bottomRightTextureCoordinate).r;
- float leftIntensity = texture2D(inputImageTexture, leftTextureCoordinate).r;
- float rightIntensity = texture2D(inputImageTexture, rightTextureCoordinate).r;
-
- float verticalDerivative = -topLeftIntensity - topIntensity - topRightIntensity + bottomLeftIntensity + bottomIntensity + bottomRightIntensity;
- float horizontalDerivative = -bottomLeftIntensity - leftIntensity - topLeftIntensity + bottomRightIntensity + rightIntensity + topRightIntensity;
- verticalDerivative = verticalDerivative * edgeStrength;
- horizontalDerivative = horizontalDerivative * edgeStrength;
-
- // Scaling the X * Y operation so that negative numbers are not clipped in the 0..1 range. This will be expanded in the corner detection filter
- gl_FragColor = vec4(horizontalDerivative * horizontalDerivative, verticalDerivative * verticalDerivative, ((verticalDerivative * horizontalDerivative) + 1.0) / 2.0, 1.0);
- }
-);
-#endif
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [self initWithFragmentShaderFromString:kGPUImageGradientFragmentShaderString]))
- {
- return nil;
- }
-
- self.edgeStrength = 1.0;
-
- return self;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageZoomBlurFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageZoomBlurFilter.h
deleted file mode 100644
index 744a72c..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageZoomBlurFilter.h
+++ /dev/null
@@ -1,13 +0,0 @@
-#import "GPUImageFilter.h"
-
-@interface GPUImageZoomBlurFilter : GPUImageFilter
-
-/** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
- */
-@property (readwrite, nonatomic) CGFloat blurSize;
-
-/** The normalized center of the blur. (0.5, 0.5) by default
- */
-@property (readwrite, nonatomic) CGPoint blurCenter;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageZoomBlurFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageZoomBlurFilter.m
deleted file mode 100644
index 2ae8493..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageZoomBlurFilter.m
+++ /dev/null
@@ -1,115 +0,0 @@
-#import "GPUImageZoomBlurFilter.h"
-
-#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
-NSString *const kGPUImageZoomBlurFragmentShaderString = SHADER_STRING
-(
- varying highp vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform highp vec2 blurCenter;
- uniform highp float blurSize;
-
- void main()
- {
- // TODO: Do a more intelligent scaling based on resolution here
- highp vec2 samplingOffset = 1.0/100.0 * (blurCenter - textureCoordinate) * blurSize;
-
- lowp vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.18;
- fragmentColor += texture2D(inputImageTexture, textureCoordinate + samplingOffset) * 0.15;
- fragmentColor += texture2D(inputImageTexture, textureCoordinate + (2.0 * samplingOffset)) * 0.12;
- fragmentColor += texture2D(inputImageTexture, textureCoordinate + (3.0 * samplingOffset)) * 0.09;
- fragmentColor += texture2D(inputImageTexture, textureCoordinate + (4.0 * samplingOffset)) * 0.05;
- fragmentColor += texture2D(inputImageTexture, textureCoordinate - samplingOffset) * 0.15;
- fragmentColor += texture2D(inputImageTexture, textureCoordinate - (2.0 * samplingOffset)) * 0.12;
- fragmentColor += texture2D(inputImageTexture, textureCoordinate - (3.0 * samplingOffset)) * 0.09;
- fragmentColor += texture2D(inputImageTexture, textureCoordinate - (4.0 * samplingOffset)) * 0.05;
-
- gl_FragColor = fragmentColor;
- }
-);
-#else
-NSString *const kGPUImageZoomBlurFragmentShaderString = SHADER_STRING
-(
- varying vec2 textureCoordinate;
-
- uniform sampler2D inputImageTexture;
-
- uniform vec2 blurCenter;
- uniform float blurSize;
-
- void main()
- {
- // TODO: Do a more intelligent scaling based on resolution here
- vec2 samplingOffset = 1.0/100.0 * (blurCenter - textureCoordinate) * blurSize;
-
- vec4 fragmentColor = texture2D(inputImageTexture, textureCoordinate) * 0.18;
- fragmentColor += texture2D(inputImageTexture, textureCoordinate + samplingOffset) * 0.15;
- fragmentColor += texture2D(inputImageTexture, textureCoordinate + (2.0 * samplingOffset)) * 0.12;
- fragmentColor += texture2D(inputImageTexture, textureCoordinate + (3.0 * samplingOffset)) * 0.09;
- fragmentColor += texture2D(inputImageTexture, textureCoordinate + (4.0 * samplingOffset)) * 0.05;
- fragmentColor += texture2D(inputImageTexture, textureCoordinate - samplingOffset) * 0.15;
- fragmentColor += texture2D(inputImageTexture, textureCoordinate - (2.0 * samplingOffset)) * 0.12;
- fragmentColor += texture2D(inputImageTexture, textureCoordinate - (3.0 * samplingOffset)) * 0.09;
- fragmentColor += texture2D(inputImageTexture, textureCoordinate - (4.0 * samplingOffset)) * 0.05;
-
- gl_FragColor = fragmentColor;
- }
-);
-#endif
-
-@interface GPUImageZoomBlurFilter()
-{
- GLint blurSizeUniform, blurCenterUniform;
-}
-@end
-
-@implementation GPUImageZoomBlurFilter
-
-@synthesize blurSize = _blurSize;
-@synthesize blurCenter = _blurCenter;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super initWithFragmentShaderFromString:kGPUImageZoomBlurFragmentShaderString]))
- {
- return nil;
- }
-
- blurSizeUniform = [filterProgram uniformIndex:@"blurSize"];
- blurCenterUniform = [filterProgram uniformIndex:@"blurCenter"];
-
- self.blurSize = 1.0;
- self.blurCenter = CGPointMake(0.5, 0.5);
-
- return self;
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
-{
- [super setInputRotation:newInputRotation atIndex:textureIndex];
- [self setBlurCenter:self.blurCenter];
-}
-
-- (void)setBlurSize:(CGFloat)newValue;
-{
- _blurSize = newValue;
-
- [self setFloat:_blurSize forUniform:blurSizeUniform program:filterProgram];
-}
-
-- (void)setBlurCenter:(CGPoint)newValue;
-{
- _blurCenter = newValue;
-
- CGPoint rotatedPoint = [self rotatedPoint:_blurCenter forRotation:inputRotation];
- [self setPoint:rotatedPoint forUniform:blurCenterUniform program:filterProgram];
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageiOSBlurFilter.h b/Example/Pods/GPUImage/framework/Source/GPUImageiOSBlurFilter.h
deleted file mode 100644
index 261d0d7..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageiOSBlurFilter.h
+++ /dev/null
@@ -1,31 +0,0 @@
-#import "GPUImageFilterGroup.h"
-
-@class GPUImageSaturationFilter;
-@class GPUImageGaussianBlurFilter;
-@class GPUImageLuminanceRangeFilter;
-
-@interface GPUImageiOSBlurFilter : GPUImageFilterGroup
-{
- GPUImageSaturationFilter *saturationFilter;
- GPUImageGaussianBlurFilter *blurFilter;
- GPUImageLuminanceRangeFilter *luminanceRangeFilter;
-}
-
-/** A radius in pixels to use for the blur, with a default of 12.0. This adjusts the sigma variable in the Gaussian distribution function.
- */
-@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
-
-/** Saturation ranges from 0.0 (fully desaturated) to 2.0 (max saturation), with 0.8 as the normal level
- */
-@property (readwrite, nonatomic) CGFloat saturation;
-
-/** The degree to which to downsample, then upsample the incoming image to minimize computations within the Gaussian blur, default of 4.0
- */
-@property (readwrite, nonatomic) CGFloat downsampling;
-
-
-/** The degree to reduce the luminance range, from 0.0 to 1.0. Default is 0.6.
- */
-@property (readwrite, nonatomic) CGFloat rangeReductionFactor;
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/GPUImageiOSBlurFilter.m b/Example/Pods/GPUImage/framework/Source/GPUImageiOSBlurFilter.m
deleted file mode 100644
index 1853321..0000000
--- a/Example/Pods/GPUImage/framework/Source/GPUImageiOSBlurFilter.m
+++ /dev/null
@@ -1,114 +0,0 @@
-#import "GPUImageiOSBlurFilter.h"
-#import "GPUImageSaturationFilter.h"
-#import "GPUImageGaussianBlurFilter.h"
-#import "GPUImageLuminanceRangeFilter.h"
-
-@implementation GPUImageiOSBlurFilter
-
-@synthesize blurRadiusInPixels;
-@synthesize saturation;
-@synthesize downsampling = _downsampling;
-
-#pragma mark -
-#pragma mark Initialization and teardown
-
-- (id)init;
-{
- if (!(self = [super init]))
- {
- return nil;
- }
-
- // First pass: downsample and desaturate
- saturationFilter = [[GPUImageSaturationFilter alloc] init];
- [self addFilter:saturationFilter];
-
- // Second pass: apply a strong Gaussian blur
- blurFilter = [[GPUImageGaussianBlurFilter alloc] init];
- [self addFilter:blurFilter];
-
- // Third pass: upsample and adjust luminance range
- luminanceRangeFilter = [[GPUImageLuminanceRangeFilter alloc] init];
- [self addFilter:luminanceRangeFilter];
-
- [saturationFilter addTarget:blurFilter];
- [blurFilter addTarget:luminanceRangeFilter];
-
- self.initialFilters = [NSArray arrayWithObject:saturationFilter];
- self.terminalFilter = luminanceRangeFilter;
-
- self.blurRadiusInPixels = 12.0;
- self.saturation = 0.8;
- self.downsampling = 4.0;
- self.rangeReductionFactor = 0.6;
-
- return self;
-}
-
-- (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex;
-{
- if (_downsampling > 1.0)
- {
- CGSize rotatedSize = [saturationFilter rotatedSize:newSize forIndex:textureIndex];
-
- [saturationFilter forceProcessingAtSize:CGSizeMake(rotatedSize.width / _downsampling, rotatedSize.height / _downsampling)];
- [luminanceRangeFilter forceProcessingAtSize:rotatedSize];
- }
-
- [super setInputSize:newSize atIndex:textureIndex];
-}
-
-#pragma mark -
-#pragma mark Accessors
-
-// From Apple's UIImage+ImageEffects category:
-
-// A description of how to compute the box kernel width from the Gaussian
-// radius (aka standard deviation) appears in the SVG spec:
-// http://www.w3.org/TR/SVG/filters.html#feGaussianBlurElement
-//
-// For larger values of 's' (s >= 2.0), an approximation can be used: Three
-// successive box-blurs build a piece-wise quadratic convolution kernel, which
-// approximates the Gaussian kernel to within roughly 3%.
-//
-// let d = floor(s * 3*sqrt(2*pi)/4 + 0.5)
-//
-// ... if d is odd, use three box-blurs of size 'd', centered on the output pixel.
-
-
-- (void)setBlurRadiusInPixels:(CGFloat)newValue;
-{
- blurFilter.blurRadiusInPixels = newValue;
-}
-
-- (CGFloat)blurRadiusInPixels;
-{
- return blurFilter.blurRadiusInPixels;
-}
-
-- (void)setSaturation:(CGFloat)newValue;
-{
- saturationFilter.saturation = newValue;
-}
-
-- (CGFloat)saturation;
-{
- return saturationFilter.saturation;
-}
-
-- (void)setDownsampling:(CGFloat)newValue;
-{
- _downsampling = newValue;
-}
-
-- (void)setRangeReductionFactor:(CGFloat)rangeReductionFactor
-{
- luminanceRangeFilter.rangeReductionFactor = rangeReductionFactor;
-}
-
-- (CGFloat)rangeReductionFactor
-{
- return luminanceRangeFilter.rangeReductionFactor;
-}
-
-@end
diff --git a/Example/Pods/GPUImage/framework/Source/iOS/Framework/GPUImageFramework.h b/Example/Pods/GPUImage/framework/Source/iOS/Framework/GPUImageFramework.h
deleted file mode 100644
index ea12ac0..0000000
--- a/Example/Pods/GPUImage/framework/Source/iOS/Framework/GPUImageFramework.h
+++ /dev/null
@@ -1,172 +0,0 @@
-#import
-
-//! Project version number for GPUImageFramework.
-FOUNDATION_EXPORT double GPUImageFrameworkVersionNumber;
-
-//! Project version string for GPUImageFramework.
-FOUNDATION_EXPORT const unsigned char GPUImageFrameworkVersionString[];
-
-#import
-
-// Base classes
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-
-// Filters
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import
-#import