diff --git a/.github/workflows/build-swift.yml b/.github/workflows/build-swift.yml
new file mode 100644
index 0000000..809455d
--- /dev/null
+++ b/.github/workflows/build-swift.yml
@@ -0,0 +1,24 @@
+name: build-swift
+
+on:
+ workflow_dispatch:
+ push:
+ branches: [main]
+ pull_request:
+ branches: [main]
+
+concurrency:
+ # cancel jobs on PRs only
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
+
+jobs:
+ build:
+ runs-on: macos-latest
+
+ steps:
+ - uses: actions/checkout@v4
+ - name: Build
+ run: swift build -v
+ - name: Run tests
+ run: swift test -v
diff --git a/.gitignore b/.gitignore
index b4d91a2..03ee614 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,11 +1,29 @@
# Ignore common artifacts
**/build
+**/.build
**/.idea
+**/dist
+**/.DS_Store
# Ignore Javascript/npm artifacts
**/node_modules
-**/dist
**/docs
**/coverage
+# Ignore Swift artifacts
+**/Packages
+**/xcuserdata/
+**/DerivedData/
+**/.swiftpm/configuration/registries.json
+**/.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
+**/.netrc
+
+# Ignore Kotlin artifacts
+**/*.iml
+**/.gradle
+**/.env
+**/local.properties
+**/captures
+**/.externalNativeBuild
+**/.cxx
diff --git a/Package.swift b/Package.swift
new file mode 100644
index 0000000..07ec000
--- /dev/null
+++ b/Package.swift
@@ -0,0 +1,25 @@
+// swift-tools-version: 5.9
+// The swift-tools-version declares the minimum version of Swift required to build this package.
+
+import PackageDescription
+
+let package = Package(
+ name: "Polyline",
+ products: [
+ // Products define the executables and libraries a package produces, making them visible to other packages.
+ .library(
+ name: "Polyline",
+ targets: ["Polyline"]),
+ ],
+ targets: [
+ // Targets are the basic building blocks of a package, defining a module or a test suite.
+ // Targets can depend on other targets in this package and products from dependencies.
+ .target(
+ name: "Polyline",
+ path: "swift/Polyline/Sources/Polyline"),
+ .testTarget(
+ name: "PolylineTests",
+ dependencies: ["Polyline"],
+ path: "swift/Polyline/Tests/PolylineTests"),
+ ]
+)
diff --git a/swift/Polyline/.swiftpm/xcode/package.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/swift/Polyline/.swiftpm/xcode/package.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
new file mode 100644
index 0000000..18d9810
--- /dev/null
+++ b/swift/Polyline/.swiftpm/xcode/package.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist
@@ -0,0 +1,8 @@
+
+
+
+
+ IDEDidComputeMac32BitWarning
+
+
+
diff --git a/swift/Polyline/Sources/Polyline/Algorithm/Decoder.swift b/swift/Polyline/Sources/Polyline/Algorithm/Decoder.swift
new file mode 100644
index 0000000..9d572c1
--- /dev/null
+++ b/swift/Polyline/Sources/Polyline/Algorithm/Decoder.swift
@@ -0,0 +1,181 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: MIT-0
+
+import Foundation
+
+class PolylineDecoder {
+ // decodingTable is a lookup table that converts ASCII values from 0x00-0x7F
+ // to the appropriate decoded 0x00-0x3F value. Polyline and Flexible-Polyline
+ // use different character encodings, so they need different decoding tables.
+ let decodingTable : [Int];
+
+ // containsHeader is true if the format includes a header (Flexible-Polyline),
+ // and false if it doesn't (Polyline).
+ let containsHeader : Bool;
+
+ init(decodingTable: [Int], containsHeader: Bool) {
+ self.decodingTable = decodingTable;
+ self.containsHeader = containsHeader;
+ }
+
+ // Given an encoded string and a starting index, this decodes a single encoded signed value.
+ // The decoded value will be an integer that still needs the decimal place moved over based
+ // on the number of digits of encoded precision.
+ private func decodeSignedValue(
+ encoded: String,
+ startIndex: Int
+ ) throws -> (result: Int64, nextIndex: Int) {
+ // decode an unsigned value
+ let (unsignedValue, nextIndex) = try self.decodeUnsignedValue(
+ encoded: encoded,
+ startIndex: startIndex
+ );
+ // If the unsigned value has a 1 encoded in its least significant bit,
+ // it's negative, so flip the bits.
+ var signedValue = unsignedValue;
+ if ((unsignedValue & 1) == 1) {
+ signedValue = ~signedValue;
+ }
+ // Shift the result by one to remove the encoded sign bit.
+ signedValue >>= 1;
+ return (signedValue, nextIndex);
+ }
+
+
+ // Given an encoded string and a starting index, this decodes a single encoded
+ // unsigned value. The flexible-polyline algorithm uses this directly to decode
+ // the header bytes, since those are encoded without the sign bit as the header
+ // values are known to be unsigned (which saves 2 bits).
+ private func decodeUnsignedValue(
+ encoded: String,
+ startIndex: Int
+ ) throws -> (result: Int64, nextIndex: Int) {
+ var result:Int64 = 0;
+ var shift = 0;
+ var index = startIndex;
+
+ // For each ASCII character, get the 6-bit (0x00 - 0x3F) value that
+ // it represents. Shift the accumulated result by 5 bits, add the new
+ // 5-bit chunk to the bottom, and keep going for as long as the 6th bit
+ // is set.
+ while (index < encoded.count) {
+ let charCode = Int(encoded.unicodeScalars[encoded.index(encoded.startIndex, offsetBy: index)].value);
+ let value = self.decodingTable[charCode];
+ if (value < 0) {
+ throw DecodeError.invalidEncodedCharacter;
+ }
+ result |= Int64(value & 0x1f) << shift;
+ shift += 5;
+ index += 1;
+
+ // We've reached the final 5-bit chunk for this value, so return.
+ // We also return the index, which represents the starting index of the
+ // next value to decode.
+ if ((value & 0x20) == 0) {
+ return (result, index);
+ }
+ }
+
+ // If we've run out of encoded characters without finding an empty 6th bit,
+ // something has gone wrong.
+ throw DecodeError.extraContinueBit;
+ }
+
+ private func decodeHeader(
+ encoded: String
+ ) throws -> (header: CompressionParameters, index: Int) {
+ // If the data has a header, the first value is expected to be the header version
+ // and the second value is compressed metadata containing precision and dimension information.
+ let (headerVersion, metadataIndex) = try self.decodeUnsignedValue(encoded: encoded, startIndex: 0);
+ if (headerVersion != FlexiblePolylineFormatVersion) {
+ throw DecodeError.invalidHeaderVersion;
+ }
+ let (metadata, nextIndex) = try self.decodeUnsignedValue(
+ encoded: encoded,
+ startIndex: metadataIndex
+ );
+ let header = CompressionParameters(
+ precisionLngLat: Int(metadata & 0x0f),
+ precisionThirdDimension: Int(metadata >> 7) & 0x0f,
+ thirdDimension: ThirdDimension(rawValue: Int((metadata >> 4)) & 0x07)!
+ );
+ return ( header: header, index: nextIndex );
+ }
+
+
+ func decode(
+ encoded: String,
+ encodePrecision: Int = 0
+ ) throws -> (lngLatArray: Array>, header: CompressionParameters) {
+ // Empty input strings are considered invalid.
+ if (encoded.count == 0) {
+ throw DecodeError.emptyInput;
+ }
+
+ // If the data doesn't have a header, default to the passed-in precision and no 3rd dimension.
+ var header = CompressionParameters(
+ precisionLngLat: encodePrecision,
+ precisionThirdDimension: 0,
+ thirdDimension: ThirdDimension.None
+ );
+
+ // Track the index of the next character to decode from the encoded string.
+ var index = 0;
+
+ if (self.containsHeader) {
+ (header, index) = try self.decodeHeader(encoded: encoded);
+ }
+
+ let numDimensions = (header.thirdDimension != ThirdDimension.None) ? 3 : 2;
+ var outputLngLatArray: Array> = [];
+
+ // The data either contains lat/lng or lat/lng/z values that will be decoded.
+ // precisionDivisors are the divisors needed to convert the values from integers
+ // back to floating-point.
+ let precisionDivisors:[Double] = [
+ pow(10.0, Double(header.precisionLngLat)),
+ pow(10.0, Double(header.precisionLngLat)),
+ pow(10.0, Double(header.precisionThirdDimension))
+ ];
+
+ // maxAllowedValues are the maximum absolute values allowed for lat/lng/z. This is used for
+ // error-checking the coordinate values as they're being decoded.
+ let maxAllowedValues = [90.0, 180.0, Double.greatestFiniteMagnitude];
+
+ // While decoding, we want to switch from lat/lng/z to lng/lat/z, so this index tells us
+ // what position to put the dimension in for the resulting coordinate.
+ let resultDimensionIndex = [1, 0, 2];
+
+ // Decoded values are deltas from the previous coordinate values, so track the previous values.
+ var lastScaledCoordinate:[Int64] = [0, 0, 0];
+
+ // Keep decoding until we reach the end of the string.
+ while (index < encoded.count) {
+ // Each time through the loop we'll decode one full coordinate.
+ var coordinate: [Double] = (numDimensions == 2) ? [0.0, 0.0] : [0.0, 0.0, 0.0];
+ var deltaValue:Int64 = 0;
+
+ // Decode each dimension for the coordinate.
+ for dimension in 0...(numDimensions - 1) {
+ if (index >= encoded.count) {
+ throw DecodeError.missingCoordinateDimension;
+ }
+
+ (deltaValue, index) = try self.decodeSignedValue(encoded: encoded, startIndex: index);
+ lastScaledCoordinate[dimension] += deltaValue;
+ // Get the final lat/lng/z value by scaling the integer back down based on the number of
+ // digits of precision.
+ let value =
+ Double(lastScaledCoordinate[dimension]) / precisionDivisors[dimension];
+ if (abs(value) > maxAllowedValues[dimension]) {
+ throw DecodeError.invalidCoordinateValue;
+ }
+ coordinate[resultDimensionIndex[dimension]] = value;
+ }
+ outputLngLatArray.append(coordinate);
+ }
+
+ return (outputLngLatArray, header);
+ }
+
+}
diff --git a/swift/Polyline/Sources/Polyline/Algorithm/Encoder.swift b/swift/Polyline/Sources/Polyline/Algorithm/Encoder.swift
new file mode 100644
index 0000000..cb1079b
--- /dev/null
+++ b/swift/Polyline/Sources/Polyline/Algorithm/Encoder.swift
@@ -0,0 +1,159 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: MIT-0
+
+// This class implements both the Encoded Polyline Algorithm Format
+// (https://developers.google.com/maps/documentation/utilities/polylinealgorithm)
+// and the Flexible-Polyline variation of the algorithm (https://github.com/heremaps/flexible-polyline).
+
+// This implementation has two differences to improve usability:
+// - It uses well-defined rounding to ensure deterministic results across all programming languages.
+// The Flexible-Polyline algorithm definition says to use the rounding rules of the programming
+// language, but this can cause inconsistent rounding depending on what language happens to be used
+// on both the encoding and decoding sides.
+// - It caps the max encoding/decoding precision to 11 decimal places (1 micrometer), because 12+ places can
+// lose precision when using 64-bit floating-point numbers to store integers.
+
+import Foundation
+
+class PolylineEncoder {
+ // encodingTable is a lookup table that converts values from 0x00-0x3F
+ // to the appropriate encoded ASCII character. Polyline and Flexible-Polyline
+ // use different character encodings.
+ let encodingTable: String;
+
+ // includeHeader is true if the format includes a header (Flexible-Polyline),
+ // and false if it doesn't (Polyline).
+ let includeHeader: Bool;
+
+ init(encodingTable: String, includeHeader: Bool) {
+ self.encodingTable = encodingTable;
+ self.includeHeader = includeHeader;
+ }
+
+ // The original polyline algorithm supposedly uses "round to nearest, ties away from 0"
+ // for its rounding rule. Flexible-polyline uses the rounding rules of the implementing
+ // language. Our generalized implementation will use the "round to nearest, ties away from 0"
+ // rule for all languages to keep the encoding deterministic across implementations.
+ private func polylineRound(_ value: Double) -> Int64 {
+ let rounded = floor(abs(value) + 0.5);
+ return (value >= 0.0) ? Int64(rounded) : Int64(-rounded);
+ }
+
+ func encode(
+ lngLatArray: Array>,
+ precision: Int,
+ thirdDim: ThirdDimension = ThirdDimension.None,
+ thirdDimPrecision: Int = 0
+ ) throws -> String {
+ if (precision < 0 || precision > 11) {
+ throw EncodeError.invalidPrecisionValue;
+ }
+ if (thirdDimPrecision < 0 || thirdDimPrecision > 11) {
+ throw EncodeError.invalidPrecisionValue;
+ }
+
+ if (lngLatArray.count == 0) {
+ return "";
+ }
+
+ let numDimensions = (thirdDim != ThirdDimension.None) ? 3 : 2;
+
+ // The data will either encode lat/lng or lat/lng/z values.
+ // precisionMultipliers are the multipliers needed to convert the values
+ // from floating-point to scaled integers.
+ let precisionMultipliers = [
+ pow(10.0, Double(precision)),
+ pow(10.0, Double(precision)),
+ pow(10.0, Double(thirdDimPrecision))
+ ];
+
+ // While encoding, we want to switch from lng/lat/z to lat/lng/z, so this index tells us
+ // what index to grab from the input coordinate when encoding each dimension.
+ let inputDimensionIndex = [1, 0, 2];
+
+ // maxAllowedValues are the maximum absolute values allowed for lat/lng/z. This is used for
+ // error-checking the coordinate values as they're being encoded.
+ let maxAllowedValues = [90.0, 180.0, Double.greatestFiniteMagnitude];
+
+ // Encoded values are deltas from the previous coordinate values, so track the previous lat/lng/z values.
+ var lastScaledCoordinate:[Int64] = [0, 0, 0];
+
+ var output = "";
+
+ // Flexible-polyline starts with an encoded header that contains precision and dimension metadata.
+ if (self.includeHeader) {
+ output = self.encodeHeader(precision: precision, thirdDim: thirdDim, thirdDimPrecision: thirdDimPrecision);
+ }
+
+ for coordinate in lngLatArray {
+ if (coordinate.count != numDimensions) {
+ throw EncodeError.inconsistentCoordinateDimensions;
+ }
+
+ for dimension in 0...(numDimensions - 1) {
+ // Even though our input data is in lng/lat/z order, this is where we grab them in
+ // lat/lng/z order for encoding.
+ let inputValue = coordinate[inputDimensionIndex[dimension]];
+ // While looping through, also verify the input data is valid
+ if (abs(inputValue) > maxAllowedValues[dimension]) {
+ throw EncodeError.invalidCoordinateValue;
+ }
+ // Scale the value based on the number of digits of precision, encode the delta between
+ // it and the previous value to the output, and track it as the previous value for encoding
+ // the next delta.
+ let scaledValue = self.polylineRound((inputValue * precisionMultipliers[dimension]));
+ output += self.encodeSignedValue(scaledValue - lastScaledCoordinate[dimension]);
+ lastScaledCoordinate[dimension] = scaledValue;
+ }
+ }
+
+ return output;
+ }
+
+ private func encodeHeader(
+ precision: Int,
+ thirdDim: ThirdDimension,
+ thirdDimPrecision: Int
+ ) -> String {
+ // Combine all the metadata about the encoded data into a single value for the header.
+ let metadataValue =
+ (thirdDimPrecision << 7) | (thirdDim.rawValue << 4) | precision;
+ return (
+ self.encodeUnsignedValue(Int64(FlexiblePolylineFormatVersion)) +
+ self.encodeUnsignedValue(Int64(metadataValue))
+ );
+ }
+
+ // Given a single input unsigned scaled value, this encodes into a series of
+ // ASCII characters. The flexible-polyline algorithm uses this directly to encode
+ // the header bytes, since those are known not to need a sign bit.
+ private func encodeUnsignedValue(_ value: Int64) -> String {
+ var encodedString = "";
+ var remainingValue = value;
+ // Loop through each 5-bit chunk in the value, add a 6th bit if there
+ // will be additional chunks, and encode to an ASCII value.
+ while (remainingValue > 0x1f) {
+ let chunk = Int(remainingValue & 0x1f) | 0x20;
+ let encodedChar = self.encodingTable[self.encodingTable.index(self.encodingTable.startIndex, offsetBy: chunk)];
+ encodedString += [encodedChar];
+ remainingValue >>= 5;
+ }
+ // For the last chunk, set the 6th bit to 0 (since there are no more chunks) and encode it.
+ let finalEncodedChar = self.encodingTable[self.encodingTable.index(self.encodingTable.startIndex, offsetBy: Int(remainingValue))];
+ return encodedString + [finalEncodedChar];
+ }
+
+ // Given a single input signed scaled value, this encodes into a series of
+ // ASCII characters.
+ private func encodeSignedValue(_ value: Int64) -> String {
+ var unsignedValue = value;
+ // Shift the value over by 1 bit to make room for the sign bit at the end.
+ unsignedValue <<= 1;
+ // If the input value is negative, flip all the bits, including the sign bit.
+ if (value < 0) {
+ unsignedValue = ~unsignedValue;
+ }
+
+ return self.encodeUnsignedValue(unsignedValue);
+ }
+}
diff --git a/swift/Polyline/Sources/Polyline/Compressors/EncodedPolyline.swift b/swift/Polyline/Sources/Polyline/Compressors/EncodedPolyline.swift
new file mode 100644
index 0000000..69f36aa
--- /dev/null
+++ b/swift/Polyline/Sources/Polyline/Compressors/EncodedPolyline.swift
@@ -0,0 +1,83 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: MIT-0
+
+// This class implements the Encoded Polyline Algorithm Format
+// (https://developers.google.com/maps/documentation/utilities/polylinealgorithm).
+// This algorithm is commonly used with either 5 or 6 bits of precision.
+// To improve usability and decrease user error, we present Polyline5 and Polyline6
+// as two distinct compression algorithms.
+
+import Foundation
+
+class EncodedPolyline: DataCompressor {
+ let precision: Int;
+
+ // The original Encoded Polyline algorithm doesn't support having a header on the encoded data.
+ let DataContainsHeader = false;
+
+ let PolylineEncodingTable: String =
+ "?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~";
+
+ // The lookup table contains conversion values for ASCII characters 0-127.
+ // Only the characters listed in the encoding table will contain valid
+ // decoding entries below.
+ let PolylineDecodingTable = [
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
+ 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33,
+ 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52,
+ 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, -1,
+ ];
+ let encoder : PolylineEncoder;
+ let decoder : PolylineDecoder;
+
+ init(precision: Int) {
+ self.precision = precision;
+ self.encoder = PolylineEncoder(
+ encodingTable: PolylineEncodingTable,
+ includeHeader: DataContainsHeader
+ );
+ self.decoder = PolylineDecoder(
+ decodingTable: PolylineDecodingTable,
+ containsHeader: DataContainsHeader
+ );
+ super.init();
+ }
+
+ override func compressLngLatArray(
+ lngLatArray: Array>,
+ parameters: CompressionParameters
+ ) throws -> String {
+ return try self.encoder.encode(lngLatArray: lngLatArray, precision: self.precision);
+ }
+
+ override func decompressLngLatArray(
+ compressedData: String
+ ) throws -> (Array>, CompressionParameters) {
+ let (lngLatArray, header) = try self.decoder.decode(
+ encoded: compressedData,
+ encodePrecision: self.precision
+ );
+ let compressionParameters = CompressionParameters(precisionLngLat: header.precisionLngLat);
+ return (lngLatArray, compressionParameters);
+ }
+}
+
+// Polyline5 and Polyline6 encodes/decodes compressed data with 5 or 6 bits of precision respectively.
+// While the underlying Polyline implementation allows for an arbitrary
+// number of bits of precision to be encoded / decoded, location service providers seem
+// to only choose 5 or 6 bits of precision, so those are the two algorithms that we'll explicitly offer here.
+
+class Polyline5 : EncodedPolyline {
+ init() {
+ super.init(precision: 5);
+ }
+}
+
+class Polyline6 : EncodedPolyline {
+ init() {
+ super.init(precision: 6);
+ }
+}
diff --git a/swift/Polyline/Sources/Polyline/Compressors/FlexiblePolyline.swift b/swift/Polyline/Sources/Polyline/Compressors/FlexiblePolyline.swift
new file mode 100644
index 0000000..4b91814
--- /dev/null
+++ b/swift/Polyline/Sources/Polyline/Compressors/FlexiblePolyline.swift
@@ -0,0 +1,61 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: MIT-0
+
+// This class implements the Flexible-Polyline variation of the
+// Encoded Polyline algorithm (https://github.com/heremaps/flexible-polyline).
+// The algorithm supports both 2D and 3D data.
+
+import Foundation;
+
+class FlexiblePolyline : DataCompressor {
+ let DataContainsHeader = true;
+ let FlexPolylineEncodingTable =
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_";
+ // The lookup table contains conversion values for ASCII characters 0-127.
+ // Only the characters listed in the encoding table will contain valid
+ // decoding entries below.
+ let FlexPolylineDecodingTable = [
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
+ -1, -1, -1, -1, -1, -1, -1, 62, -1, -1, 52, 53, 54, 55, 56, 57, 58, 59, 60,
+ 61, -1, -1, -1, -1, -1, -1, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,
+ 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -1, -1, -1, -1, 63, -1,
+ 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44,
+ 45, 46, 47, 48, 49, 50, 51,
+ ];
+
+ let encoder : PolylineEncoder;
+ let decoder : PolylineDecoder;
+
+ override init() {
+ self.encoder = PolylineEncoder(
+ encodingTable: self.FlexPolylineEncodingTable,
+ includeHeader: self.DataContainsHeader
+ );
+ self.decoder = PolylineDecoder(
+ decodingTable: self.FlexPolylineDecodingTable,
+ containsHeader: self.DataContainsHeader
+ );
+ super.init();
+ }
+
+ override func compressLngLatArray(
+ lngLatArray: Array>,
+ parameters: CompressionParameters
+ ) throws -> String {
+ return try self.encoder.encode(
+ lngLatArray: lngLatArray,
+ precision: parameters.precisionLngLat,
+ thirdDim: parameters.thirdDimension,
+ thirdDimPrecision: parameters.precisionThirdDimension
+ );
+ }
+
+ override func decompressLngLatArray(
+ compressedData: String
+ ) throws -> (Array>, CompressionParameters) {
+ let (lngLatArray, header) = try self.decoder.decode(encoded: compressedData);
+
+ return (lngLatArray, header);
+ }
+}
diff --git a/swift/Polyline/Sources/Polyline/DataCompressor.swift b/swift/Polyline/Sources/Polyline/DataCompressor.swift
new file mode 100644
index 0000000..49a24a3
--- /dev/null
+++ b/swift/Polyline/Sources/Polyline/DataCompressor.swift
@@ -0,0 +1,260 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: MIT-0
+
+import Foundation;
+
+// DataCompressor is an abstract base class that defines the interface for
+// encoding/decoding compressed coordinate arrays. The coordinate arrays represent either
+// LineString ("polyline") or Polygon geometry.
+// To make this compressed data easy to use with MapLibre, DataCompressor provides
+// methods for decoding the data into different types of GeoJSON outputs:
+// - decodeToLineStringFeature / decodeToPolygonFeature:
+// These produce a GeoJSON Feature object that can be directly passed into MapLibre as a geojson source.
+// - decodeToLineString / decodeToPolygon:
+// These produce a GeoJSON Geometry object that can be manually assembled into a Feature to pass
+// into MapLibre as a geojson source.
+
+// Concrete implementations of this class are expected to implement the following APIs:
+// - compressLngLatArray(lngLatArray, compressionParameters) -> compressedData
+// - decompressLngLatArray(compressedData) -> [lngLatArray, compressionParameters]
+
+class DataCompressor {
+ // Encode an array of LngLat data into a string of compressed data.
+ // The coordinates may optionally have a third dimension of data.
+ func compressLngLatArray(
+ lngLatArray: Array>,
+ parameters: CompressionParameters
+ ) throws -> String {
+ return "";
+ }
+
+ // Decode a string of compressed data into an array of LngLat data.
+ // The coordinates may optionally have a third dimension of data.
+ func decompressLngLatArray(
+ compressedData: String
+ ) throws -> (Array>, CompressionParameters) {
+ return ([], CompressionParameters(precisionLngLat: DefaultPrecision, precisionThirdDimension: 0, thirdDimension: ThirdDimension.None));
+ }
+
+ // Helper method to determine whether the polygon is wound in CCW (counterclockwise) or CW (clockwise) order.
+ private func polygonIsCounterClockwise(
+ lngLatArray: Array>
+ ) -> Bool {
+ // If the data isn't a polygon, then it can't be a counter-clockwise polygon.
+ // (A polygon requires at least 3 unique points and a 4th last point that matches the first)
+ if (lngLatArray.count < 4) {
+ return false;
+ }
+
+ // To determine if a polygon has a counterclockwise winding order, all we need to
+ // do is calculate the area of the polygon.
+ // If the area is positive, it's counterclockwise.
+ // If the area is negative, it's clockwise.
+ // If the area is 0, it's neither, so we'll still return false for counterclockwise.
+ // This implementation currently assumes that only 2D winding order is important and
+ // ignores any optional third dimension.
+ var area = 0.0;
+ for idx in 0...(lngLatArray.count - 2) {
+ let x1 = lngLatArray[idx][0];
+ let y1 = lngLatArray[idx][1];
+ let x2 = lngLatArray[idx + 1][0];
+ let y2 = lngLatArray[idx + 1][1];
+ area += x1 * y2 - x2 * y1;
+ }
+ // If we needed the actual area value, we should divide by 2 here, but since we only
+ // need to check the sign, we can skip the division.
+ return area > 0;
+ }
+
+ // Helper method to determine if two LngLat positions are equivalent within a given epsilon range.
+ private func positionsAreEquivalent(
+ _ pos1: Array,
+ _ pos2: Array
+ ) -> Bool {
+ // Verify that the two positions are equal within an epsilon.
+ // This epsilon was picked because most compressed data uses <= 6 digits of precision,
+ // so this epsilon is large enough to detect intentionally different data, and small
+ // enough to detect equivalency for values that just have compression artifact drift.
+ let epsilon = 0.000001;
+ if (pos1.count != pos2.count) {
+ return false;
+ }
+ // Loop through longitude, latitude, and optional 3rd dimension to make sure each one is equivalent.
+ for idx in 0...(pos1.count - 1) {
+ if (abs(pos1[idx] - pos2[idx]) >= epsilon) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private func decodeLineString(
+ _ compressedData: String
+ ) throws -> (String, CompressionParameters) {
+ let (decodedLine, compressionParameters) =
+ try self.decompressLngLatArray(compressedData: compressedData);
+ // Validate that the result is a valid GeoJSON LineString per the RFC 7946 GeoJSON spec:
+ // "The 'coordinates' member is an array of two or more positions"
+ if (decodedLine.count < 2) {
+ throw GeoJsonError.invalidLineStringLength;
+ }
+ return (
+ """
+ {
+ "type": "LineString",
+ "coordinates": \(decodedLine),
+ }
+ """,
+ compressionParameters
+ );
+ }
+
+ private func decodePolygon(
+ _ compressedData: Array
+ ) throws -> (String, CompressionParameters) {
+ var decodedPolygon : Array>> = [];
+ var shouldBeCounterclockwise = true; // The first ring of a polygon should be counterclockwise
+ var compressionParameters: CompressionParameters = CompressionParameters();
+ for ring in compressedData {
+ var (decodedRing, ringCompressionParameters) = try self.decompressLngLatArray(compressedData: ring);
+
+ // Validate that the result is a valid GeoJSON Polygon linear ring per the RFC 7946 GeoJSON spec.
+
+ // 1. "A linear ring is a closed LineString with 4 or more positions."
+ if (decodedRing.count < 4) {
+ throw GeoJsonError.invalidPolygonLength;
+ }
+
+ // 2. "The first and last positions are equivalent, and they MUST contain identical values;
+ // their representation SHOULD also be identical."
+ // We validate equivalency within a small epsilon.
+ if (
+ !self.positionsAreEquivalent(
+ decodedRing[0],
+ decodedRing[decodedRing.count - 1]
+ )
+ ) {
+ throw GeoJsonError.invalidPolygonClosure;
+ }
+
+ // 3. "A linear ring MUST follow the right-hand rule with respect to the area it bounds,
+ // i.e., exterior rings are counterclockwise, and holes are clockwise."
+ // "Note: the [GJ2008] specification did not discuss linear ring winding
+ // order. For backwards compatibility, parsers SHOULD NOT reject
+ // Polygons that do not follow the right-hand rule."
+ // "For Polygons with more than one of these rings, the first MUST be
+ // the exterior ring, and any others MUST be interior rings. The
+ // exterior ring bounds the surface, and the interior rings (if
+ // present) bound holes within the surface."
+
+ // With all this taken together, we should enforce the winding order as opposed to just
+ // validating it.
+ if (
+ shouldBeCounterclockwise != self.polygonIsCounterClockwise(lngLatArray: decodedRing)
+ ) {
+ decodedRing.reverse();
+ }
+
+ decodedPolygon.append(decodedRing);
+
+ // Set compressionParameter metadata to whatever the last compression parameters were that were used.
+ // This may need to have more complicated logic at some point if different rings have different compression
+ // parameters and we want to capture all of them.
+ compressionParameters = ringCompressionParameters;
+
+ // All rings after the first should be clockwise.
+ shouldBeCounterclockwise = false;
+ }
+ return (
+ """
+ {
+ "type": "Polygon",
+ "coordinates": \(decodedPolygon),
+ }
+ """,
+ compressionParameters
+ );
+ }
+
+ private func compressionParametersToGeoJsonProperties(
+ parameters: CompressionParameters
+ ) -> String {
+ switch (parameters.thirdDimension) {
+ case ThirdDimension.Level:
+ return """
+ {
+ "precision": \(parameters.precisionLngLat),
+ "thirdDimensionPrecision": \(parameters.precisionThirdDimension),
+ "thirdDimensionType": "level",
+ }
+ """;
+ case ThirdDimension.Elevation:
+ return """
+ {
+ "precision": \(parameters.precisionLngLat),
+ "thirdDimensionPrecision": \(parameters.precisionThirdDimension),
+ "thirdDimensionType": "elevation",
+ }
+ """;
+ case ThirdDimension.Altitude:
+ return """
+ {
+ "precision": \(parameters.precisionLngLat),
+ "thirdDimensionPrecision": \(parameters.precisionThirdDimension),
+ "thirdDimensionType": "altitude",
+ }
+ """;
+ default:
+ return """
+ {
+ "precision": \(parameters.precisionLngLat)
+ }
+ """;
+ }
+ }
+
+ func encodeFromLngLatArray(
+ lngLatArray: Array>,
+ parameters: CompressionParameters
+ ) throws -> String {
+ return try self.compressLngLatArray(lngLatArray: lngLatArray, parameters: parameters);
+ }
+
+ func decodeToLngLatArray(compressedData: String) throws -> Array> {
+ let (decodedLngLatArray, _) = try self.decompressLngLatArray(compressedData: compressedData);
+
+ return decodedLngLatArray;
+ }
+
+ func decodeToLineString(compressedData: String) throws -> String {
+ let (lineString, _) = try self.decodeLineString(compressedData);
+ return lineString;
+ }
+
+ func decodeToPolygon(compressedData: Array) throws -> String {
+ let (polygon, _) = try self.decodePolygon(compressedData);
+ return polygon;
+ }
+
+ func decodeToLineStringFeature(compressedData: String) throws -> String {
+ let (lineString, compressionParameters) = try self.decodeLineString(compressedData);
+ return """
+ {
+ "type": "Feature",
+ "geometry": \(lineString),
+ "properties": \(self.compressionParametersToGeoJsonProperties(parameters: compressionParameters)),
+ }
+ """;
+ }
+
+ func decodeToPolygonFeature(compressedData: Array) throws -> String {
+ let (polygon, compressionParameters) = try self.decodePolygon(compressedData);
+ return """
+ {
+ "type": "Feature",
+ "geometry": \(polygon),
+ "properties": \(self.compressionParametersToGeoJsonProperties(parameters: compressionParameters)),
+ }
+ """;
+ }
+}
diff --git a/swift/Polyline/Sources/Polyline/Polyline.swift b/swift/Polyline/Sources/Polyline/Polyline.swift
new file mode 100644
index 0000000..6c2966e
--- /dev/null
+++ b/swift/Polyline/Sources/Polyline/Polyline.swift
@@ -0,0 +1,253 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: MIT-0
+
+import Foundation
+
+// The default algorithm is FlexiblePolyline. This was selected as it is the newest and most flexible format
+// of the different decoding types supported.
+private var compressor: DataCompressor = FlexiblePolyline();
+
+/** Get the currently-selected compression algorithm.
+ * @returns The current compression algorithm.
+ */
+func getCompressionAlgorithm() -> CompressionAlgorithm {
+ if (compressor is Polyline5) {
+ return CompressionAlgorithm.Polyline5;
+ }
+ if (compressor is Polyline6) {
+ return CompressionAlgorithm.Polyline6;
+ }
+
+ return CompressionAlgorithm.FlexiblePolyline;
+}
+
+/** Set the compression algorithm to use for subsequent encode/decode calls.
+ * @param compressionType The compression algorithm to use.
+ * @throws Error() if an invalid compression algorithm is specified.
+ */
+func setCompressionAlgorithm(_ compressionType: CompressionAlgorithm = .FlexiblePolyline) {
+ switch (compressionType) {
+ case CompressionAlgorithm.Polyline5:
+ if (!(compressor is Polyline5)) {
+ compressor = Polyline5();
+ }
+ case CompressionAlgorithm.Polyline6:
+ if (!(compressor is Polyline6)) {
+ compressor = Polyline6();
+ }
+ default:
+ if (!(compressor is FlexiblePolyline)) {
+ compressor = FlexiblePolyline();
+ }
+ }
+}
+
+/** Encode the provided array of coordinate values into an encoded string.
+ * @remarks
+ * This takes in an array of two-dimensional or three-dimensional positions and encodes them into
+ * the currently-selected compression format.
+ * Example of 2D input data:
+ * ```typescript
+ * [ [5.0, 0.0], [10.0, 5.0], [10.0, 10.0], ]
+ * ```
+ * Example of 3D input data:
+ * ```typescript
+ * [ [5.0, 0.0, 200.0], [10.0, 5.0, 200.0], [10.0, 10.0, 205.0], ]
+ * ```
+ * @param lngLatArray An array of lng/lat positions to encode. The positions may contain an optional 3rd dimension.
+ * @param parameters Optional compression parameters. These are currently only used by the FlexiblePolyline algorithm.
+ * @returns An encoded string containing the compressed coordinate values.
+ * @throws Error() if the input data contains no coordinate pairs,
+ * latitude values outside of [-90, 90], longitude values outside of [-180, 180],
+ * data that isn't 2-dimensional or 3-dimensional, or data that is 3-dimensional with a compressor that doesn't support 3D data.
+ */
+func encodeFromLngLatArray(
+ lngLatArray: Array>,
+ parameters: CompressionParameters = CompressionParameters()
+) throws -> String {
+ return try compressor.encodeFromLngLatArray(lngLatArray: lngLatArray, parameters: parameters);
+}
+
+/** Decode the provided encoded data string into an array of coordinate values.
+ * @remarks
+ * Note that this method returns a raw array of coordinate values, which cannot be used as a MapLibre source
+ * without first embedding it into a GeoJSON Feature. If you want to add the decoded data as a MapLibre source,
+ * use either {@link decodeToLineStringFeature} or {@link decodeToPolygonFeature} instead.
+ * Only use this method when you want to use the coordinate data directly.
+ * @param compressedData The encoded data string to decode. The data is expected to have valid lat/lng values.
+ * @returns An array of coordinate value arrays.
+ * @throws Error() if the encodedData contains invalid characters, no coordinate pairs,
+ * latitude values outside of [-90, 90], or longitude values outside of [-180, 180].
+ * @example
+ * An example of decoded data:
+ * ```typescript
+ * [
+ * [5.0, 0.0],
+ * [10.0, 5.0],
+ * [10.0, 10.0],
+ * ]
+ * ```
+ */
+func decodeToLngLatArray(
+ _ encodedData: String
+) throws -> Array> {
+ return try compressor.decodeToLngLatArray(compressedData: encodedData);
+}
+
+/** Decode the provided encoded data string into a GeoJSON LineString.
+ * @remarks
+ * Note that this method returns a LineString, which cannot be used as a MapLibre source without first embedding it
+ * into a GeoJSON Feature. If you want to add the LineString as a MapLibre source, use {@link decodeToLineStringFeature} instead.
+ * Only use this method when you plan to manipulate the LineString further as opposed to using it directly as a source.
+ * @param encodedData The encoded data string to decode. The data is expected to have a minimum of two
+ * coordinate pairs with valid lat/lng values.
+ * @returns A GeoJSON LineString representing the decoded data.
+ * @throws Error() if the encodedData contains invalid characters, < 2 coordinate pairs,
+ * latitude values outside of [-90, 90], or longitude values outside of [-180, 180].
+ * @example
+ * An example of a decoded LineString:
+ * ```json
+ * {
+ * "type": "LineString",
+ * "coordinates": [
+ * [5.0, 0.0],
+ * [10.0, 5.0],
+ * [10.0, 10.0],
+ * ]
+ * }
+ * ```
+ */
+func decodeToLineString(_ encodedData: String) throws -> String {
+ return try compressor.decodeToLineString(compressedData: encodedData);
+}
+
+/** Decode the provided encoded data string into a GeoJSON Polygon.
+ * @remarks
+ * Note that this method returns a Polygon, which cannot be used as a MapLibre source without first embedding it
+ * into a GeoJSON Feature. If you want to add the Polygon as a MapLibre source, use {@link decodeToPolygonFeature} instead.
+ * Only use this method when you plan to manipulate the Polygon further as opposed to using it directly as a source.
+ * @param encodedData An array of encoded data strings to decode. This is an array instead of a single string
+ * because polygons can consist of multiple rings of compressed data. The first entry will be treated as the
+ * outer ring and the remaining entries will be treated as inner rings. Each input ring can be wound either
+ * clockwise or counterclockwise; they will get rewound to be GeoJSON-compliant in the output. Each ring is
+ * expected to have a minimum of four coordinate pairs with valid lat/lng data, and the last coordinate pair
+ * must match the first to make an explicit ring.
+ * @returns A GeoJSON Polygon representing the decoded data. The first entry in the output coordinates
+ * represents the outer ring and any remaining entries represent inner rings.
+ * @throws Error() if the encodedData contains invalid characters, < 4 coordinate pairs, first/last coordinates that
+ * aren't approximately equal, latitude values outside of [-90, 90], or longitude values outside of [-180, 180].
+ * @example
+ * An example of a decoded Polygon:
+ * ```json
+ * {
+ * "type": "Polygon",
+ * "coordinates": [
+ * [[0, 0], [10, 0], [10, 10], [0, 10], [0, 0]], // outer ring
+ * [[2, 2], [2, 8], [8 , 8 ], [8 , 2], [2, 2]], // inner ring
+ * [[4, 4], [4, 6], [6 , 6 ], [6 , 4], [4, 4]] // inner ring
+ * ]
+ * }
+ * ```
+ */
+func decodeToPolygon(_ encodedData: Array) throws -> String {
+ return try compressor.decodeToPolygon(compressedData: encodedData);
+}
+
+/** Decode the provided encoded data string into a GeoJSON Feature containing a LineString.
+ * @param encodedData The encoded data string to decode. The data is expected to have a minimum of two
+ * coordinate pairs with valid lat/lng values.
+ * @returns A GeoJSON Feature containing a LineString that represents the decoded data.
+ * @throws Error() if the encodedData contains invalid characters, < 2 coordinate pairs,
+ * latitude values outside of [-90, 90], or longitude values outside of [-180, 180]
+ * @example
+ * An example of a decoded LineString as a Feature:
+ * ```json
+ * {
+ * "type": "Feature",
+ * "properties": {},
+ * "geometry": {
+ * "type": "LineString",
+ * "coordinates": [
+ * [5.0, 0.0],
+ * [10.0, 5.0],
+ * [10.0, 10.0],
+ * ]
+ * }
+ * }
+ * ```
+ * The result of this method can be used with MapLibre's `addSource` to add a named data source or embedded directly
+ * with MapLibre's `addLayer` to both add and render the result:
+ * ```javascript
+ * var decodedGeoJSON = polylineDecoder.decodeToLineStringFeature(encodedRoutePolyline);
+ * map.addLayer({
+ * id: 'route',
+ * type: 'line',
+ * source: {
+ * type: 'geojson',
+ * data: decodedGeoJSON
+ * },
+ * layout: {
+ * 'line-join': 'round',
+ * 'line-cap': 'round'
+ * },
+ * paint: {
+ * 'line-color': '#3887be',
+ * 'line-width': 5,
+ * 'line-opacity': 0.75
+ * }
+ * });
+ * ```
+ */
+func decodeToLineStringFeature(_ encodedData: String) throws -> String {
+ return try compressor.decodeToLineStringFeature(compressedData: encodedData);
+}
+
+/** Decode the provided encoded data string into a GeoJSON Feature containing a Polygon.
+ * @param encodedData An array of encoded data strings to decode. This is an array instead of a single string
+ * because polygons can consist of multiple rings of compressed data. The first entry will be treated as the
+ * outer ring and the remaining entries will be treated as inner rings. Each input ring can be wound either
+ * clockwise or counterclockwise; they will get rewound to be GeoJSON-compliant in the output. Each ring is
+ * expected to have a minimum of four coordinate pairs with valid lat/lng data, and the last coordinate pair
+ * must match the first to make an explicit ring.
+ * @returns A GeoJSON Feature containing a Polygon that represents the decoded data. The first entry in the
+ * output coordinates represents the outer ring and any remaining entries represent inner rings.
+ * @throws Error() if the encodedData contains invalid characters, < 4 coordinate pairs, first/last coordinates that
+ * aren't approximately equal, latitude values outside of [-90, 90], or longitude values outside of [-180, 180].
+ * @example
+ * An example of a decoded Polygon as a Feature:
+ * ```json
+ * {
+ * 'type': 'Feature',
+ * 'properties': {},
+ * 'geometry': {
+ * "type": "Polygon",
+ * "coordinates": [
+ * [[0, 0], [10, 0], [10, 10], [0, 10], [0, 0]], // outer ring
+ * [[2, 2], [2, 8], [8 , 8 ], [8 , 2], [2, 2]], // inner ring
+ * [[4, 4], [4, 6], [6 , 6 ], [6 , 4], [4, 4]] // inner ring
+ * ]
+ * }
+ * }
+ * ```
+ * The result of this method can be used with MapLibre's `addSource` to add a named data source or embedded directly
+ * with MapLibre's `addLayer` to both add and render the result:
+ * ```javascript
+ * var decodedGeoJSON = polylineDecoder.decodeToPolygonFeature(encodedIsolinePolygons);
+ * map.addLayer({
+ * id: 'isoline',
+ * type: 'fill',
+ * source: {
+ * type: 'geojson',
+ * data: decodedGeoJSON
+ * },
+ * layout: {},
+ * paint: {
+ * 'fill-color': '#FF0000',
+ * 'fill-opacity': 0.6
+ }
+ * });
+ * ```
+ */
+func decodeToPolygonFeature(_ encodedData: Array) throws -> String {
+ return try compressor.decodeToPolygonFeature(compressedData: encodedData);
+}
diff --git a/swift/Polyline/Sources/Polyline/PolylineTypes.swift b/swift/Polyline/Sources/Polyline/PolylineTypes.swift
new file mode 100644
index 0000000..0eb79b2
--- /dev/null
+++ b/swift/Polyline/Sources/Polyline/PolylineTypes.swift
@@ -0,0 +1,95 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: MIT-0
+
+import Foundation
+
+/** Defines the default encoding precision for coordinates */
+let DefaultPrecision = 6;
+
+/** The version of flexible-polyline that's supported by this implementation */
+let FlexiblePolylineFormatVersion = 1;
+
+/** Defines the set of compression algorithms that are supported by this library. */
+enum CompressionAlgorithm {
+ /** Encoder/decoder for the [Flexible Polyline](https://github.com/heremaps/flexible-polyline) format. */
+ case FlexiblePolyline
+ /** Encoder/decoder for the [Encoded Polyline Algorithm Format](https://developers.google.com/maps/documentation/utilities/polylinealgorithm)
+ * with 5 bits of precision.
+ */
+ case Polyline5
+ /** Encoder/decoder for the [Encoded Polyline Algorithm Format](https://developers.google.com/maps/documentation/utilities/polylinealgorithm)
+ * with 6 bits of precision.
+ */
+ case Polyline6
+}
+
+/** Defines how to interpret a third dimension value if it exists. */
+enum ThirdDimension:Int {
+ /** No third dimension specified */
+ case None = 0
+ /** Third dimension is level */
+ case Level = 1
+ /** Third dimension is altitude (height above the Earth's surface) */
+ case Altitude = 2
+ /** Third dimension is elevation (height of the Earth's surface relative to the reference geoid) */
+ case Elevation = 3
+}
+
+/** The optional set of parameters for encoding a set of LngLat coordinates.
+ * Currently, only the FlexiblePolyline algorithm supports these parameters. The Polyline5 / Polyline6
+ * algorithms ignore them, as they don't support 3D data and we've defined them to use
+ * a fixed precision value.
+ */
+struct CompressionParameters {
+ /** The number of decimal places of precision to use for compressing longitude and latitude.
+ */
+ let precisionLngLat: Int;
+ /** The number of decimal places of precision to use for compressing the third dimension of data.
+ */
+ let precisionThirdDimension: Int;
+ /** The type of third dimension data being encoded - none, level, altitude, or elevation.
+ */
+ let thirdDimension: ThirdDimension;
+
+ init(precisionLngLat: Int = DefaultPrecision, precisionThirdDimension: Int = 0, thirdDimension: ThirdDimension = ThirdDimension.None) {
+ self.precisionLngLat = precisionLngLat;
+ self.precisionThirdDimension = precisionThirdDimension;
+ self.thirdDimension = thirdDimension;
+ }
+};
+
+
+enum DecodeError: Error {
+ // Empty input string is considered an error.
+ case emptyInput
+ // Invalid input, the encoded character doesn't exist in the decoding table.
+ case invalidEncodedCharacter
+ // Invalid encoding, the last block contained an extra 0x20 'continue' bit.
+ case extraContinueBit
+ // The decoded header has an unknown version number.
+ case invalidHeaderVersion
+ // The decoded coordinate has invalid lng/lat values.
+ case invalidCoordinateValue
+ // Decoding ended before all the dimensions for a coordinate were decoded.
+ case missingCoordinateDimension
+};
+
+
+enum EncodeError: Error {
+ // Invalid precision value, the valid range is 0 - 11.
+ case invalidPrecisionValue
+ // All the coordinates need to have the same number of dimensions.
+ case inconsistentCoordinateDimensions
+ // Latitude values need to be in [-90, 90] and longitude values need to be in [-180, 180]
+ case invalidCoordinateValue
+};
+
+
+enum GeoJsonError: Error {
+ // LineString coordinate arrays need at least 2 entries (start, end)
+ case invalidLineStringLength
+ // Polygon coordinate arrays need at least 4 entries (v0, v1, v2, v0)
+ case invalidPolygonLength
+ // Polygons need the first and last coordinate to match
+ case invalidPolygonClosure
+}
diff --git a/swift/Polyline/Tests/PolylineTests/PolylineTests.swift b/swift/Polyline/Tests/PolylineTests/PolylineTests.swift
new file mode 100644
index 0000000..4bbf44d
--- /dev/null
+++ b/swift/Polyline/Tests/PolylineTests/PolylineTests.swift
@@ -0,0 +1,764 @@
+// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
+// SPDX-License-Identifier: MIT-0
+
+import Foundation
+import XCTest
+@testable import Polyline
+
+// Simplified GeoJSON structures for validating the outputs
+
+struct LineString: Decodable {
+ let type: String;
+ let coordinates: [[Double]];
+}
+struct Polygon: Decodable {
+ let type: String;
+ let coordinates: [[[Double]]];
+}
+struct Properties: Decodable {
+ let precision: Int;
+ let thirdDimensionPrecision: Int?;
+ let thirdDimensionType: String?;
+}
+struct LineStringFeature: Decodable {
+ let type: String;
+ let geometry: LineString;
+ let properties: Properties;
+}
+struct PolygonFeature: Decodable {
+ let type: String;
+ let geometry: Polygon;
+ let properties: Properties;
+}
+
+// Tests to validate the polyline library
+
+final class PolylineTests: XCTestCase {
+
+ let algorithms : [CompressionAlgorithm] = [.FlexiblePolyline, .Polyline5, .Polyline6];
+
+ override func setUp() {
+ // Reset the compression algorithm back to the default for each unit test.
+ Polyline.setCompressionAlgorithm();
+ }
+
+ private func validateLineString(geojson: String, coords: [[Double]]) {
+ let geojsonData = geojson.data(using: .utf8)!;
+ let lineString:LineString = try! JSONDecoder().decode(LineString.self, from: geojsonData);
+
+ XCTAssertEqual(lineString.type, "LineString");
+ XCTAssertEqual(lineString.coordinates, coords);
+ }
+
+ private func validatePolygon(geojson: String, coords: [[[Double]]]) {
+ let geojsonData = geojson.data(using: .utf8)!;
+ let polygon:Polygon = try! JSONDecoder().decode(Polygon.self, from: geojsonData);
+
+ XCTAssertEqual(polygon.type, "Polygon");
+ XCTAssertEqual(polygon.coordinates, coords);
+ }
+
+ private func validateProperties(properties: Properties, parameters: CompressionParameters) {
+ XCTAssertEqual(properties.precision, parameters.precisionLngLat);
+ XCTAssertEqual(properties.thirdDimensionPrecision != nil, parameters.thirdDimension != ThirdDimension.None);
+ if (properties.thirdDimensionPrecision != nil) {
+ XCTAssertEqual(properties.thirdDimensionPrecision, parameters.precisionThirdDimension);
+ }
+ XCTAssertEqual(properties.thirdDimensionType != nil, parameters.thirdDimension != ThirdDimension.None);
+ if (properties.thirdDimensionType != nil) {
+ switch properties.thirdDimensionType {
+ case "level":
+ XCTAssertEqual(parameters.thirdDimension, ThirdDimension.Level);
+ case "altitude":
+ XCTAssertEqual(parameters.thirdDimension, ThirdDimension.Altitude);
+ case "elevation":
+ XCTAssertEqual(parameters.thirdDimension, ThirdDimension.Elevation);
+ default:
+ XCTFail("Unknown third dimension type");
+ }
+ XCTAssertEqual(properties.thirdDimensionPrecision, parameters.precisionThirdDimension);
+ }
+ }
+
+ private func validateLineStringFeature(geojson: String, coords: [[Double]], parameters: CompressionParameters) {
+ let geojsonData = geojson.data(using: .utf8)!;
+ let lineStringFeature:LineStringFeature = try! JSONDecoder().decode(LineStringFeature.self, from: geojsonData);
+
+ XCTAssertEqual(lineStringFeature.type, "Feature");
+ XCTAssertEqual(lineStringFeature.geometry.type, "LineString");
+ XCTAssertEqual(lineStringFeature.geometry.coordinates, coords);
+ validateProperties(properties: lineStringFeature.properties, parameters: parameters);
+ }
+
+ private func validatePolygonFeature(geojson: String, coords: [[[Double]]], parameters: CompressionParameters) {
+ let geojsonData = geojson.data(using: .utf8)!;
+ let polygonFeature:PolygonFeature = try! JSONDecoder().decode(PolygonFeature.self, from: geojsonData);
+
+ XCTAssertEqual(polygonFeature.type, "Feature");
+ XCTAssertEqual(polygonFeature.geometry.type, "Polygon");
+ XCTAssertEqual(polygonFeature.geometry.coordinates, coords);
+ validateProperties(properties: polygonFeature.properties, parameters: parameters);
+ }
+
+
+
+ func testDefaultsToFlexiblePolyline() {
+ XCTAssertEqual(Polyline.getCompressionAlgorithm(), .FlexiblePolyline);
+ }
+
+ func testSettingFlexiblePolyline() {
+ // Since we default to FlexiblePolyline first set to something other than FlexiblePolyline
+ Polyline.setCompressionAlgorithm(.Polyline5);
+ // Now set back to FlexiblePolyline
+ Polyline.setCompressionAlgorithm(.FlexiblePolyline);
+ XCTAssertEqual(Polyline.getCompressionAlgorithm(), .FlexiblePolyline);
+ }
+
+ // Verify that all of the non-default algorithms can be set correctly
+ func testSettingNonDefaultAlgorithm() {
+ let nonDefaultAlgorithms: [CompressionAlgorithm] = [ .Polyline5, .Polyline6 ];
+
+ for algorithm in nonDefaultAlgorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+ XCTAssertEqual(Polyline.getCompressionAlgorithm(), algorithm);
+ }
+ }
+
+ func testDecodingEmptyDataThrowsError() {
+ for algorithm in algorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+ XCTAssertThrowsError(try Polyline.decodeToLineString("")) { error in
+ XCTAssertEqual(error as! Polyline.DecodeError, Polyline.DecodeError.emptyInput);
+ };
+ }
+ }
+
+ func testDecodingBadDataThrowsError() {
+ for algorithm in algorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+ // The characters in the string below are invalid for each of the decoding algorithms.
+ // For polyline5/polyline6, only ?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ are valid.
+ // For flexiblePolyline, only ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_ are valid.
+ XCTAssertThrowsError(try Polyline.decodeToLineString("!#$%(*)")) { error in
+ XCTAssertEqual(error as! Polyline.DecodeError, Polyline.DecodeError.invalidEncodedCharacter);
+ };
+
+ }
+ }
+
+ func testEncodingInputPointValuesAreValidated() {
+ for algorithm in algorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+
+ // Longitude too low
+ XCTAssertThrowsError(try Polyline.encodeFromLngLatArray(lngLatArray: [[-181.0, 5.0], [0.0, 0.0]])) { error in
+ XCTAssertEqual(error as! Polyline.EncodeError, Polyline.EncodeError.invalidCoordinateValue);
+ };
+
+ // Longitude too high
+ XCTAssertThrowsError(try Polyline.encodeFromLngLatArray(lngLatArray: [[181.0, 5.0], [0.0, 0.0]])) { error in
+ XCTAssertEqual(error as! Polyline.EncodeError, Polyline.EncodeError.invalidCoordinateValue);
+ };
+
+ // Latitude too low
+ XCTAssertThrowsError(try Polyline.encodeFromLngLatArray(lngLatArray: [[5.0, -91.0], [0.0, 0.0]])) { error in
+ XCTAssertEqual(error as! Polyline.EncodeError, Polyline.EncodeError.invalidCoordinateValue);
+ };
+
+ // Latitude too high
+ XCTAssertThrowsError(try Polyline.encodeFromLngLatArray(lngLatArray: [[5.0, 91.0], [0.0, 0.0]])) { error in
+ XCTAssertEqual(error as! Polyline.EncodeError, Polyline.EncodeError.invalidCoordinateValue);
+ };
+
+ }
+ }
+
+ func testEncodingMixedDimensionalityThrowsError() {
+ for algorithm in algorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+
+ // Mixing 2D and 3D throws error
+ XCTAssertThrowsError(try Polyline.encodeFromLngLatArray(lngLatArray: [[5.0, 5.0], [10.0, 10.0, 10.0]])) { error in
+ XCTAssertEqual(error as! Polyline.EncodeError, Polyline.EncodeError.inconsistentCoordinateDimensions);
+ };
+ // Mixing 3D and 2D throws error
+ XCTAssertThrowsError(try Polyline.encodeFromLngLatArray(lngLatArray: [[5.0, 5.0, 5.0], [10.0, 10.0]])) { error in
+ XCTAssertEqual(error as! Polyline.EncodeError, Polyline.EncodeError.inconsistentCoordinateDimensions);
+ };
+ }
+ }
+
+ func testEncodingUnsupportedDimensionsThrowsError() {
+ for algorithm in algorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+
+ // 1D throws error
+ XCTAssertThrowsError(try Polyline.encodeFromLngLatArray(lngLatArray: [[5.0], [10.0]])) { error in
+ XCTAssertEqual(error as! Polyline.EncodeError, Polyline.EncodeError.inconsistentCoordinateDimensions);
+ };
+ // 4D throws error
+ XCTAssertThrowsError(try Polyline.encodeFromLngLatArray(lngLatArray: [[5.0, 5.0, 5.0, 5.0], [10.0, 10.0, 10.0, 10.0]])) { error in
+ XCTAssertEqual(error as! Polyline.EncodeError, Polyline.EncodeError.inconsistentCoordinateDimensions);
+ };
+ }
+ }
+
+ func testEncodingEmptyInputProducesEmptyResults() {
+ for algorithm in algorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+
+ XCTAssertEqual(try Polyline.encodeFromLngLatArray(lngLatArray:[]), "");
+ }
+ }
+
+ func testDecodeToLineStringWithOnePositionThrowsError() {
+ for algorithm in algorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+
+ do {
+ let encodedLine = try Polyline.encodeFromLngLatArray(lngLatArray: [[5.0, 5.0]]);
+ XCTAssertThrowsError(try Polyline.decodeToLineString(encodedLine)) { error in
+ XCTAssertEqual(error as! Polyline.GeoJsonError, Polyline.GeoJsonError.invalidLineStringLength);
+ };
+ }
+ catch {
+ XCTFail("Unexpected error");
+ }
+ }
+ }
+
+ func testDecodeToPolygonWithUnderFourPositionsThrowsError() {
+ for algorithm in algorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+
+ do {
+ let encodedLine = try Polyline.encodeFromLngLatArray(lngLatArray: [[5.0, 5.0], [10.0, 10.0], [5.0, 5.0]]);
+ XCTAssertThrowsError(try Polyline.decodeToPolygon([encodedLine])) { error in
+ XCTAssertEqual(error as! Polyline.GeoJsonError, Polyline.GeoJsonError.invalidPolygonLength);
+ };
+ }
+ catch {
+ XCTFail("Unexpected error");
+ }
+ }
+ }
+
+ func testDecodeToPolygonWithMismatchedStartEndThrowsError() {
+ for algorithm in algorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+
+ do {
+ let encodedLine = try Polyline.encodeFromLngLatArray(lngLatArray: [[5.0, 5.0], [10.0, 10.0], [15.0, 15.0], [20.0, 20.0]]);
+ XCTAssertThrowsError(try Polyline.decodeToPolygon([encodedLine])) { error in
+ XCTAssertEqual(error as! Polyline.GeoJsonError, Polyline.GeoJsonError.invalidPolygonClosure);
+ };
+ }
+ catch {
+ XCTFail("Unexpected error");
+ }
+ }
+ }
+
+ func testDecodeToLineStringProducesValidResults() {
+ for algorithm in algorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+
+ do {
+ let coords = [[132.0, -67.0], [38.0, 62.0]];
+ let encodedLine = try Polyline.encodeFromLngLatArray(lngLatArray: coords);
+ let geojson = try Polyline.decodeToLineString(encodedLine);
+
+ validateLineString(geojson:geojson, coords:coords);
+ }
+ catch {
+ XCTFail("Unexpected error");
+ }
+ }
+ }
+
+ func testDecodeToLineStringFeatureProducesValidResults() {
+ for algorithm in algorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+
+ do {
+ let coords = [[132.0, -67.0], [38.0, 62.0]];
+ let encodedLine = try Polyline.encodeFromLngLatArray(lngLatArray: coords);
+ let geojson = try Polyline.decodeToLineStringFeature(encodedLine);
+ validateLineStringFeature(geojson: geojson, coords: coords, parameters: CompressionParameters(
+ precisionLngLat:(algorithm == .Polyline5) ? 5 : DefaultPrecision));
+ }
+ catch {
+ XCTFail("Unexpected error");
+ }
+ }
+ }
+
+ func testDecodeToPolygonProducesValidResults() {
+ for algorithm in algorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+
+ do {
+ let coords = [[0.0, 0.0], [10.0, 0.0], [5.0, 10.0], [0.0, 0.0]];
+ let encodedRing = try Polyline.encodeFromLngLatArray(lngLatArray: coords);
+ let geojson = try Polyline.decodeToPolygon([encodedRing]);
+ validatePolygon(geojson:geojson, coords:[coords]);
+ }
+ catch {
+ XCTFail("Unexpected error");
+ }
+ }
+ }
+
+ func testDecodeToPolygonFeatureProducesValidResults() {
+ for algorithm in algorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+
+ do {
+ let coords = [[0.0, 0.0], [10.0, 0.0], [5.0, 10.0], [0.0, 0.0]];
+ let encodedRing = try Polyline.encodeFromLngLatArray(lngLatArray: coords);
+ let geojson = try Polyline.decodeToPolygonFeature([encodedRing]);
+ validatePolygonFeature(geojson: geojson, coords: [coords], parameters: CompressionParameters(
+ precisionLngLat:(algorithm == .Polyline5) ? 5 : DefaultPrecision)
+ );
+ }
+ catch {
+ XCTFail("Unexpected error");
+ }
+ }
+ }
+
+ func testDecodeToPolygonWithCWOuterRingProducesCCWResult() {
+ for algorithm in algorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+
+ do {
+ let coords = [[0.0, 0.0], [0.0, 10.0], [10.0, 10.0], [10.0, 0.0], [0.0, 0.0]];
+ let encodedRing = try Polyline.encodeFromLngLatArray(lngLatArray: coords);
+ let geojson = try Polyline.decodeToPolygon([encodedRing]);
+ let ccwCoords = [[0.0, 0.0], [10.0, 0.0], [10.0, 10.0], [0.0, 10.0], [0.0, 0.0]];
+ validatePolygon(geojson:geojson, coords:[ccwCoords]);
+ }
+ catch {
+ XCTFail("Unexpected error");
+ }
+ }
+ }
+
+ func testDecodeToPolygonWithCCWOuterRingProducesCCWResult() {
+ for algorithm in algorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+
+ do {
+ let coords = [[0.0, 0.0], [10.0, 0.0], [10.0, 10.0], [0.0, 10.0], [0.0, 0.0]];
+ let encodedRing = try Polyline.encodeFromLngLatArray(lngLatArray: coords);
+ let geojson = try Polyline.decodeToPolygon([encodedRing]);
+ validatePolygon(geojson:geojson, coords:[coords]);
+ }
+ catch {
+ XCTFail("Unexpected error");
+ }
+ }
+ }
+
+ func testDecodeToPolygonWithCWInnerRingsProducesCWResult() {
+ for algorithm in algorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+
+ do {
+ let clockwiseCoords = [
+ [
+ [0.0, 0.0],
+ [10.0, 0.0],
+ [10.0, 10.0],
+ [0.0, 10.0],
+ [0.0, 0.0],
+ ], // CCW outer ring
+ [
+ [2.0, 2.0],
+ [2.0, 8.0],
+ [8.0, 8.0],
+ [8.0, 2.0],
+ [2.0, 2.0],
+ ], // CW inner ring
+ [
+ [4.0, 4.0],
+ [4.0, 6.0],
+ [6.0, 6.0],
+ [6.0, 4.0],
+ [4.0, 4.0],
+ ], // CW inner ring
+ ];
+ var encodedRings:Array = [];
+ for ring in clockwiseCoords {
+ encodedRings.append(try Polyline.encodeFromLngLatArray(lngLatArray: ring));
+ }
+ let geojson = try Polyline.decodeToPolygon(encodedRings);
+ validatePolygon(geojson:geojson, coords:clockwiseCoords);
+ }
+ catch {
+ XCTFail("Unexpected error");
+ }
+ }
+ }
+
+ func testDecodeToPolygonWithCCWInnerRingsProducesCWResult() {
+ for algorithm in algorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+
+ do {
+ let counterclockwiseCoords = [
+ [
+ [0.0, 0.0],
+ [10.0, 0.0],
+ [10.0, 10.0],
+ [0.0, 10.0],
+ [0.0, 0.0],
+ ], // CCW outer ring
+ [
+ [2.0, 2.0],
+ [8.0, 2.0],
+ [8.0, 8.0],
+ [2.0, 8.0],
+ [2.0, 2.0],
+ ], // CCW inner ring
+ [
+ [4.0, 4.0],
+ [6.0, 4.0],
+ [6.0, 6.0],
+ [4.0, 6.0],
+ [4.0, 4.0],
+ ], // CCW inner ring
+ ];
+ var encodedRings:Array = [];
+ for ring in counterclockwiseCoords {
+ encodedRings.append(try Polyline.encodeFromLngLatArray(lngLatArray: ring));
+ }
+ let geojson = try Polyline.decodeToPolygon(encodedRings);
+ let expectedCoords = [
+ [
+ [0.0, 0.0],
+ [10.0, 0.0],
+ [10.0, 10.0],
+ [0.0, 10.0],
+ [0.0, 0.0],
+ ], // CCW outer ring
+ [
+ [2.0, 2.0],
+ [2.0, 8.0],
+ [8.0, 8.0],
+ [8.0, 2.0],
+ [2.0, 2.0],
+ ], // CW inner ring
+ [
+ [4.0, 4.0],
+ [4.0, 6.0],
+ [6.0, 6.0],
+ [6.0, 4.0],
+ [4.0, 4.0],
+ ], // CW inner ring
+ ];
+ validatePolygon(geojson:geojson, coords:expectedCoords);
+ }
+ catch {
+ XCTFail("Unexpected error");
+ }
+ }
+ }
+
+ func testDecodeToLineStringWithRangesOfInputsProducesValidResults() {
+ for algorithm in algorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+
+ do {
+ let coords = [
+ // A few different valid longitude values (positive, zero, negative)
+ [167.0, 5.0],
+ [0.0, 5.0],
+ [-167.0, 5.0],
+ // A few different valid latitude values (positive, zero, negative)
+ [5.0, 87.0],
+ [5.0, 0.0],
+ [5.0, -87.0],
+ // A few different high-precision values
+ [123.45678, 76.54321],
+ [-123.45678, -76.54321],
+ ];
+ let encodedLine = try Polyline.encodeFromLngLatArray(lngLatArray: coords);
+ let geojson = try Polyline.decodeToLineString(encodedLine);
+
+ validateLineString(geojson:geojson, coords:coords);
+ }
+ catch {
+ XCTFail("Unexpected error");
+ }
+ }
+ }
+
+
+ func testDecodeToPolygonWithRangesOfInputsProducesValidResults() {
+ for algorithm in algorithms {
+ Polyline.setCompressionAlgorithm(algorithm);
+
+ do {
+ let coords = [
+ // A few different valid longitude values (positive, zero, negative)
+ [167.0, 5.0],
+ [0.0, 5.0],
+ [-167.0, 5.0],
+ // A few different valid latitude values (positive, zero, negative)
+ [5.0, 87.0],
+ [5.0, 0.0],
+ [5.0, -87.0],
+ // A few different high-precision values
+ [123.45678, 76.54321],
+ [-123.45678, -76.54321],
+ // Close the polygon ring
+ [167.0, 5.0],
+ ];
+ let encodedLine = try Polyline.encodeFromLngLatArray(lngLatArray: coords);
+ let geojson = try Polyline.decodeToPolygon([encodedLine]);
+ validatePolygon(geojson:geojson, coords:[coords]);
+ }
+ catch {
+ XCTFail("Unexpected error");
+ }
+ }
+ }
+
+ // The following tests use hard-coded compressed data because we want them to contain invalid values and our
+ // encoding method would prevent that. The compressed data was generated by calling encodeFromLngLatArray with the
+ // input validation temporarily disabled.
+
+ func testFlexiblePolylineDecodeInvalidValuesThrowsError() {
+ Polyline.setCompressionAlgorithm(.FlexiblePolyline);
+ let invalidStrings = [
+ "AGgsmytFg0lxJ_rmytF_zlxJ", // Header version = 0
+ "CGgsmytFg0lxJ_rmytF_zlxJ", // Header version = 2
+ ];
+ for invalidString in invalidStrings {
+ XCTAssertThrowsError(try Polyline.decodeToLngLatArray(invalidString)) { error in
+ XCTAssertEqual(error as! Polyline.DecodeError, Polyline.DecodeError.invalidHeaderVersion);
+ };
+ }
+ }
+
+ func testFlexiblePolylineDecodeInvalidHeaderThrowsError() {
+ Polyline.setCompressionAlgorithm(.FlexiblePolyline);
+ let invalidStrings = [
+ "BGg0lxJ_zrn5K_zlxJg0rn5K", // [[-181, 5], [0, 0]] - longitude too low
+ "BGg0lxJg0rn5K_zlxJ_zrn5K", // [[181, 5], [0, 0]] - longitude too high
+ "BG_rmytFg0lxJgsmytF_zlxJ", // [[5, -91], [0, 0]] - latitude too low
+ "BGgsmytFg0lxJ_rmytF_zlxJ", // [[5, 91], [0, 0]] - latitude too high
+ ];
+ for invalidString in invalidStrings {
+ XCTAssertThrowsError(try Polyline.decodeToLngLatArray(invalidString)) { error in
+ XCTAssertEqual(error as! Polyline.DecodeError, Polyline.DecodeError.invalidCoordinateValue);
+ };
+ }
+ }
+
+ func testPolyline5DecodeInvalidValuesThrowsError() {
+ Polyline.setCompressionAlgorithm(.Polyline5);
+ let invalidStrings = [
+ "_qo]~pvoa@~po]_qvoa@", // [[-181, 5], [0, 0]] - longitude too low
+ "_qo]_qvoa@~po]~pvoa@", // [[181, 5], [0, 0]] - longitude too high
+ "~lljP_qo]_mljP~po]", // [[5, -91], [0, 0]] - latitude too low
+ "_mljP_qo]~lljP~po]", // [[5, 91], [0, 0]] - latitude too high
+ ];
+ for invalidString in invalidStrings {
+ XCTAssertThrowsError(try Polyline.decodeToLngLatArray(invalidString)) { error in
+ XCTAssertEqual(error as! Polyline.DecodeError, Polyline.DecodeError.invalidCoordinateValue);
+ };
+ }
+ }
+
+
+ func testPolyline6DecodeInvalidValuesThrowsError() {
+ Polyline.setCompressionAlgorithm(.Polyline6);
+ let invalidStrings = [
+ "_sdpH~rjfxI~rdpH_sjfxI", // [[-181, 5], [0, 0]] - longitude too low
+ "_sdpH_sjfxI~rdpH~rjfxI", // [[181, 5], [0, 0]] - longitude too high
+ "~jeqlD_sdpH_keqlD~rdpH", // [[5, -91], [0, 0]] - latitude too low
+ "_keqlD_sdpH~jeqlD~rdpH", // [[5, 91], [0, 0]] - latitude too high
+ ];
+ for invalidString in invalidStrings {
+ XCTAssertThrowsError(try Polyline.decodeToLngLatArray(invalidString)) { error in
+ XCTAssertEqual(error as! Polyline.DecodeError, Polyline.DecodeError.invalidCoordinateValue);
+ };
+ }
+ }
+
+ // FlexiblePolyline is the only format that supports 3D data, so specifically test that algorithm to ensure
+ // that the 3D data works as expected.
+
+ func testFlexiblePolylineLngLatArrayHandlesThirdDimensionTypes() {
+ Polyline.setCompressionAlgorithm(.FlexiblePolyline);
+ let coords = [
+ [0.0, 0.0, 5.0],
+ [10.0, 0.0, 0.0],
+ [10.0, 10.0, -5.0],
+ [0.0, 10.0, 0.0],
+ [0.0, 0.0, 5.0],
+ ];
+ for thirdDimension in [Polyline.ThirdDimension.Level, Polyline.ThirdDimension.Altitude, Polyline.ThirdDimension.Elevation] {
+ do {
+ let encodedLine = try Polyline.encodeFromLngLatArray(lngLatArray: coords, parameters: CompressionParameters(
+ thirdDimension: thirdDimension
+ ));
+ let result = try Polyline.decodeToLngLatArray(encodedLine);
+ XCTAssertEqual(result, coords);
+ } catch {
+ XCTFail("Unexpected error");
+ }
+ }
+ }
+ func testFlexiblePolylineLineStringHandlesThirdDimensionTypes() {
+ Polyline.setCompressionAlgorithm(.FlexiblePolyline);
+ let coords = [
+ [0.0, 0.0, 5.0],
+ [10.0, 0.0, 0.0],
+ [10.0, 10.0, -5.0],
+ [0.0, 10.0, 0.0],
+ [0.0, 0.0, 5.0],
+ ];
+ for thirdDimension in [Polyline.ThirdDimension.Level, Polyline.ThirdDimension.Altitude, Polyline.ThirdDimension.Elevation] {
+ do {
+ let encodedLine = try Polyline.encodeFromLngLatArray(lngLatArray: coords, parameters: CompressionParameters(
+ thirdDimension: thirdDimension
+ ));
+ let geojson = try Polyline.decodeToLineString(encodedLine);
+
+ validateLineString(geojson:geojson, coords:coords);
+ } catch {
+ XCTFail("Unexpected error");
+ }
+ }
+ }
+ func testFlexiblePolylineLineStringFeatureHandlesThirdDimensionTypes() {
+ Polyline.setCompressionAlgorithm(.FlexiblePolyline);
+ let coords = [
+ [0.0, 0.0, 5.0],
+ [10.0, 0.0, 0.0],
+ [10.0, 10.0, -5.0],
+ [0.0, 10.0, 0.0],
+ [0.0, 0.0, 5.0],
+ ];
+ for thirdDimension in [Polyline.ThirdDimension.Level, Polyline.ThirdDimension.Altitude, Polyline.ThirdDimension.Elevation] {
+ do {
+ let parameters = CompressionParameters(
+ thirdDimension: thirdDimension
+ );
+ let encodedLine = try Polyline.encodeFromLngLatArray(lngLatArray: coords, parameters: parameters);
+ let geojson = try Polyline.decodeToLineStringFeature(encodedLine);
+ validateLineStringFeature(geojson:geojson, coords:coords, parameters:parameters);
+ } catch {
+ XCTFail("Unexpected error");
+ }
+ }
+ }
+ func testFlexiblePolylinePolygonHandlesThirdDimensionTypes() {
+ Polyline.setCompressionAlgorithm(.FlexiblePolyline);
+ let ringCoords = [
+ [
+ [0.0, 0.0, 5.0],
+ [10.0, 0.0, 0.0],
+ [10.0, 10.0, -5.0],
+ [0.0, 10.0, 0.0],
+ [0.0, 0.0, 5.0],
+ ], // outer ring
+ [
+ [2.0, 2.0, 5.0],
+ [2.0, 8.0, 0.0],
+ [8.0, 8.0, -5.0],
+ [8.0, 2.0, 0.0],
+ [2.0, 2.0, 5.0],
+ ], // inner ring
+ ];
+ for thirdDimension in [Polyline.ThirdDimension.Level, Polyline.ThirdDimension.Altitude, Polyline.ThirdDimension.Elevation] {
+ do {
+ var encodedRings:Array = [];
+ for ring in ringCoords {
+ encodedRings.append(
+ try Polyline.encodeFromLngLatArray(lngLatArray: ring, parameters:
+ CompressionParameters(thirdDimension: thirdDimension)
+ ));
+ }
+ let geojson = try Polyline.decodeToPolygon(encodedRings);
+ validatePolygon(geojson:geojson, coords:ringCoords);
+ } catch {
+ XCTFail("Unexpected error");
+ }
+ }
+ }
+ func testFlexiblePolylinePolygonFeatureHandlesThirdDimensionTypes() {
+ Polyline.setCompressionAlgorithm(.FlexiblePolyline);
+ let ringCoords = [
+ [
+ [0.0, 0.0, 5.0],
+ [10.0, 0.0, 0.0],
+ [10.0, 10.0, -5.0],
+ [0.0, 10.0, 0.0],
+ [0.0, 0.0, 5.0],
+ ], // outer ring
+ [
+ [2.0, 2.0, 5.0],
+ [2.0, 8.0, 0.0],
+ [8.0, 8.0, -5.0],
+ [8.0, 2.0, 0.0],
+ [2.0, 2.0, 5.0],
+ ], // inner ring
+ ];
+ for thirdDimension in [Polyline.ThirdDimension.Level, Polyline.ThirdDimension.Altitude, Polyline.ThirdDimension.Elevation] {
+ do {
+ let parameters = CompressionParameters(thirdDimension: thirdDimension);
+ var encodedRings:Array = [];
+ for ring in ringCoords {
+ encodedRings.append(
+ try Polyline.encodeFromLngLatArray(lngLatArray: ring, parameters:
+ parameters
+ ));
+ }
+ let geojson = try Polyline.decodeToPolygonFeature(encodedRings);
+ validatePolygonFeature(geojson:geojson, coords:ringCoords, parameters:parameters);
+ } catch {
+ XCTFail("Unexpected error");
+ }
+ }
+ }
+ func testPolylineErrorsOnThreeDimensions() {
+ let coords = [
+ [0.0, 0.0, 5.0],
+ [10.0, 0.0, 0.0],
+ [10.0, 10.0, -5.0],
+ [0.0, 10.0, 0.0],
+ [0.0, 0.0, 5.0],
+ ];
+ for algorithm in [Polyline.CompressionAlgorithm.Polyline5, Polyline.CompressionAlgorithm.Polyline6] {
+ Polyline.setCompressionAlgorithm(algorithm);
+ XCTAssertThrowsError(try Polyline.encodeFromLngLatArray(lngLatArray: coords, parameters: CompressionParameters(
+ thirdDimension: ThirdDimension.Altitude
+ ))) { error in
+ XCTAssertEqual(error as! Polyline.EncodeError, Polyline.EncodeError.inconsistentCoordinateDimensions);
+ };
+ }
+ }
+
+ // Verify that FlexiblePolyline checks for valid encoding settings
+
+ func testFlexiblePolylineEncodeThrowsErrorWithNegative2DPrecision() {
+ Polyline.setCompressionAlgorithm(.FlexiblePolyline);
+
+ let coords = [[0.0, 0.0, 5.0], [10.0, 0.0, 0.0]];
+ XCTAssertThrowsError(try Polyline.encodeFromLngLatArray(lngLatArray: coords, parameters: CompressionParameters(precisionLngLat: -5))) { error in
+ XCTAssertEqual(error as! Polyline.EncodeError, Polyline.EncodeError.invalidPrecisionValue);
+ };
+ }
+ func testFlexiblePolylineEncodeThrowsErrorWithNegative3DPrecision() {
+ Polyline.setCompressionAlgorithm(.FlexiblePolyline);
+
+ let coords = [[0.0, 0.0, 5.0], [10.0, 0.0, 0.0]];
+ XCTAssertThrowsError(try Polyline.encodeFromLngLatArray(lngLatArray: coords, parameters: CompressionParameters(precisionThirdDimension: -5))) { error in
+ XCTAssertEqual(error as! Polyline.EncodeError, Polyline.EncodeError.invalidPrecisionValue);
+ };
+ }}
+