Skip to content

Commit

Permalink
Merge release/1.76.0 into main
Browse files Browse the repository at this point in the history
  • Loading branch information
daxmobile authored Feb 21, 2024
2 parents 883db58 + 3002b7d commit 422814e
Show file tree
Hide file tree
Showing 59 changed files with 701 additions and 295 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/publish_dmg_release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -109,8 +109,8 @@ jobs:
- name: Upload to S3
id: upload
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_RELEASE_S3 }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_RELEASE_S3 }}
AWS_DEFAULT_REGION: ${{ vars.AWS_DEFAULT_REGION }}
run: |
# Back up existing appcast2.xml
Expand Down
2 changes: 1 addition & 1 deletion Configuration/BuildNumber.xcconfig
Original file line number Diff line number Diff line change
@@ -1 +1 @@
CURRENT_PROJECT_VERSION = 122
CURRENT_PROJECT_VERSION = 123
Original file line number Diff line number Diff line change
Expand Up @@ -290,4 +290,26 @@ extension InMemoryDataCache: DBPUICommunicationDelegate {

return mapper.maintenanceScanState(brokerProfileQueryData)
}

func getDataBrokers() async -> [DBPUIDataBroker] {
brokerProfileQueryData
// 1. We get all brokers (in this list brokers are repeated)
.map { $0.dataBroker }
// 2. We map the brokers to the UI model
.flatMap { dataBroker -> [DBPUIDataBroker] in
var result: [DBPUIDataBroker] = []
result.append(DBPUIDataBroker(name: dataBroker.name, url: dataBroker.url))

for mirrorSite in dataBroker.mirrorSites {
result.append(DBPUIDataBroker(name: mirrorSite.name, url: mirrorSite.url))
}
return result
}
// 3. We delete duplicates
.reduce(into: [DBPUIDataBroker]()) { (result, dataBroker) in
if !result.contains(where: { $0.url == dataBroker.url }) {
result.append(dataBroker)
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ struct DatabaseView: View {
@State private var isPopoverVisible = false
@State private var selectedData: String = ""
let data: [DataBrokerDatabaseBrowserData.Row]
let rowHeight: CGFloat = 40.0

var body: some View {
if data.count > 0 {
Expand All @@ -62,6 +63,11 @@ struct DatabaseView: View {
}
}

private func spacerHeight(_ geometry: GeometryProxy) -> CGFloat {
let result = geometry.size.height - CGFloat(data.count) * rowHeight
return max(0, result)
}

private func dataView() -> some View {
GeometryReader { geometry in
ScrollView([.horizontal, .vertical]) {
Expand All @@ -86,7 +92,8 @@ struct DatabaseView: View {
ForEach(row.data.keys.sorted(), id: \.self) { key in
VStack {
Text("\(row.data[key]?.description ?? "")")
.frame(maxWidth: 200, maxHeight: 50)
.frame(maxWidth: 200)
.frame(height: rowHeight)
.frame(minWidth: 60)
.onTapGesture {
selectedData = row.data[key]?.description ?? ""
Expand All @@ -100,7 +107,8 @@ struct DatabaseView: View {
}
}
}
Spacer(minLength: geometry.size.height)
Spacer()
.frame(height: spacerHeight(geometry))
}
.frame(minWidth: geometry.size.width, minHeight: 0, alignment: .topLeading)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,12 +103,24 @@ struct DBPUIAddressAtIndex: Codable {
/// Message Object representing a data broker
struct DBPUIDataBroker: Codable, Hashable {
let name: String
let url: String
let date: Double?

init(name: String, url: String, date: Double? = nil) {
self.name = name
self.url = url
self.date = date
}

func hash(into hasher: inout Hasher) {
hasher.combine(name)
}
}

struct DBPUIDataBrokerList: DBPUISendableMessage {
let dataBrokers: [DBPUIDataBroker]
}

/// Message Object representing a requested change to the user profile's brith year
struct DBPUIBirthYear: Codable {
let year: Int
Expand All @@ -123,6 +135,7 @@ struct DBPUIDataBrokerProfileMatch: Codable {
let addresses: [DBPUIUserProfileAddress]
let alternativeNames: [String]
let relatives: [String]
let date: Double? // Used in some methods to set the removedDate or found date
}

/// Protocol to represent a message that can be passed from the host to the UI
Expand All @@ -139,6 +152,10 @@ struct DBPUIScanAndOptOutMaintenanceState: DBPUISendableMessage {
struct DBPUIOptOutMatch: DBPUISendableMessage {
let dataBroker: DBPUIDataBroker
let matches: Int
let name: String
let alternativeNames: [String]
let addresses: [DBPUIUserProfileAddress]
let date: Double
}

/// Data representing the initial scan progress
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,46 @@ extension Int {

struct MirrorSite: Codable, Sendable {
let name: String
let url: String
let addedAt: Date
let removedAt: Date?

enum CodingKeys: CodingKey {
case name
case url
case addedAt
case removedAt
}

init(name: String, url: String, addedAt: Date, removedAt: Date? = nil) {
self.name = name
self.url = url
self.addedAt = addedAt
self.removedAt = removedAt
}

init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
name = try container.decode(String.self, forKey: .name)

// The older versions of the JSON file did not have a URL property.
// When decoding those cases, we fallback to its name, since the name was the URL.
do {
url = try container.decode(String.self, forKey: .url)
} catch {
url = name
}

addedAt = try container.decode(Date.self, forKey: .addedAt)
removedAt = try? container.decode(Date.self, forKey: .removedAt)

}
}

struct DataBroker: Codable, Sendable {
let id: Int64?
let name: String
let url: String
let steps: [Step]
let version: String
let schedulingConfig: DataBrokerScheduleConfig
Expand All @@ -51,6 +84,7 @@ struct DataBroker: Codable, Sendable {

enum CodingKeys: CodingKey {
case name
case url
case steps
case version
case schedulingConfig
Expand All @@ -60,6 +94,7 @@ struct DataBroker: Codable, Sendable {

init(id: Int64? = nil,
name: String,
url: String,
steps: [Step],
version: String,
schedulingConfig: DataBrokerScheduleConfig,
Expand All @@ -68,6 +103,13 @@ struct DataBroker: Codable, Sendable {
) {
self.id = id
self.name = name

if url.isEmpty {
self.url = name
} else {
self.url = url
}

self.steps = steps
self.version = version
self.schedulingConfig = schedulingConfig
Expand All @@ -78,6 +120,15 @@ struct DataBroker: Codable, Sendable {
init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
name = try container.decode(String.self, forKey: .name)

// The older versions of the JSON file did not have a URL property.
// When decoding those cases, we fallback to its name, since the name was the URL.
do {
url = try container.decode(String.self, forKey: .url)
} catch {
url = name
}

version = try container.decode(String.self, forKey: .version)
steps = try container.decode([Step].self, forKey: .steps)
schedulingConfig = try container.decode(DataBrokerScheduleConfig.self, forKey: .schedulingConfig)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ extension DataBrokerOperation {
if action.needsEmail {
do {
stageCalculator?.setStage(.emailGenerate)
extractedProfile?.email = try await emailService.getEmail(dataBrokerName: query.dataBroker.name)
extractedProfile?.email = try await emailService.getEmail(dataBrokerURL: query.dataBroker.url)
stageCalculator?.fireOptOutEmailGenerate()
} catch {
await onError(error: DataBrokerProtectionError.emailError(error as? EmailError))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ public struct DataBrokerProtectionBrokerUpdater {
// 2. If does exist, we check the number version, if the version number is new, we update it
// 3. If it does not exist, we add it, and we create the scan operations related to it
private func update(_ broker: DataBroker) throws {
guard let savedBroker = try vault.fetchBroker(with: broker.name) else {
guard let savedBroker = try vault.fetchBroker(with: broker.url) else {
// The broker does not exist in the current storage. We need to add it.
try add(broker)
return
Expand Down
Original file line number Diff line number Diff line change
@@ -1,21 +1,22 @@
{
"name": "advancedbackgroundchecks.com",
"version": "0.1.0",
"name": "AdvancedBackgroundChecks",
"url": "advancedbackgroundchecks.com",
"version": "0.1.4",
"parent": "peoplefinders.com",
"addedDatetime": 1678078800000,
"addedDatetime": 1678082400000,
"steps": [
{
"stepType": "scan",
"scanType": "templatedUrl",
"actions": [
{
"actionType": "navigate",
"id": "ef8031e6-5e61-4183-b57e-7df156c7129a",
"id": "c73ba931-9e01-4d37-9e15-2fd7a14eefa3",
"url": "https://www.advancedbackgroundchecks.com/names/${firstName}-${lastName}_${city}-${state}_age_${age}"
},
{
"actionType": "extract",
"id": "f3ed744c-6cfc-4a99-b46e-6095587eadfc",
"id": "94003082-0a9d-4418-ac88-68595c7f4953",
"selector": ".card-block",
"profile": {
"name": {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
{
"name": "backgroundcheck.run",
"version": "0.1.1",
"url": "backgroundcheck.run",
"version": "0.1.4",
"parent": "verecor.com",
"addedDatetime": 1677736800000,
"steps": [
Expand All @@ -10,12 +11,12 @@
"actions": [
{
"actionType": "navigate",
"id": "aa12b430-8e5d-4c64-bb77-2961f19a1bc8",
"id": "5f90e39f-cb94-4b8d-94ed-48ba0060dc08",
"url": "https://backgroundcheck.run/profile/search?fname=${firstName}&lname=${lastName}&state=${state}&city=${city}"
},
{
"actionType": "extract",
"id": "75fd2e16-d84a-4bbe-9cf1-79c6d1cc4dec",
"id": "3225fa15-4e00-4e6a-bfc7-a85dfb504c86",
"selector": ".b-pfl-list",
"profile": {
"name": {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
{
"name": "centeda.com",
"version": "0.1.1",
"name": "Centeda",
"url": "centeda.com",
"version": "0.1.4",
"parent": "verecor.com",
"addedDatetime": 1677733200000,
"addedDatetime": 1677736800000,
"steps": [
{
"stepType": "scan",
"scanType": "templatedUrl",
"actions": [
{
"actionType": "navigate",
"id": "25990359-3d58-45de-bdfd-d524b1946e57",
"id": "2f6639c0-201f-4d5e-8467-ae0ba457b409",
"url": "https://centeda.com/profile/search?fname=${firstName}&lname=${lastName}&state=${state}&city=${city}&fage=${age|ageRange}",
"ageRange": [
"18-30",
Expand All @@ -24,7 +25,7 @@
},
{
"actionType": "extract",
"id": "7108af78-dbbf-47ec-8bb9-e44be505993e",
"id": "e2e236b0-515b-43b3-9154-0432ed9b7566",
"selector": ".search-item",
"profile": {
"name": {
Expand Down Expand Up @@ -63,4 +64,4 @@
"confirmOptOutScan": 72,
"maintenanceScan": 240
}
}
}
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
{
"name": "clubset.com",
"version": "0.1.1",
"name": "Clubset",
"url": "clubset.com",
"version": "0.1.4",
"parent": "verecor.com",
"addedDatetime": 1702965600000,
"steps": [
Expand All @@ -10,7 +11,7 @@
"actions": [
{
"actionType": "navigate",
"id": "917f5d40-2011-4fe5-9ef6-136d6bfaea35",
"id": "5c559c67-c13c-4055-a318-6ba35d62a2cf",
"url": "https://clubset.com/profile/search?fname=${firstName}&lname=${lastName}&state=${state|upcase}&city=${city|capitalize}&fage=${age|ageRange}",
"ageRange": [
"18-30",
Expand All @@ -24,7 +25,7 @@
},
{
"actionType": "extract",
"id": "06e37215-ef34-4971-bf86-e5a03dfe46e8",
"id": "866bdfc5-069e-4734-9ce0-a19976fa796b",
"selector": ".card",
"profile": {
"name": {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,21 +1,22 @@
{
"name": "clustrmaps.com",
"version": "0.1.1",
"name": "ClustrMaps",
"url": "clustrmaps.com",
"version": "0.1.4",
"parent": "neighbor.report",
"addedDatetime": 1692590400000,
"addedDatetime": 1692594000000,
"steps": [
{
"stepType": "scan",
"scanType": "templatedUrl",
"actions": [
{
"actionType": "navigate",
"id": "a39655de-5c23-477d-9887-1d34966a1069",
"id": "e6929e37-4764-450a-be2a-73479f11842a",
"url": "https://clustrmaps.com/persons/${firstName}-${lastName}/${state|stateFull|capitalize}/${city|hyphenated}"
},
{
"actionType": "extract",
"id": "4e3a628e-3634-4a2b-b632-4fbb8ce0b52b",
"id": "06f39df7-89c2-40da-b288-cdf3ed0e4bfd",
"selector": ".//div[@itemprop='Person']",
"profile": {
"name": {
Expand Down Expand Up @@ -55,4 +56,4 @@
"confirmOptOutScan": 72,
"maintenanceScan": 240
}
}
}
Loading

0 comments on commit 422814e

Please sign in to comment.