Add data model and structure for swift graph executor, rework create products to bem ore general

This commit is contained in:
Michael Pivato
2024-05-09 22:50:35 +09:30
parent 51fc216c82
commit 98d38d47a3
17 changed files with 432 additions and 50 deletions

5
.gitignore vendored
View File

@@ -1,3 +1,8 @@
/target /target
.DS_Store .DS_Store
*.xcuserdatad *.xcuserdatad
.venv
*.csv
*.h
*.py
.idea

View File

@@ -12,6 +12,14 @@
5A1986FB2996502C00FA0471 /* FileButtonSelector.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5A1986FA2996502C00FA0471 /* FileButtonSelector.swift */; }; 5A1986FB2996502C00FA0471 /* FileButtonSelector.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5A1986FA2996502C00FA0471 /* FileButtonSelector.swift */; };
5A450751298CE6D500E3D402 /* CsvDocument.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5A450750298CE6D500E3D402 /* CsvDocument.swift */; }; 5A450751298CE6D500E3D402 /* CsvDocument.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5A450750298CE6D500E3D402 /* CsvDocument.swift */; };
5A45075B298D01EF00E3D402 /* libcoster_rs.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 5A45075A298D01EF00E3D402 /* libcoster_rs.a */; }; 5A45075B298D01EF00E3D402 /* libcoster_rs.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 5A45075A298D01EF00E3D402 /* libcoster_rs.a */; };
5A53D5742BE4B4FB00563893 /* FileNodeView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5A53D5732BE4B4FB00563893 /* FileNodeView.swift */; };
5A53D5772BE4B98300563893 /* SwiftCSV in Frameworks */ = {isa = PBXBuildFile; productRef = 5A53D5762BE4B98300563893 /* SwiftCSV */; };
5A53D5792BE4C0C300563893 /* CsvEditor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5A53D5782BE4C0C300563893 /* CsvEditor.swift */; };
5A53D57B2BE4C1D400563893 /* OutputFilesView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5A53D57A2BE4C1D400563893 /* OutputFilesView.swift */; };
5A53D5822BE507AD00563893 /* ChartEditor.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5A53D5812BE507AD00563893 /* ChartEditor.swift */; };
5A53D5842BE507FF00563893 /* ChartView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5A53D5832BE507FF00563893 /* ChartView.swift */; };
5A53D5892BE5182C00563893 /* Tasks.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5A53D5882BE5182C00563893 /* Tasks.swift */; };
5A53D58B2BE518CA00563893 /* Graph.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5A53D58A2BE518CA00563893 /* Graph.swift */; };
5ADD9F2D298A713300F998F5 /* FastCosterApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5ADD9F2C298A713300F998F5 /* FastCosterApp.swift */; }; 5ADD9F2D298A713300F998F5 /* FastCosterApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5ADD9F2C298A713300F998F5 /* FastCosterApp.swift */; };
5ADD9F2F298A713300F998F5 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5ADD9F2E298A713300F998F5 /* ContentView.swift */; }; 5ADD9F2F298A713300F998F5 /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5ADD9F2E298A713300F998F5 /* ContentView.swift */; };
5ADD9F31298A713400F998F5 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 5ADD9F30298A713400F998F5 /* Assets.xcassets */; }; 5ADD9F31298A713400F998F5 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 5ADD9F30298A713400F998F5 /* Assets.xcassets */; };
@@ -48,6 +56,13 @@
5A450755298CFFE400E3D402 /* create-lib.sh */ = {isa = PBXFileReference; lastKnownFileType = text.script.sh; path = "create-lib.sh"; sourceTree = "<group>"; }; 5A450755298CFFE400E3D402 /* create-lib.sh */ = {isa = PBXFileReference; lastKnownFileType = text.script.sh; path = "create-lib.sh"; sourceTree = "<group>"; };
5A450756298D00AE00E3D402 /* remove-lib.sh */ = {isa = PBXFileReference; lastKnownFileType = text.script.sh; path = "remove-lib.sh"; sourceTree = "<group>"; }; 5A450756298D00AE00E3D402 /* remove-lib.sh */ = {isa = PBXFileReference; lastKnownFileType = text.script.sh; path = "remove-lib.sh"; sourceTree = "<group>"; };
5A45075A298D01EF00E3D402 /* libcoster_rs.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libcoster_rs.a; path = "../costerrs/target/aarch64-apple-ios/release/libcoster_rs.a"; sourceTree = "<group>"; }; 5A45075A298D01EF00E3D402 /* libcoster_rs.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; name = libcoster_rs.a; path = "../costerrs/target/aarch64-apple-ios/release/libcoster_rs.a"; sourceTree = "<group>"; };
5A53D5732BE4B4FB00563893 /* FileNodeView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FileNodeView.swift; sourceTree = "<group>"; };
5A53D5782BE4C0C300563893 /* CsvEditor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CsvEditor.swift; sourceTree = "<group>"; };
5A53D57A2BE4C1D400563893 /* OutputFilesView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OutputFilesView.swift; sourceTree = "<group>"; };
5A53D5812BE507AD00563893 /* ChartEditor.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChartEditor.swift; sourceTree = "<group>"; };
5A53D5832BE507FF00563893 /* ChartView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChartView.swift; sourceTree = "<group>"; };
5A53D5882BE5182C00563893 /* Tasks.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Tasks.swift; sourceTree = "<group>"; };
5A53D58A2BE518CA00563893 /* Graph.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Graph.swift; sourceTree = "<group>"; };
5ADD9F29298A713300F998F5 /* FastCoster.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = FastCoster.app; sourceTree = BUILT_PRODUCTS_DIR; }; 5ADD9F29298A713300F998F5 /* FastCoster.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = FastCoster.app; sourceTree = BUILT_PRODUCTS_DIR; };
5ADD9F2C298A713300F998F5 /* FastCosterApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FastCosterApp.swift; sourceTree = "<group>"; }; 5ADD9F2C298A713300F998F5 /* FastCosterApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FastCosterApp.swift; sourceTree = "<group>"; };
5ADD9F2E298A713300F998F5 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = "<group>"; }; 5ADD9F2E298A713300F998F5 /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = "<group>"; };
@@ -67,6 +82,7 @@
buildActionMask = 2147483647; buildActionMask = 2147483647;
files = ( files = (
5A45075B298D01EF00E3D402 /* libcoster_rs.a in Frameworks */, 5A45075B298D01EF00E3D402 /* libcoster_rs.a in Frameworks */,
5A53D5772BE4B98300563893 /* SwiftCSV in Frameworks */,
); );
runOnlyForDeploymentPostprocessing = 0; runOnlyForDeploymentPostprocessing = 0;
}; };
@@ -106,6 +122,24 @@
name = Frameworks; name = Frameworks;
sourceTree = "<group>"; sourceTree = "<group>";
}; };
5A53D5802BE4C26A00563893 /* Charts */ = {
isa = PBXGroup;
children = (
5A53D5812BE507AD00563893 /* ChartEditor.swift */,
5A53D5832BE507FF00563893 /* ChartView.swift */,
);
path = Charts;
sourceTree = "<group>";
};
5A53D5852BE50C7B00563893 /* Model */ = {
isa = PBXGroup;
children = (
5A53D5882BE5182C00563893 /* Tasks.swift */,
5A53D58A2BE518CA00563893 /* Graph.swift */,
);
path = Model;
sourceTree = "<group>";
};
5ADD9F20298A713300F998F5 = { 5ADD9F20298A713300F998F5 = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
@@ -131,6 +165,8 @@
5ADD9F2B298A713300F998F5 /* FastCoster */ = { 5ADD9F2B298A713300F998F5 /* FastCoster */ = {
isa = PBXGroup; isa = PBXGroup;
children = ( children = (
5A53D5852BE50C7B00563893 /* Model */,
5A53D5802BE4C26A00563893 /* Charts */,
5ADD9F2C298A713300F998F5 /* FastCosterApp.swift */, 5ADD9F2C298A713300F998F5 /* FastCosterApp.swift */,
5ADD9F2E298A713300F998F5 /* ContentView.swift */, 5ADD9F2E298A713300F998F5 /* ContentView.swift */,
5ADD9F30298A713400F998F5 /* Assets.xcassets */, 5ADD9F30298A713400F998F5 /* Assets.xcassets */,
@@ -140,6 +176,9 @@
5A1986F62996436500FA0471 /* OverheadAllocation.swift */, 5A1986F62996436500FA0471 /* OverheadAllocation.swift */,
5A1986F82996436D00FA0471 /* MoveMoney.swift */, 5A1986F82996436D00FA0471 /* MoveMoney.swift */,
5A1986FA2996502C00FA0471 /* FileButtonSelector.swift */, 5A1986FA2996502C00FA0471 /* FileButtonSelector.swift */,
5A53D5732BE4B4FB00563893 /* FileNodeView.swift */,
5A53D5782BE4C0C300563893 /* CsvEditor.swift */,
5A53D57A2BE4C1D400563893 /* OutputFilesView.swift */,
); );
path = FastCoster; path = FastCoster;
sourceTree = "<group>"; sourceTree = "<group>";
@@ -187,6 +226,9 @@
dependencies = ( dependencies = (
); );
name = FastCoster; name = FastCoster;
packageProductDependencies = (
5A53D5762BE4B98300563893 /* SwiftCSV */,
);
productName = FastCoster; productName = FastCoster;
productReference = 5ADD9F29298A713300F998F5 /* FastCoster.app */; productReference = 5ADD9F29298A713300F998F5 /* FastCoster.app */;
productType = "com.apple.product-type.application"; productType = "com.apple.product-type.application";
@@ -259,6 +301,9 @@
Base, Base,
); );
mainGroup = 5ADD9F20298A713300F998F5; mainGroup = 5ADD9F20298A713300F998F5;
packageReferences = (
5A53D5752BE4B98300563893 /* XCRemoteSwiftPackageReference "SwiftCSV" */,
);
productRefGroup = 5ADD9F2A298A713300F998F5 /* Products */; productRefGroup = 5ADD9F2A298A713300F998F5 /* Products */;
projectDirPath = ""; projectDirPath = "";
projectRoot = ""; projectRoot = "";
@@ -344,11 +389,18 @@
buildActionMask = 2147483647; buildActionMask = 2147483647;
files = ( files = (
5A1986FB2996502C00FA0471 /* FileButtonSelector.swift in Sources */, 5A1986FB2996502C00FA0471 /* FileButtonSelector.swift in Sources */,
5A53D58B2BE518CA00563893 /* Graph.swift in Sources */,
5ADD9F2F298A713300F998F5 /* ContentView.swift in Sources */, 5ADD9F2F298A713300F998F5 /* ContentView.swift in Sources */,
5A1986F92996436D00FA0471 /* MoveMoney.swift in Sources */, 5A1986F92996436D00FA0471 /* MoveMoney.swift in Sources */,
5A53D57B2BE4C1D400563893 /* OutputFilesView.swift in Sources */,
5ADD9F2D298A713300F998F5 /* FastCosterApp.swift in Sources */, 5ADD9F2D298A713300F998F5 /* FastCosterApp.swift in Sources */,
5A450751298CE6D500E3D402 /* CsvDocument.swift in Sources */, 5A450751298CE6D500E3D402 /* CsvDocument.swift in Sources */,
5A53D5822BE507AD00563893 /* ChartEditor.swift in Sources */,
5A53D5842BE507FF00563893 /* ChartView.swift in Sources */,
5A53D5792BE4C0C300563893 /* CsvEditor.swift in Sources */,
5A53D5892BE5182C00563893 /* Tasks.swift in Sources */,
5A1986F72996436500FA0471 /* OverheadAllocation.swift in Sources */, 5A1986F72996436500FA0471 /* OverheadAllocation.swift in Sources */,
5A53D5742BE4B4FB00563893 /* FileNodeView.swift in Sources */,
); );
runOnlyForDeploymentPostprocessing = 0; runOnlyForDeploymentPostprocessing = 0;
}; };
@@ -524,7 +576,7 @@
"LIBRARY_SEARCH_PATHS[arch=*]" = "${DERIVED_FILES_DIR}"; "LIBRARY_SEARCH_PATHS[arch=*]" = "${DERIVED_FILES_DIR}";
MACOSX_DEPLOYMENT_TARGET = 13.1; MACOSX_DEPLOYMENT_TARGET = 13.1;
MARKETING_VERSION = 1.0; MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = com.Vato.FastCoster; PRODUCT_BUNDLE_IDENTIFIER = dev.michaelpivato.FastCoster;
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = auto; SDKROOT = auto;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx"; SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx";
@@ -564,7 +616,7 @@
"LIBRARY_SEARCH_PATHS[arch=*]" = "${DERIVED_FILES_DIR}"; "LIBRARY_SEARCH_PATHS[arch=*]" = "${DERIVED_FILES_DIR}";
MACOSX_DEPLOYMENT_TARGET = 13.1; MACOSX_DEPLOYMENT_TARGET = 13.1;
MARKETING_VERSION = 1.0; MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = com.Vato.FastCoster; PRODUCT_BUNDLE_IDENTIFIER = dev.michaelpivato.FastCoster;
PRODUCT_NAME = "$(TARGET_NAME)"; PRODUCT_NAME = "$(TARGET_NAME)";
SDKROOT = auto; SDKROOT = auto;
SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx"; SUPPORTED_PLATFORMS = "iphoneos iphonesimulator macosx";
@@ -705,6 +757,25 @@
defaultConfigurationName = Release; defaultConfigurationName = Release;
}; };
/* End XCConfigurationList section */ /* End XCConfigurationList section */
/* Begin XCRemoteSwiftPackageReference section */
5A53D5752BE4B98300563893 /* XCRemoteSwiftPackageReference "SwiftCSV" */ = {
isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/swiftcsv/SwiftCSV.git";
requirement = {
kind = upToNextMajorVersion;
minimumVersion = 0.9.1;
};
};
/* End XCRemoteSwiftPackageReference section */
/* Begin XCSwiftPackageProductDependency section */
5A53D5762BE4B98300563893 /* SwiftCSV */ = {
isa = XCSwiftPackageProductDependency;
package = 5A53D5752BE4B98300563893 /* XCRemoteSwiftPackageReference "SwiftCSV" */;
productName = SwiftCSV;
};
/* End XCSwiftPackageProductDependency section */
}; };
rootObject = 5ADD9F21298A713300F998F5 /* Project object */; rootObject = 5ADD9F21298A713300F998F5 /* Project object */;
} }

View File

@@ -0,0 +1,18 @@
//
// ChartEditor.swift
// FastCoster
//
// Created by Michael Pivato on 3/5/2024.
//
import SwiftUI
struct ChartEditor: View {
var body: some View {
Text(/*@START_MENU_TOKEN@*/"Hello, World!"/*@END_MENU_TOKEN@*/)
}
}
#Preview {
ChartEditor()
}

View File

@@ -0,0 +1,19 @@
//
// ChartView.swift
// FastCoster
//
// Created by Michael Pivato on 3/5/2024.
//
import SwiftUI
struct ChartView: View {
// View the chart for the given file and configuration: https://developer.apple.com/documentation/Charts
var body: some View {
Text(/*@START_MENU_TOKEN@*/"Hello, World!"/*@END_MENU_TOKEN@*/)
}
}
#Preview {
ChartView()
}

View File

@@ -11,6 +11,7 @@ enum ProcessType: String, Hashable {
case MoveMoney = "Move Money" case MoveMoney = "Move Money"
case OverheadAllocation = "Overhead Allocation" case OverheadAllocation = "Overhead Allocation"
// TODO: This needs to be the list of graphs
static let values = [MoveMoney, OverheadAllocation] static let values = [MoveMoney, OverheadAllocation]
} }
@@ -37,6 +38,7 @@ struct ContentView: View {
} }
} }
} }
// TODO: Button to add a new graph
} }

View File

@@ -0,0 +1,20 @@
//
// CsvEditor.swift
// FastCoster
//
// Created by Michael Pivato on 3/5/2024.
//
import SwiftUI
struct CsvEditor: View {
// A table to view data in a file: https://developer.apple.com/documentation/SwiftUI/Table
// It's fine to load it all into memory to begin with, we'll probably want to change that later though.
var body: some View {
Text(/*@START_MENU_TOKEN@*/"Hello, World!"/*@END_MENU_TOKEN@*/)
}
}
#Preview {
CsvEditor()
}

View File

@@ -0,0 +1,33 @@
//
// FileNode.swift
// FastCoster
//
// Created by Michael Pivato on 3/5/2024.
//
import SwiftUI
struct FileNodeView: View {
@State private var showPicker = false
@State private var selectedFileUrl: URL?
var body: some View {
// Should basically show a file selector.
Button {
showPicker.toggle()
} label: {
Text("Select File")
}.fileImporter(isPresented: $showPicker, allowedContentTypes: [.commaSeparatedText]) { result in
switch result {
case .success(let fileUrl):
selectedFileUrl = fileUrl
case .failure(let error):
print(error)
}
}.padding()
}
}
#Preview {
FileNodeView()
}

View File

@@ -0,0 +1,23 @@
//
// Graph.swift
// FastCoster
//
// Created by Michael Pivato on 3/5/2024.
//
import Foundation
// JSON for saving/loading configuration: https://www.avanderlee.com/swift/json-parsing-decoding/
struct Node: Codable {
var id: Int
var info: NodeInfo
var dependentNodeIds: [Int]
func hasDependentNodes() -> Bool {
return !dependentNodeIds.isEmpty
}
}
struct Graph: Codable {
var name: String
var nodes: [Node]
}

View File

@@ -0,0 +1,118 @@
//
// InputFile.swift
// FastCoster
//
// Created by Michael Pivato on 3/5/2024.
//
import Foundation
struct NodeInfo: Codable {
var name: String;
var outputFiles: [String]
var configuration: NodeConfiguration
}
// Need to check if enums with data actually works with json serialisation/deserialisation, otherwise
// can look into binary serialisation/deserialisation instead
enum NodeConfiguration: Codable {
case FileNode
case MoveMoneyNode(MoveMoneyNode)
case MergeNode(MergeNode)
case DeriveNode(DeriveNode)
}
enum MoveMoneyAmoutType: String, Codable {
case Percent, Amount
}
struct MoveMoneyRule: Codable {
let fromAccout: String
let fromCC: String
let toAccount: String
let toCC: String
let value: Double
let type: MoveMoneyAmoutType
}
struct MoveMoneyNode: Codable {
var departmentsPath: String
var accountsPath: String
var glPath: String
var rules: [MoveMoneyRule]
}
enum JoinType: Codable {
case Left, Inner, Right
}
struct MergeJoin: Codable {
var type: JoinType
var leftColumnName: String
var rightColumnName: String
}
struct MergeNode: Codable {
var inputFiles: [String]
var joins: [MergeJoin]
}
enum DeriveColumnType: Codable {
case Column(String)
case Constant(String)
}
struct MapOperation: Codable {
var mappedValue: String
}
enum DatePart: Codable {
case Year, Month, Week, Day, Hour, Minute, Secod
}
enum SplitType: Codable {
case DateTime(String, DatePart)
case Numeric(String, Int)
}
enum MatchComparisonType: Codable {
case Equal, GreaterThan, LessThan
}
enum DeriveOperation: Codable {
case Concat([DeriveColumnType])
case Add([DeriveColumnType])
case Multiply([DeriveColumnType])
case Subtract(DeriveColumnType, DeriveColumnType)
case Divide(DeriveColumnType, DeriveColumnType)
case Map(String, [MapOperation])
case Split(String, SplitType)
}
struct DeriveFilter: Codable {
var columnName: String
var comparator: MatchComparisonType
var matchValue: String
}
struct DeriveRule: Codable {
// Should this actually be an array though? It think it's fine?
var operations: [DeriveOperation]
// Filter to only specific values if required, if empty every value is considered a match
var filters: [DeriveFilter]
}
struct DeriveNode: Codable {
var rules: [DeriveRule]
}
// Example json serialisation
func tryJson() {
do {
let json = try JSONEncoder().encode(NodeInfo(name: "", outputFiles: [], configuration: NodeConfiguration.FileNode))
let decoded = try JSONDecoder().decode(NodeInfo.self, from: json)
}catch {
}
}

View File

@@ -0,0 +1,19 @@
//
// OutputFilesView.swift
// FastCoster
//
// Created by Michael Pivato on 3/5/2024.
//
import SwiftUI
struct OutputFilesView: View {
// List of files, with links to open a file editor to edit the linked files
var body: some View {
Text(/*@START_MENU_TOKEN@*/"Hello, World!"/*@END_MENU_TOKEN@*/)
}
}
#Preview {
OutputFilesView()
}

View File

@@ -8,6 +8,7 @@
import SwiftUI import SwiftUI
struct OverheadAllocation: View { struct OverheadAllocation: View {
// TODO: Refactor to take inputs from another task instead
@State private var lines: String? @State private var lines: String?
@State private var accounts: String? @State private var accounts: String?
@State private var areas: String? @State private var areas: String?

View File

@@ -14,6 +14,6 @@ async fn main() -> anyhow::Result<()> {
.connect(&connection_string) .connect(&connection_string)
.await?; .await?;
upload_to_db::upload_file_bulk(&pool, &"".to_owned(), &"".to_owned(), None, "".to_owned()).await?; // upload_to_db::upload_file_bulk(&pool, &"".to_owned(), &"".to_owned(), None, "".to_owned()).await?;
Ok(()) Ok(())
} }

View File

@@ -59,6 +59,33 @@ pub extern "C" fn move_money_from_text(
// This looks like exactly what I'm doing too: https://mozilla.github.io/firefox-browser-architecture/experiments/2017-09-06-rust-on-ios.htmlcar // This looks like exactly what I'm doing too: https://mozilla.github.io/firefox-browser-architecture/experiments/2017-09-06-rust-on-ios.htmlcar
} }
#[no_mangle]
pub extern "C" fn move_money_from_file(
rules_file: *const c_char,
lines: *const c_char,
accounts: *const c_char,
cost_centres: *const c_char,
output_path: *const c_char,
use_numeric_accounts: bool,
) {
let mut output_writer = csv::Writer::from_writer(vec![]);
let safe_rules = unwrap_c_char(rules_file);
let safe_lines = unwrap_c_char(lines);
let safe_accounts = unwrap_c_char(accounts);
let safe_cost_centres = unwrap_c_char(cost_centres);
move_money_2()
// move_money(
// ,
// &mut csv::Reader::from_reader(safe_lines.to_str().unwrap()),
// &mut csv::Reader::from_reader(safe_accounts.to_bytes()),
// &mut csv::Reader::from_reader(safe_cost_centres.to_bytes()),
// &mut output_writer,
// use_numeric_accounts,
// false,
// )
// .expect("Failed to move money");
}
#[no_mangle] #[no_mangle]
pub unsafe extern "C" fn move_money_from_text_free(s: *mut c_char) { pub unsafe extern "C" fn move_money_from_text_free(s: *mut c_char) {
unsafe { unsafe {

View File

@@ -242,7 +242,7 @@ fn main() -> anyhow::Result<()> {
date_order_column: None, date_order_column: None,
}, },
); );
coster_rs::create_products::create_products_polars(definitions, inputs, output) coster_rs::create_products::create_products_polars(definitions, vec![], output)
} }
} }
} }

View File

@@ -39,14 +39,14 @@ struct Product {
pub struct InputFile { pub struct InputFile {
pub file_path: PathBuf, pub file_path: PathBuf,
pub joins: HashMap<SourceType, String>, pub joins: HashMap<PathBuf, String>,
// if not specified, then don't allow change in type builds, as there's no way to detect changes over time // if not specified, then don't allow change in type builds, as there's no way to detect changes over time
pub date_order_column: Option<String>, pub date_order_column: Option<String>,
} }
pub fn create_products_polars( pub fn create_products_polars(
definitions_path: PathBuf, definitions_path: PathBuf,
inputs: HashMap<SourceType, InputFile>, inputs: Vec<InputFile>,
output_path: PathBuf, output_path: PathBuf,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let definitions = read_definitions(&mut csv::Reader::from_path(definitions_path)?)?; let definitions = read_definitions(&mut csv::Reader::from_path(definitions_path)?)?;
@@ -59,7 +59,7 @@ pub fn create_products_polars(
pub fn build_polars( pub fn build_polars(
definition: &Definition, definition: &Definition,
inputs: &HashMap<SourceType, InputFile>, inputs: &Vec<InputFile>,
output_path: &PathBuf, output_path: &PathBuf,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
// 1. Apply filters to limit encounters // 1. Apply filters to limit encounters
@@ -81,8 +81,7 @@ pub fn build_polars(
}) })
.reduce(|prev, next| prev.and(next)); .reduce(|prev, next| prev.and(next));
let input_file = inputs let input_file = inputs.iter().find(|input| input.file_path == definition.source)
.get(&definition.source_type)
.ok_or(anyhow!("Failed to find valid file"))?; .ok_or(anyhow!("Failed to find valid file"))?;
let mut reader = LazyCsvReader::new(&input_file.file_path) let mut reader = LazyCsvReader::new(&input_file.file_path)
.has_header(true) .has_header(true)
@@ -98,9 +97,9 @@ pub fn build_polars(
} }
for source_type in required_files { for source_type in required_files {
// TODO: Better error messages // TODO: Better error messages
if source_type != &definition.source_type { if source_type != &definition.source {
let source_file = inputs let source_file = inputs.iter()
.get(&source_type) .find(|input| input.file_path == definition.source)
.ok_or(anyhow!("Input file was not specified for source type"))?; .ok_or(anyhow!("Input file was not specified for source type"))?;
// TODO: Alias the joined columns so they don't potentially clash with the current column // TODO: Alias the joined columns so they don't potentially clash with the current column
let join_reader = LazyCsvReader::new(source_file.file_path.clone()).finish()?; let join_reader = LazyCsvReader::new(source_file.file_path.clone()).finish()?;
@@ -110,27 +109,32 @@ pub fn build_polars(
.ok_or(anyhow!("Failed to get left join column"))?; .ok_or(anyhow!("Failed to get left join column"))?;
let right_column = source_file let right_column = source_file
.joins .joins
.get(&definition.source_type) .get(&definition.source)
.ok_or(anyhow!("Failed to get right join column"))?; .ok_or(anyhow!("Failed to get right join column"))?;
reader = reader.inner_join(join_reader, col(&left_column), col(&right_column)); reader = reader.inner_join(join_reader, col(&left_column), col(&right_column));
} }
} }
// TODO: Also work out how to expand rows, so that transfers can have stuff like daily or change in x expanded into multiple rows
// Since it's related to time it is probably related to this: https://docs.pola.rs/user-guide/transformations/time-series/parsing/
// I'm guessing upsampling is what I'm looking for: https://docs.pola.rs/user-guide/transformations/time-series/resampling/#upsampling-to-a-higher-frequency
// Can use different strategies to break the time period down, range can be calculated by using start/end datetime
// Wonder if this can be done more generally (e.g. splitting up based on a number?)
// Note: This must occur before creating the components, since we'll need to create one for every upsampled row
let mut built_expression = lit(""); let mut built_expression = lit("");
// Create component columns // Create component columns
for component in &definition.components { for component in &definition.components {
match component { match component {
Component::Constant(constant) => { Component::Constant(constant) => {
built_expression = built_expression + lit(constant.clone()) built_expression = built_expression + lit(constant.clone())
} }
// TODO: Do we need to worry about the source type? Might be clashing column names we need to think about earlier then address here? // TODO: Do we need to worry about the source type? Might be clashing column names we need to think about earlier then address here?
// TODO: What I really want to do is not use source type, instead I want to be referring to a file, which we translate from the sourcetype
// to an actual filename. I don't want to be limited by a concept of 'sourcetype' at all, instead the definition should treat everything
// the same, and just translate the imported csv format to the necessary files and columns in files that are expected to be input.
Component::Field(source_type, column) => { Component::Field(source_type, column) => {
built_expression = built_expression + col(&column) built_expression = built_expression + col(&column)
} // TODO: Also work out how to expand rows, so that transfers can have stuff like daily or change in x expanded into multiple rows }
// Since it's related to time it is probably related to this: https://docs.pola.rs/user-guide/transformations/time-series/parsing/
// I'm guessing upsampling is what I'm looking for: https://docs.pola.rs/user-guide/transformations/time-series/resampling/#upsampling-to-a-higher-frequency
// Can use different strategies to break the time period down, range can be calculated by using start/end datetime
// Wonder if this can be done more generally (e.g. splitting up based on a number?)
} }
} }

View File

@@ -1,4 +1,4 @@
use std::{collections::HashMap, io::Read}; use std::{collections::HashMap, io::Read, path::PathBuf};
use anyhow::bail; use anyhow::bail;
use chrono::NaiveDateTime; use chrono::NaiveDateTime;
@@ -6,7 +6,7 @@ use chrono::NaiveDateTime;
#[derive(Hash, PartialEq, PartialOrd)] #[derive(Hash, PartialEq, PartialOrd)]
pub struct Filter { pub struct Filter {
pub filter_type: FilterType, pub filter_type: FilterType,
pub file: SourceType, pub file: PathBuf,
pub field: String, pub field: String,
pub value: String, pub value: String,
} }
@@ -16,7 +16,8 @@ pub enum SourceType {
CodingDiagnosis, CodingDiagnosis,
CodingProcedure, CodingProcedure,
Encounter, Encounter,
Incident, // TODO: Incident isn't used right now
// Incident,
Patient, Patient,
Revenue, Revenue,
Service, Service,
@@ -31,7 +32,6 @@ impl TryFrom<&String> for SourceType {
"CD" => Ok(SourceType::CodingDiagnosis), "CD" => Ok(SourceType::CodingDiagnosis),
"CP" => Ok(SourceType::CodingProcedure), "CP" => Ok(SourceType::CodingProcedure),
"E" => Ok(SourceType::Encounter), "E" => Ok(SourceType::Encounter),
"I" => Ok(SourceType::Incident),
"P" => Ok(SourceType::Patient), "P" => Ok(SourceType::Patient),
"R" => Ok(SourceType::Revenue), "R" => Ok(SourceType::Revenue),
"S" => Ok(SourceType::Service), "S" => Ok(SourceType::Service),
@@ -62,6 +62,18 @@ impl SourceType {
_ => bail!("Invalid ComponentSourceType found: {}", value), _ => bail!("Invalid ComponentSourceType found: {}", value),
} }
} }
fn to_file_path(&self) -> String {
match self {
SourceType::CodingDiagnosis => "coding_diagnoses.csv".to_owned(),
SourceType::CodingProcedure => "coding_procedures.csv".to_owned(),
SourceType::Encounter => "encounters.csv".to_owned(),
SourceType::Patient => "patients.csv".to_owned(),
SourceType::Revenue => "revenues.csv".to_owned(),
SourceType::Service => "services.csv".to_owned(),
SourceType::Transfer => "transfers.csv".to_owned(),
}
}
} }
#[derive(Hash, PartialEq, PartialOrd)] #[derive(Hash, PartialEq, PartialOrd)]
@@ -104,7 +116,7 @@ pub enum ExtraType {
pub enum Component { pub enum Component {
Constant(String), Constant(String),
// File, column_name // File, column_name
Field(SourceType, String), Field(PathBuf, String),
} }
// Frequency per type: // Frequency per type:
@@ -233,7 +245,7 @@ pub struct Definition {
pub name: String, pub name: String,
pub components: Vec<Component>, pub components: Vec<Component>,
pub filters: Vec<Filter>, pub filters: Vec<Filter>,
pub source_type: SourceType, pub source: PathBuf,
pub frequency: Frequency, pub frequency: Frequency,
pub quantity: BuiltQuantity, pub quantity: BuiltQuantity,
pub duration_fallback: DurationFallback, pub duration_fallback: DurationFallback,
@@ -284,7 +296,7 @@ where
name: record.get("Name").unwrap().to_owned(), name: record.get("Name").unwrap().to_owned(),
components: vec![], components: vec![],
filters: vec![], filters: vec![],
source_type: build_from, source: build_from.to_file_path().into(),
frequency, frequency,
quantity: built_quantity, quantity: built_quantity,
// TODO: Figure this out // TODO: Figure this out
@@ -307,7 +319,7 @@ where
// TODO: extra/classification types need to append Extra:/Classification: to the start of the field // TODO: extra/classification types need to append Extra:/Classification: to the start of the field
field: record.get("FilterField").unwrap().clone(), field: record.get("FilterField").unwrap().clone(),
value: record.get("FilterValue").unwrap().clone(), value: record.get("FilterValue").unwrap().clone(),
file: source_type, file: source_type.to_file_path().into(),
} }
}; };
let all_filters = &mut all_definitions let all_filters = &mut all_definitions
@@ -327,9 +339,7 @@ where
source => { source => {
let component_source_type = SourceType::from_component_source_type(source)?; let component_source_type = SourceType::from_component_source_type(source)?;
Component::Field( Component::Field(
// TODO: Figure this out, should be determined from the source type component_source_type.to_file_path().into(),
component_source_type,
// TODO: Field probably needs to be enumed to match onto the correct column in input files
record.get("ComponentValueOrField").unwrap().to_owned(), record.get("ComponentValueOrField").unwrap().to_owned(),
) )
} }
@@ -349,7 +359,7 @@ where
field: record.get("ConstraintColumn").unwrap().to_owned(), field: record.get("ConstraintColumn").unwrap().to_owned(),
filter_type, filter_type,
value: record.get("ConstraintValue").unwrap().to_owned(), value: record.get("ConstraintValue").unwrap().to_owned(),
file: source_type, file: source_type.to_file_path().into(),
} }
}; };
let all_filters = &mut all_definitions let all_filters = &mut all_definitions

View File

@@ -1,7 +1,11 @@
use std::{collections::HashMap, io::Read}; use std::{collections::HashMap, io::Read};
use csv::Reader; use csv::Reader;
use sqlx::{Mssql, Pool, QueryBuilder}; use sqlx::{query, query_builder, Any, Mssql, Pool, QueryBuilder};
// Note: right now this is set to mssql only, since sqlx 0.7 is requried to use the Any
// type for sqlx 0.6 and earlier due to a query_builder lifetime issue,
// however sqlx >=0.7 currently doesn't support mssql.
// Upload data in a file to a db table, with an optional post-script to run, // Upload data in a file to a db table, with an optional post-script to run,
// such as to move data from the upload table into other tables // such as to move data from the upload table into other tables
@@ -9,7 +13,7 @@ use sqlx::{Mssql, Pool, QueryBuilder};
// TODO: Add fallback insert when bulk insert fails (e.g. due to // TODO: Add fallback insert when bulk insert fails (e.g. due to
// permission errors) // permission errors)
pub async fn upload_file_bulk( pub async fn upload_file_bulk(
pool: &Pool<sqlx::Any>, pool: &Pool<sqlx::Mssql>,
file_name: &String, file_name: &String,
table_name: &String, table_name: &String,
// Mappings from column in file -> column in db // Mappings from column in file -> column in db
@@ -19,34 +23,42 @@ pub async fn upload_file_bulk(
// TODO: Test if the table already exists. If it doesn't, try creating the table // TODO: Test if the table already exists. If it doesn't, try creating the table
// First try a bulk insert command // First try a bulk insert command
let result = match pool.any_kind() { // let result = match pool.any_kind() {
sqlx::any::AnyKind::Mssql => { // sqlx::any::AnyKind::Mssql => {
sqlx::query(&format!("BULK INSERT {} FROM {}", table_name, file_name)) let result = sqlx::query(&format!("BULK INSERT {} FROM {}", table_name, file_name))
.execute(pool) .execute(pool)
.await .await?;
} // }
}; // };
let mut rows_affected = match &result { let mut rows_affected = result.rows_affected();
Result::Ok(result) => result.rows_affected(),
// TODO: Log error
Err(error) => 0_u64, // let mut rows_affected = match &result {
}; // Result::Ok(result) => result.rows_affected(),
// // TODO: Log error
// Err(error) => 0_u64,
// };
// TODO: Adjust for various dbmss // TODO: Adjust for various dbmss
if rows_affected == 0 {
if let Err(_) = result {
let rows: Vec<HashMap<String, String>> = vec![]; let rows: Vec<HashMap<String, String>> = vec![];
let BIND_LIMIT: usize = 65535; let BIND_LIMIT: usize = 65535;
// TODO: Use csv to read from file // TODO: Use csv to read from file
// TODO: When bulk insert fails, Fall back to sql batched insert // TODO: When bulk insert fails, Fall back to sql batched insert
// TODO: Columns to insert... needs some kind of mapping from file column name <-> db column
let mut query_builder = QueryBuilder::new(format!("INSERT INTO {}({}) ", table_name, "")); let mut query_builder = QueryBuilder::new(format!("INSERT INTO {}({}) ", table_name, ""));
query_builder.push_values(rows, |mut b, row| { // TODO: Iterate over all values in file, not the limit
query_builder.push_values(&rows[0..BIND_LIMIT], |mut b, row| {
b.push_bind(row.get("s")); b.push_bind(row.get("s"));
}); });
let mut query = query_builder.build(); let mut query_builder = query_builder;
// TODO: Looks like this issue: https://github.com/launchbadge/sqlx/issues/1978
// Turns out we need v0.7 for this to not bug out, however mssql is only supported in versions before v0.7, so right now can't use sqlx
// to use this, unless we explicity specified mssql only, not Any as the db type...
let query = query_builder.build();
let result = query.execute(pool).await?; let result = query.execute(pool).await?;
rows_affected = result.rows_affected(); rows_affected = result.rows_affected();
} }