Start reading of product definitions

This commit is contained in:
Piv
2023-02-20 19:38:25 +10:30
parent 3d6042d929
commit 7bce9578df

View File

@@ -18,6 +18,7 @@ enum Component {
Field(String), Field(String),
} }
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash)]
enum BuildFrom { enum BuildFrom {
Service, Service,
Transfer, Transfer,
@@ -28,12 +29,35 @@ enum BuildFrom {
LinkedDataset, LinkedDataset,
} }
impl From<&String> for BuildFrom {
fn from(string: &String) -> Self {
match string.as_str() {
"S" => BuildFrom::Service,
"E" => BuildFrom::Encounter,
"CP" => BuildFrom::CodingProcedure,
"CD" => BuildFrom::CodingDiagnosis,
"T" => BuildFrom::Transfer,
"BS" => BuildFrom::LinkedDataset,
}
}
}
enum Frequency { enum Frequency {
OnePerSource, OnePerSource,
DailyOrChangeInWard, DailyOrChangeInWard,
Daily, Daily,
} }
impl From<&String> for Frequency {
fn from(frequency: &String) -> Self {
match frequency.as_str() {
"S" => Frequency::OnePerSource,
"DOCW" => Frequency::DailyOrChangeInWard,
"D" => Frequency::Daily,
}
}
}
enum Quantity { enum Quantity {
Constant(f64), Constant(f64),
Extra(String), Extra(String),
@@ -99,7 +123,7 @@ where
// TODO: Looks kind of bad, any other way around it? I'd rather not have to depend on crossbeam as well // TODO: Looks kind of bad, any other way around it? I'd rather not have to depend on crossbeam as well
O: Write + Send + 'static, O: Write + Send + 'static,
{ {
let mapped_definitions: HashMap<BuildFrom, Definition> = HashMap::new(); let mut all_definitions: HashMap<String, Definition> = HashMap::new();
// Partition the rules by the build from type, so that we'll run all the rules at once for a particular file, which should be much faster // Partition the rules by the build from type, so that we'll run all the rules at once for a particular file, which should be much faster
// then opening files and scanning one at a time. Could also do batches in files // then opening files and scanning one at a time. Could also do batches in files
@@ -108,7 +132,35 @@ where
for record in definitions.deserialize::<HashMap<String, String>>() { for record in definitions.deserialize::<HashMap<String, String>>() {
let record = record?; let record = record?;
// Get the type, then switch based on that, as that's how we determine whether we've got a definition/filter/component/constraint (definition should always come first) // Get the type, then switch based on that, as that's how we determine whether we've got a definition/filter/component/constraint (definition should always come first)
let recordType = record.get("Type").unwrap();
match recordType.as_str() {
"Definition" => {
let build_quantity =
all_definitions.insert(
record.get("Name").unwrap().to_owned(),
Definition {
name: record.get("Name").unwrap().to_owned(),
components: vec![],
filters: vec![],
constraints: vec![],
build_from: BuildFrom::from(record.get("BuildFrom").unwrap()),
frequency: Frequency::from(record.get("Frequency").unwrap()),
quantity: ,
duration_fallback: (),
},
);
} }
"Filter" => {}
"Component" => {}
"Constraint" => {}
_ => continue,
}
}
let mut mapped_definitions = all_definitions
.into_values()
.map(|value| (value.build_from, value))
.collect();
// Then read through each file type line by line if there are definitions for that type, and process all records (read into memory the batch size) // Then read through each file type line by line if there are definitions for that type, and process all records (read into memory the batch size)
// Probably output to a separate thread (or maybe some kind of limited queue?) to write to disk. Try on same thread to start, then if it's too slow // Probably output to a separate thread (or maybe some kind of limited queue?) to write to disk. Try on same thread to start, then if it's too slow