Start reading of product definitions
This commit is contained in:
@@ -18,6 +18,7 @@ enum Component {
|
||||
Field(String),
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
enum BuildFrom {
|
||||
Service,
|
||||
Transfer,
|
||||
@@ -28,12 +29,35 @@ enum BuildFrom {
|
||||
LinkedDataset,
|
||||
}
|
||||
|
||||
impl From<&String> for BuildFrom {
|
||||
fn from(string: &String) -> Self {
|
||||
match string.as_str() {
|
||||
"S" => BuildFrom::Service,
|
||||
"E" => BuildFrom::Encounter,
|
||||
"CP" => BuildFrom::CodingProcedure,
|
||||
"CD" => BuildFrom::CodingDiagnosis,
|
||||
"T" => BuildFrom::Transfer,
|
||||
"BS" => BuildFrom::LinkedDataset,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum Frequency {
|
||||
OnePerSource,
|
||||
DailyOrChangeInWard,
|
||||
Daily,
|
||||
}
|
||||
|
||||
impl From<&String> for Frequency {
|
||||
fn from(frequency: &String) -> Self {
|
||||
match frequency.as_str() {
|
||||
"S" => Frequency::OnePerSource,
|
||||
"DOCW" => Frequency::DailyOrChangeInWard,
|
||||
"D" => Frequency::Daily,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum Quantity {
|
||||
Constant(f64),
|
||||
Extra(String),
|
||||
@@ -99,7 +123,7 @@ where
|
||||
// TODO: Looks kind of bad, any other way around it? I'd rather not have to depend on crossbeam as well
|
||||
O: Write + Send + 'static,
|
||||
{
|
||||
let mapped_definitions: HashMap<BuildFrom, Definition> = HashMap::new();
|
||||
let mut all_definitions: HashMap<String, Definition> = HashMap::new();
|
||||
// Partition the rules by the build from type, so that we'll run all the rules at once for a particular file, which should be much faster
|
||||
// then opening files and scanning one at a time. Could also do batches in files
|
||||
|
||||
@@ -108,8 +132,36 @@ where
|
||||
for record in definitions.deserialize::<HashMap<String, String>>() {
|
||||
let record = record?;
|
||||
// Get the type, then switch based on that, as that's how we determine whether we've got a definition/filter/component/constraint (definition should always come first)
|
||||
let recordType = record.get("Type").unwrap();
|
||||
match recordType.as_str() {
|
||||
"Definition" => {
|
||||
let build_quantity =
|
||||
all_definitions.insert(
|
||||
record.get("Name").unwrap().to_owned(),
|
||||
Definition {
|
||||
name: record.get("Name").unwrap().to_owned(),
|
||||
components: vec![],
|
||||
filters: vec![],
|
||||
constraints: vec![],
|
||||
build_from: BuildFrom::from(record.get("BuildFrom").unwrap()),
|
||||
frequency: Frequency::from(record.get("Frequency").unwrap()),
|
||||
quantity: ,
|
||||
duration_fallback: (),
|
||||
},
|
||||
);
|
||||
}
|
||||
"Filter" => {}
|
||||
"Component" => {}
|
||||
"Constraint" => {}
|
||||
_ => continue,
|
||||
}
|
||||
}
|
||||
|
||||
let mut mapped_definitions = all_definitions
|
||||
.into_values()
|
||||
.map(|value| (value.build_from, value))
|
||||
.collect();
|
||||
|
||||
// Then read through each file type line by line if there are definitions for that type, and process all records (read into memory the batch size)
|
||||
// Probably output to a separate thread (or maybe some kind of limited queue?) to write to disk. Try on same thread to start, then if it's too slow
|
||||
// or I want to experiment, split to a separate thread. Probably want to use sync_sender to limit number of messages to a max size in case
|
||||
|
||||
Reference in New Issue
Block a user