now we have an iterator - albeit somewhat slow

This commit is contained in:
Yehowshua Immanuel 2022-05-18 22:57:42 -04:00
parent de08a60f17
commit 0497015783
3 changed files with 27 additions and 45 deletions

View file

@ -8,3 +8,4 @@ edition = "2021"
[dependencies] [dependencies]
num = "0.4" num = "0.4"
clap = { version = "3.1.8", features = ["derive"] } clap = { version = "3.1.8", features = ["derive"] }
next-gen = "0.1.1"

View file

@ -19,22 +19,11 @@ The first build of the program may take some time.
``cargo run --release test-vcd-files/aldec/SPI_Write.vcd`` ``cargo run --release test-vcd-files/aldec/SPI_Write.vcd``
## TODO # TODO
- [ ] We need a way to merge lines. - [x] We need a way to merge lines.
- [ ] We need to start regression testing the parser over all files
- [ ] Take a look at GTKWave parser to compare effificiency.
- [ ] Send survey to community channel.
### April 14 ### May 18
- [ ] store timestamps to struct - [ ] move while loop into word yielding iterator
- [ ] Get file loading status
- [ ] Get all signal scopes
### April 15
- [ ] Re-factor to support hooks in the initial file ingest
- [ ] Modularize
### April 15
- [ ] Build tree per signal.
- [ ] Each signal also comes with a value change buffer to
avoid frequent disk readouts.
# VCD Spec Questions
- [ ] I'm pretty sure that only one statement per line is allowed.

View file

@ -2,6 +2,7 @@ use std::io::prelude::*;
use std::io; use std::io;
use std::fs::File; use std::fs::File;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use ::next_gen::prelude::*;
use num::*; use num::*;
use clap::Parser; use clap::Parser;
@ -13,24 +14,6 @@ struct Cli {
path: std::path::PathBuf, path: std::path::PathBuf,
} }
struct Timestamp{
file_offset: u64,
timestamp: BigInt
}
struct Cursor{
line: u64,
col : u64
}
enum Tokens {
Date,
End,
String,
Version,
Time,
}
struct Signal { struct Signal {
name : String, name : String,
timeline : BTreeMap<BigInt, BigInt>, timeline : BTreeMap<BigInt, BigInt>,
@ -38,23 +21,21 @@ struct Signal {
parent_index : usize parent_index : usize
} }
fn main() -> std::io::Result<()> {
let args = Cli::parse();
let space = " ".as_bytes()[0];
let file = File::open(&args.path)?; #[generator(yield(String))]
fn yield_words(file : File) {
let mut reader = io::BufReader::new(file); let mut reader = io::BufReader::new(file);
let mut buffer = String::new(); let mut buffer = String::new();
let mut word_count = 0u64; let mut word_count = 0u64;
let mut do_break = false; let mut EOF = false;
let line_chunk_size = 25; let line_chunk_size = 25;
while {!do_break} { while {!EOF} {
for _ in 0..line_chunk_size { for _ in 0..line_chunk_size {
let bytes_read = reader.read_line(&mut buffer).unwrap(); let bytes_read = reader.read_line(&mut buffer).unwrap();
if bytes_read == 0 { if bytes_read == 0 {
do_break = true; EOF = true;
break break
} }
} }
@ -62,13 +43,24 @@ fn main() -> std::io::Result<()> {
let words = buffer.split_ascii_whitespace(); let words = buffer.split_ascii_whitespace();
for word in words { for word in words {
word_count += 1; yield_!(word.to_string());
} }
buffer.clear(); buffer.clear();
} }
dbg!(word_count);
}
fn main() -> std::io::Result<()> {
let args = Cli::parse();
let file = File::open(&args.path)?;
let mut word_count = 0;
mk_gen!(let mut generator = yield_words(file));
for word in generator {
word_count += 1;
}
Ok(()) Ok(())
} }