Start working on a cache solution. Keep the html, so I can test with parsing in a more controlled way.

This commit is contained in:
2018-12-18 13:05:00 +01:00
parent f6e9a0daa6
commit 97103a9775
3 changed files with 40 additions and 1 deletions

View File

@@ -1,9 +1,10 @@
use std::env;
use directories::ProjectDirs;
use scraper::{Html, Selector};
trait Probe {
fn search(&mut self, number: &str);
fn search(&mut self, _: &str);
}
// https://www.hitta.se/vem-ringde/{}
@@ -63,7 +64,33 @@ impl Probe for KonsumentInfo {
fn search(&mut self, _: &str) {}
}
struct Context {
dirs: ProjectDirs,
}
impl Context {
fn new() -> Context {
Context {
dirs: ProjectDirs::from("com", "logaritmisk", "whoareyou").unwrap(),
}
}
fn cache_get(&mut self, key: &str) -> Option<Vec<u8>> {
None
}
fn cache_set(&mut self, key: &str) {
}
}
fn main() {
if let Some(proj_dirs) = ProjectDirs::from("com", "logaritmisk", "whoareyou") {
println!("{:?}", proj_dirs.cache_dir());
}
std::process::exit(0);
let number = env::args()
.nth(1)
.expect("must specify a number to search for");