use std::marker::PhantomData;
use std::path::PathBuf;
use num::{one, zero, ToPrimitive};
use super::Position;
use super::PrimInt;
use super::WithHyperAstPositionConverter;
use crate::position::building;
use crate::types::{
self, Children, HyperAST, HyperType, IterableChildren, LabelStore, Labeled, NodeStore,
TypeStore, WithChildren, WithSerialization,
};
pub fn path_with_spaces<'store, HAST, It: Iterator>(
root: HAST::IdN,
no_spaces: &mut It,
stores: &'store HAST,
) -> (Vec<It::Item>,)
where
It::Item: Clone + PrimInt,
HAST::IdN: Clone,
HAST: HyperAST<'store>,
HAST::T: WithSerialization + WithChildren<ChildIdx = It::Item>,
{
let mut x = root;
let mut path_ids = vec![];
let mut with_spaces = vec![];
let mut path = vec![];
for mut o in &mut *no_spaces {
let b = stores.node_store().resolve(&x);
let t = stores.type_store().resolve_type(&b);
if t.is_directory() || t.is_file() {
let l = stores.label_store().resolve(b.get_label_unchecked());
path.push(l);
}
let mut with_s_idx = zero();
if let Some(cs) = b.children() {
let cs = cs.clone();
if !t.is_directory() {
for y in cs.iter_children() {
let b = stores.node_store().resolve(y);
if !stores.type_store().resolve_type(&b).is_spaces() {
if o == zero() {
break;
}
o = o - one();
}
with_s_idx = with_s_idx + one();
}
} else {
with_s_idx = o;
}
if let Some(a) = cs.get(with_s_idx) {
x = a.clone();
with_spaces.push(with_s_idx);
path_ids.push(x.clone());
} else {
dbg!();
break;
}
} else {
dbg!();
break;
}
}
if let Some(x) = no_spaces.next() {
dbg!(x);
panic!()
}
let b = stores.node_store().resolve(&x);
let t = stores.type_store().resolve_type(&b);
if t.is_directory() || t.is_file() {
let l = stores.label_store().resolve(b.get_label_unchecked());
path.push(l);
}
path_ids.reverse();
(with_spaces,)
}
impl<'store, 'src, 'a, Idx: PrimInt, HAST>
WithHyperAstPositionConverter<
'store,
'src,
Filtered<super::offsets::Offsets<Idx>, node_filters::NoSpace>,
HAST,
>
{
pub fn path_with_spaces<It: Iterator>(
root: HAST::IdN,
no_spaces: &mut It,
stores: &'store HAST,
) -> Filtered<Vec<It::Item>, node_filters::Full>
where
It::Item: Clone + PrimInt,
HAST::IdN: Clone,
HAST: HyperAST<'store>,
HAST::T: WithSerialization + WithChildren<ChildIdx = It::Item>,
{
todo!()
}
}
pub fn global_pos_with_spaces<'store, T, NS, It: Iterator>(
root: T::TreeId,
no_spaces: &mut It,
node_store: &'store NS,
) -> (Vec<It::Item>,)
where
It::Item: Clone + PrimInt,
T::TreeId: Clone,
NS: 'store + types::NodeStore<T::TreeId, R<'store> = T>,
T: types::Tree<ChildIdx = It::Item> + types::WithStats,
{
todo!()
}
pub fn compute_position_with_no_spaces<'store, HAST, It: Iterator>(
root: HAST::IdN,
offsets: &mut It,
stores: &'store HAST,
) -> (Position, HAST::IdN, Vec<It::Item>)
where
It::Item: Clone + PrimInt,
HAST::IdN: Clone,
HAST: HyperAST<'store>,
HAST::T: WithSerialization + WithChildren<ChildIdx = It::Item>,
{
let (pos, mut path_ids, no_spaces) =
compute_position_and_nodes_with_no_spaces(root, offsets, stores);
(pos, path_ids.remove(path_ids.len() - 1), no_spaces)
}
pub fn compute_position_and_nodes_with_no_spaces<'store, HAST, It>(
root: HAST::IdN,
offsets: &mut It,
stores: &'store HAST,
) -> (Position, Vec<HAST::IdN>, Vec<It::Item>)
where
HAST::IdN: Clone,
HAST: HyperAST<'store>,
HAST::T: WithSerialization + WithChildren<ChildIdx = It::Item>,
It: Iterator,
It::Item: Clone + PrimInt,
{
let mut offset = 0;
let mut x = root;
let mut path_ids = vec![];
let mut no_spaces = vec![];
let mut path = vec![];
for o in &mut *offsets {
let b = stores.node_store().resolve(&x);
let t = stores.type_store().resolve_type(&b);
if t.is_directory() || t.is_file() {
let l = stores.label_store().resolve(b.get_label_unchecked());
path.push(l);
}
let mut no_s_idx = zero();
if let Some(cs) = b.children() {
let cs = cs.clone();
if !t.is_directory() {
for y in cs.before(o.clone()).iter_children() {
let b = stores.node_store().resolve(y);
if !stores.type_store().resolve_type(&b).is_spaces() {
no_s_idx = no_s_idx + one();
}
offset += b.try_bytes_len().unwrap().to_usize().unwrap();
}
} else {
no_s_idx = o;
}
if let Some(a) = cs.get(o) {
x = a.clone();
no_spaces.push(no_s_idx);
path_ids.push(x.clone());
} else {
dbg!();
break;
}
} else {
dbg!();
break;
}
}
assert!(offsets.next().is_none());
let b = stores.node_store().resolve(&x);
let t = stores.type_store().resolve_type(&b);
if t.is_directory() || t.is_file() {
let l = stores.label_store().resolve(b.get_label_unchecked());
path.push(l);
}
let len = if !t.is_directory() {
b.try_bytes_len().unwrap().to_usize().unwrap()
} else {
0
};
let file = PathBuf::from_iter(path.iter());
path_ids.reverse();
(Position::new(file, offset, len), path_ids, no_spaces)
}
mod node_filters {
pub struct NoSpace;
pub struct Full;
}
pub struct Filtered<T, F>(T, std::marker::PhantomData<F>);
impl<T, F> From<T> for Filtered<T, F> {
fn from(value: T) -> Self {
Self(value, std::marker::PhantomData)
}
}
type PathNoSpace<IdN, Idx> =
Filtered<super::offsets::RootedOffsets<IdN, Idx>, node_filters::NoSpace>;
type SpFull<IdN, Idx> =
Filtered<super::offsets_and_nodes::StructuralPosition<IdN, Idx>, node_filters::NoSpace>;
type FileAndOffsetFull =
Filtered<super::file_and_offset::Position<PathBuf, usize>, node_filters::Full>;
impl<'store, 'src, 'a, HAST, S> WithHyperAstPositionConverter<'store, 'src, S, HAST>
where
HAST: HyperAST<'store>,
S: super::position_accessors::WithPreOrderOffsets<Idx = HAST::Idx>,
S: super::position_accessors::RootedPosition<HAST::IdN>,
S: super::node_filter_traits::Full,
{
pub fn compute_multi_position_with_no_spaces(
&self,
) -> (FileAndOffsetFull, SpFull<HAST::IdN, HAST::Idx>)
where
HAST::IdN: Clone,
HAST: HyperAST<'store>,
HAST::T: WithSerialization + WithChildren,
{
let stores = self.stores;
let mut x = self.src.root();
let mut offset = 0;
let mut path_ids = vec![];
let mut no_spaces = vec![];
let mut path = vec![];
let mut offsets_iter = self.src.iter();
loop {
let b = stores.node_store().resolve(&x);
let t = stores.type_store().resolve_type(&b);
if t.is_directory() || t.is_file() {
let l = stores.label_store().resolve(b.get_label_unchecked());
path.push(l);
}
let (cs, o) = match (b.children(), offsets_iter.next()) {
(Some(cs), Some(o)) => (cs, o),
(None, Some(_)) => panic!("there is no children remaining"),
_ => break,
};
let cs = cs.clone();
let mut no_s_idx = zero();
if !t.is_directory() {
for y in cs.before(o.clone()).iter_children() {
let b = stores.node_store().resolve(y);
if !stores.type_store().resolve_type(&b).is_spaces() {
no_s_idx = no_s_idx + one();
}
offset += b.try_bytes_len().unwrap().to_usize().unwrap();
}
} else {
no_s_idx = o;
}
if let Some(a) = cs.get(o) {
x = a.clone();
no_spaces.push(no_s_idx);
path_ids.push(x.clone());
} else {
dbg!();
break;
}
}
let b = stores.node_store().resolve(&x);
let t = stores.type_store().resolve_type(&b);
let len = if !t.is_directory() {
b.try_bytes_len().unwrap().to_usize().unwrap()
} else {
0
};
let file = PathBuf::from_iter(path.iter());
path_ids.reverse();
no_spaces.reverse();
let o_and_n = todo!();
(Position::new(file, offset, len).into(), o_and_n)
}
fn compute_multi_position_with_no_spaces2(
&self,
) -> (FileAndOffsetFull, SpFull<HAST::IdN, HAST::Idx>)
where
HAST::IdN: Clone,
HAST: HyperAST<'store>,
HAST::T: WithSerialization + WithChildren,
for<'b> <HAST::T as WithChildren>::Children<'b>: Clone,
{
let stores = self.stores;
let mut x = self.src.root();
let mut offsets_iter = self.src.iter();
let mut aaa = PathBuf::default();
let mut offset = 0;
let mut path_ids = vec![];
let mut no_spaces = vec![];
let mut path = vec![];
let mut bbb: FileAndOffsetPositionBuilder<_, usize> = {
let (b, t) = loop {
let b = stores.node_store().resolve(&x);
let t = stores.type_store().resolve_type(&b);
if t.is_directory() {
let l = stores.label_store().resolve(b.get_label_unchecked());
path.push(l);
aaa.push(l);
} else {
break (b, t);
}
let (cs, o) = match (b.children(), offsets_iter.next()) {
(Some(cs), Some(o)) => (cs.clone(), o),
(None, Some(_)) => panic!("there is no children remaining"),
_ => return todo!(),
};
let a = cs.get(o).expect("no child at path");
no_spaces.push(o);
path_ids.push(a.clone());
x = a.clone();
};
if t.is_file() {
assert!(t.is_file());
let l = stores.label_store().resolve(b.get_label_unchecked());
path.push(l);
aaa.push(l);
}
aaa.into()
};
let (b, t) = loop {
let b = stores.node_store().resolve(&x);
let t = stores.type_store().resolve_type(&b);
assert!(!t.is_directory());
let (cs, o) = match (b.children(), offsets_iter.next()) {
(Some(cs), Some(o)) => (cs.clone(), o),
(None, Some(_)) => panic!("there is no children remaining"),
_ => break (b, t),
};
let mut no_s_idx = zero();
if !t.is_directory() {
for y in cs.before(o.clone()).iter_children() {
let b = stores.node_store().resolve(y);
if !stores.type_store().resolve_type(&b).is_spaces() {
no_s_idx = no_s_idx + one();
}
let len = b.try_bytes_len().unwrap().to_usize().unwrap();
offset += len;
bbb.inc_offset(len);
}
} else {
no_s_idx = o;
}
let a = cs.get(o).expect("no child at path");
no_spaces.push(no_s_idx);
path_ids.push(a.clone());
x = a.clone();
};
let len = if !t.is_directory() {
b.try_bytes_len().unwrap().to_usize().unwrap()
} else {
0
};
let file = PathBuf::from_iter(path.iter());
path_ids.reverse();
no_spaces.reverse();
let o_and_n = todo!();
(Position::new(file, offset, len).into(), o_and_n)
}
fn compute_multi_position_with_no_spaces3<B>(&self) -> B::Prepared
where
HAST::IdN: Clone,
HAST: HyperAST<'store>,
HAST::T: WithSerialization + WithChildren,
for<'b> <HAST::T as WithChildren>::Children<'b>: Clone,
B: TopDownPosBuilder<HAST::IdN, HAST::Idx, usize, NoSpacePrepareParams<HAST::Idx>>
+ Default,
{
let mut builder: B = Default::default();
let stores = self.stores;
let mut x = self.src.root();
let mut offsets_iter = self.src.iter();
let mut builder = {
loop {
let b = stores.node_store().resolve(&x);
let t = stores.type_store().resolve_type(&b);
let l = if t.is_directory() {
stores.label_store().resolve(b.get_label_unchecked())
} else if t.is_file() {
assert!(t.is_file());
let l = stores.label_store().resolve(b.get_label_unchecked());
break builder.seal_path(l);
} else {
break builder.seal_without_path();
};
let (cs, idx) = match (b.children(), offsets_iter.next()) {
(Some(cs), Some(o)) => (cs.clone(), o),
(None, Some(_)) => panic!("there is no children remaining"),
_ => return builder.finish(x),
};
builder.push(x, idx, l, ());
let a = cs.get(idx).expect("no child at path");
x = a.clone();
}
};
let (b, t) = loop {
let b = stores.node_store().resolve(&x);
let t = stores.type_store().resolve_type(&b);
assert!(!t.is_directory());
let (cs, idx) = match (b.children(), offsets_iter.next()) {
(Some(cs), Some(idx)) => (cs.clone(), idx),
(None, Some(_)) => panic!("there is no children remaining"),
_ => break (b, t),
};
let mut no_s_idx = zero();
let mut byte_offset = 0;
for y in cs.before(idx.clone()).iter_children() {
let b = stores.node_store().resolve(y);
if !stores.type_store().resolve_type(&b).is_spaces() {
no_s_idx = no_s_idx + one();
}
let len = b.try_bytes_len().unwrap().to_usize().unwrap();
byte_offset += len;
}
builder.push(x, idx, byte_offset, (no_s_idx,));
let a = cs.get(idx).expect("no child at path");
x = a.clone();
};
let len = if !t.is_directory() {
b.try_bytes_len().unwrap()
} else {
0
};
let len = num::cast(len).unwrap();
builder.finish(x, len, ())
}
pub fn compute_no_spaces<O, B>(&self) -> O
where
HAST::IdN: Clone,
HAST: HyperAST<'store>,
HAST::T: WithSerialization + WithChildren,
B: building::top_down::ReceiveDir<HAST::IdN, HAST::Idx, O>
+ building::top_down::CreateBuilder,
B::SB1<O>: building::top_down::ReceiveInFile<HAST::IdN, HAST::Idx, usize, O>,
{
let mut builder: B = building::top_down::CreateBuilder::create();
let stores = self.stores;
let mut x = self.src.root();
let mut offsets_iter = self.src.iter();
use building::{top_down::ReceiveIdx, Transition};
let mut builder: B::SB1<O> = {
loop {
let b = stores.node_store().resolve(&x);
let t = stores.type_store().resolve_type(&b);
let l = if t.is_directory() {
stores.label_store().resolve(b.get_label_unchecked())
} else if t.is_file() {
assert!(t.is_file());
let l = stores.label_store().resolve(b.get_label_unchecked());
break builder.set_file_name(l);
} else {
break builder.transit();
};
let (cs, idx) = match (b.children(), offsets_iter.next()) {
(Some(cs), Some(o)) => (cs.clone(), o),
(None, Some(_)) => panic!("there is no children remaining"),
_ => return builder.set_node(x),
};
use building::top_down::ReceiveDirName;
builder = builder.push(x).push(idx).push(l);
let a = cs.get(idx).expect("no child at path");
x = a.clone();
}
};
let (b, t) = loop {
let b = stores.node_store().resolve(&x);
let t = stores.type_store().resolve_type(&b);
assert!(!t.is_directory());
let (cs, idx) = match (b.children(), offsets_iter.next()) {
(Some(cs), Some(idx)) => (cs.clone(), idx),
(None, Some(_)) => panic!("there is no children remaining"),
_ => break (b, t),
};
let mut no_s_idx = zero();
let mut byte_offset = 0;
for y in cs.before(idx.clone()).iter_children() {
let b = stores.node_store().resolve(y);
if !stores.type_store().resolve_type(&b).is_spaces() {
no_s_idx = no_s_idx + one();
}
let len = b.try_bytes_len().unwrap().to_usize().unwrap();
byte_offset += len;
}
use building::top_down::{ReceiveIdxNoSpace, ReceiveOffset, ReceiveParent};
builder = builder.push(x).push(idx).push(byte_offset).push(no_s_idx);
let a = cs.get(idx).expect("no child at path");
x = a.clone();
};
let len = if !t.is_directory() {
b.try_bytes_len().unwrap()
} else {
0
};
let len = num::cast(len).unwrap();
use building::top_down::SetNode;
use building::SetLen;
builder.set(len).set_node(x)
}
}
#[derive(Default)]
pub struct NoSpacePrepareParams<Idx>(PhantomData<Idx>);
impl<Idx> AdditionalPrepareParams for NoSpacePrepareParams<Idx> {
type C = (Idx,);
type L = ();
}
impl<Idx> AdditionalPrepareFileParams for NoSpacePrepareParams<Idx> {
type F = ();
}
pub trait AdditionalPrepareParams {
type C;
type L;
}
impl AdditionalPrepareParams for () {
type C = ();
type L = ();
}
impl AdditionalPrepareFileParams for () {
type F = ();
}
pub trait AdditionalPrepareFileParams: AdditionalPrepareParams {
type F;
}
pub trait TopDownPosBuilder<IdN, Idx, IdO, Additional: AdditionalPrepareFileParams = ()> {
type Prepared;
type SealedFile: SealedFileTopDownPosBuilder<
IdN,
Idx,
IdO,
Additional,
Prepared = Self::Prepared,
>;
fn seal_path(self, file_name: &str) -> Self::SealedFile;
fn seal_without_path(self) -> Self::SealedFile;
fn push(&mut self, parent: IdN, idx: Idx, dir_name: &str, additional: Additional::F);
fn finish(self, node: IdN) -> Self::Prepared;
}
pub trait SealedFileTopDownPosBuilder<IdN, Idx, IdO, Params: AdditionalPrepareParams = ()> {
type Prepared;
fn push(&mut self, parent: IdN, idx: Idx, offset: IdO, params: Params::C);
fn finish(self, node: IdN, len: Idx, additional: Params::L) -> Self::Prepared;
}
struct TopDownPositionPreparer<IdN, Idx, IdO> {
parents: Vec<IdN>,
offsets: Vec<Idx>,
filtered_offsets: Vec<Idx>,
file: PathBuf,
range: Option<std::ops::Range<IdO>>,
}
impl<IdN, Idx: PrimInt, IdO: PrimInt + Default>
TopDownPosBuilder<IdN, Idx, IdO, NoSpacePrepareParams<Idx>>
for TopDownPositionPreparer<IdN, Idx, IdO>
{
type Prepared = TopDownPositionBuilder<IdN, Idx, IdO>;
type SealedFile = TopDownPositionPreparer<IdN, Idx, IdO>;
fn seal_path(mut self, file_name: &str) -> Self::SealedFile {
self.file.push(file_name);
self
}
fn seal_without_path(self) -> Self::SealedFile {
self
}
fn push(&mut self, parent: IdN, offset: Idx, dir_name: &str, _additional: ()) {
self.parents.push(parent);
self.offsets.push(offset);
self.file.push(dir_name);
}
fn finish(self, node: IdN) -> Self::Prepared {
debug_assert!(self.range.is_none());
Self::Prepared {
parents: self.parents,
offsets: self.offsets,
file: self.file,
range: Some(Default::default()),
node,
}
}
}
impl<IdN, Idx: PrimInt, IdO: PrimInt>
SealedFileTopDownPosBuilder<IdN, Idx, IdO, NoSpacePrepareParams<Idx>>
for TopDownPositionPreparer<IdN, Idx, IdO>
{
type Prepared = TopDownPositionBuilder<IdN, Idx, IdO>;
fn push(&mut self, parent: IdN, idx: Idx, offset: IdO, (no_s_idx,): (Idx,)) {
self.parents.push(parent);
self.offsets.push(idx);
self.range.as_mut().unwrap().start += num::cast(idx).unwrap();
self.filtered_offsets.push(no_s_idx);
}
fn finish(self, node: IdN, len: Idx, _additional: ()) -> Self::Prepared {
let mut range = self.range.unwrap();
range.end = num::cast(len).unwrap();
Self::Prepared {
parents: self.parents,
offsets: self.offsets,
file: self.file,
range: Some(range),
node,
}
}
}
pub(super) struct TopDownPositionBuilder<IdN, Idx, IdO> {
pub(super) parents: Vec<IdN>,
pub(super) offsets: Vec<Idx>,
pub(super) file: PathBuf,
pub(super) range: Option<std::ops::Range<IdO>>,
pub(super) node: IdN,
}
trait BottomUpPosBuilder<IdN, Idx> {
type F0;
type S: SealedOffsetBottomUpPosBuilder<IdN, Idx>;
fn seal_offset(self) -> Self::S;
fn push(&mut self, node: IdN, offset: Idx);
fn finish(self, root: IdN) -> Self::F0;
}
trait SealedOffsetBottomUpPosBuilder<IdN, Idx> {
type F;
fn push(&mut self, node: IdN, offset: Idx, name: &str);
fn finish(self, root: IdN) -> Self::F;
}
impl<F, T: num::Zero> From<F> for FileAndOffsetPositionBuilder<F, T> {
fn from(value: F) -> Self {
Self {
path: value,
offset: num::zero(),
}
}
}
struct FileAndOffsetPositionBuilder<F, T> {
path: F,
offset: T,
}
impl<F, T: PrimInt> FileAndOffsetPositionBuilder<F, T> {
fn inc_offset(&mut self, o: T) -> &mut Self {
self.offset += o;
self
}
fn build(self, len: T) -> super::file_and_offset::Position<F, T> {
super::file_and_offset::Position::new(self.path, self.offset, len)
}
}