pro mgfftokenizer::restorePos, memento
compile_opt strictarr
self.lineNumber = memento.lineNumber
*self.tokens = memento.tokens
*self.tokenLength = memento.tokenLength
self.tokenCounter = memento.tokenCounter
self.line = memento.line
end
function mgfftokenizer::savePos
compile_opt strictarr
memento = { lineNumber : self.lineNumber, $
tokens : *self.tokens, $
tokenLength : *self.tokenLength, $
tokenCounter : self.tokenCounter, $
line : self.line $
}
return, memento
end
function mgfftokenizer::getCurrentLine, number=number
compile_opt strictarr
number = self.lineNumber + 1L
return, self.line
end
function mgfftokenizer::next, pre_delim=pre_delim, post_delim=post_delim, $
newline=newline
compile_opt strictarr
newline = 0B
if (self->done()) then begin
pre_delim = ''
post_delim = ''
return, ''
endif
token_start = (*self.tokens)[self.tokenCounter]
token_length = (*self.tokenLength)[self.tokenCounter]
token = strmid(self.line, token_start, token_length)
newline = self.tokenCounter eq 0L && self.lineNumber ne 0L
if (arg_present(pre_delim)) then begin
if (self.tokenCounter eq 0) then begin
pre_delim = ''
if ((*self.tokens)[0] ne 0) then begin
pre_delim = strmid(self.line, 0, (*self.tokens)[0])
endif
endif else begin
delim_start = (*self.tokens)[self.tokenCounter - 1L] $
+ (*self.tokenLength)[self.tokenCounter - 1L]
delim_length = (*self.tokens)[self.tokenCounter] - delim_start
pre_delim = strmid(self.line, delim_start, delim_length)
endelse
endif
if (arg_present(post_delim)) then begin
if (self.tokenCounter eq n_elements(*self.tokens) - 1) then begin
post_delim = ''
delim_start $
= (*self.tokens)[self.tokenCounter] $
+ (*self.tokenLength)[self.tokenCounter]
if (delim_start lt strlen(self.line) - 1) then begin
post_delim = strmid(self.line, delim_start)
endif
endif else begin
delim_start = (*self.tokens)[self.tokenCounter] $
+ (*self.tokenLength)[self.tokenCounter]
delim_length = (*self.tokens)[self.tokenCounter + 1L] - delim_start
post_delim = strmid(self.line, delim_start, delim_length)
endelse
endif
++self.tokenCounter
return, token
end
function mgfftokenizer::done
compile_opt strictarr
if (self.tokenCounter lt n_elements(*self.tokens)) then return, 0B
if (self.lineNumber ge self.nlines - 1L) then return, 1B
self.line = (*self.data)[++self.lineNumber]
*self.tokens = strsplit(self.line, self.pattern, /regex, length=len)
*self.tokenLength = len
self.tokenCounter = 0L
return, 0B
end
pro mgfftokenizer::reset
compile_opt strictarr
ptr_free, self.tokens, self.tokenLength
self.lineNumber = -1L
self.tokenCounter = 0L
self.tokens = ptr_new(/allocate_heap)
self.tokenLength = ptr_new(/allocate_heap)
check = self->done()
end
pro mgfftokenizer::cleanup
compile_opt strictarr
ptr_free, self.tokens, self.tokenLength, self.data
end
function mgfftokenizer::init, filename, pattern=pattern, $
string_array=stringArray
compile_opt strictarr
on_error, 2
if (n_params() ne 1) then message, 'filename parameter required'
self.pattern = n_elements(pattern) eq 0 ? '[[:space:]]' : pattern
if (keyword_set(stringArray)) then begin
self.nlines = n_elements(filename)
self.data = ptr_new(filename)
endif else begin
file_present = file_test(filename)
if (~file_present) then message, 'file not found: ' + filename
self.nlines = file_lines(filename)
if (self.nlines ne 0) then begin
data = strarr(self.nlines)
openr, lun, filename, /get_lun
readf, lun, data
free_lun, lun
self.data = ptr_new(data)
endif
endelse
self.tokens = ptr_new(/allocate_heap)
self.tokenLength = ptr_new(/allocate_heap)
self.tokenCounter = 0L
self.lineNumber = -1L
return, 1
end
pro mgfftokenizer__define
compile_opt strictarr
define = { MGffTokenizer, $
data: ptr_new(), $
pattern: '', $
lineNumber: 0L, $
nlines: 0L, $
line: '', $
tokens: ptr_new(), $
tokenLength: ptr_new(), $
tokenCounter: 0L $
}
end