Skip to content

Commit

Permalink
Merge pull request #3 from tlnagy/tn/1.0-fixes
Browse files Browse the repository at this point in the history
update for Julia 1.0
  • Loading branch information
tlnagy authored Oct 13, 2018
2 parents 55905fc + 2f16a2c commit adc97eb
Show file tree
Hide file tree
Showing 8 changed files with 30 additions and 28 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ os:
- linux
- osx
julia:
- 0.6
- 1.0
- nightly
notifications:
email: false
Expand Down
2 changes: 0 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,6 @@ Add FileIO.jl integration for FCS files
```julia
julia> using FileIO

julia> using FCSFiles

julia> flowrun = load("example.fcs")
FCS.FlowSample{Float32}
Machine: LSRFortessa
Expand Down
2 changes: 1 addition & 1 deletion REQUIRE
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
julia 0.6
julia 0.7
FileIO
6 changes: 1 addition & 5 deletions src/FCSFiles.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,7 @@ include("parse.jl")

export FlowSample

try
add_format(format"FCS", "FCS", [".fcs"])
end

function FileIO.load(f::File{format"FCS"})
function load(f::File{format"FCS"})
open(f) do io
offsets = parse_header(io)

Expand Down
27 changes: 17 additions & 10 deletions src/parse.jl
Original file line number Diff line number Diff line change
@@ -1,16 +1,20 @@
function parse_header(io)
seekstart(io)
version = String(read(io, UInt8, 6))
rawversion = Array{UInt8}(undef, 6)
read!(io, rawversion)
version = String(rawversion)
if "$version" != "FCS3.0" && version != "FCS3.1"
warn("$version files are not guaranteed to work")
end
seek(io, 10)
# start, end positions of TEXT, DATA, and ANALYSIS sections
offsets = Array{Int64}(6)
offsets = Array{Int64}(undef, 6)
for i in 1:6
# offsets are encoded as ASCII strings
raw_str = String(read(io, UInt8, 8))
offsets[i] = parse(Int, strip(join(raw_str)))
raw_str = Array{UInt8}(undef, 8)
read!(io, raw_str)
offsets_str = String(raw_str)
offsets[i] = parse(Int, strip(join(offsets_str)))
end

# DATA offsets are larger than 99,999,999bytes
Expand All @@ -24,14 +28,16 @@ end
function parse_text(io, start_text::Int, end_text::Int)
seek(io, start_text)
# TODO: Check for supplemental TEXT file
raw_text = String(read(io, UInt8, end_text - start_text + 1))
raw_btext = Array{UInt8}(undef, end_text - start_text + 1)
read!(io, raw_btext)
raw_text = String(raw_btext)
delimiter = raw_text[1]

text_mappings = Dict{String, String}()
# initialize iterator
prev, state = next(raw_text, start(raw_text))
while !done(raw_text, state)
i, state = next(raw_text, state)
iter_result = iterate(raw_text)
while iter_result !== nothing
i, state = iter_result

# found a new key, value pair
if i == '$'
Expand All @@ -42,7 +48,7 @@ function parse_text(io, start_text::Int, end_text::Int)
# FCS keywords are case insensitive so force them uppercase
text_mappings["\$"*uppercase(key)] = value
end
prev = i
iter_result = iterate(raw_text, state)
end
text_mappings
end
Expand All @@ -56,7 +62,8 @@ function parse_data(io,
# Add support for data types other than float
(text_mappings["\$DATATYPE"] != "F") && error("Non float32 support not implemented yet. Please see github issues for this project.")

flat_data = read(io, Float32, (end_data - start_data + 1) ÷ 4)
flat_data = Array{Float32}(undef, (end_data - start_data + 1) ÷ 4)
read!(io, flat_data)
endian_func = get_endian_func(text_mappings)
map!(endian_func, flat_data, flat_data)

Expand Down
7 changes: 3 additions & 4 deletions src/type.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
immutable FlowSample{T}
struct FlowSample{T}
data::Dict{String, Vector{T}}
params::Dict{String, String}
end
Expand Down Expand Up @@ -38,6 +38,5 @@ Base.haskey(f::FlowSample, x) = haskey(f.data, x)
Base.getindex(f::FlowSample, key) = f.data[key]
Base.keys(f::FlowSample) = keys(f.data)
Base.values(f::FlowSample) = values(f.data)
Base.start(iter::FlowSample) = start(iter.data)
Base.next(iter::FlowSample, state) = next(iter.data, state)
Base.done(iter::FlowSample, state) = done(iter.data, state)
Base.iterate(iter::FlowSample) = Base.iterate(iter.data)
Base.iterate(iter::FlowSample, state) = Base.iterate(iter.data, state)
10 changes: 6 additions & 4 deletions src/utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,9 @@ and the state of the iterator.
function grab_word(iter, state, delimiter::Char)
word = Char[]
prev = ' '
while !done(iter, state)
i, state = next(iter, state)
iter_result = iterate(iter, state)
while iter_result !== nothing
i, state = iter_result

# only add character if the current and previous are both
# delimiters (i.e. escaped) or neither are
Expand All @@ -40,6 +41,7 @@ function grab_word(iter, state, delimiter::Char)
else
break
end
iter_result = iterate(iter, state)
end
join(word), state
end
Expand All @@ -53,7 +55,7 @@ returned by `parse_text`
"""
function verify_text(text_mappings::Dict{String, String})
# get all parameterized keywords $P1N, $P2N, etc
is_param = [contains(keyword, "n") for keyword in required_keywords]
is_param = [occursin("n", keyword) for keyword in required_keywords]

# verify that all non-parameterized keywords are present in the mapping
for non_param in required_keywords[.~is_param]
Expand All @@ -69,7 +71,7 @@ function verify_text(text_mappings::Dict{String, String})

for params in required_keywords[is_param]
for i in 1:n_params
if !haskey(text_mappings, replace(params, "n", i))
if !haskey(text_mappings, replace(params, "n"=>i))
error("FCS file is corrupted. It is missing required keyword $non_param in its TEXT section")
end
end
Expand Down
2 changes: 1 addition & 1 deletion test/runtests.jl
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
using FCSFiles
using FileIO
using Base.Test
using Test

flowrun = load("testdata/BD-FACS-Aria-II.fcs")

Expand Down

0 comments on commit adc97eb

Please sign in to comment.