diff --git a/docs/src/custom_behaviour.md b/docs/src/custom_behaviour.md index f3a2d8a..85ef75c 100644 --- a/docs/src/custom_behaviour.md +++ b/docs/src/custom_behaviour.md @@ -5,7 +5,7 @@ value is sent to: - `::AbstractVector{<:Real}` -> [Histogram backend](https://www.tensorflow.org/guide/tensorboard_histograms) as a vector - `::StatsBase.Histogram` -> [Histogram backend](https://www.tensorflow.org/guide/tensorboard_histograms) - - `(bin_edges, weights)::Tuple{AbstractVector,AbstractVector}` where `length(bin_edges)==length(weights)+1`, is interpreted as an histogram. (*Will be deprecated.* Please use `TBHistogram(edges, weights)` for this). + - `::Real` -> Scalar backend - `::AbstractArray{<:Colorant}` -> [Image backend](https://www.tensorflow.org/tensorboard/r2/image_summaries) - `::Any` -> Text Backend diff --git a/docs/src/extending_behaviour.md b/docs/src/extending_behaviour.md index a9537a5..b28cabb 100644 --- a/docs/src/extending_behaviour.md +++ b/docs/src/extending_behaviour.md @@ -12,7 +12,7 @@ At the end of this step, every pair in `objects` will be logged to a specific backend, according to the following rules: - `::AbstractVector{<:Real}` -> [Histogram backend](https://www.tensorflow.org/guide/tensorboard_histograms) as a vector - - `::Tuple{AbstractVector,AbstractVector}` [Histogram backend](https://www.tensorflow.org/guide/tensorboard_histograms) as an histogram + - `::Real` -> Scalar backend - `::AbstractArray{<:Colorant}` -> [Image backend](https://www.tensorflow.org/tensorboard/r2/image_summaries) - `::Any` -> Text Backend diff --git a/docs/src/index.md b/docs/src/index.md index bcb5215..d589f96 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -111,7 +111,7 @@ at [Reading back TensorBoard data](@ref) We also support logging custom types from a the following third-party libraries: - [Plots.jl](https://github.com/JuliaPlots/Plots.jl): the `Plots.Plot` type will be rendered to PNG at the resolution specified by the object and logged as an image - [PyPlot.jl](https://github.com/JuliaPy/PyPlot.jl): the `PyPlot.Figure` type will be rendered to PNG at the resolution specified by the object and logged as an image - - [Gadfly.jl](https://github.com/GiovineItalia/Gadfly.jl) type will be rendered to PNG at the resolution specified by the object and logged as an image. `Cairo` and `Fontconfig` packages must be imported for this functionality to work as it is required by `Gadfly`. + - [Gadfly.jl](https://github.com/GiovineItalia/Gadfly.jl): the `Gadfly.Plot` type will be rendered to PNG at the resolution specified by the object and logged as an image. `Cairo` and `Fontconfig` packages must be imported for this functionality to work as it is required by `Gadfly`. - [Tracker.jl](https://github.com/FluxML/Tracker.jl): the `TrackedReal` and `TrackedArray` types will be logged as vector data - [ValueHistories.jl](https://github.com/JuliaML/ValueHistories.jl): the `MVHistory` type is used to store the deserialized content of .proto files. diff --git a/examples/Histograms.jl b/examples/Histograms.jl index 5f98c41..88545ce 100644 --- a/examples/Histograms.jl +++ b/examples/Histograms.jl @@ -10,9 +10,10 @@ with_logger(logger) do x0 = 0.5+i/30; s0 = 0.5/(i/20); edges = collect(-5:0.1:5) centers = collect(edges[1:end-1] .+0.05) - histvals = [exp(-((c-x0)/s0)^2) for c = centers] + histvals = s0 * randn(length(centers)) .+ x0 data_tuple = (edges, histvals) - @info "histogram/loggerinterface" autobin=rand(10).+0.1*i manualbin=data_tuple + @info "histogram/loggerinterface" autobin=s0 .* randn(100) .+ x0 + @info "histogram/loggerinterface" manualbin=data_tuple end end @@ -21,8 +22,8 @@ for i in 1:100 x0 = 0.5+i/30; s0 = 0.5/(i/20); edges = collect(-5:0.1:5) centers = collect(edges[1:end-1] .+0.05) - histvals = [exp(-((c-x0)/s0)^2) for c = centers] + histvals = s0 * randn(length(centers)) .+ x0 data_tuple = (edges, histvals) - log_histogram(logger, "histogram/explicitinterface/autobin", rand(10).+0.1*i, step = i) #automatic bins + log_histogram(logger, "histogram/explicitinterface/autobin", s0 .* randn(100) .+ x0, step = i) #automatic bins log_histogram(logger, "histogram/explicitinterface/manualbin", data_tuple, step = i) #manual bins end diff --git a/src/Deserialization/histograms.jl b/src/Deserialization/histograms.jl index 3126524..dd00b9a 100644 --- a/src/Deserialization/histograms.jl +++ b/src/Deserialization/histograms.jl @@ -1,16 +1,4 @@ function deserialize_histogram_summary(summary::Summary_Value) - # custom deserialization - if hasproperty(summary, :metadata) - histo = summary.value.value - if summary.metadata.plugin_data.plugin_name == TB_PLUGIN_JLARRAY_NAME - val = reshape(histo.bucket, - reinterpret(Int, - summary.metadata.plugin_data.content)...) - - return val - end - end - # deserialize histogramproto hist_proto = summary.value.value bin_edges = similar(hist_proto.bucket_limit, length(hist_proto.bucket_limit)+1) diff --git a/src/Loggers/LogHistograms.jl b/src/Loggers/LogHistograms.jl index add2134..613c19d 100644 --- a/src/Loggers/LogHistograms.jl +++ b/src/Loggers/LogHistograms.jl @@ -12,7 +12,8 @@ be used to bin the data. function log_histogram(logger::TBLogger, name::AbstractString, (bins,weights)::Tuple{AbstractVector, AbstractArray}; step=nothing) weights = collect(vec(weights)) - summ = SummaryCollection(histogram_summary(name, collect(bins), weights)) + hist = fit(Histogram, weights, bins) + summ = SummaryCollection(histogram_summary(name, hist)) write_event(logger.file, make_event(logger, summ, step=step)) end @@ -24,27 +25,22 @@ Bins the values found in `data` and logs them as an histogram under the tag """ function log_histogram(logger::TBLogger, name::AbstractString, data::AbstractArray; step=nothing) - data = vec(data) - hvals = fit(Histogram, data) - summ = SummaryCollection(histogram_summary(name, collect(hvals.edges[1]), hvals.weights)) + data = collect(vec(data)) + hist = fit(Histogram, data) + summ = SummaryCollection(histogram_summary(name, hist)) write_event(logger.file, make_event(logger, summ, step=step)) end """ log_vector(logger, name, data::Vector; step=step(logger)) - Logs the vector found in `data` as an histogram under the name `name`. """ function log_vector(logger::TBLogger, name::AbstractString, data::AbstractVector; step=nothing) - summ = SummaryCollection(histogram_summary(name, collect(0:length(data)),data)) + hist = Histogram(collect(0:length(data)), data) + summ = SummaryCollection(histogram_summary(name, hist)) write_event(logger.file, make_event(logger, summ, step=step)) end -function histogram_summary(name::AbstractString, edges::AbstractVector{<:Number}, hist_vals::AbstractVector{<:Number}) - @assert length(edges) == length(hist_vals)+1 - return histogram_summary(name, Histogram(edges, hist_vals)) -end - function histogram_summary(name::AbstractString, hist::Histogram{T,1}) where T edges = first(hist.edges) hist_vals = hist.weights @@ -59,25 +55,3 @@ function histogram_summary(name::AbstractString, hist::Histogram{T,1}) where T hist_vals) return Summary_Value(name, name, nothing, OneOf(:histo, hp)) end - -# Writes to an Histogram summary the flattened version of the array. -# Also stores the shape of the array as a field in a plugin, which allows to -# reconstruct the original shape when read back into Julia -function histogram_arr_summary(name::AbstractString, tensor::AbstractArray) - - smpd = SummaryMetadata_PluginData(TB_PLUGIN_JLARRAY_NAME, reinterpret(UInt8, collect(size(tensor)))) - sm = SummaryMetadata(smpd, name, "", DataClass.DATA_CLASS_TENSOR) - - num = length(tensor) - edges = collect(0:num) - histsum = sum(tensor) - histsumsqr = sum(tensor.^2) - hp = HistogramProto(minimum(edges), maximum(edges), - num, - histsum, - histsumsqr, - edges[2:end], - vec(tensor)) - - return Summary_Value(name, name, sm, OneOf(:histo, hp)) -end diff --git a/src/logger_dispatch.jl b/src/logger_dispatch.jl index a6b3c77..8dd1da9 100644 --- a/src/logger_dispatch.jl +++ b/src/logger_dispatch.jl @@ -79,18 +79,7 @@ summary_impl(name, value::Any) = text_summary(name, value) preprocess(name, hist::Histogram{<:Any,1}, data) = push!(data, name=>hist) summary_impl(name, hist::Histogram) = histogram_summary(name, hist) -# TODO: maybe deprecate? tuple means histogram (only if bins/weights match) -function preprocess(name, (bins,weights)::Tuple{AbstractVector,AbstractVector}, data) - # if ... this is an histogram - if length(bins) == length(weights)+1 - return preprocess(name, Histogram(bins,weights), data) - end - preprocess(name*"/1", bins, data) - preprocess(name*"/2", weights, data) -end - -preprocess(name, val::AbstractArray{<:Real}, data) = push!(data, name=>val) -summary_impl(name, val::AbstractArray{<:Real}) = histogram_arr_summary(name, val) +preprocess(name, val::AbstractArray{<:Real}, data) = return preprocess(name, fit(Histogram, collect(vec(val))), data) # Split complex numbers into real/complex pairs preprocess(name, val::AbstractArray{<:Complex}, data) = push!(data, name*"/re"=>real.(val), name*"/im"=>imag.(val)) diff --git a/test/runtests.jl b/test/runtests.jl index f674e37..ca969bf 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -5,6 +5,7 @@ using TestImages using ImageCore using FileIO using LightGraphs +using StatsBase: fit, Histogram ENV["DATADEPS_ALWAYS_ACCEPT"] = true ENV["GKSwstype"] = "100" @@ -70,15 +71,15 @@ end centers = collect(edges[1:end-1] .+0.05) histvals = [exp(-((c-x0)/s0)^2) for c=centers] data_tuple = (edges, histvals) - - ss = TensorBoardLogger.histogram_summary("test", edges, histvals) + hist = fit(Histogram, histvals, edges) + ss = TensorBoardLogger.histogram_summary("test", hist) @test isa(ss, TensorBoardLogger.Summary_Value) @test ss.tag == "test" @test isa(ss.value.value, TensorBoardLogger.HistogramProto) @test ss.value.value.min == minimum(edges) @test ss.value.value.max == maximum(edges) @test all(ss.value.value.bucket_limit .== edges[2:end]) - @test all(ss.value.value.bucket .== histvals) + @test ss.value.value.bucket == hist.weights log_histogram(logger, "hist/cust", data_tuple, step=step) log_histogram(logger, "hist/cust", rand(100), step=step) @@ -93,7 +94,7 @@ end vals = rand(10) @test data == preprocess("test1", vals, data) @test first(data[1]) == "test1" - @test last(data[1]) == vals + @test last(data[1]) == fit(Histogram, collect(vec(vals))) vals = rand(ComplexF32, 10) preprocess("test2", vals, data) @@ -104,7 +105,7 @@ end vals = rand(10, 10) preprocess("test2", vals, data) - @test last(data[4]) == vals + @test last(data[4]) == fit(Histogram, collect(vec(vals))) end