Commit 6c0d38ec authored by Peter Jansweijer's avatar Peter Jansweijer

repaired data_vs_fit_err_clean in case of outliers, repaired single file…

repaired data_vs_fit_err_clean in case of outliers, repaired single file analysis, added extra note in results.txt
parent fd4c19d6
...@@ -494,10 +494,12 @@ def analyze_plot(insitu_file, analyse_single, x, y, name, tolerance, use_itu_cha ...@@ -494,10 +494,12 @@ def analyze_plot(insitu_file, analyse_single, x, y, name, tolerance, use_itu_cha
x_clean = x # in [nm]! x_clean = x # in [nm]!
y_clean = y y_clean = y
t_clean = t t_clean = t
data_vs_fit_err_clean = data_vs_fit_err
for i in sorted(outlier_idx, reverse=True): for i in sorted(outlier_idx, reverse=True):
x_clean = numpy.delete(x_clean, i) x_clean = numpy.delete(x_clean, i)
y_clean = numpy.delete(y_clean, i) y_clean = numpy.delete(y_clean, i)
t_clean = numpy.delete(t_clean, i) t_clean = numpy.delete(t_clean, i)
data_vs_fit_err_clean = numpy.delete(data_vs_fit_err_clean, i)
# Tackle the case when all datapoints are regarded as outlier due to narrow tolerance # Tackle the case when all datapoints are regarded as outlier due to narrow tolerance
if len(x_clean) == 0: if len(x_clean) == 0:
...@@ -593,8 +595,8 @@ def analyze_plot(insitu_file, analyse_single, x, y, name, tolerance, use_itu_cha ...@@ -593,8 +595,8 @@ def analyze_plot(insitu_file, analyse_single, x, y, name, tolerance, use_itu_cha
ax2.set_ylabel('data versus fit err [ps]') ax2.set_ylabel('data versus fit err [ps]')
# Calcualte standard deviation of data to fit error and format string # Calcualte standard deviation of data to fit error and format string
stdev_str = "{0:.3f}".format(data_vs_fit_err.std(ddof=1)) stdev_str = "{0:.3f}".format(data_vs_fit_err_clean.std(ddof=1))
lns3 = ax2.plot(x_clean, data_vs_fit_err, color='grey', label='data vs. 5-term Sellmeier fit error\n StDev: ' + stdev_str + '[ps]') lns3 = ax2.plot(x_clean, data_vs_fit_err_clean, color='grey', label='data vs. 5-term Sellmeier fit error\n StDev: ' + stdev_str + '[ps]')
lns=lns+lns1+lns2+lns3 lns=lns+lns1+lns2+lns3
labels=[l.get_label() for l in lns] labels=[l.get_label() for l in lns]
...@@ -975,8 +977,9 @@ if __name__ == "__main__": ...@@ -975,8 +977,9 @@ if __name__ == "__main__":
t = crtt_array["temp"] t = crtt_array["temp"]
tangent_array, temperature_array, result_line = analyze_plot(insitu_file, analyse_single, x, y, name, tolerance, use_itu_channels, ref_name, fixed_itu_channel, tangent_array, temperature_array) tangent_array, temperature_array, result_line = analyze_plot(insitu_file, analyse_single, x, y, name, tolerance, use_itu_channels, ref_name, fixed_itu_channel, tangent_array, temperature_array)
numb_of_meas = numb_of_meas + 1 if not(analyse_single):
result_file.write(result_line) numb_of_meas = numb_of_meas + 1
result_file.write(result_line)
# After scanning all files in a directory (i.e. not a single file) # After scanning all files in a directory (i.e. not a single file)
# => add statistics: # => add statistics:
...@@ -987,6 +990,8 @@ if __name__ == "__main__": ...@@ -987,6 +990,8 @@ if __name__ == "__main__":
result_file.write("Mean: " + str(tangent_array.mean()) +"\n") result_file.write("Mean: " + str(tangent_array.mean()) +"\n")
result_file.write("StDev: " + str(tangent_array.std(ddof=1)) +"\n") result_file.write("StDev: " + str(tangent_array.std(ddof=1)) +"\n")
result_file.write("StErr: " + str(tangent_array.std(ddof=1)/(numb_of_meas**0.5)) +"\n") result_file.write("StErr: " + str(tangent_array.std(ddof=1)/(numb_of_meas**0.5)) +"\n")
result_file.write("Note: Individual files analysed. Statistics over individual results.\n")
result_file.write(" Averaged output is average of individual crtt measurements treated as single measurement leading to average tangent.\n")
# Finally average all measurement files and analyse the average is if it was a single file # Finally average all measurement files and analyse the average is if it was a single file
x, arr_crtt,arr_temp = average_insitu_files(files) x, arr_crtt,arr_temp = average_insitu_files(files)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment